diff --git a/R/compat-plyr.R b/R/compat-plyr.R index 898aacdf05..4139018873 100644 --- a/R/compat-plyr.R +++ b/R/compat-plyr.R @@ -307,7 +307,7 @@ rbind_dfs <- function(dfs) { for (col in names(col_levels)) { out[[col]] <- factor(out[[col]], levels = col_levels[[col]]) } - attributes(out) <- list(class = "data.frame", names = names(out), row.names = .set_row_names(total)) + attributes(out) <- list(class = c("tbl_df", "tbl", "data.frame"), names = names(out), row.names = .set_row_names(total)) out } #' Apply function to unique subsets of a data.frame diff --git a/R/facet-grid-.r b/R/facet-grid-.r index 13d213ca1f..e769ca7160 100644 --- a/R/facet-grid-.r +++ b/R/facet-grid-.r @@ -237,7 +237,6 @@ FacetGrid <- ggproto("FacetGrid", Facet, panels <- new_data_frame(c(list(PANEL = panel, ROW = rows, COL = cols), base)) panels <- panels[order(panels$PANEL), , drop = FALSE] - rownames(panels) <- NULL panels$SCALE_X <- if (params$free$x) panels$COL else 1L panels$SCALE_Y <- if (params$free$y) panels$ROW else 1L diff --git a/R/facet-wrap.r b/R/facet-wrap.r index 8b8a29b2ee..a2ad71ac4b 100644 --- a/R/facet-wrap.r +++ b/R/facet-wrap.r @@ -165,7 +165,7 @@ FacetWrap <- ggproto("FacetWrap", Facet, panels <- cbind(layout, unrowname(base)) panels <- panels[order(panels$PANEL), , drop = FALSE] - rownames(panels) <- NULL + panels <- new_data_frame(panels) # Add scale identification panels$SCALE_X <- if (params$free$x) seq_len(n) else 1L diff --git a/R/fortify-multcomp.r b/R/fortify-multcomp.r index eaa8d316bd..cd1fac92ba 100644 --- a/R/fortify-multcomp.r +++ b/R/fortify-multcomp.r @@ -33,7 +33,7 @@ NULL #' @rdname fortify-multcomp #' @export fortify.glht <- function(model, data, ...) { - unrowname(base::data.frame( + unrowname(data_frame( lhs = rownames(model$linfct), rhs = model$rhs, estimate = stats::coef(model), @@ -48,7 +48,7 @@ fortify.confint.glht <- function(model, data, ...) { coef <- model$confint colnames(coef) <- to_lower_ascii(colnames(coef)) - unrowname(base::data.frame( + unrowname(data_frame( lhs = rownames(coef), rhs = model$rhs, coef, @@ -64,7 +64,7 @@ fortify.summary.glht <- function(model, data, ...) { model$test[c("coefficients", "sigma", "tstat", "pvalues")]) names(coef) <- c("estimate", "se", "t", "p") - unrowname(base::data.frame( + unrowname(data_frame( lhs = rownames(coef), rhs = model$rhs, coef, @@ -77,7 +77,7 @@ fortify.summary.glht <- function(model, data, ...) { #' @rdname fortify-multcomp #' @export fortify.cld <- function(model, data, ...) { - unrowname(base::data.frame( + unrowname(data_frame( lhs = names(model$mcletters$Letters), letters = model$mcletters$Letters, check.names = FALSE, diff --git a/R/fortify-spatial.r b/R/fortify-spatial.r index dcf9550ef2..4059c1902e 100644 --- a/R/fortify-spatial.r +++ b/R/fortify-spatial.r @@ -22,7 +22,7 @@ NULL #' @export #' @method fortify SpatialPolygonsDataFrame fortify.SpatialPolygonsDataFrame <- function(model, data, region = NULL, ...) { - attr <- as.data.frame(model) + attr <- new_data_frame(as.data.frame(model)) # If not specified, split into regions based on polygons if (is.null(region)) { coords <- rbind_dfs(lapply(model@polygons,fortify)) @@ -67,7 +67,7 @@ fortify.Polygons <- function(model, data, ...) { #' @export #' @method fortify Polygon fortify.Polygon <- function(model, data, ...) { - df <- as.data.frame(model@coords) + df <- new_data_frame(as.data.frame(model@coords)) names(df) <- c("long", "lat") df$order <- 1:nrow(df) df$hole <- model@hole @@ -103,7 +103,7 @@ fortify.Lines <- function(model, data, ...) { #' @export #' @method fortify Line fortify.Line <- function(model, data, ...) { - df <- as.data.frame(model@coords) + df <- new_data_frame(as.data.frame(model@coords)) names(df) <- c("long", "lat") df$order <- 1:nrow(df) df diff --git a/R/geom-path.r b/R/geom-path.r index 379355c563..cc5685bf34 100644 --- a/R/geom-path.r +++ b/R/geom-path.r @@ -305,7 +305,7 @@ GeomStep <- ggproto("GeomStep", GeomPath, # @keyword internal stairstep <- function(data, direction="hv") { direction <- match.arg(direction, c("hv", "vh")) - data <- as.data.frame(data)[order(data$x), ] + data <- new_data_frame(as.data.frame(data)[order(data$x), ]) n <- nrow(data) if (n <= 1) { diff --git a/R/performance.R b/R/performance.R index 2657e9ac09..fc30951f61 100644 --- a/R/performance.R +++ b/R/performance.R @@ -12,7 +12,7 @@ new_data_frame <- function(x = list(), n = NULL) { x[[i]] <- rep(x[[i]], n) } - class(x) <- "data.frame" + class(x) <- c("tbl_df", "tbl", "data.frame") attr(x, "row.names") <- .set_row_names(n) x @@ -32,7 +32,7 @@ split_matrix <- function(x, col_names = colnames(x)) { if (!is.null(col_names)) names(x) <- col_names x } - + mat_2_df <- function(x, col_names = colnames(x)) { new_data_frame(split_matrix(x, col_names)) } diff --git a/R/position-dodge2.r b/R/position-dodge2.r index 2bab0ba4fc..d4fb466cde 100644 --- a/R/position-dodge2.r +++ b/R/position-dodge2.r @@ -102,7 +102,7 @@ pos_dodge2 <- function(df, width, n = NULL, padding = 0.1) { # Set the elements in place for (i in seq_along(starts)) { - divisions <- cumsum(c(starts[i], df[df$xid == i, "new_width"])) + divisions <- cumsum(c(starts[i], df[df$xid == i, ]$new_width)) df[df$xid == i, "xmin"] <- divisions[-length(divisions)] df[df$xid == i, "xmax"] <- divisions[-1] } diff --git a/R/stat-smooth-methods.r b/R/stat-smooth-methods.r index 311958e16d..4dc144f501 100644 --- a/R/stat-smooth-methods.r +++ b/R/stat-smooth-methods.r @@ -16,9 +16,9 @@ predictdf.default <- function(model, xseq, se, level) { if (se) { fit <- as.data.frame(pred$fit) names(fit) <- c("y", "ymin", "ymax") - base::data.frame(x = xseq, fit, se = pred$se.fit) + data_frame(x = xseq, fit, se = pred$se.fit) } else { - base::data.frame(x = xseq, y = as.vector(pred)) + data_frame(x = xseq, y = as.vector(pred)) } } @@ -29,7 +29,7 @@ predictdf.glm <- function(model, xseq, se, level) { if (se) { std <- stats::qnorm(level / 2 + 0.5) - base::data.frame( + data_frame( x = xseq, y = model$family$linkinv(as.vector(pred$fit)), ymin = model$family$linkinv(as.vector(pred$fit - std * pred$se.fit)), @@ -37,7 +37,7 @@ predictdf.glm <- function(model, xseq, se, level) { se = as.vector(pred$se.fit) ) } else { - base::data.frame(x = xseq, y = model$family$linkinv(as.vector(pred))) + data_frame(x = xseq, y = model$family$linkinv(as.vector(pred))) } } @@ -50,9 +50,9 @@ predictdf.loess <- function(model, xseq, se, level) { ci <- pred$se.fit * stats::qt(level / 2 + .5, pred$df) ymin = y - ci ymax = y + ci - base::data.frame(x = xseq, y, ymin, ymax, se = pred$se.fit) + data_frame(x = xseq, y, ymin, ymax, se = pred$se.fit) } else { - base::data.frame(x = xseq, y = as.vector(pred)) + data_frame(x = xseq, y = as.vector(pred)) } } @@ -64,8 +64,8 @@ predictdf.locfit <- function(model, xseq, se, level) { y = pred$fit ymin = y - pred$se.fit ymax = y + pred$se.fit - base::data.frame(x = xseq, y, ymin, ymax, se = pred$se.fit) + data_frame(x = xseq, y, ymin, ymax, se = pred$se.fit) } else { - base::data.frame(x = xseq, y = as.vector(pred)) + data_frame(x = xseq, y = as.vector(pred)) } } diff --git a/R/utilities.r b/R/utilities.r index b99c949a75..85f6426960 100644 --- a/R/utilities.r +++ b/R/utilities.r @@ -58,7 +58,6 @@ try_require <- function(package, fun) { # @keyword internal uniquecols <- function(df) { df <- df[1, sapply(df, function(x) length(unique(x)) == 1), drop = FALSE] - rownames(df) <- 1:nrow(df) df } diff --git a/R/zzz.r b/R/zzz.r index 9e569b2087..8532937920 100644 --- a/R/zzz.r +++ b/R/zzz.r @@ -17,6 +17,13 @@ } .onLoad <- function(...) { + vctrs::s3_register("base::$", "tbl_df", base::.subset2) + vctrs::s3_register("base::transform", "tbl_df", function(`_data`, ...) { + res <- NextMethod() + class(res) <- class(`_data`) + res + }) + backport_unit_methods() # To avoid namespace clash with dplyr. diff --git a/tests/testthat/test-fortify.r b/tests/testthat/test-fortify.r index d6510493f8..fe6f7cc2f5 100644 --- a/tests/testthat/test-fortify.r +++ b/tests/testthat/test-fortify.r @@ -17,7 +17,6 @@ test_that("spatial polygons have correct ordering", { } fake_data <- data_frame(ids = 1:5, region = c(1,1,2,3,4)) - rownames(fake_data) <- 1:5 polys <- list(sp::Polygons(list(make_square(), make_hole()), 1), sp::Polygons(list(make_square(1,0), make_square(2, 0)), 2), sp::Polygons(list(make_square(1,1)), 3), diff --git a/tests/testthat/test-geom-tile.R b/tests/testthat/test-geom-tile.R index 46ee3094e9..d536304c2c 100644 --- a/tests/testthat/test-geom-tile.R +++ b/tests/testthat/test-geom-tile.R @@ -19,10 +19,10 @@ test_that("accepts width and height aesthetics", { geom_tile(fill = NA, colour = "black", size = 1) out <- layer_data(p) - boundary <- as.data.frame(tibble::tribble( + boundary <- tibble::tribble( ~xmin, ~xmax, ~ymin, ~ymax, -1, 1, -1, 1, -2, 2, -2, 2 - )) + ) expect_equal(out[c("xmin", "xmax", "ymin", "ymax")], boundary) }) diff --git a/tests/testthat/test-layer.r b/tests/testthat/test-layer.r index f07228adec..2ae343a299 100644 --- a/tests/testthat/test-layer.r +++ b/tests/testthat/test-layer.r @@ -23,7 +23,7 @@ test_that("column vectors are allowed (#2609)", { df <- data_frame(x = 1:10) df$y <- scale(1:10) # Returns a column vector p <- ggplot(df, aes(x, y)) - expect_is(layer_data(p), "data.frame") + expect_is(layer_data(p), "tbl_df") }) test_that("missing aesthetics trigger informative error", {