Skip to content

Commit

Permalink
[backport] [R] Fix CRAN test notes. (#8428) (#8440)
Browse files Browse the repository at this point in the history
- Limit the number of used CPU cores in examples.
- Add a note for the constraint.
- Bring back the cleanup script.
  • Loading branch information
trivialfis committed Nov 8, 2022
1 parent 9ff0c08 commit a347cd5
Show file tree
Hide file tree
Showing 22 changed files with 81 additions and 49 deletions.
6 changes: 6 additions & 0 deletions .github/workflows/r_tests.yml
Expand Up @@ -5,6 +5,7 @@ on: [push, pull_request]
env:
R_PACKAGES: c('XML', 'data.table', 'ggplot2', 'DiagrammeR', 'Ckmeans.1d.dp', 'vcd', 'testthat', 'lintr', 'knitr', 'rmarkdown', 'e1071', 'cplm', 'devtools', 'float', 'titanic')
GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }}
_R_CHECK_EXAMPLE_TIMING_CPU_TO_ELAPSED_THRESHOLD_: 2.5

permissions:
contents: read # to fetch code (actions/checkout)
Expand Down Expand Up @@ -68,6 +69,7 @@ jobs:
- {os: windows-latest, r: 'release', compiler: 'mingw', build: 'cmake'}
env:
R_REMOTES_NO_ERRORS_FROM_WARNINGS: true
_R_CHECK_EXAMPLE_TIMING_CPU_TO_ELAPSED_THRESHOLD_: 2.5
RSPM: ${{ matrix.config.rspm }}

steps:
Expand Down Expand Up @@ -121,6 +123,10 @@ jobs:
config:
- {r: 'release'}

env:
_R_CHECK_EXAMPLE_TIMING_CPU_TO_ELAPSED_THRESHOLD_: 2.5
MAKE: "make -j$(nproc)"

steps:
- uses: actions/checkout@v2
with:
Expand Down
2 changes: 1 addition & 1 deletion R-package/DESCRIPTION
Expand Up @@ -66,5 +66,5 @@ Imports:
methods,
data.table (>= 1.9.6),
jsonlite (>= 1.0),
RoxygenNote: 7.1.1
RoxygenNote: 7.2.1
SystemRequirements: GNU make, C++14
12 changes: 7 additions & 5 deletions R-package/R/callbacks.R
Expand Up @@ -544,9 +544,11 @@ cb.cv.predict <- function(save_models = FALSE) {
#'
#' @return
#' Results are stored in the \code{coefs} element of the closure.
#' The \code{\link{xgb.gblinear.history}} convenience function provides an easy way to access it.
#' The \code{\link{xgb.gblinear.history}} convenience function provides an easy
#' way to access it.
#' With \code{xgb.train}, it is either a dense of a sparse matrix.
#' While with \code{xgb.cv}, it is a list (an element per each fold) of such matrices.
#' While with \code{xgb.cv}, it is a list (an element per each fold) of such
#' matrices.
#'
#' @seealso
#' \code{\link{callbacks}}, \code{\link{xgb.gblinear.history}}.
Expand All @@ -558,7 +560,7 @@ cb.cv.predict <- function(save_models = FALSE) {
#' # without considering the 2nd order interactions:
#' x <- model.matrix(Species ~ .^2, iris)[,-1]
#' colnames(x)
#' dtrain <- xgb.DMatrix(scale(x), label = 1*(iris$Species == "versicolor"))
#' dtrain <- xgb.DMatrix(scale(x), label = 1*(iris$Species == "versicolor"), nthread = 2)
#' param <- list(booster = "gblinear", objective = "reg:logistic", eval_metric = "auc",
#' lambda = 0.0003, alpha = 0.0003, nthread = 2)
#' # For 'shotgun', which is a default linear updater, using high eta values may result in
Expand All @@ -583,14 +585,14 @@ cb.cv.predict <- function(save_models = FALSE) {
#'
#' # For xgb.cv:
#' bst <- xgb.cv(param, dtrain, nfold = 5, nrounds = 100, eta = 0.8,
#' callbacks = list(cb.gblinear.history()))
#' callbacks = list(cb.gblinear.history()))
#' # coefficients in the CV fold #3
#' matplot(xgb.gblinear.history(bst)[[3]], type = 'l')
#'
#'
#' #### Multiclass classification:
#' #
#' dtrain <- xgb.DMatrix(scale(x), label = as.numeric(iris$Species) - 1)
#' dtrain <- xgb.DMatrix(scale(x), label = as.numeric(iris$Species) - 1, nthread = 2)
#' param <- list(booster = "gblinear", objective = "multi:softprob", num_class = 3,
#' lambda = 0.0003, alpha = 0.0003, nthread = 2)
#' # For the default linear updater 'shotgun' it sometimes is helpful
Expand Down
14 changes: 7 additions & 7 deletions R-package/R/xgb.DMatrix.R
Expand Up @@ -18,7 +18,7 @@
#'
#' @examples
#' data(agaricus.train, package='xgboost')
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label))
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2))
#' xgb.DMatrix.save(dtrain, 'xgb.DMatrix.data')
#' dtrain <- xgb.DMatrix('xgb.DMatrix.data')
#' if (file.exists('xgb.DMatrix.data')) file.remove('xgb.DMatrix.data')
Expand Down Expand Up @@ -110,7 +110,7 @@ xgb.get.DMatrix <- function(data, label = NULL, missing = NA, weight = NULL, nth
#' @examples
#' data(agaricus.train, package='xgboost')
#' train <- agaricus.train
#' dtrain <- xgb.DMatrix(train$data, label=train$label)
#' dtrain <- xgb.DMatrix(train$data, label=train$label, nthread = 2)
#'
#' stopifnot(nrow(dtrain) == nrow(train$data))
#' stopifnot(ncol(dtrain) == ncol(train$data))
Expand Down Expand Up @@ -138,7 +138,7 @@ dim.xgb.DMatrix <- function(x) {
#' @examples
#' data(agaricus.train, package='xgboost')
#' train <- agaricus.train
#' dtrain <- xgb.DMatrix(train$data, label=train$label)
#' dtrain <- xgb.DMatrix(train$data, label=train$label, nthread = 2)
#' dimnames(dtrain)
#' colnames(dtrain)
#' colnames(dtrain) <- make.names(1:ncol(train$data))
Expand Down Expand Up @@ -193,7 +193,7 @@ dimnames.xgb.DMatrix <- function(x) {
#'
#' @examples
#' data(agaricus.train, package='xgboost')
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label))
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2))
#'
#' labels <- getinfo(dtrain, 'label')
#' setinfo(dtrain, 'label', 1-labels)
Expand Down Expand Up @@ -249,7 +249,7 @@ getinfo.xgb.DMatrix <- function(object, name, ...) {
#'
#' @examples
#' data(agaricus.train, package='xgboost')
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label))
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2))
#'
#' labels <- getinfo(dtrain, 'label')
#' setinfo(dtrain, 'label', 1-labels)
Expand Down Expand Up @@ -345,7 +345,7 @@ setinfo.xgb.DMatrix <- function(object, name, info, ...) {
#'
#' @examples
#' data(agaricus.train, package='xgboost')
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label))
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2))
#'
#' dsub <- slice(dtrain, 1:42)
#' labels1 <- getinfo(dsub, 'label')
Expand Down Expand Up @@ -401,7 +401,7 @@ slice.xgb.DMatrix <- function(object, idxset, ...) {
#'
#' @examples
#' data(agaricus.train, package='xgboost')
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label))
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2))
#'
#' dtrain
#' print(dtrain, verbose=TRUE)
Expand Down
2 changes: 1 addition & 1 deletion R-package/R/xgb.DMatrix.save.R
Expand Up @@ -7,7 +7,7 @@
#'
#' @examples
#' data(agaricus.train, package='xgboost')
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label))
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2))
#' xgb.DMatrix.save(dtrain, 'xgb.DMatrix.data')
#' dtrain <- xgb.DMatrix('xgb.DMatrix.data')
#' if (file.exists('xgb.DMatrix.data')) file.remove('xgb.DMatrix.data')
Expand Down
14 changes: 9 additions & 5 deletions R-package/R/xgb.create.features.R
Expand Up @@ -48,8 +48,8 @@
#' @examples
#' data(agaricus.train, package='xgboost')
#' data(agaricus.test, package='xgboost')
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label))
#' dtest <- with(agaricus.test, xgb.DMatrix(data, label = label))
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2))
#' dtest <- with(agaricus.test, xgb.DMatrix(data, label = label, nthread = 2))
#'
#' param <- list(max_depth=2, eta=1, silent=1, objective='binary:logistic')
#' nrounds = 4
Expand All @@ -65,8 +65,12 @@
#' new.features.test <- xgb.create.features(model = bst, agaricus.test$data)
#'
#' # learning with new features
#' new.dtrain <- xgb.DMatrix(data = new.features.train, label = agaricus.train$label)
#' new.dtest <- xgb.DMatrix(data = new.features.test, label = agaricus.test$label)
#' new.dtrain <- xgb.DMatrix(
#' data = new.features.train, label = agaricus.train$label, nthread = 2
#' )
#' new.dtest <- xgb.DMatrix(
#' data = new.features.test, label = agaricus.test$label, nthread = 2
#' )
#' watchlist <- list(train = new.dtrain)
#' bst <- xgb.train(params = param, data = new.dtrain, nrounds = nrounds, nthread = 2)
#'
Expand All @@ -79,7 +83,7 @@
#' accuracy.after, "!\n"))
#'
#' @export
xgb.create.features <- function(model, data, ...){
xgb.create.features <- function(model, data, ...) {
check.deprecation(...)
pred_with_leaf <- predict(model, data, predleaf = TRUE)
cols <- lapply(as.data.frame(pred_with_leaf), factor)
Expand Down
6 changes: 3 additions & 3 deletions R-package/R/xgb.cv.R
Expand Up @@ -110,9 +110,9 @@
#'
#' @examples
#' data(agaricus.train, package='xgboost')
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label))
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2))
#' cv <- xgb.cv(data = dtrain, nrounds = 3, nthread = 2, nfold = 5, metrics = list("rmse","auc"),
#' max_depth = 3, eta = 1, objective = "binary:logistic")
#' max_depth = 3, eta = 1, objective = "binary:logistic")
#' print(cv)
#' print(cv, verbose=TRUE)
#'
Expand Down Expand Up @@ -192,7 +192,7 @@ xgb.cv <- function(params=list(), data, nrounds, nfold, label = NULL, missing =

# create the booster-folds
# train_folds
dall <- xgb.get.DMatrix(data, label, missing)
dall <- xgb.get.DMatrix(data, label, missing, nthread = params$nthread)
bst_folds <- lapply(seq_along(folds), function(k) {
dtest <- slice(dall, folds[[k]])
# code originally contributed by @RolandASc on stackoverflow
Expand Down
4 changes: 2 additions & 2 deletions R-package/R/xgb.train.R
Expand Up @@ -192,8 +192,8 @@
#' data(agaricus.train, package='xgboost')
#' data(agaricus.test, package='xgboost')
#'
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label))
#' dtest <- with(agaricus.test, xgb.DMatrix(data, label = label))
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2))
#' dtest <- with(agaricus.test, xgb.DMatrix(data, label = label, nthread = 2))
#' watchlist <- list(train = dtrain, eval = dtest)
#'
#' ## A simple xgb.train example:
Expand Down
12 changes: 7 additions & 5 deletions R-package/man/cb.gblinear.history.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion R-package/man/dim.xgb.DMatrix.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion R-package/man/dimnames.xgb.DMatrix.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion R-package/man/getinfo.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion R-package/man/print.xgb.DMatrix.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion R-package/man/setinfo.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion R-package/man/slice.xgb.DMatrix.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion R-package/man/xgb.DMatrix.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion R-package/man/xgb.DMatrix.save.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 8 additions & 4 deletions R-package/man/xgb.create.features.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions R-package/man/xgb.cv.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions R-package/man/xgb.train.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit a347cd5

Please sign in to comment.