From c7cb1c91ee38b99642dca32c497b875638e88eec Mon Sep 17 00:00:00 2001 From: James Lamb Date: Thu, 9 Apr 2020 21:17:10 -0500 Subject: [PATCH 1/2] [R-package] [docs] Simplified examles to cut example run time (fixes #2988) --- R-package/R/lgb.Booster.R | 21 ++++++----- R-package/R/lgb.Dataset.R | 11 ------ R-package/R/lgb.cv.R | 6 ++-- R-package/R/lgb.importance.R | 8 +++-- R-package/R/lgb.interprete.R | 8 +++-- R-package/R/lgb.plot.importance.R | 10 +++--- R-package/R/lgb.plot.interpretation.R | 37 ++++++++++++++------ R-package/R/lgb.prepare.R | 1 - R-package/R/lgb.prepare2.R | 1 - R-package/R/lgb.prepare_rules.R | 1 - R-package/R/lgb.prepare_rules2.R | 1 - R-package/R/lgb.train.R | 5 ++- R-package/R/lgb.unloader.R | 4 +-- R-package/R/readRDS.lgb.Booster.R | 3 +- R-package/R/saveRDS.lgb.Booster.R | 2 ++ R-package/man/dim.Rd | 1 - R-package/man/dimnames.lgb.Dataset.Rd | 1 - R-package/man/getinfo.Rd | 1 - R-package/man/lgb.Dataset.Rd | 1 - R-package/man/lgb.Dataset.construct.Rd | 1 - R-package/man/lgb.Dataset.create.valid.Rd | 1 - R-package/man/lgb.Dataset.save.Rd | 1 - R-package/man/lgb.Dataset.set.categorical.Rd | 1 - R-package/man/lgb.Dataset.set.reference.Rd | 1 - R-package/man/lgb.cv.Rd | 6 ++-- R-package/man/lgb.dump.Rd | 3 +- R-package/man/lgb.get.eval.result.Rd | 4 +-- R-package/man/lgb.importance.Rd | 8 +++-- R-package/man/lgb.interprete.Rd | 8 +++-- R-package/man/lgb.load.Rd | 8 ++--- R-package/man/lgb.plot.importance.Rd | 10 +++--- R-package/man/lgb.plot.interpretation.Rd | 37 ++++++++++++++------ R-package/man/lgb.prepare.Rd | 1 - R-package/man/lgb.prepare2.Rd | 1 - R-package/man/lgb.prepare_rules.Rd | 1 - R-package/man/lgb.prepare_rules2.Rd | 1 - R-package/man/lgb.save.Rd | 2 ++ R-package/man/lgb.train.Rd | 5 ++- R-package/man/lgb.unloader.Rd | 4 +-- R-package/man/lgb_shared_params.Rd | 3 +- R-package/man/lightgbm.Rd | 3 +- R-package/man/predict.lgb.Booster.Rd | 4 +-- R-package/man/readRDS.lgb.Booster.Rd | 3 +- R-package/man/saveRDS.lgb.Booster.Rd | 2 ++ R-package/man/setinfo.Rd | 1 - R-package/man/slice.Rd | 1 - 46 files changed, 126 insertions(+), 118 deletions(-) diff --git a/R-package/R/lgb.Booster.R b/R-package/R/lgb.Booster.R index f7aa4d10f49d..3e1bd0de0b78 100644 --- a/R-package/R/lgb.Booster.R +++ b/R-package/R/lgb.Booster.R @@ -711,7 +711,6 @@ Booster <- R6::R6Class( #' number of columns corresponding to the number of trees. #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -723,11 +722,10 @@ Booster <- R6::R6Class( #' model <- lgb.train( #' params = params #' , data = dtrain -#' , nrounds = 10L +#' , nrounds = 5L #' , valids = valids #' , min_data = 1L #' , learning_rate = 1.0 -#' , early_stopping_rounds = 5L #' ) #' preds <- predict(model, test$data) #' @export @@ -769,7 +767,7 @@ predict.lgb.Booster <- function(object, #' @return lgb.Booster #' #' @examples -#' library(lightgbm) +#' \donttest{ #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -781,17 +779,17 @@ predict.lgb.Booster <- function(object, #' model <- lgb.train( #' params = params #' , data = dtrain -#' , nrounds = 10L +#' , nrounds = 5L #' , valids = valids #' , min_data = 1L #' , learning_rate = 1.0 -#' , early_stopping_rounds = 5L +#' , early_stopping_rounds = 3L #' ) #' lgb.save(model, "model.txt") #' load_booster <- lgb.load(filename = "model.txt") #' model_string <- model$save_model_to_string(NULL) # saves best iteration #' load_booster_from_str <- lgb.load(model_str = model_string) -#' +#' } #' @export lgb.load <- function(filename = NULL, model_str = NULL) { @@ -828,6 +826,7 @@ lgb.load <- function(filename = NULL, model_str = NULL) { #' @return lgb.Booster #' #' @examples +#' \donttest{ #' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train @@ -847,6 +846,7 @@ lgb.load <- function(filename = NULL, model_str = NULL) { #' , early_stopping_rounds = 5L #' ) #' lgb.save(model, "model.txt") +#' } #' @export lgb.save <- function(booster, filename, num_iteration = NULL) { @@ -874,6 +874,7 @@ lgb.save <- function(booster, filename, num_iteration = NULL) { #' @return json format of model #' #' @examples +#' \donttest{ #' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train @@ -893,7 +894,7 @@ lgb.save <- function(booster, filename, num_iteration = NULL) { #' , early_stopping_rounds = 5L #' ) #' json_model <- lgb.dump(model) -#' +#' } #' @export lgb.dump <- function(booster, num_iteration = NULL) { @@ -922,7 +923,6 @@ lgb.dump <- function(booster, num_iteration = NULL) { #' #' @examples #' # train a regression model -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -934,11 +934,10 @@ lgb.dump <- function(booster, num_iteration = NULL) { #' model <- lgb.train( #' params = params #' , data = dtrain -#' , nrounds = 10L +#' , nrounds = 5L #' , valids = valids #' , min_data = 1L #' , learning_rate = 1.0 -#' , early_stopping_rounds = 5L #' ) #' #' # Examine valid data_name values diff --git a/R-package/R/lgb.Dataset.R b/R-package/R/lgb.Dataset.R index c361a6c423c3..22afc2666718 100644 --- a/R-package/R/lgb.Dataset.R +++ b/R-package/R/lgb.Dataset.R @@ -725,7 +725,6 @@ Dataset <- R6::R6Class( #' @return constructed dataset #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -770,7 +769,6 @@ lgb.Dataset <- function(data, #' @return constructed dataset #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -797,7 +795,6 @@ lgb.Dataset.create.valid <- function(dataset, data, info = list(), ...) { #' @param dataset Object of class \code{lgb.Dataset} #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -828,7 +825,6 @@ lgb.Dataset.construct <- function(dataset) { #' be directly used with an \code{lgb.Dataset} object. #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -863,7 +859,6 @@ dim.lgb.Dataset <- function(x, ...) { #' Since row names are irrelevant, it is recommended to use \code{colnames} directly. #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -936,7 +931,6 @@ dimnames.lgb.Dataset <- function(x) { #' @return constructed sub dataset #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -983,7 +977,6 @@ slice.lgb.Dataset <- function(dataset, idxset, ...) { #' } #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -1037,7 +1030,6 @@ getinfo.lgb.Dataset <- function(dataset, name, ...) { #' } #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -1078,7 +1070,6 @@ setinfo.lgb.Dataset <- function(dataset, name, info, ...) { #' @return passed dataset #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -1109,7 +1100,6 @@ lgb.Dataset.set.categorical <- function(dataset, categorical_feature) { #' @return passed dataset #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package ="lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -1141,7 +1131,6 @@ lgb.Dataset.set.reference <- function(dataset, reference) { #' @return passed dataset #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) diff --git a/R-package/R/lgb.cv.R b/R-package/R/lgb.cv.R index 3433aade6594..c0bcbb058f59 100644 --- a/R-package/R/lgb.cv.R +++ b/R-package/R/lgb.cv.R @@ -56,7 +56,6 @@ CVBooster <- R6::R6Class( #' @return a trained model \code{lgb.CVBooster}. #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -64,11 +63,10 @@ CVBooster <- R6::R6Class( #' model <- lgb.cv( #' params = params #' , data = dtrain -#' , nrounds = 10L +#' , nrounds = 5L #' , nfold = 3L #' , min_data = 1L -#' , learning_rate = 1.0 -#' , early_stopping_rounds = 5L +#' , learning_rate = 0.05 #' ) #' @importFrom data.table data.table setorderv #' @export diff --git a/R-package/R/lgb.importance.R b/R-package/R/lgb.importance.R index acef193ebbc4..b1540ba1114b 100644 --- a/R-package/R/lgb.importance.R +++ b/R-package/R/lgb.importance.R @@ -13,7 +13,6 @@ #' } #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -21,12 +20,15 @@ #' params <- list( #' objective = "binary" #' , learning_rate = 0.01 -#' , num_leaves = 63L #' , max_depth = -1L #' , min_data_in_leaf = 1L #' , min_sum_hessian_in_leaf = 1.0 #' ) -#' model <- lgb.train(params, dtrain, 10L) +#' model <- lgb.train( +#' params = params +#' , data = dtrain +#' , nrounds = 5L +#' ) #' #' tree_imp1 <- lgb.importance(model, percentage = TRUE) #' tree_imp2 <- lgb.importance(model, percentage = FALSE) diff --git a/R-package/R/lgb.interprete.R b/R-package/R/lgb.interprete.R index eb0ecd94a6a1..e2433719e958 100644 --- a/R-package/R/lgb.interprete.R +++ b/R-package/R/lgb.interprete.R @@ -16,7 +16,6 @@ #' Contribution columns to each class. #' #' @examples -#' Sigmoid <- function(x) 1.0 / (1.0 + exp(-x)) #' Logit <- function(x) log(x / (1.0 - x)) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train @@ -28,12 +27,15 @@ #' params <- list( #' objective = "binary" #' , learning_rate = 0.01 -#' , num_leaves = 63L #' , max_depth = -1L #' , min_data_in_leaf = 1L #' , min_sum_hessian_in_leaf = 1.0 #' ) -#' model <- lgb.train(params, dtrain, 10L) +#' model <- lgb.train( +#' params = params +#' , data = dtrain +#' , nrounds = 3L +#' ) #' #' tree_interpretation <- lgb.interprete(model, test$data, 1L:5L) #' diff --git a/R-package/R/lgb.plot.importance.R b/R-package/R/lgb.plot.importance.R index b05dac39b91a..548c4f7821b5 100644 --- a/R-package/R/lgb.plot.importance.R +++ b/R-package/R/lgb.plot.importance.R @@ -25,16 +25,18 @@ #' params <- list( #' objective = "binary" #' , learning_rate = 0.01 -#' , num_leaves = 63L -#' , max_depth = -1L #' , min_data_in_leaf = 1L #' , min_sum_hessian_in_leaf = 1.0 #' ) #' -#' model <- lgb.train(params, dtrain, 10L) +#' model <- lgb.train( +#' params = params +#' , data = dtrain +#' , nrounds = 5L +#' ) #' #' tree_imp <- lgb.importance(model, percentage = TRUE) -#' lgb.plot.importance(tree_imp, top_n = 10L, measure = "Gain") +#' lgb.plot.importance(tree_imp, top_n = 5L, measure = "Gain") #' @importFrom graphics barplot par #' @export lgb.plot.importance <- function(tree_imp, diff --git a/R-package/R/lgb.plot.interpretation.R b/R-package/R/lgb.plot.interpretation.R index 2914ddf94f97..7fa5e72c50b5 100644 --- a/R-package/R/lgb.plot.interpretation.R +++ b/R-package/R/lgb.plot.interpretation.R @@ -15,28 +15,43 @@ #' The \code{lgb.plot.interpretation} function creates a \code{barplot}. #' #' @examples -#' library(lightgbm) -#' Sigmoid <- function(x) {1.0 / (1.0 + exp(-x))} -#' Logit <- function(x) {log(x / (1.0 - x))} +#' \donttest{ +#' Logit <- function(x) { +#' log(x / (1.0 - x)) +#' } #' data(agaricus.train, package = "lightgbm") -#' train <- agaricus.train -#' dtrain <- lgb.Dataset(train$data, label = train$label) -#' setinfo(dtrain, "init_score", rep(Logit(mean(train$label)), length(train$label))) +#' labels <- agaricus.train$label +#' dtrain <- lgb.Dataset( +#' agaricus.train$data +#' , label = labels +#' ) +#' setinfo(dtrain, "init_score", rep(Logit(mean(labels)), length(labels))) +#' #' data(agaricus.test, package = "lightgbm") -#' test <- agaricus.test #' #' params <- list( #' objective = "binary" #' , learning_rate = 0.01 -#' , num_leaves = 63L #' , max_depth = -1L #' , min_data_in_leaf = 1L #' , min_sum_hessian_in_leaf = 1.0 #' ) -#' model <- lgb.train(params, dtrain, 10L) +#' model <- lgb.train( +#' params = params +#' , data = dtrain +#' , nrounds = 5L +#' ) #' -#' tree_interpretation <- lgb.interprete(model, test$data, 1L:5L) -#' lgb.plot.interpretation(tree_interpretation[[1L]], top_n = 10L) +#' tree_interpretation <- lgb.interprete( +#' model = model +#' , data = agaricus.test$data +#' , idxset = 1L:5L +#' ) +#' lgb.plot.interpretation( +#' tree_interpretation_dt = tree_interpretation[[1L]] +#' , top_n = 5L +#' ) +#' } #' @importFrom data.table setnames #' @importFrom graphics barplot par #' @export diff --git a/R-package/R/lgb.prepare.R b/R-package/R/lgb.prepare.R index 42a9daa0d434..863271e06040 100644 --- a/R-package/R/lgb.prepare.R +++ b/R-package/R/lgb.prepare.R @@ -8,7 +8,6 @@ #' for input in \code{lgb.Dataset}. #' #' @examples -#' library(lightgbm) #' data(iris) #' #' str(iris) diff --git a/R-package/R/lgb.prepare2.R b/R-package/R/lgb.prepare2.R index f2fdc89d0c2a..0d7179ed9496 100644 --- a/R-package/R/lgb.prepare2.R +++ b/R-package/R/lgb.prepare2.R @@ -11,7 +11,6 @@ #' for input in \code{lgb.Dataset}. #' #' @examples -#' library(lightgbm) #' data(iris) #' #' str(iris) diff --git a/R-package/R/lgb.prepare_rules.R b/R-package/R/lgb.prepare_rules.R index e6efe89ab25f..307a69e32a38 100644 --- a/R-package/R/lgb.prepare_rules.R +++ b/R-package/R/lgb.prepare_rules.R @@ -10,7 +10,6 @@ #' in \code{lgb.Dataset}. #' #' @examples -#' library(lightgbm) #' data(iris) #' #' str(iris) diff --git a/R-package/R/lgb.prepare_rules2.R b/R-package/R/lgb.prepare_rules2.R index dab2ae5f5271..62688a765b47 100644 --- a/R-package/R/lgb.prepare_rules2.R +++ b/R-package/R/lgb.prepare_rules2.R @@ -13,7 +13,6 @@ #' \code{lgb.Dataset}. #' #' @examples -#' library(lightgbm) #' data(iris) #' #' str(iris) diff --git a/R-package/R/lgb.train.R b/R-package/R/lgb.train.R index d0dacecc0bd1..c41f32e15c8e 100644 --- a/R-package/R/lgb.train.R +++ b/R-package/R/lgb.train.R @@ -29,7 +29,6 @@ #' @return a trained booster model \code{lgb.Booster}. #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -41,11 +40,11 @@ #' model <- lgb.train( #' params = params #' , data = dtrain -#' , nrounds = 10L +#' , nrounds = 5L #' , valids = valids #' , min_data = 1L #' , learning_rate = 1.0 -#' , early_stopping_rounds = 5L +#' , early_stopping_rounds = 3L #' ) #' @export lgb.train <- function(params = list(), diff --git a/R-package/R/lgb.unloader.R b/R-package/R/lgb.unloader.R index cb80e2f01ff6..aaafca019358 100644 --- a/R-package/R/lgb.unloader.R +++ b/R-package/R/lgb.unloader.R @@ -14,7 +14,6 @@ #' @return NULL invisibly. #' #' @examples -#' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train #' dtrain <- lgb.Dataset(train$data, label = train$label) @@ -26,11 +25,10 @@ #' model <- lgb.train( #' params = params #' , data = dtrain -#' , nrounds = 10L +#' , nrounds = 5L #' , valids = valids #' , min_data = 1L #' , learning_rate = 1.0 -#' , early_stopping_rounds = 5L #' ) #' #' \dontrun{ diff --git a/R-package/R/readRDS.lgb.Booster.R b/R-package/R/readRDS.lgb.Booster.R index e68dd8c963de..f0c862f33c74 100644 --- a/R-package/R/readRDS.lgb.Booster.R +++ b/R-package/R/readRDS.lgb.Booster.R @@ -7,6 +7,7 @@ #' @return \code{lgb.Booster}. #' #' @examples +#' \donttest{ #' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train @@ -27,7 +28,7 @@ #' ) #' saveRDS.lgb.Booster(model, "model.rds") #' new_model <- readRDS.lgb.Booster("model.rds") -#' +#' } #' @export readRDS.lgb.Booster <- function(file = "", refhook = NULL) { diff --git a/R-package/R/saveRDS.lgb.Booster.R b/R-package/R/saveRDS.lgb.Booster.R index 21bd8483628f..855e1e1b6c8e 100644 --- a/R-package/R/saveRDS.lgb.Booster.R +++ b/R-package/R/saveRDS.lgb.Booster.R @@ -18,6 +18,7 @@ #' @return NULL invisibly. #' #' @examples +#' \donttest{ #' library(lightgbm) #' data(agaricus.train, package = "lightgbm") #' train <- agaricus.train @@ -37,6 +38,7 @@ #' , early_stopping_rounds = 5L #' ) #' saveRDS.lgb.Booster(model, "model.rds") +#' } #' @export saveRDS.lgb.Booster <- function(object, file = "", diff --git a/R-package/man/dim.Rd b/R-package/man/dim.Rd index 4fdb64252f7e..55fde26d6a5b 100644 --- a/R-package/man/dim.Rd +++ b/R-package/man/dim.Rd @@ -22,7 +22,6 @@ Note: since \code{nrow} and \code{ncol} internally use \code{dim}, they can also be directly used with an \code{lgb.Dataset} object. } \examples{ -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) diff --git a/R-package/man/dimnames.lgb.Dataset.Rd b/R-package/man/dimnames.lgb.Dataset.Rd index 5f85ff12bd6f..22be85149646 100644 --- a/R-package/man/dimnames.lgb.Dataset.Rd +++ b/R-package/man/dimnames.lgb.Dataset.Rd @@ -24,7 +24,6 @@ Generic \code{dimnames} methods are used by \code{colnames}. Since row names are irrelevant, it is recommended to use \code{colnames} directly. } \examples{ -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) diff --git a/R-package/man/getinfo.Rd b/R-package/man/getinfo.Rd index 29254d8622f3..2925308ed7e9 100644 --- a/R-package/man/getinfo.Rd +++ b/R-package/man/getinfo.Rd @@ -33,7 +33,6 @@ The \code{name} field can be one of the following: } } \examples{ -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) diff --git a/R-package/man/lgb.Dataset.Rd b/R-package/man/lgb.Dataset.Rd index b6cbb5327a14..fb1d1067a53e 100644 --- a/R-package/man/lgb.Dataset.Rd +++ b/R-package/man/lgb.Dataset.Rd @@ -40,7 +40,6 @@ Construct \code{lgb.Dataset} object from dense matrix, sparse matrix or local file (that was created previously by saving an \code{lgb.Dataset}). } \examples{ -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) diff --git a/R-package/man/lgb.Dataset.construct.Rd b/R-package/man/lgb.Dataset.construct.Rd index 23dfc0e9f67b..4338f84b669c 100644 --- a/R-package/man/lgb.Dataset.construct.Rd +++ b/R-package/man/lgb.Dataset.construct.Rd @@ -13,7 +13,6 @@ lgb.Dataset.construct(dataset) Construct Dataset explicitly } \examples{ -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) diff --git a/R-package/man/lgb.Dataset.create.valid.Rd b/R-package/man/lgb.Dataset.create.valid.Rd index e48c93772a39..0669f1887171 100644 --- a/R-package/man/lgb.Dataset.create.valid.Rd +++ b/R-package/man/lgb.Dataset.create.valid.Rd @@ -22,7 +22,6 @@ constructed dataset Construct validation data according to training data } \examples{ -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) diff --git a/R-package/man/lgb.Dataset.save.Rd b/R-package/man/lgb.Dataset.save.Rd index d8446f030936..26895999d11a 100644 --- a/R-package/man/lgb.Dataset.save.Rd +++ b/R-package/man/lgb.Dataset.save.Rd @@ -19,7 +19,6 @@ Please note that \code{init_score} is not saved in binary file. If you need it, please set it again after loading Dataset. } \examples{ -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) diff --git a/R-package/man/lgb.Dataset.set.categorical.Rd b/R-package/man/lgb.Dataset.set.categorical.Rd index e1f03cfbf9e6..0ab44b56bb0d 100644 --- a/R-package/man/lgb.Dataset.set.categorical.Rd +++ b/R-package/man/lgb.Dataset.set.categorical.Rd @@ -21,7 +21,6 @@ Set the categorical features of an \code{lgb.Dataset} object. Use this function to tell LightGBM which features should be treated as categorical. } \examples{ -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) diff --git a/R-package/man/lgb.Dataset.set.reference.Rd b/R-package/man/lgb.Dataset.set.reference.Rd index fabe7c03e6fd..e8bd41820286 100644 --- a/R-package/man/lgb.Dataset.set.reference.Rd +++ b/R-package/man/lgb.Dataset.set.reference.Rd @@ -18,7 +18,6 @@ passed dataset If you want to use validation data, you should set reference to training data } \examples{ -library(lightgbm) data(agaricus.train, package ="lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) diff --git a/R-package/man/lgb.cv.Rd b/R-package/man/lgb.cv.Rd index 01473339cf6f..8ab61fc70074 100644 --- a/R-package/man/lgb.cv.Rd +++ b/R-package/man/lgb.cv.Rd @@ -100,7 +100,6 @@ a trained model \code{lgb.CVBooster}. Cross validation logic used by LightGBM } \examples{ -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) @@ -108,10 +107,9 @@ params <- list(objective = "regression", metric = "l2") model <- lgb.cv( params = params , data = dtrain - , nrounds = 10L + , nrounds = 5L , nfold = 3L , min_data = 1L - , learning_rate = 1.0 - , early_stopping_rounds = 5L + , learning_rate = 0.05 ) } diff --git a/R-package/man/lgb.dump.Rd b/R-package/man/lgb.dump.Rd index 828ba4ac7ea9..6fbc5cbe9b43 100644 --- a/R-package/man/lgb.dump.Rd +++ b/R-package/man/lgb.dump.Rd @@ -18,6 +18,7 @@ json format of model Dump LightGBM model to json } \examples{ +\donttest{ library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train @@ -37,5 +38,5 @@ model <- lgb.train( , early_stopping_rounds = 5L ) json_model <- lgb.dump(model) - +} } diff --git a/R-package/man/lgb.get.eval.result.Rd b/R-package/man/lgb.get.eval.result.Rd index c5473825e61a..5707d8ccb6c4 100644 --- a/R-package/man/lgb.get.eval.result.Rd +++ b/R-package/man/lgb.get.eval.result.Rd @@ -33,7 +33,6 @@ Given a \code{lgb.Booster}, return evaluation results for a } \examples{ # train a regression model -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) @@ -45,11 +44,10 @@ valids <- list(test = dtest) model <- lgb.train( params = params , data = dtrain - , nrounds = 10L + , nrounds = 5L , valids = valids , min_data = 1L , learning_rate = 1.0 - , early_stopping_rounds = 5L ) # Examine valid data_name values diff --git a/R-package/man/lgb.importance.Rd b/R-package/man/lgb.importance.Rd index 3d6c1fae5217..d4eba713f1c2 100644 --- a/R-package/man/lgb.importance.Rd +++ b/R-package/man/lgb.importance.Rd @@ -24,7 +24,6 @@ For a tree model, a \code{data.table} with the following columns: Creates a \code{data.table} of feature importances in a model. } \examples{ -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) @@ -32,12 +31,15 @@ dtrain <- lgb.Dataset(train$data, label = train$label) params <- list( objective = "binary" , learning_rate = 0.01 - , num_leaves = 63L , max_depth = -1L , min_data_in_leaf = 1L , min_sum_hessian_in_leaf = 1.0 ) -model <- lgb.train(params, dtrain, 10L) +model <- lgb.train( + params = params + , data = dtrain + , nrounds = 5L +) tree_imp1 <- lgb.importance(model, percentage = TRUE) tree_imp2 <- lgb.importance(model, percentage = FALSE) diff --git a/R-package/man/lgb.interprete.Rd b/R-package/man/lgb.interprete.Rd index aa8aedf156f4..0793b338944d 100644 --- a/R-package/man/lgb.interprete.Rd +++ b/R-package/man/lgb.interprete.Rd @@ -29,7 +29,6 @@ For regression, binary classification and lambdarank model, a \code{list} of \co Computes feature contribution components of rawscore prediction. } \examples{ -Sigmoid <- function(x) 1.0 / (1.0 + exp(-x)) Logit <- function(x) log(x / (1.0 - x)) data(agaricus.train, package = "lightgbm") train <- agaricus.train @@ -41,12 +40,15 @@ test <- agaricus.test params <- list( objective = "binary" , learning_rate = 0.01 - , num_leaves = 63L , max_depth = -1L , min_data_in_leaf = 1L , min_sum_hessian_in_leaf = 1.0 ) -model <- lgb.train(params, dtrain, 10L) +model <- lgb.train( + params = params + , data = dtrain + , nrounds = 3L +) tree_interpretation <- lgb.interprete(model, test$data, 1L:5L) diff --git a/R-package/man/lgb.load.Rd b/R-package/man/lgb.load.Rd index c2b1500e9bc8..5f7c2354733e 100644 --- a/R-package/man/lgb.load.Rd +++ b/R-package/man/lgb.load.Rd @@ -19,7 +19,7 @@ Load LightGBM takes in either a file path or model string. If both are provided, Load will default to loading from file } \examples{ -library(lightgbm) +\donttest{ data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) @@ -31,15 +31,15 @@ valids <- list(test = dtest) model <- lgb.train( params = params , data = dtrain - , nrounds = 10L + , nrounds = 5L , valids = valids , min_data = 1L , learning_rate = 1.0 - , early_stopping_rounds = 5L + , early_stopping_rounds = 3L ) lgb.save(model, "model.txt") load_booster <- lgb.load(filename = "model.txt") model_string <- model$save_model_to_string(NULL) # saves best iteration load_booster_from_str <- lgb.load(model_str = model_string) - +} } diff --git a/R-package/man/lgb.plot.importance.Rd b/R-package/man/lgb.plot.importance.Rd index 97775efd704d..9bcea189b881 100644 --- a/R-package/man/lgb.plot.importance.Rd +++ b/R-package/man/lgb.plot.importance.Rd @@ -44,14 +44,16 @@ dtrain <- lgb.Dataset(train$data, label = train$label) params <- list( objective = "binary" , learning_rate = 0.01 - , num_leaves = 63L - , max_depth = -1L , min_data_in_leaf = 1L , min_sum_hessian_in_leaf = 1.0 ) -model <- lgb.train(params, dtrain, 10L) +model <- lgb.train( + params = params + , data = dtrain + , nrounds = 5L +) tree_imp <- lgb.importance(model, percentage = TRUE) -lgb.plot.importance(tree_imp, top_n = 10L, measure = "Gain") +lgb.plot.importance(tree_imp, top_n = 5L, measure = "Gain") } diff --git a/R-package/man/lgb.plot.interpretation.Rd b/R-package/man/lgb.plot.interpretation.Rd index f5fa6497f2c0..95b15c034e74 100644 --- a/R-package/man/lgb.plot.interpretation.Rd +++ b/R-package/man/lgb.plot.interpretation.Rd @@ -34,26 +34,41 @@ The graph represents each feature as a horizontal bar of length proportional to contribution of a feature. Features are shown ranked in a decreasing contribution order. } \examples{ -library(lightgbm) -Sigmoid <- function(x) {1.0 / (1.0 + exp(-x))} -Logit <- function(x) {log(x / (1.0 - x))} +\donttest{ +Logit <- function(x) { + log(x / (1.0 - x)) +} data(agaricus.train, package = "lightgbm") -train <- agaricus.train -dtrain <- lgb.Dataset(train$data, label = train$label) -setinfo(dtrain, "init_score", rep(Logit(mean(train$label)), length(train$label))) +labels <- agaricus.train$label +dtrain <- lgb.Dataset( + agaricus.train$data + , label = labels +) +setinfo(dtrain, "init_score", rep(Logit(mean(labels)), length(labels))) + data(agaricus.test, package = "lightgbm") -test <- agaricus.test params <- list( objective = "binary" , learning_rate = 0.01 - , num_leaves = 63L , max_depth = -1L , min_data_in_leaf = 1L , min_sum_hessian_in_leaf = 1.0 ) -model <- lgb.train(params, dtrain, 10L) +model <- lgb.train( + params = params + , data = dtrain + , nrounds = 5L +) -tree_interpretation <- lgb.interprete(model, test$data, 1L:5L) -lgb.plot.interpretation(tree_interpretation[[1L]], top_n = 10L) +tree_interpretation <- lgb.interprete( + model = model + , data = agaricus.test$data + , idxset = 1L:5L +) +lgb.plot.interpretation( + tree_interpretation_dt = tree_interpretation[[1L]] + , top_n = 5L +) +} } diff --git a/R-package/man/lgb.prepare.Rd b/R-package/man/lgb.prepare.Rd index dc1fed72e698..db726b15d36a 100644 --- a/R-package/man/lgb.prepare.Rd +++ b/R-package/man/lgb.prepare.Rd @@ -19,7 +19,6 @@ Attempts to prepare a clean dataset to prepare to put in a \code{lgb.Dataset}. \code{\link{lgb.prepare_rules}} if you want to apply this transformation to other datasets. } \examples{ -library(lightgbm) data(iris) str(iris) diff --git a/R-package/man/lgb.prepare2.Rd b/R-package/man/lgb.prepare2.Rd index e4eaf53df2f6..eef44758f42b 100644 --- a/R-package/man/lgb.prepare2.Rd +++ b/R-package/man/lgb.prepare2.Rd @@ -22,7 +22,6 @@ Attempts to prepare a clean dataset to prepare to put in a \code{lgb.Dataset}. input. Consider this as a half memory technique which is dangerous, especially for LightGBM. } \examples{ -library(lightgbm) data(iris) str(iris) diff --git a/R-package/man/lgb.prepare_rules.Rd b/R-package/man/lgb.prepare_rules.Rd index a766b7f26af6..69821ef7d240 100644 --- a/R-package/man/lgb.prepare_rules.Rd +++ b/R-package/man/lgb.prepare_rules.Rd @@ -22,7 +22,6 @@ Attempts to prepare a clean dataset to prepare to put in a \code{lgb.Dataset}. so you can convert other datasets using this converter. } \examples{ -library(lightgbm) data(iris) str(iris) diff --git a/R-package/man/lgb.prepare_rules2.Rd b/R-package/man/lgb.prepare_rules2.Rd index b19f275f2009..7fe3de7c4929 100644 --- a/R-package/man/lgb.prepare_rules2.Rd +++ b/R-package/man/lgb.prepare_rules2.Rd @@ -25,7 +25,6 @@ Attempts to prepare a clean dataset to prepare to put in a \code{lgb.Dataset}. Consider this as a half memory technique which is dangerous, especially for LightGBM. } \examples{ -library(lightgbm) data(iris) str(iris) diff --git a/R-package/man/lgb.save.Rd b/R-package/man/lgb.save.Rd index 70bd098a0913..91e2befb738f 100644 --- a/R-package/man/lgb.save.Rd +++ b/R-package/man/lgb.save.Rd @@ -20,6 +20,7 @@ lgb.Booster Save LightGBM model } \examples{ +\donttest{ library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train @@ -40,3 +41,4 @@ model <- lgb.train( ) lgb.save(model, "model.txt") } +} diff --git a/R-package/man/lgb.train.Rd b/R-package/man/lgb.train.Rd index 98298ab6f954..b471e0c7601f 100644 --- a/R-package/man/lgb.train.Rd +++ b/R-package/man/lgb.train.Rd @@ -83,7 +83,6 @@ a trained booster model \code{lgb.Booster}. Logic to train with LightGBM } \examples{ -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) @@ -95,10 +94,10 @@ valids <- list(test = dtest) model <- lgb.train( params = params , data = dtrain - , nrounds = 10L + , nrounds = 5L , valids = valids , min_data = 1L , learning_rate = 1.0 - , early_stopping_rounds = 5L + , early_stopping_rounds = 3L ) } diff --git a/R-package/man/lgb.unloader.Rd b/R-package/man/lgb.unloader.Rd index 758a831ee3dd..ca69c08b602c 100644 --- a/R-package/man/lgb.unloader.Rd +++ b/R-package/man/lgb.unloader.Rd @@ -26,7 +26,6 @@ Attempts to unload LightGBM packages so you can remove objects cleanly without apparent reason and you do not want to restart R to fix the lost object. } \examples{ -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) @@ -38,11 +37,10 @@ valids <- list(test = dtest) model <- lgb.train( params = params , data = dtrain - , nrounds = 10L + , nrounds = 5L , valids = valids , min_data = 1L , learning_rate = 1.0 - , early_stopping_rounds = 5L ) \dontrun{ diff --git a/R-package/man/lgb_shared_params.Rd b/R-package/man/lgb_shared_params.Rd index ae2f61a86256..a7143179495e 100644 --- a/R-package/man/lgb_shared_params.Rd +++ b/R-package/man/lgb_shared_params.Rd @@ -4,7 +4,8 @@ \alias{lgb_shared_params} \title{Shared parameter docs} \arguments{ -\item{callbacks}{List of callback functions that are applied at each iteration.} +\item{callbacks}{list of callback functions +List of callback functions that are applied at each iteration.} \item{data}{a \code{lgb.Dataset} object, used for training. Some functions, such as \code{\link{lgb.cv}}, may allow you to pass other types of data like \code{matrix} and then separately supply diff --git a/R-package/man/lightgbm.Rd b/R-package/man/lightgbm.Rd index 88d98d13525d..256a7dc6e8e9 100644 --- a/R-package/man/lightgbm.Rd +++ b/R-package/man/lightgbm.Rd @@ -45,7 +45,8 @@ If early stopping occurs, the model will have 'best_iter' field.} \item{init_model}{path of model file of \code{lgb.Booster} object, will continue training from this model} -\item{callbacks}{List of callback functions that are applied at each iteration.} +\item{callbacks}{list of callback functions +List of callback functions that are applied at each iteration.} \item{...}{Additional arguments passed to \code{\link{lgb.train}}. For example \itemize{ diff --git a/R-package/man/predict.lgb.Booster.Rd b/R-package/man/predict.lgb.Booster.Rd index 985cd763689a..40444cbff7be 100644 --- a/R-package/man/predict.lgb.Booster.Rd +++ b/R-package/man/predict.lgb.Booster.Rd @@ -52,7 +52,6 @@ For regression or binary classification, it returns a vector of length \code{nro Predicted values based on class \code{lgb.Booster} } \examples{ -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) @@ -64,11 +63,10 @@ valids <- list(test = dtest) model <- lgb.train( params = params , data = dtrain - , nrounds = 10L + , nrounds = 5L , valids = valids , min_data = 1L , learning_rate = 1.0 - , early_stopping_rounds = 5L ) preds <- predict(model, test$data) } diff --git a/R-package/man/readRDS.lgb.Booster.Rd b/R-package/man/readRDS.lgb.Booster.Rd index 809333aef0dc..be03fd1cfcb8 100644 --- a/R-package/man/readRDS.lgb.Booster.Rd +++ b/R-package/man/readRDS.lgb.Booster.Rd @@ -18,6 +18,7 @@ readRDS.lgb.Booster(file = "", refhook = NULL) Attempts to load a model stored in a \code{.rds} file, using \code{\link[base]{readRDS}} } \examples{ +\donttest{ library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train @@ -38,5 +39,5 @@ model <- lgb.train( ) saveRDS.lgb.Booster(model, "model.rds") new_model <- readRDS.lgb.Booster("model.rds") - +} } diff --git a/R-package/man/saveRDS.lgb.Booster.Rd b/R-package/man/saveRDS.lgb.Booster.Rd index 2d1fbb636a93..3e7bb368d878 100644 --- a/R-package/man/saveRDS.lgb.Booster.Rd +++ b/R-package/man/saveRDS.lgb.Booster.Rd @@ -42,6 +42,7 @@ Attempts to save a model using RDS. Has an additional parameter (\code{raw}) which decides whether to save the raw model or not. } \examples{ +\donttest{ library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train @@ -62,3 +63,4 @@ model <- lgb.train( ) saveRDS.lgb.Booster(model, "model.rds") } +} diff --git a/R-package/man/setinfo.Rd b/R-package/man/setinfo.Rd index 74d18673a8a1..344f79cc4621 100644 --- a/R-package/man/setinfo.Rd +++ b/R-package/man/setinfo.Rd @@ -38,7 +38,6 @@ The \code{name} field can be one of the following: } } \examples{ -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) diff --git a/R-package/man/slice.Rd b/R-package/man/slice.Rd index 6eaec3daeab2..90c837f222ab 100644 --- a/R-package/man/slice.Rd +++ b/R-package/man/slice.Rd @@ -24,7 +24,6 @@ Get a new \code{lgb.Dataset} containing the specified rows of original \code{lgb.Dataset} object } \examples{ -library(lightgbm) data(agaricus.train, package = "lightgbm") train <- agaricus.train dtrain <- lgb.Dataset(train$data, label = train$label) From 1024d9fd54d521841dc30e637d2c66f95c1e236e Mon Sep 17 00:00:00 2001 From: James Lamb Date: Mon, 13 Apr 2020 18:17:47 -0500 Subject: [PATCH 2/2] updated learning rates --- R-package/R/lgb.cv.R | 2 +- R-package/R/lgb.importance.R | 2 +- R-package/R/lgb.interprete.R | 2 +- R-package/R/lgb.plot.importance.R | 2 +- R-package/R/lgb.plot.interpretation.R | 2 +- R-package/man/lgb.cv.Rd | 2 +- R-package/man/lgb.importance.Rd | 2 +- R-package/man/lgb.interprete.Rd | 2 +- R-package/man/lgb.plot.importance.Rd | 2 +- R-package/man/lgb.plot.interpretation.Rd | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/R-package/R/lgb.cv.R b/R-package/R/lgb.cv.R index c0bcbb058f59..90e54773c786 100644 --- a/R-package/R/lgb.cv.R +++ b/R-package/R/lgb.cv.R @@ -66,7 +66,7 @@ CVBooster <- R6::R6Class( #' , nrounds = 5L #' , nfold = 3L #' , min_data = 1L -#' , learning_rate = 0.05 +#' , learning_rate = 1.0 #' ) #' @importFrom data.table data.table setorderv #' @export diff --git a/R-package/R/lgb.importance.R b/R-package/R/lgb.importance.R index b1540ba1114b..3064673f664a 100644 --- a/R-package/R/lgb.importance.R +++ b/R-package/R/lgb.importance.R @@ -19,7 +19,7 @@ #' #' params <- list( #' objective = "binary" -#' , learning_rate = 0.01 +#' , learning_rate = 0.1 #' , max_depth = -1L #' , min_data_in_leaf = 1L #' , min_sum_hessian_in_leaf = 1.0 diff --git a/R-package/R/lgb.interprete.R b/R-package/R/lgb.interprete.R index e2433719e958..e97fb1b590a1 100644 --- a/R-package/R/lgb.interprete.R +++ b/R-package/R/lgb.interprete.R @@ -26,7 +26,7 @@ #' #' params <- list( #' objective = "binary" -#' , learning_rate = 0.01 +#' , learning_rate = 0.1 #' , max_depth = -1L #' , min_data_in_leaf = 1L #' , min_sum_hessian_in_leaf = 1.0 diff --git a/R-package/R/lgb.plot.importance.R b/R-package/R/lgb.plot.importance.R index 548c4f7821b5..ec496c4213f3 100644 --- a/R-package/R/lgb.plot.importance.R +++ b/R-package/R/lgb.plot.importance.R @@ -24,7 +24,7 @@ #' #' params <- list( #' objective = "binary" -#' , learning_rate = 0.01 +#' , learning_rate = 0.1 #' , min_data_in_leaf = 1L #' , min_sum_hessian_in_leaf = 1.0 #' ) diff --git a/R-package/R/lgb.plot.interpretation.R b/R-package/R/lgb.plot.interpretation.R index 7fa5e72c50b5..0efd71ccc929 100644 --- a/R-package/R/lgb.plot.interpretation.R +++ b/R-package/R/lgb.plot.interpretation.R @@ -31,7 +31,7 @@ #' #' params <- list( #' objective = "binary" -#' , learning_rate = 0.01 +#' , learning_rate = 0.1 #' , max_depth = -1L #' , min_data_in_leaf = 1L #' , min_sum_hessian_in_leaf = 1.0 diff --git a/R-package/man/lgb.cv.Rd b/R-package/man/lgb.cv.Rd index 8ab61fc70074..673392f54568 100644 --- a/R-package/man/lgb.cv.Rd +++ b/R-package/man/lgb.cv.Rd @@ -110,6 +110,6 @@ model <- lgb.cv( , nrounds = 5L , nfold = 3L , min_data = 1L - , learning_rate = 0.05 + , learning_rate = 1.0 ) } diff --git a/R-package/man/lgb.importance.Rd b/R-package/man/lgb.importance.Rd index d4eba713f1c2..5a269407859f 100644 --- a/R-package/man/lgb.importance.Rd +++ b/R-package/man/lgb.importance.Rd @@ -30,7 +30,7 @@ dtrain <- lgb.Dataset(train$data, label = train$label) params <- list( objective = "binary" - , learning_rate = 0.01 + , learning_rate = 0.1 , max_depth = -1L , min_data_in_leaf = 1L , min_sum_hessian_in_leaf = 1.0 diff --git a/R-package/man/lgb.interprete.Rd b/R-package/man/lgb.interprete.Rd index 0793b338944d..86fb8ecb515b 100644 --- a/R-package/man/lgb.interprete.Rd +++ b/R-package/man/lgb.interprete.Rd @@ -39,7 +39,7 @@ test <- agaricus.test params <- list( objective = "binary" - , learning_rate = 0.01 + , learning_rate = 0.1 , max_depth = -1L , min_data_in_leaf = 1L , min_sum_hessian_in_leaf = 1.0 diff --git a/R-package/man/lgb.plot.importance.Rd b/R-package/man/lgb.plot.importance.Rd index 9bcea189b881..024077a08409 100644 --- a/R-package/man/lgb.plot.importance.Rd +++ b/R-package/man/lgb.plot.importance.Rd @@ -43,7 +43,7 @@ dtrain <- lgb.Dataset(train$data, label = train$label) params <- list( objective = "binary" - , learning_rate = 0.01 + , learning_rate = 0.1 , min_data_in_leaf = 1L , min_sum_hessian_in_leaf = 1.0 ) diff --git a/R-package/man/lgb.plot.interpretation.Rd b/R-package/man/lgb.plot.interpretation.Rd index 95b15c034e74..b8818eecdbc0 100644 --- a/R-package/man/lgb.plot.interpretation.Rd +++ b/R-package/man/lgb.plot.interpretation.Rd @@ -50,7 +50,7 @@ data(agaricus.test, package = "lightgbm") params <- list( objective = "binary" - , learning_rate = 0.01 + , learning_rate = 0.1 , max_depth = -1L , min_data_in_leaf = 1L , min_sum_hessian_in_leaf = 1.0