diff --git a/R-package/R/aliases.R b/R-package/R/aliases.R
index 5fcba4f46fe7..7cd3245727af 100644
--- a/R-package/R/aliases.R
+++ b/R-package/R/aliases.R
@@ -113,6 +113,7 @@
, "num_trees"
, "num_round"
, "num_rounds"
+ , "nrounds"
, "num_boost_round"
, "n_estimators"
, "max_iter"
diff --git a/R-package/tests/testthat/test_basic.R b/R-package/tests/testthat/test_basic.R
index 5aff8e2c2d2c..92b11650c3e2 100644
--- a/R-package/tests/testthat/test_basic.R
+++ b/R-package/tests/testthat/test_basic.R
@@ -188,6 +188,68 @@ test_that("lightgbm() rejects negative or 0 value passed to nrounds", {
}
})
+test_that("lightgbm() accepts nrounds as either a top-level argument or parameter", {
+ nrounds <- 15L
+
+ set.seed(708L)
+ top_level_bst <- lightgbm(
+ data = train$data
+ , label = train$label
+ , nrounds = nrounds
+ , params = list(
+ objective = "regression"
+ , metric = "l2"
+ , num_leaves = 5L
+ )
+ , save_name = tempfile(fileext = ".model")
+ )
+
+ set.seed(708L)
+ param_bst <- lightgbm(
+ data = train$data
+ , label = train$label
+ , params = list(
+ objective = "regression"
+ , metric = "l2"
+ , num_leaves = 5L
+ , nrounds = nrounds
+ )
+ , save_name = tempfile(fileext = ".model")
+ )
+
+ set.seed(708L)
+ both_customized <- lightgbm(
+ data = train$data
+ , label = train$label
+ , nrounds = 20L
+ , params = list(
+ objective = "regression"
+ , metric = "l2"
+ , num_leaves = 5L
+ , nrounds = nrounds
+ )
+ , save_name = tempfile(fileext = ".model")
+ )
+
+ top_level_l2 <- top_level_bst$eval_train()[[1L]][["value"]]
+ params_l2 <- param_bst$eval_train()[[1L]][["value"]]
+ both_l2 <- both_customized$eval_train()[[1L]][["value"]]
+
+ # check type just to be sure the subsetting didn't return a NULL
+ expect_true(is.numeric(top_level_l2))
+ expect_true(is.numeric(params_l2))
+ expect_true(is.numeric(both_l2))
+
+ # check that model produces identical performance
+ expect_identical(top_level_l2, params_l2)
+ expect_identical(both_l2, params_l2)
+
+ expect_identical(param_bst$current_iter(), top_level_bst$current_iter())
+ expect_identical(param_bst$current_iter(), both_customized$current_iter())
+ expect_identical(param_bst$current_iter(), nrounds)
+
+})
+
test_that("lightgbm() performs evaluation on validation sets if they are provided", {
set.seed(708L)
dvalid1 <- lgb.Dataset(
@@ -467,6 +529,76 @@ test_that("lgb.train() rejects negative or 0 value passed to nrounds", {
}
})
+
+test_that("lgb.train() accepts nrounds as either a top-level argument or parameter", {
+ nrounds <- 15L
+
+ set.seed(708L)
+ top_level_bst <- lgb.train(
+ data = lgb.Dataset(
+ train$data
+ , label = train$label
+ )
+ , nrounds = nrounds
+ , params = list(
+ objective = "regression"
+ , metric = "l2"
+ , num_leaves = 5L
+ , save_name = tempfile(fileext = ".model")
+ )
+ )
+
+ set.seed(708L)
+ param_bst <- lgb.train(
+ data = lgb.Dataset(
+ train$data
+ , label = train$label
+ )
+ , params = list(
+ objective = "regression"
+ , metric = "l2"
+ , num_leaves = 5L
+ , nrounds = nrounds
+ , save_name = tempfile(fileext = ".model")
+ )
+ )
+
+ set.seed(708L)
+ both_customized <- lgb.train(
+ data = lgb.Dataset(
+ train$data
+ , label = train$label
+ )
+ , nrounds = 20L
+ , params = list(
+ objective = "regression"
+ , metric = "l2"
+ , num_leaves = 5L
+ , nrounds = nrounds
+ , save_name = tempfile(fileext = ".model")
+ )
+ )
+
+ top_level_l2 <- top_level_bst$eval_train()[[1L]][["value"]]
+ params_l2 <- param_bst$eval_train()[[1L]][["value"]]
+ both_l2 <- both_customized$eval_train()[[1L]][["value"]]
+
+ # check type just to be sure the subsetting didn't return a NULL
+ expect_true(is.numeric(top_level_l2))
+ expect_true(is.numeric(params_l2))
+ expect_true(is.numeric(both_l2))
+
+ # check that model produces identical performance
+ expect_identical(top_level_l2, params_l2)
+ expect_identical(both_l2, params_l2)
+
+ expect_identical(param_bst$current_iter(), top_level_bst$current_iter())
+ expect_identical(param_bst$current_iter(), both_customized$current_iter())
+ expect_identical(param_bst$current_iter(), nrounds)
+
+})
+
+
test_that("lgb.train() throws an informative error if 'data' is not an lgb.Dataset", {
bad_values <- list(
4L
diff --git a/docs/Parameters.rst b/docs/Parameters.rst
index 088773d0690e..75bef7add9bc 100644
--- a/docs/Parameters.rst
+++ b/docs/Parameters.rst
@@ -153,7 +153,7 @@ Core Parameters
- **Note**: can be used only in CLI version
-- ``num_iterations`` :raw-html:`🔗︎`, default = ``100``, type = int, aliases: ``num_iteration``, ``n_iter``, ``num_tree``, ``num_trees``, ``num_round``, ``num_rounds``, ``num_boost_round``, ``n_estimators``, ``max_iter``, constraints: ``num_iterations >= 0``
+- ``num_iterations`` :raw-html:`🔗︎`, default = ``100``, type = int, aliases: ``num_iteration``, ``n_iter``, ``num_tree``, ``num_trees``, ``num_round``, ``num_rounds``, ``nrounds``, ``num_boost_round``, ``n_estimators``, ``max_iter``, constraints: ``num_iterations >= 0``
- number of boosting iterations
diff --git a/include/LightGBM/config.h b/include/LightGBM/config.h
index c94420645cd9..da43a5ec9782 100644
--- a/include/LightGBM/config.h
+++ b/include/LightGBM/config.h
@@ -161,7 +161,7 @@ struct Config {
// desc = **Note**: can be used only in CLI version
std::vector valid;
- // alias = num_iteration, n_iter, num_tree, num_trees, num_round, num_rounds, num_boost_round, n_estimators, max_iter
+ // alias = num_iteration, n_iter, num_tree, num_trees, num_round, num_rounds, nrounds, num_boost_round, n_estimators, max_iter
// check = >=0
// desc = number of boosting iterations
// desc = **Note**: internally, LightGBM constructs ``num_class * num_iterations`` trees for multi-class classification problems
diff --git a/python-package/lightgbm/basic.py b/python-package/lightgbm/basic.py
index 812fd82a5d97..569d9383f680 100644
--- a/python-package/lightgbm/basic.py
+++ b/python-package/lightgbm/basic.py
@@ -386,6 +386,7 @@ class _ConfigAliases:
"num_trees",
"num_round",
"num_rounds",
+ "nrounds",
"num_boost_round",
"n_estimators",
"max_iter"},
diff --git a/src/io/config_auto.cpp b/src/io/config_auto.cpp
index 4e3f000a88f5..cd24790b820c 100644
--- a/src/io/config_auto.cpp
+++ b/src/io/config_auto.cpp
@@ -33,6 +33,7 @@ const std::unordered_map& Config::alias_table() {
{"num_trees", "num_iterations"},
{"num_round", "num_iterations"},
{"num_rounds", "num_iterations"},
+ {"nrounds", "num_iterations"},
{"num_boost_round", "num_iterations"},
{"n_estimators", "num_iterations"},
{"max_iter", "num_iterations"},