Skip to content

Commit

Permalink
[Unittest] Add lr_scheduler_cosine unit test
Browse files Browse the repository at this point in the history
Add lr_scheduler_cosine Unit test case
- property setting test
- finalize test
- getLearningRate test

**Self evaluation:**
1. Build test:	 [X]Passed [ ]Failed [ ]Skipped
2. Run test:	 [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Donghak PARK <donghak.park@samsung.com>
  • Loading branch information
DonghakPark committed Oct 21, 2024
1 parent 72ecd76 commit d29ad9f
Show file tree
Hide file tree
Showing 3 changed files with 70 additions and 2 deletions.
11 changes: 9 additions & 2 deletions api/ccapi/include/optimizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -143,8 +143,9 @@ SGD(const std::vector<std::string> &properties = {}) {
enum LearningRateSchedulerType {
CONSTANT = ML_TRAIN_LR_SCHEDULER_TYPE_CONSTANT, /**< constant */
EXPONENTIAL =
ML_TRAIN_LR_SCHEDULER_TYPE_EXPONENTIAL, /**< exponentially decay */
STEP = ML_TRAIN_LR_SCHEDULER_TYPE_STEP /**< step wise decay */
ML_TRAIN_LR_SCHEDULER_TYPE_EXPONENTIAL, /**< exponentially decay */
STEP = ML_TRAIN_LR_SCHEDULER_TYPE_STEP, /**< step wise decay */
COSINE = ML_TRAIN_LR_SCHEDULER_TYPE_COSINE /**< cosine annealing */
};

/**
Expand Down Expand Up @@ -251,6 +252,12 @@ Step(const std::vector<std::string> &properties = {}) {
properties);
}

inline std::unique_ptr<LearningRateScheduler>
Cosine(const std::vector<std::string> &properties = {}) {
return createLearningRateScheduler(LearningRateSchedulerType::COSINE,
properties);
}

} // namespace learning_rate
} // namespace optimizer

Expand Down
1 change: 1 addition & 0 deletions api/nntrainer-api-common.h
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ typedef enum {
ML_TRAIN_LR_SCHEDULER_TYPE_CONSTANT = 0, /**< Constant lr scheduler */
ML_TRAIN_LR_SCHEDULER_TYPE_EXPONENTIAL = 1, /**< Exponentially lr scheduler */
ML_TRAIN_LR_SCHEDULER_TYPE_STEP = 2, /**< Step lr scheduler */
ML_TRAIN_LR_SCHEDULER_TYPE_COSINE = 3, /**< Cosine lr scheduler */
ML_TRAIN_LR_SCHEDULER_TYPE_UNKNOWN = 999 /**< Unknown lr scheduler */
} ml_train_lr_scheduler_type_e;

Expand Down
60 changes: 60 additions & 0 deletions test/unittest/unittest_nntrainer_lr_scheduler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

#include <lr_scheduler.h>
#include <lr_scheduler_constant.h>
#include <lr_scheduler_cosine.h>
#include <lr_scheduler_exponential.h>
#include <nntrainer_error.h>

Expand Down Expand Up @@ -54,6 +55,14 @@ TEST(lr_constant, ctor_initializer_03_n) {
EXPECT_ANY_THROW(createLRS("random"));
}

/**
* @brief test constructing lr scheduler
*
*/
TEST(lr_constant, ctor_initializer_cosine_n) {
EXPECT_NO_THROW(createLRS("cosine"));
}

/**
* @brief test constructing lr scheduler
*
Expand Down Expand Up @@ -418,6 +427,57 @@ TEST(lr_step, get_learning_rate_02_p) {
EXPECT_FLOAT_EQ(lr->getLearningRate(1000), 0.001f);
}

TEST(lr_cosine, prop_01_n) {
auto lr = createLRS("cosine");
EXPECT_THROW(lr->setProperty({"unknown=unknown"}), std::invalid_argument);
}

TEST(lr_cosine, prop_02_n) {
auto lr = createLRS("cosine");
EXPECT_THROW(lr->setProperty({"learning_rate:0.1"}), std::invalid_argument);
}

TEST(lr_cosine, prop_03_n) {
auto lr = createLRS("cosine");
EXPECT_NO_THROW(lr->setProperty({"max_learning_rate=1.0"}));
}

TEST(lr_cosine, prop_04_n) {
auto lr = createLRS("cosine");
EXPECT_NO_THROW(lr->setProperty({"min_learning_rate=0.1"}));
}

TEST(lr_cosine, prop_05_n) {
auto lr = createLRS("cosine");
EXPECT_NO_THROW(lr->setProperty({"max_learning_rate=1.0"}));
EXPECT_NO_THROW(lr->setProperty({"min_learning_rate=0.1"}));
}

TEST(lr_cosine, prop_06_n) {
auto lr = createLRS("cosine");
EXPECT_NO_THROW(lr->setProperty({"decay_steps=1"}));
}

TEST(lr_cosine, finalize_01_n) {
auto lr = createLRS("cosine");
EXPECT_THROW(lr->finalize(), std::invalid_argument);
}

TEST(lr_cosine, finalize_02_n) {
auto lr = createLRS("cosine");
EXPECT_NO_THROW(lr->setProperty({"max_learning_rate=1.0"}));
EXPECT_NO_THROW(lr->setProperty({"min_learning_rate=0.1"}));
EXPECT_NO_THROW(lr->setProperty({"decay_steps=1"}));
EXPECT_NO_THROW(lr->finalize());
}
TEST(lr_cosine, getlearningrate_01_n) {
auto lr = createLRS("cosine");
EXPECT_NO_THROW(lr->setProperty({"max_learning_rate=1.0"}));
EXPECT_NO_THROW(lr->setProperty({"min_learning_rate=0.1"}));
EXPECT_NO_THROW(lr->setProperty({"decay_steps=1"}));
EXPECT_NO_THROW(lr->finalize());
EXPECT_FLOAT_EQ(lr->getLearningRate(0), 1);
}
int main(int argc, char **argv) {
int result = -1;

Expand Down

0 comments on commit d29ad9f

Please sign in to comment.