From 68e7dab5ed2f99e8b380ec8a5f599d76952b7fc3 Mon Sep 17 00:00:00 2001 From: Jihoon Lee Date: Fri, 19 Feb 2021 15:58:24 +0900 Subject: [PATCH] [Clean/iniTest] add logic to erase ini after test This patch add logic to erase ini after test for better determinancy and cleaner build directory. v2: also deprecating config_str in favor of `ScopedIni` **Self evaluation:** 1. Build test: [X]Passed [ ]Failed [ ]Skipped 2. Run test: [X]Passed [ ]Failed [ ]Skipped Signed-off-by: Jihoon Lee --- test/ccapi/unittest_ccapi.cpp | 44 ++- test/include/nntrainer_test_util.h | 179 ++-------- test/tizen_capi/unittest_tizen_capi.cpp | 102 ++++-- test/unittest/unittest_databuffer_file.cpp | 3 +- .../unittest/unittest_nntrainer_modelfile.cpp | 324 ++++++++---------- 5 files changed, 263 insertions(+), 389 deletions(-) diff --git a/test/ccapi/unittest_ccapi.cpp b/test/ccapi/unittest_ccapi.cpp index 1360340540..f5f444f70e 100644 --- a/test/ccapi/unittest_ccapi.cpp +++ b/test/ccapi/unittest_ccapi.cpp @@ -156,18 +156,46 @@ TEST(ccapi_dataset, construct_02_p) { TEST(nntrainer_ccapi, train_with_config_01_p) { std::unique_ptr model; - std::string config_file = "./test_train_01_p.ini"; - RESET_CONFIG(config_file.c_str()); - - replaceString("Input_Shape = 1:1:62720", "Input_Shape=1:1:62720", config_file, - config_str); - replaceString("batch_size = 32", "batch_size = 16", config_file, config_str); - replaceString("BufferSize=100", "", config_file, config_str); + static IniSection model_base("Model", "Type = NeuralNetwork" + " | Learning_rate = 0.0001" + " | Decay_rate = 0.96" + " | Decay_steps = 1000" + " | Epochs = 1" + " | Optimizer = adam" + " | Loss = cross" + " | Weight_Regularizer = l2norm" + " | weight_regularizer_constant = 0.005" + " | Save_Path = 'model.bin'" + " | batch_size = 32" + " | beta1 = 0.9" + " | beta2 = 0.9999" + " | epsilon = 1e-7"); + + static IniSection dataset("Dataset", "BufferSize=100" + " | TrainData = trainingSet.dat" + " | ValidData = valSet.dat" + " | LabelData = label.dat"); + + static IniSection inputlayer("inputlayer", "Type = input" + "| Input_Shape = 1:1:62720" + "| bias_initializer = zeros" + "| Normalization = true" + "| Activation = sigmoid"); + + static IniSection outputlayer("outputlayer", "Type = fully_connected" + "| input_layers = inputlayer" + "| Unit = 10" + "| bias_initializer = zeros" + "| Activation = softmax"); + + ScopedIni s("test_train_01_p", + {model_base + "batch_size = 16", dataset + "-BufferSize", + inputlayer, outputlayer}); EXPECT_NO_THROW(model = ml::train::createModel(ml::train::ModelType::NEURAL_NET)); - EXPECT_EQ(model->loadFromConfig(config_file), ML_ERROR_NONE); + EXPECT_EQ(model->loadFromConfig(s.getIniName()), ML_ERROR_NONE); EXPECT_EQ(model->compile(), ML_ERROR_NONE); EXPECT_EQ(model->initialize(), ML_ERROR_NONE); EXPECT_NO_THROW(model->train()); diff --git a/test/include/nntrainer_test_util.h b/test/include/nntrainer_test_util.h index dc2ff32de0..c2f4d44529 100644 --- a/test/include/nntrainer_test_util.h +++ b/test/include/nntrainer_test_util.h @@ -213,7 +213,7 @@ class IniTestWrapper { * @brief erase ini * */ - void erase_ini() { remove(getIniName().c_str()); } + void erase_ini() noexcept { remove(getIniName().c_str()); } bool operator==(const IniTestWrapper &rhs) const { return name == rhs.name && sections == rhs.sections; @@ -290,150 +290,27 @@ class IniTestWrapper { Sections sections; }; -/// @todo: migrate this to datafile unittest -const std::string config_str = "[Model]" - "\n" - "Type = NeuralNetwork" - "\n" - "Learning_rate = 0.0001" - "\n" - "Decay_rate = 0.96" - "\n" - "Decay_steps = 1000" - "\n" - "Epochs = 1" - "\n" - "Optimizer = adam" - "\n" - "Loss = cross" - "\n" - "Weight_Regularizer = l2norm" - "\n" - "weight_regularizer_constant = 0.005" - "\n" - "Save_Path = 'model.bin'" - "\n" - "batch_size = 32" - "\n" - "beta1 = 0.9" - "\n" - "beta2 = 0.9999" - "\n" - "epsilon = 1e-7" - "\n" - "[DataSet]" - "\n" - "BufferSize=100" - "\n" - "TrainData = trainingSet.dat" - "\n" - "ValidData = valSet.dat" - "\n" - "LabelData = label.dat" - "\n" - "[inputlayer]" - "\n" - "Type = input" - "\n" - "Input_Shape = 1:1:62720" - "\n" - "bias_initializer = zeros" - "\n" - "Normalization = true" - "\n" - "Activation = sigmoid" - "\n" - "[outputlayer]" - "\n" - "Type = fully_connected" - "\n" - "input_layers = inputlayer" - "\n" - "Unit = 10" - "\n" - "bias_initializer = zeros" - "\n" - "Activation = softmax" - "\n"; - -const std::string config_str2 = "[Model]" - "\n" - "Type = NeuralNetwork" - "\n" - "Learning_rate = 0.0001" - "\n" - "Decay_rate = 0.96" - "\n" - "Decay_steps = 1000" - "\n" - "Epochs = 1" - "\n" - "Optimizer = adam" - "\n" - "Loss = cross" - "\n" - "Weight_Regularizer = l2norm" - "\n" - "weight_regularizer_constant = 0.005" - "\n" - "Model = 'model.bin'" - "\n" - "batch_size = 32" - "\n" - "beta1 = 0.9" - "\n" - "beta2 = 0.9999" - "\n" - "epsilon = 1e-7" - "\n" - "[DataSet]" - "\n" - "BufferSize=100" - "\n" - "TrainData = trainingSet.dat" - "\n" - "ValidData = valSet.dat" - "\n" - "LabelData = label.dat" - "\n" - "[conv2dlayer]" - "\n" - "Type = conv2d" - "\n" - "Input_Shape = 3:28:28" - "\n" - "bias_initializer = zeros" - "\n" - "Activation = sigmoid" - "\n" - "weight_regularizer=l2norm" - "\n" - "weight_regularizer_constant=0.005" - "\n" - "filters=6" - "\n" - "kernel_size=5,5" - "\n" - "stride=1,1" - "\n" - "padding=0,0" - "\n" - "weight_initializer=xavier_uniform" - "\n" - "flatten = false" - "\n" - "[outputlayer]" - "\n" - "Type = fully_connected" - "\n" - "input_layers = conv2dlayer" - "\n" - "Unit = 10" - "\n" - "bias_initializer = zeros" - "\n" - "Activation = softmax" - "\n"; +/** + * @brief This class wraps IniTestWrapper, this class must live longer than the + * IniTestWrapper contained inside + * + */ +class ScopedIni { +public: + ScopedIni(const IniTestWrapper &ini_) : ini(ini_) { ini.save_ini(); } + ScopedIni(const std::string &name_, + const IniTestWrapper::Sections §ions_) : + ini(name_, sections_) { + ini.save_ini(); + } + + std::string getIniName() { return ini.getIniName(); } + + ~ScopedIni() { ini.erase_ini(); } + +private: + IniTestWrapper ini; +}; #define GEN_TEST_INPUT(input, eqation_i_j_k_l) \ do { \ @@ -449,18 +326,6 @@ const std::string config_str2 = "[Model]" } \ } while (0) -#define RESET_CONFIG(conf_name) \ - do { \ - std::ifstream file_stream(conf_name, std::ifstream::in); \ - if (file_stream.good()) { \ - file_stream.close(); \ - if (std::remove(conf_name) != 0) \ - ml_loge("Error: Cannot delete file: %s", conf_name); \ - else \ - ml_logi("Info: deleteing file: %s", conf_name); \ - } \ - } while (0) - /** * @brief return a tensor filled with contant value with dimension */ diff --git a/test/tizen_capi/unittest_tizen_capi.cpp b/test/tizen_capi/unittest_tizen_capi.cpp index d743d81fc8..3512808114 100644 --- a/test/tizen_capi/unittest_tizen_capi.cpp +++ b/test/tizen_capi/unittest_tizen_capi.cpp @@ -28,6 +28,39 @@ static const std::string getTestResPath(const std::string &file) { return getResPath(file, {"test"}); } + +static IniSection model_base("Model", "Type = NeuralNetwork" + " | Learning_rate = 0.0001" + " | Decay_rate = 0.96" + " | Decay_steps = 1000" + " | Epochs = 1" + " | Optimizer = adam" + " | Loss = cross" + " | Weight_Regularizer = l2norm" + " | weight_regularizer_constant = 0.005" + " | Save_Path = 'model.bin'" + " | batch_size = 32" + " | beta1 = 0.9" + " | beta2 = 0.9999" + " | epsilon = 1e-7"); + +static IniSection dataset("Dataset", "BufferSize=100" + " | TrainData = trainingSet.dat" + " | ValidData = valSet.dat" + " | LabelData = label.dat"); + +static IniSection inputlayer("inputlayer", "Type = input" + "| Input_Shape = 1:1:62720" + "| bias_initializer = zeros" + "| Normalization = true" + "| Activation = sigmoid"); + +static IniSection outputlayer("outputlayer", "Type = fully_connected" + "| input_layers = inputlayer" + "| Unit = 10" + "| bias_initializer = zeros" + "| Activation = softmax"); + /** * @brief Compare the training statistics */ @@ -101,11 +134,11 @@ TEST(nntrainer_capi_nnmodel, construct_destruct_03_n) { TEST(nntrainer_capi_nnmodel, compile_01_p) { ml_train_model_h handle = NULL; int status = ML_ERROR_NONE; - std::string config_file = "./test_compile_01_p.ini"; - RESET_CONFIG(config_file.c_str()); - replaceString("Layers = inputlayer outputlayer", - "Layers = inputlayer outputlayer", config_file, config_str); - status = ml_train_model_construct_with_conf(config_file.c_str(), &handle); + + ScopedIni s("test_compile_01_p", + {model_base, dataset, inputlayer, outputlayer}); + + status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle); EXPECT_EQ(status, ML_ERROR_NONE); status = ml_train_model_compile(handle, NULL); EXPECT_EQ(status, ML_ERROR_NONE); @@ -130,11 +163,12 @@ TEST(nntrainer_capi_nnmodel, construct_conf_01_n) { TEST(nntrainer_capi_nnmodel, construct_conf_02_n) { ml_train_model_h handle = NULL; int status = ML_ERROR_NONE; - std::string config_file = "./test_compile_03_n.ini"; - RESET_CONFIG(config_file.c_str()); - replaceString("Input_Shape = 1:1:62720", "Input_Shape=1:1:0", config_file, - config_str); - status = ml_train_model_construct_with_conf(config_file.c_str(), &handle); + + ScopedIni s( + "test_compile_03_n", + {model_base, dataset, inputlayer + "Input_Shape=1:1:0", outputlayer}); + + status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle); EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER); } @@ -299,14 +333,12 @@ TEST(nntrainer_capi_nnmodel, compile_06_n) { TEST(nntrainer_capi_nnmodel, train_01_p) { ml_train_model_h handle = NULL; int status = ML_ERROR_NONE; - std::string config_file = "./test_train_01_p.ini"; - RESET_CONFIG(config_file.c_str()); - replaceString("Input_Shape = 1:1:62720", "Input_Shape=1:1:62720", config_file, - config_str); - replaceString("batch_size = 32", "batch_size = 16", config_file, config_str); - replaceString("BufferSize=100", "", config_file, config_str); - status = ml_train_model_construct_with_conf(config_file.c_str(), &handle); + ScopedIni s("test_train_01_p", + {model_base + "batch_size = 16", dataset + "-BufferSize", + inputlayer, outputlayer}); + + status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle); EXPECT_EQ(status, ML_ERROR_NONE); status = ml_train_model_compile(handle, NULL); @@ -337,11 +369,11 @@ TEST(nntrainer_capi_nnmodel, train_02_n) { TEST(nntrainer_capi_nnmodel, train_03_n) { ml_train_model_h handle = NULL; int status = ML_ERROR_NONE; - std::string config_file = "./test_train_01_p.ini"; - RESET_CONFIG(config_file.c_str()); - replaceString("batch_size = 32", "batch_size = 16", config_file, config_str); - replaceString("BufferSize=100", "", config_file, config_str); - status = ml_train_model_construct_with_conf(config_file.c_str(), &handle); + ScopedIni s("test_train_01_p", + {model_base + "batch_size = 16", dataset + "-BufferSize", + inputlayer, outputlayer}); + + status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle); EXPECT_EQ(status, ML_ERROR_NONE); status = ml_train_model_compile(handle, NULL); EXPECT_EQ(status, ML_ERROR_NONE); @@ -487,12 +519,10 @@ TEST(nntrainer_capi_nnmodel, addLayer_05_n) { ml_train_model_h model = NULL; ml_train_layer_h layer = NULL; - std::string config_file = "./test_compile_01_p.ini"; - RESET_CONFIG(config_file.c_str()); - replaceString("Layers = inputlayer outputlayer", - "Layers = inputlayer outputlayer", config_file, config_str); + ScopedIni s("test_compile_01_p", + {model_base, dataset, inputlayer, outputlayer}); - status = ml_train_model_construct_with_conf(config_file.c_str(), &model); + status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &model); EXPECT_EQ(status, ML_ERROR_NONE); status = ml_train_model_compile(model, NULL); @@ -898,11 +928,10 @@ TEST(nntrainer_capi_nnmodel, train_with_generator_02_p) { TEST(nntrainer_capi_summary, summary_01_p) { ml_train_model_h handle = NULL; int status = ML_ERROR_NONE; - std::string config_file = "./test_compile_01_p.ini"; - RESET_CONFIG(config_file.c_str()); - replaceString("Layers = inputlayer outputlayer", - "Layers = inputlayer outputlayer", config_file, config_str); - status = ml_train_model_construct_with_conf(config_file.c_str(), &handle); + + ScopedIni s("test_compile_01_p", + {model_base, dataset, inputlayer, outputlayer}); + status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle); EXPECT_EQ(status, ML_ERROR_NONE); status = ml_train_model_compile(handle, NULL); EXPECT_EQ(status, ML_ERROR_NONE); @@ -925,11 +954,10 @@ TEST(nntrainer_capi_summary, summary_01_p) { TEST(nntrainer_capi_summary, summary_02_n) { ml_train_model_h handle = NULL; int status = ML_ERROR_NONE; - std::string config_file = "./test_compile_01_p.ini"; - RESET_CONFIG(config_file.c_str()); - replaceString("Layers = inputlayer outputlayer", - "Layers = inputlayer outputlayer", config_file, config_str); - status = ml_train_model_construct_with_conf(config_file.c_str(), &handle); + + ScopedIni s("test_compile_01_p", + {model_base, dataset, inputlayer, outputlayer}); + status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle); EXPECT_EQ(status, ML_ERROR_NONE); status = ml_train_model_compile(handle, NULL); EXPECT_EQ(status, ML_ERROR_NONE); diff --git a/test/unittest/unittest_databuffer_file.cpp b/test/unittest/unittest_databuffer_file.cpp index 7d421e104b..f0d90d9d26 100644 --- a/test/unittest/unittest_databuffer_file.cpp +++ b/test/unittest/unittest_databuffer_file.cpp @@ -41,7 +41,8 @@ TEST(nntrainer_DataBuffer, setFeatureSize_01_p) { dim.setTensorDim("32:1:1:62720"); status = data_buffer.setClassNum(10); EXPECT_EQ(status, ML_ERROR_NONE); - status = data_buffer.setDataFile(nntrainer::DATA_TRAIN, "./trainingSet.dat"); + status = data_buffer.setDataFile(nntrainer::DATA_TRAIN, + getTestResPath("trainingSet.dat")); EXPECT_EQ(status, ML_ERROR_NONE); status = data_buffer.setFeatureSize(dim); EXPECT_EQ(status, ML_ERROR_NONE); diff --git a/test/unittest/unittest_nntrainer_modelfile.cpp b/test/unittest/unittest_nntrainer_modelfile.cpp index 0344c3fb6d..1a1d62b7c4 100644 --- a/test/unittest/unittest_nntrainer_modelfile.cpp +++ b/test/unittest/unittest_nntrainer_modelfile.cpp @@ -33,16 +33,15 @@ class nntrainerIniTest std::tuple> { public: - static void save_ini(const char *filename, std::vector sections, - std::ios_base::openmode mode = std::ios_base::out) { - IniTestWrapper::save_ini(filename, sections, mode); - } - static void SetUpTestCase() { nntrainer::AppContext::Global().setWorkingDirectory( getResPath("", {"test"})); } + static void TearDownTestCase() { + nntrainer::AppContext::Global().setWorkingDirectory("."); + } + protected: virtual void SetUp() { name = std::string(std::get<0>(GetParam())); @@ -350,82 +349,64 @@ INSTANTIATE_TEST_CASE_P( * @brief Ini file unittest with backbone with wrong file */ TEST(nntrainerIniTest, backbone_n_01) { - const char *ini_name = "backbone_n1.ini"; - nntrainerIniTest::save_ini(ini_name, {nw_base, backbone_random}); + ScopedIni s{"backbone_n1", {nw_base, backbone_random}}; nntrainer::NeuralNetwork NN; - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_INVALID_PARAMETER); + EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_INVALID_PARAMETER); } /** * @brief Ini file unittest with backbone with empty backbone */ TEST(nntrainerIniTest, backbone_n_02) { - const char *ini_name = "backbone_n2.ini"; - nntrainerIniTest::save_ini("base.ini", {nw_base}); - nntrainerIniTest::save_ini(ini_name, {nw_base, backbone_valid}); + ScopedIni b{"base", {nw_base}}; + ScopedIni s{"backbone_n2", {nw_base, backbone_valid}}; nntrainer::NeuralNetwork NN; - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_INVALID_PARAMETER); + EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_INVALID_PARAMETER); } /** * @brief Ini file unittest with backbone with normal backbone */ TEST(nntrainerIniTest, backbone_p_03) { - const char *ini_name = "backbone_p3.ini"; - nntrainerIniTest::save_ini("base.ini", {nw_base, batch_normal}); - nntrainerIniTest::save_ini(ini_name, {nw_base, backbone_valid}); + ScopedIni b{"base", {nw_base, batch_normal}}; + ScopedIni s{"backbone_p3", {nw_base, backbone_valid}}; nntrainer::NeuralNetwork NN; - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NONE); + EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_NONE); } /** * @brief Ini file unittest with backbone without model parameters */ TEST(nntrainerIniTest, backbone_p_04) { - const char *ini_name = "backbone_p4.ini"; - nntrainerIniTest::save_ini("base.ini", {flatten, conv2d}); - nntrainerIniTest::save_ini(ini_name, {nw_base, backbone_valid}); + ScopedIni b{"base", {flatten, conv2d}}; + ScopedIni s{"backbone_p4", {nw_base, backbone_valid}}; nntrainer::NeuralNetwork NN; - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NONE); + EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_NONE); } /** * @brief Ini file unittest matching model with and without backbone */ TEST(nntrainerIniTest, backbone_p_05) { - const char *bb_use_ini_name = "backbone_made.ini"; - const char *direct_ini_name = "direct_made.ini"; /** Create a backbone.ini */ - nntrainerIniTest::save_ini("base.ini", {nw_adam, conv2d}); + ScopedIni b("base", {nw_adam, conv2d}); /** Create a model of 4 conv layers using backbone */ - std::string backbone_valid_orig_name = backbone_valid.getName(); - - nntrainerIniTest::save_ini( - bb_use_ini_name, - {nw_sgd, input2d, backbone_valid + "input_layers=inputlayer"}); - backbone_valid.rename("block2"); - nntrainerIniTest::save_ini(bb_use_ini_name, - {backbone_valid + "input_layers=block1"}, - std::ios_base::app); - backbone_valid.rename("block3"); - nntrainerIniTest::save_ini(bb_use_ini_name, - {backbone_valid + "input_layers=block2"}, - std::ios_base::app); - backbone_valid.rename("block4"); - nntrainerIniTest::save_ini(bb_use_ini_name, - {backbone_valid + "input_layers=block3"}, - std::ios_base::app); - - backbone_valid.rename(backbone_valid_orig_name); + ScopedIni backbone_made( + "backbone_made", + {nw_sgd, input2d, I("block1") + backbone_valid + "input_layers=inputlayer", + I("block2") + backbone_valid + "input_layers=block1", + I("block3") + backbone_valid + "input_layers=block2", + I("block4") + backbone_valid + "input_layers=block3"}); nntrainer::NeuralNetwork NN_backbone; - EXPECT_EQ(NN_backbone.loadFromConfig(bb_use_ini_name), ML_ERROR_NONE); + EXPECT_EQ(NN_backbone.loadFromConfig(backbone_made.getIniName()), + ML_ERROR_NONE); EXPECT_EQ(NN_backbone.compile(), ML_ERROR_NONE); EXPECT_EQ(NN_backbone.initialize(), ML_ERROR_NONE); @@ -436,32 +417,18 @@ TEST(nntrainerIniTest, backbone_p_05) { EXPECT_EQ(NN_backbone.getLearningRate(), 1); /** Create the same model directly without using backbone */ - std::string conv2d_orig_name = conv2d.getName(); - - nntrainerIniTest::save_ini(direct_ini_name, {nw_sgd, input2d}); - conv2d.rename("block1conv2d"); - nntrainerIniTest::save_ini( - direct_ini_name, {conv2d + "input_layers=inputlayer"}, std::ios_base::app); - conv2d.rename("block2conv2d"); - nntrainerIniTest::save_ini(direct_ini_name, - {conv2d + "input_layers=block1conv2d"}, - std::ios_base::app); - conv2d.rename("block3conv2d"); - nntrainerIniTest::save_ini(direct_ini_name, - {conv2d + "input_layers=block2conv2d"}, - std::ios_base::app); - conv2d.rename("block4conv2d"); - nntrainerIniTest::save_ini(direct_ini_name, - {conv2d + "input_layers=block3conv2d"}, - std::ios_base::app); - - conv2d.rename(conv2d_orig_name); + // std::string conv2d_orig_name = conv2d.getName(); + ScopedIni direct_made( + "direct_made", + {nw_sgd, input2d, I("block1conv2d") + conv2d + "input_layers=inputlayer", + I("block2conv2d") + conv2d + "input_layers=block1conv2d", + I("block3conv2d") + conv2d + "input_layers=block2conv2d", + I("block4conv2d") + conv2d + "input_layers=block3conv2d"}); nntrainer::NeuralNetwork NN_direct; - EXPECT_EQ(NN_direct.loadFromConfig(direct_ini_name), ML_ERROR_NONE); + EXPECT_EQ(NN_direct.loadFromConfig(direct_made.getIniName()), ML_ERROR_NONE); EXPECT_EQ(NN_direct.compile(), ML_ERROR_NONE); EXPECT_EQ(NN_direct.initialize(), ML_ERROR_NONE); - /** Summary of both the models must match precisely */ NN_backbone.printPreset(std::cout, ML_TRAIN_SUMMARY_MODEL); NN_direct.printPreset(std::cout, ML_TRAIN_SUMMARY_MODEL); @@ -489,12 +456,11 @@ TEST(nntrainerIniTest, backbone_p_05) { * @brief Ini file unittest matching model with and without trainable */ TEST(nntrainerIniTest, backbone_p_06) { - const char *ini_name = "backbone_p6.ini"; - nntrainerIniTest::save_ini("base.ini", {flatten, conv2d}); - nntrainerIniTest::save_ini(ini_name, {nw_base, backbone_valid}); + ScopedIni b("base", {flatten, conv2d}); + ScopedIni s("backbone_p6", {nw_base, backbone_valid}); nntrainer::NeuralNetwork NN; - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NONE); + EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_NONE); /** default trainable is false */ auto graph = NN.getFlatGraph(); @@ -506,13 +472,11 @@ TEST(nntrainerIniTest, backbone_p_06) { * @brief Ini file unittest matching model with and without trainable */ TEST(nntrainerIniTest, backbone_p_07) { - const char *ini_name = "backbone_p7.ini"; - nntrainerIniTest::save_ini("base.ini", {conv2d}); - nntrainerIniTest::save_ini(ini_name, - {nw_base, backbone_notrain, backbone_train}); + ScopedIni b("base", {conv2d}); + ScopedIni s("backbone_p7", {nw_base, backbone_notrain, backbone_train}); nntrainer::NeuralNetwork NN; - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NONE); + EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_NONE); /** trainable is set to false */ auto graph = NN.getFlatGraph(); @@ -524,16 +488,16 @@ TEST(nntrainerIniTest, backbone_p_07) { * @brief Ini file unittest with backbone with normal backbone */ TEST(nntrainerIniTest, backbone_n_08) { - const char *ini_name = "backbone_n8.ini"; - nntrainerIniTest::save_ini(ini_name, {nw_base, backbone_random_external}); + ScopedIni s("backbone_n8", {nw_base, backbone_random_external}); + nntrainer::NeuralNetwork NN; #if defined(ENABLE_NNSTREAMER_BACKBONE) || defined(ENABLE_TFLITE_BACKBONE) - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NONE); + EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_NONE); EXPECT_THROW(NN.compile(), std::invalid_argument); EXPECT_EQ(NN.initialize(), ML_ERROR_NOT_SUPPORTED); #else - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NOT_SUPPORTED); + EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_NOT_SUPPORTED); #endif } @@ -541,17 +505,16 @@ TEST(nntrainerIniTest, backbone_n_08) { * @brief Ini file unittest with backbone with normal backbone */ TEST(nntrainerIniTest, backbone_p_09) { - const char *ini_name = "backbone_p9.ini"; - nntrainerIniTest::save_ini( - ini_name, {nw_base_mse + "-batch_size", backbone_valid_external}); + ScopedIni s("backbone_p9", + {nw_base_mse + "-batch_size", backbone_valid_external}); nntrainer::NeuralNetwork NN; #if defined(ENABLE_NNSTREAMER_BACKBONE) || defined(ENABLE_TFLITE_BACKBONE) - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NONE); + EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_NONE); EXPECT_EQ(NN.compile(), ML_ERROR_NONE); EXPECT_EQ(NN.initialize(), ML_ERROR_NONE); #else - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NOT_SUPPORTED); + EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_NOT_SUPPORTED); #endif } @@ -560,17 +523,15 @@ TEST(nntrainerIniTest, backbone_p_09) { */ // Enable after sepearet memory assign and initialization of graph TEST(nntrainerIniTest, backbone_p_10) { - const char *ini_name = "backbone_p10.ini"; - nntrainerIniTest::save_ini(ini_name, - {nw_base_mse, backbone_valid_external_no_shape}); + ScopedIni s("backbone_p10", {nw_base_mse, backbone_valid_external_no_shape}); nntrainer::NeuralNetwork NN; #if defined(ENABLE_NNSTREAMER_BACKBONE) || defined(ENABLE_TFLITE_BACKBONE) - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NONE); + EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_NONE); EXPECT_THROW(NN.compile(), std::invalid_argument); EXPECT_EQ(NN.initialize(), ML_ERROR_NOT_SUPPORTED); #else - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NOT_SUPPORTED); + EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_NOT_SUPPORTED); #endif } @@ -580,23 +541,26 @@ TEST(nntrainerIniTest, backbone_p_10) { * layers */ TEST(nntrainerIniTest, backbone_p_11) { - const char *ini_name_scaled_half = "backbone_p11_scaled_half.ini"; - const char *ini_name_full = "backbone_p11_full.ini"; - nntrainerIniTest::save_ini( - "base.ini", {conv2d, batch_normal + "input_layers=conv2d", conv2d}); - nntrainerIniTest::save_ini( - ini_name_scaled_half, + + ScopedIni base("base", + {conv2d, batch_normal + "input_layers=conv2d", conv2d}); + + ScopedIni ini_scaled_half( + "backbone_p11_scaled_half", {nw_base_mse, input2d, backbone_scaled + "input_layers=inputlayer"}); - nntrainerIniTest::save_ini( - ini_name_full, + + ScopedIni ini_full( + "backbone_p11_full", {nw_base_mse, input2d, backbone_valid + "input_layers=inputlayer"}); + nntrainer::NeuralNetwork NN_scaled_half, NN_full; - EXPECT_EQ(NN_full.loadFromConfig(ini_name_full), ML_ERROR_NONE); + EXPECT_EQ(NN_full.loadFromConfig(ini_full.getIniName()), ML_ERROR_NONE); EXPECT_EQ(NN_full.compile(), ML_ERROR_NONE); EXPECT_EQ(NN_full.initialize(), ML_ERROR_NONE); - EXPECT_EQ(NN_scaled_half.loadFromConfig(ini_name_scaled_half), ML_ERROR_NONE); + EXPECT_EQ(NN_scaled_half.loadFromConfig(ini_scaled_half.getIniName()), + ML_ERROR_NONE); EXPECT_EQ(NN_scaled_half.compile(), ML_ERROR_NONE); EXPECT_EQ(NN_scaled_half.initialize(), ML_ERROR_NONE); @@ -613,23 +577,22 @@ TEST(nntrainerIniTest, backbone_p_11) { * layers */ TEST(nntrainerIniTest, backbone_p_12) { - const char *ini_name_scaled_half = "backbone_p12_scaled_half.ini"; - const char *ini_name_full = "backbone_p12_full.ini"; - nntrainerIniTest::save_ini("base.ini", - {out, batch_normal + "input_layers=fclayer"}); - nntrainerIniTest::save_ini( - ini_name_scaled_half, - {nw_base_mse, input, backbone_scaled + "input_layers=inputlayer"}); - nntrainerIniTest::save_ini( - ini_name_full, + ScopedIni b("base", {out, batch_normal + "input_layers=fclayer"}); + ScopedIni scaled_half( + "backbone_p12_scaled_half", + {{nw_base_mse, input, backbone_scaled + "input_layers=inputlayer"}}); + ScopedIni scaled_full( + "backbone_p12_scaled_full", {nw_base_mse, input, backbone_valid + "input_layers=inputlayer"}); + nntrainer::NeuralNetwork NN_scaled_half, NN_full; - EXPECT_EQ(NN_full.loadFromConfig(ini_name_full), ML_ERROR_NONE); + EXPECT_EQ(NN_full.loadFromConfig(scaled_full.getIniName()), ML_ERROR_NONE); EXPECT_EQ(NN_full.compile(), ML_ERROR_NONE); EXPECT_EQ(NN_full.initialize(), ML_ERROR_NONE); - EXPECT_EQ(NN_scaled_half.loadFromConfig(ini_name_scaled_half), ML_ERROR_NONE); + EXPECT_EQ(NN_scaled_half.loadFromConfig(scaled_half.getIniName()), + ML_ERROR_NONE); EXPECT_EQ(NN_scaled_half.compile(), ML_ERROR_NONE); EXPECT_EQ(NN_scaled_half.initialize(), ML_ERROR_NONE); @@ -645,24 +608,26 @@ TEST(nntrainerIniTest, backbone_p_12) { * @note Input shape from layers of backbone are striped off */ TEST(nntrainerIniTest, backbone_p_13) { - const char *ini_name_scaled_half = "backbone_p13_scaled_half.ini"; - const char *ini_name_full = "backbone_p13_full.ini"; - nntrainerIniTest::save_ini( - "base.ini", {conv2d_shape, batch_normal + "input_layers=conv2d_shape", - conv2d + "input_layers=bn"}); - nntrainerIniTest::save_ini( - ini_name_scaled_half, + ScopedIni base("base", + {conv2d_shape, batch_normal + "input_layers=conv2d_shape", + conv2d + "input_layers=bn"}); + + ScopedIni scaled_half( + "backbone_p13_scaled_half", {nw_base_mse, input2d, backbone_scaled + "input_layers=inputlayer"}); - nntrainerIniTest::save_ini( - ini_name_full, + + ScopedIni scaled_full( + "backbone_p13_full", {nw_base_mse, input2d, backbone_valid + "input_layers=inputlayer"}); + nntrainer::NeuralNetwork NN_scaled_half, NN_full; - EXPECT_EQ(NN_full.loadFromConfig(ini_name_full), ML_ERROR_NONE); + EXPECT_EQ(NN_full.loadFromConfig(scaled_full.getIniName()), ML_ERROR_NONE); EXPECT_EQ(NN_full.compile(), ML_ERROR_NONE); EXPECT_EQ(NN_full.initialize(), ML_ERROR_NONE); - EXPECT_EQ(NN_scaled_half.loadFromConfig(ini_name_scaled_half), ML_ERROR_NONE); + EXPECT_EQ(NN_scaled_half.loadFromConfig(scaled_half.getIniName()), + ML_ERROR_NONE); EXPECT_EQ(NN_scaled_half.compile(), ML_ERROR_NONE); EXPECT_EQ(NN_scaled_half.initialize(), ML_ERROR_NONE); @@ -678,23 +643,24 @@ TEST(nntrainerIniTest, backbone_p_13) { * @note Scaled size is at least 1 */ TEST(nntrainerIniTest, backbone_p_14) { - const char *ini_name_scaled_zero = "backbone_p14_scaled_zero.ini"; - const char *ini_name_full = "backbone_p14_full.ini"; - nntrainerIniTest::save_ini( - "base.ini", {conv2d_shape, conv2d + "input_layers=conv2d_shape"}); - nntrainerIniTest::save_ini( - ini_name_scaled_zero, + ScopedIni base("base", {conv2d_shape, conv2d + "input_layers=conv2d_shape"}); + + ScopedIni scaled_zero( + "backbone_p14_scaled_zero", {nw_base_mse, input2d, backbone_scaled_zero + "input_layers=inputlayer"}); - nntrainerIniTest::save_ini( - ini_name_full, + + ScopedIni scaled_full( + "backbone_p14_full", {nw_base_mse, input2d, backbone_valid + "input_layers=inputlayer"}); + nntrainer::NeuralNetwork NN_scaled_zero, NN_full; - EXPECT_EQ(NN_full.loadFromConfig(ini_name_full), ML_ERROR_NONE); + EXPECT_EQ(NN_full.loadFromConfig(scaled_full.getIniName()), ML_ERROR_NONE); EXPECT_EQ(NN_full.compile(), ML_ERROR_NONE); EXPECT_EQ(NN_full.initialize(), ML_ERROR_NONE); - EXPECT_EQ(NN_scaled_zero.loadFromConfig(ini_name_scaled_zero), ML_ERROR_NONE); + EXPECT_EQ(NN_scaled_zero.loadFromConfig(scaled_zero.getIniName()), + ML_ERROR_NONE); EXPECT_EQ(NN_scaled_zero.compile(), ML_ERROR_NONE); EXPECT_EQ(NN_scaled_zero.initialize(), ML_ERROR_NONE); @@ -708,21 +674,18 @@ TEST(nntrainerIniTest, backbone_p_14) { * @note Input shape is provided in model file */ TEST(nntrainerIniTest, backbone_n_15) { - const char *ini_name_scaled = "backbone_n15_scaled.ini"; - const char *ini_name_full = "backbone_n15_full.ini"; - nntrainer::NeuralNetwork NN_scaled, NN_full; - - nntrainerIniTest::save_ini("base.ini", {conv2d, conv2d}); + ScopedIni base("base", {conv2d, conv2d}); - nntrainerIniTest::save_ini(ini_name_full, {nw_base_mse, backbone_valid}); + ScopedIni full("backbone_n15_scaled", {nw_base_mse, backbone_valid}); - EXPECT_EQ(NN_full.loadFromConfig(ini_name_full), ML_ERROR_NONE); + nntrainer::NeuralNetwork NN_scaled, NN_full; + EXPECT_EQ(NN_full.loadFromConfig(full.getIniName()), ML_ERROR_NONE); EXPECT_THROW(NN_full.compile(), std::invalid_argument); EXPECT_EQ(NN_full.initialize(), ML_ERROR_NOT_SUPPORTED); - nntrainerIniTest::save_ini(ini_name_scaled, {nw_base_mse, backbone_scaled}); + ScopedIni scaled("backbone_n15_scaled", {nw_base_mse, backbone_scaled}); - EXPECT_EQ(NN_scaled.loadFromConfig(ini_name_scaled), ML_ERROR_NONE); + EXPECT_EQ(NN_scaled.loadFromConfig(scaled.getIniName()), ML_ERROR_NONE); EXPECT_THROW(NN_scaled.compile(), std::invalid_argument); EXPECT_EQ(NN_scaled.initialize(), ML_ERROR_NOT_SUPPORTED); } @@ -732,22 +695,19 @@ TEST(nntrainerIniTest, backbone_n_15) { * @note Input shape is striped from backbone and not provided in model file */ TEST(nntrainerIniTest, backbone_n_16) { - const char *ini_name_scaled = "backbone_n16_scaled.ini"; - const char *ini_name_full = "backbone_n16_full.ini"; nntrainer::NeuralNetwork NN_scaled, NN_full; - nntrainerIniTest::save_ini( - "base.ini", {conv2d_shape, conv2d + "input_layers=conv2d_shape"}); + ScopedIni base("base", {conv2d_shape, conv2d + "input_layers=conv2d_shape"}); - nntrainerIniTest::save_ini(ini_name_full, {nw_base_mse, backbone_valid}); + ScopedIni full("backbone_n16_full", {nw_base_mse, backbone_valid}); - EXPECT_EQ(NN_full.loadFromConfig(ini_name_full), ML_ERROR_NONE); + EXPECT_EQ(NN_full.loadFromConfig(full.getIniName()), ML_ERROR_NONE); EXPECT_THROW(NN_full.compile(), std::invalid_argument); EXPECT_EQ(NN_full.initialize(), ML_ERROR_NOT_SUPPORTED); - nntrainerIniTest::save_ini(ini_name_scaled, {nw_base_mse, backbone_scaled}); + ScopedIni scaled("backbone_n16_full", {nw_base_mse, backbone_scaled}); - EXPECT_EQ(NN_scaled.loadFromConfig(ini_name_scaled), ML_ERROR_NONE); + EXPECT_EQ(NN_scaled.loadFromConfig(scaled.getIniName()), ML_ERROR_NONE); EXPECT_THROW(NN_scaled.compile(), std::invalid_argument); EXPECT_EQ(NN_scaled.initialize(), ML_ERROR_NOT_SUPPORTED); } @@ -756,26 +716,23 @@ TEST(nntrainerIniTest, backbone_n_16) { * @note Input shape is striped from backbone and not provided in model file */ TEST(nntrainerIniTest, backbone_p_17) { - const char *ini_name_scaled = "backbone_p17_scaled.ini"; - const char *ini_name_full = "backbone_p17_full.ini"; nntrainer::NeuralNetwork NN_scaled, NN_full; - nntrainerIniTest::save_ini( - "base.ini", {conv2d_shape, conv2d + "input_layers=conv2d_shape"}); + ScopedIni base("base", {conv2d_shape, conv2d + "input_layers=conv2d_shape"}); - nntrainerIniTest::save_ini( - ini_name_full, + ScopedIni full( + "backbone_p17_full", {nw_base_mse, input2d, backbone_valid + "input_layers=inputlayer"}); - EXPECT_EQ(NN_full.loadFromConfig(ini_name_full), ML_ERROR_NONE); + EXPECT_EQ(NN_full.loadFromConfig(full.getIniName()), ML_ERROR_NONE); EXPECT_EQ(NN_full.compile(), ML_ERROR_NONE); EXPECT_EQ(NN_full.initialize(), ML_ERROR_NONE); - nntrainerIniTest::save_ini( - ini_name_scaled, + ScopedIni scaled( + "backbone_p17_scaled", {nw_base_mse, input2d, backbone_scaled + "input_layers=inputlayer"}); - EXPECT_EQ(NN_scaled.loadFromConfig(ini_name_scaled), ML_ERROR_NONE); + EXPECT_EQ(NN_scaled.loadFromConfig(scaled.getIniName()), ML_ERROR_NONE); EXPECT_EQ(NN_scaled.compile(), ML_ERROR_NONE); EXPECT_EQ(NN_scaled.initialize(), ML_ERROR_NONE); } @@ -785,17 +742,15 @@ TEST(nntrainerIniTest, backbone_p_17) { * @note Output layer name not found, epmty backbone */ TEST(nntrainerIniTest, backbone_n_18) { - const char *ini_name = "backbone_n18.ini"; nntrainer::NeuralNetwork NN; - nntrainerIniTest::save_ini("base.ini", - {input2d, conv2d + "input_layers=inputlayer", - flatten + "input_layers=conv2d"}); - nntrainerIniTest::save_ini( - ini_name, + ScopedIni base("base", {input2d, conv2d + "input_layers=inputlayer", + flatten + "input_layers=conv2d"}); + ScopedIni backbone( + "Backbone_n18", {nw_base_mse, input, backbone_valid_inout + "input_layers=inputlayer"}); - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NONE); + EXPECT_EQ(NN.loadFromConfig(backbone.getIniName()), ML_ERROR_NONE); EXPECT_EQ(NN.compile(), ML_ERROR_NONE); EXPECT_EQ(NN.initialize(), ML_ERROR_NONE); @@ -807,17 +762,16 @@ TEST(nntrainerIniTest, backbone_n_18) { * @note Input layer name not found, epmty backbone */ TEST(nntrainerIniTest, backbone_n_19) { - const char *ini_name = "backbone_n19.ini"; nntrainer::NeuralNetwork NN; - nntrainerIniTest::save_ini("base.ini", - {input2d, conv2d + "input_layers=inputlayer", - batch_normal + "input_layers=conv2d"}); - nntrainerIniTest::save_ini( - ini_name, + ScopedIni base("base", {input2d, conv2d + "input_layers=inputlayer", + batch_normal + "input_layers=conv2d"}); + + ScopedIni backbone( + "backbone_n19", {nw_base_mse, input, backbone_valid_inout + "input_layers=inputlayer"}); - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NONE); + EXPECT_EQ(NN.loadFromConfig(backbone.getIniName()), ML_ERROR_NONE); EXPECT_EQ(NN.compile(), ML_ERROR_NONE); EXPECT_EQ(NN.initialize(), ML_ERROR_NONE); @@ -829,17 +783,17 @@ TEST(nntrainerIniTest, backbone_n_19) { * @note input and output layer specified are found */ TEST(nntrainerIniTest, backbone_p_20) { - const char *ini_name = "backbone_p20.ini"; nntrainer::NeuralNetwork NN; - nntrainerIniTest::save_ini( - "base.ini", {input2d, conv2d + "input_layers=inputlayer", - flatten + "input_layers=conv2d", out + "input_layers=flat"}); - nntrainerIniTest::save_ini( - ini_name, + ScopedIni base("base", + {input2d, conv2d + "input_layers=inputlayer", + flatten + "input_layers=conv2d", out + "input_layers=flat"}); + + ScopedIni backbone( + "backbone_p20", {nw_base_mse, input, backbone_valid_inout + "input_layers=inputlayer"}); - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NONE); + EXPECT_EQ(NN.loadFromConfig(backbone.getIniName()), ML_ERROR_NONE); EXPECT_EQ(NN.compile(), ML_ERROR_NONE); EXPECT_EQ(NN.initialize(), ML_ERROR_NONE); EXPECT_EQ(NN.getNetworkGraph().getSorted().size(), 6u); @@ -850,18 +804,16 @@ TEST(nntrainerIniTest, backbone_p_20) { * @note Input layer name not found, epmty backbone */ TEST(nntrainerIniTest, backbone_n_21) { - const char *ini_name = "backbone_n21.ini"; nntrainer::NeuralNetwork NN; - nntrainerIniTest::save_ini("base.ini", - {input2d, conv2d + "input_layers=inputlayer", - batch_normal + "input_layers=conv2d", - out + "input_layers=bn"}); - nntrainerIniTest::save_ini( - ini_name, + ScopedIni base("base", {input2d, conv2d + "input_layers=inputlayer", + batch_normal + "input_layers=conv2d", + out + "input_layers=bn"}); + ScopedIni backbone( + "backbone_n21", {nw_base_mse, input, backbone_valid_inout + "input_layers=inputlayer"}); - EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NONE); + EXPECT_EQ(NN.loadFromConfig(backbone.getIniName()), ML_ERROR_NONE); EXPECT_EQ(NN.compile(), ML_ERROR_NONE); EXPECT_EQ(NN.initialize(), ML_ERROR_NONE); EXPECT_EQ(NN.getNetworkGraph().getSorted().size(), 3u);