diff --git a/test/tizen_capi/unittest_tizen_capi_layer.cpp b/test/tizen_capi/unittest_tizen_capi_layer.cpp index 7bd07c9b4e..ce5e7dec42 100644 --- a/test/tizen_capi/unittest_tizen_capi_layer.cpp +++ b/test/tizen_capi/unittest_tizen_capi_layer.cpp @@ -26,7 +26,7 @@ #include /** - * @brief Neural Network Layer Create / Delete Test (possitive test) + * @brief Neural Network Layer Create / Delete Test (positive test) */ TEST(nntrainer_capi_nnlayer, create_delete_01_p) { ml_train_layer_h handle; @@ -38,7 +38,7 @@ TEST(nntrainer_capi_nnlayer, create_delete_01_p) { } /** - * @brief Neural Network Layer Create / Delete Test (possitive test) + * @brief Neural Network Layer Create / Delete Test (positive test) */ TEST(nntrainer_capi_nnlayer, create_delete_02_p) { ml_train_layer_h handle; @@ -232,6 +232,58 @@ TEST(nntrainer_capi_nnlayer, setproperty_11_n) { EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER); } +/** + * @brief Neural Network Set Property Test (negative test) + */ +TEST(nntrainer_capi_nnlayer, setproperty_12_n) { + ml_train_layer_h handle = nullptr; + int status; + /** + * If property is set which is an inappropriate way, then error. + */ + status = ml_train_layer_set_property(handle, "relu", NULL); + EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER); +} + +/** + * @brief Neural Network Set Property Test (negative test) + */ +TEST(nntrainer_capi_nnlayer, setproperty_13_n) { + ml_train_layer_h handle = nullptr; + int status; + /** + * If property is set which is an inappropriate way, then error. + */ + status = ml_train_layer_set_property(handle, "=relu", NULL); + EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER); +} + +/** + * @brief Neural Network Set Property Test (negative test) + */ +TEST(nntrainer_capi_nnlayer, setproperty_14_n) { + ml_train_layer_h handle = nullptr; + int status; + /** + * If property is set which is an inappropriate way, then error. + */ + status = ml_train_layer_set_property(handle, "=0.01", NULL); + EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER); +} + +/** + * @brief Neural Network Set Property Test (negative test) + */ +TEST(nntrainer_capi_nnlayer, setproperty_15_n) { + ml_train_layer_h handle = nullptr; + int status; + /** + * If property is set which is an inappropriate way, then error. + */ + status = ml_train_layer_set_property(handle, "activation:relu", NULL); + EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER); +} + /** * @brief Neural Network Layer Set Property Test (positive test) */ @@ -280,6 +332,55 @@ TEST(nntrainer_capi_nnlayer, setproperty_with_single_param_03_n) { EXPECT_EQ(status, ML_ERROR_NONE); } +/** + * @brief Neural Network Layer Set Property Test (negative test ) + */ +TEST(nntrainer_capi_nnlayer, setproperty_with_single_param_04_n) { + ml_train_layer_h handle; + int status; + status = ml_train_layer_create(&handle, ML_TRAIN_LAYER_TYPE_INPUT); + EXPECT_EQ(status, ML_ERROR_NONE); + status = ml_train_layer_set_property_with_single_param( + handle, "input_shape=1:1:6270 / normalization=true / standardization=true"); + EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER); + + status = ml_train_layer_destroy(handle); + EXPECT_EQ(status, ML_ERROR_NONE); +} + +/** + * @brief Neural Network Layer Set Property Test (negative test ) + */ +TEST(nntrainer_capi_nnlayer, setproperty_with_single_param_05_n) { + ml_train_layer_h handle; + int status; + status = ml_train_layer_create(&handle, ML_TRAIN_LAYER_TYPE_INPUT); + EXPECT_EQ(status, ML_ERROR_NONE); + status = ml_train_layer_set_property_with_single_param( + handle, + "input_shape=1:1:6270 // normalization=true // standardization=true"); + EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER); + + status = ml_train_layer_destroy(handle); + EXPECT_EQ(status, ML_ERROR_NONE); +} + +/** + * @brief Neural Network Layer Set Property Test (negative test ) + */ +TEST(nntrainer_capi_nnlayer, setproperty_with_single_param_06_n) { + ml_train_layer_h handle; + int status; + status = ml_train_layer_create(&handle, ML_TRAIN_LAYER_TYPE_INPUT); + EXPECT_EQ(status, ML_ERROR_NONE); + status = ml_train_layer_set_property_with_single_param( + handle, "input_shape=1:1:6270 : normalization=true : standardization=true"); + EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER); + + status = ml_train_layer_destroy(handle); + EXPECT_EQ(status, ML_ERROR_NONE); +} + /*** since tizen 6.5 ***/ /** diff --git a/test/unittest/compiler/unittest_realizer.cpp b/test/unittest/compiler/unittest_realizer.cpp index 5759fa0612..b4fee235d5 100644 --- a/test/unittest/compiler/unittest_realizer.cpp +++ b/test/unittest/compiler/unittest_realizer.cpp @@ -942,5 +942,5 @@ TEST(LossRealizer, loss_realizer_p) { }; LossRealizer r; std::vector> realizers; - compileAndRealizeAndEqual(r, realizers, before, after); + EXPECT_NO_THROW(compileAndRealizeAndEqual(r, realizers, before, after)); } diff --git a/test/unittest/datasets/unittest_iteration_queue.cpp b/test/unittest/datasets/unittest_iteration_queue.cpp index 376687d701..808472f19b 100644 --- a/test/unittest/datasets/unittest_iteration_queue.cpp +++ b/test/unittest/datasets/unittest_iteration_queue.cpp @@ -575,7 +575,15 @@ GTEST_PARAMETER_TEST(IterQueue, IterQueueScenarios, multi_slot_single_batch, single_slot_single_batch)); -TEST(IterQueue, constructEmptySlots_n) { +TEST(IterQueue, constructEmptySlots_01_n) { + EXPECT_ANY_THROW(nntrainer::IterationQueue(0, {}, {})); +} + +TEST(IterQueue, constructEmptySlots_02_n) { + EXPECT_ANY_THROW(nntrainer::IterationQueue(0, {}, {{1}})); +} + +TEST(IterQueue, constructEmptySlots_03_n) { EXPECT_ANY_THROW(nntrainer::IterationQueue(0, {{1}}, {{1}})); } @@ -583,17 +591,21 @@ TEST(IterQueue, constructEmptyInput_n) { EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {}, {{1}})); } -TEST(IterQueue, constructNotConsistentBatchSizeBetweenInputs_n) { +TEST(IterQueue, constructNotConsistentBatchSizeBetweenInputs_01_n) { EXPECT_ANY_THROW( nntrainer::IterationQueue(1, {{3, 1, 1, 10}, {2, 1, 1, 10}}, {})); } -TEST(IterQueue, constructNotConsistentBatchSizeInLabel_n) { +TEST(IterQueue, constructNotConsistentBatchSizeBetweenInputs_02_n) { + EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {{3, 1}, {2, 1}}, {{1, 0}})); +} + +TEST(IterQueue, constructNotConsistentBatchSizeInLabel_01_n) { EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {{3, 1, 1, 10}, {3, 1, 1, 10}}, {{2, 1, 1, 10}})); } -TEST(IterQueue, constructNotConsistentBatchSizeInLabel2_n) { +TEST(IterQueue, constructNotConsistentBatchSizeInLabel_02_n) { EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {{3, 1, 1, 10}, {3, 1, 1, 10}}, {{3, 1, 1, 10}, {2, 1, 1, 10}})); } diff --git a/test/unittest/layers/layers_dependent_common_tests.cpp b/test/unittest/layers/layers_dependent_common_tests.cpp index f1c87b2426..068fe4e232 100644 --- a/test/unittest/layers/layers_dependent_common_tests.cpp +++ b/test/unittest/layers/layers_dependent_common_tests.cpp @@ -39,12 +39,18 @@ TEST_P(LayerPropertySemantics, setPropertiesInvalid_n) { EXPECT_THROW(layer->setProperty({valid_properties}), std::invalid_argument); } -TEST_P(LayerSemantics, setPropertiesInvalid_n) { +TEST_P(LayerSemantics, setPropertiesInvalid_01_n) { auto lnode = nntrainer::createLayerNode(expected_type); /** must not crash */ EXPECT_THROW(layer->setProperty({"unknown_props=2"}), std::invalid_argument); } +TEST_P(LayerSemantics, setPropertiesInvalid_02_n) { + auto lnode = nntrainer::createLayerNode(expected_type); + /** must not crash */ + EXPECT_THROW(layer->setProperty({"unknown_props:2"}), std::invalid_argument); +} + TEST_P(LayerSemantics, finalizeValidateLayerNode_p) { auto lnode = nntrainer::createLayerNode(expected_type); std::vector props = {"name=test"}; diff --git a/test/unittest/layers/layers_golden_tests.cpp b/test/unittest/layers/layers_golden_tests.cpp index 152f9e0934..464318f333 100644 --- a/test/unittest/layers/layers_golden_tests.cpp +++ b/test/unittest/layers/layers_golden_tests.cpp @@ -426,6 +426,4 @@ TEST_P(LayerGoldenTest, run) { compareRunContext(rc, golden_file, skip_calc_grad, skip_calc_deriv, dropout_compare_60_percent, skip_cos_sim); - - EXPECT_TRUE(true); // stub test for tcm } diff --git a/test/unittest/memory/unittest_cache_pool.cpp b/test/unittest/memory/unittest_cache_pool.cpp index 6f12e05e96..051ed3e6e8 100644 --- a/test/unittest/memory/unittest_cache_pool.cpp +++ b/test/unittest/memory/unittest_cache_pool.cpp @@ -67,11 +67,6 @@ class CachePoolTest : public ::testing::Test { MockCachePool *pool; }; -/** - * @brief creation and destruction - */ -TEST_F(CachePoolTest, create_destroy) {} - /** * @brief get cache memory */ diff --git a/test/unittest/models/models_golden_test.cpp b/test/unittest/models/models_golden_test.cpp index e755a902cc..58445b38f3 100644 --- a/test/unittest/models/models_golden_test.cpp +++ b/test/unittest/models/models_golden_test.cpp @@ -43,51 +43,6 @@ void nntrainerModelTest::validate( } } -/** - * @brief check given ini is failing/suceeding at unoptimized running - */ -TEST_P(nntrainerModelTest, model_test) { - if (!shouldCompare()) { - std::cout << "[ SKIPPED ] option not enabled \n"; - return; - } - /** Check model with all optimizations off */ - compare(false); - - /// add stub test for tcm - EXPECT_TRUE(true); -} - -/** - * @brief check given ini is failing/suceeding at optimized running - */ -TEST_P(nntrainerModelTest, model_test_optimized) { - if (!shouldCompare()) { - std::cout << "[ SKIPPED ] option not enabled \n"; - return; - } - /** Check model with all optimizations on */ - - compare(true); - - /// add stub test for tcm - EXPECT_TRUE(true); -} - -/** - * @brief check given ini is failing/suceeding at validation - */ -TEST_P(nntrainerModelTest, model_test_validate) { - if (!shouldValidate()) { - std::cout << "[ SKIPPED ] option not enabled \n"; - return; - } - - validate(true); - /// add stub test for tcm - EXPECT_TRUE(true); -} - TEST_P(nntrainerModelTest, model_test_save_load_compare) { if (!shouldSaveLoadIniTest() || !shouldCompare()) { std::cout << "[ SKIPPED ] option not enabled \n"; diff --git a/test/unittest/unittest_base_properties.cpp b/test/unittest/unittest_base_properties.cpp index 56640c4748..e25c97386a 100644 --- a/test/unittest/unittest_base_properties.cpp +++ b/test/unittest/unittest_base_properties.cpp @@ -383,6 +383,21 @@ TEST(BasicProperty, setNotValid_03_n) { EXPECT_THROW(d.set({3, 3, 2, 4}), std::invalid_argument); } +TEST(BasicProperty, setNotValid_04_n) { + DimensionOfBanana d; + EXPECT_THROW(d.set({1, 2, 3, 4, 5}), std::invalid_argument); +} + +TEST(BasicProperty, setNotValid_05_n) { + DimensionOfBanana d; + EXPECT_THROW(d.set({0}), std::invalid_argument); +} + +TEST(BasicProperty, setNotValid_06_n) { + DimensionOfBanana d; + EXPECT_THROW(d.set({0, 1}), std::invalid_argument); +} + TEST(BasicProperty, fromStringNotValid_01_n) { NumBanana b; EXPECT_THROW(nntrainer::from_string("not integer", b), std::invalid_argument); diff --git a/test/unittest/unittest_common_properties.cpp b/test/unittest/unittest_common_properties.cpp index 7da478a2a5..848bc18251 100644 --- a/test/unittest/unittest_common_properties.cpp +++ b/test/unittest/unittest_common_properties.cpp @@ -144,6 +144,27 @@ TEST(InputConnection, invalidFormat_n_06) { std::invalid_argument); } +TEST(InputConnection, invalidFormat_n_07) { + using namespace nntrainer::props; + InputConnection actual; + EXPECT_THROW(nntrainer::from_string("name:layer0", actual), + std::invalid_argument); +} + +TEST(InputConnection, invalidFormat_n_08) { + using namespace nntrainer::props; + InputConnection actual; + EXPECT_THROW(nntrainer::from_string("name(layer0)", actual), + std::invalid_argument); +} + +TEST(InputConnection, invalidFormat_n_09) { + using namespace nntrainer::props; + InputConnection actual; + EXPECT_THROW(nntrainer::from_string("name==layer0", actual), + std::invalid_argument); +} + TEST(DropOutRate, dropout_01_n) { nntrainer::props::DropOutRate dropout; EXPECT_THROW(dropout.set(-0.5), std::invalid_argument); diff --git a/test/unittest/unittest_nntrainer_internal.cpp b/test/unittest/unittest_nntrainer_internal.cpp index b5dd84ff90..fc44549cea 100644 --- a/test/unittest/unittest_nntrainer_internal.cpp +++ b/test/unittest/unittest_nntrainer_internal.cpp @@ -53,7 +53,27 @@ TEST(nntrainer_Optimizer, create_02_n) { /** * @brief Optimizer create */ -TEST(nntrainer_Optimizer, setType_02_n) { +TEST(nntrainer_Optimizer, create_03_n) { + std::unique_ptr op; + auto &ac = nntrainer::AppContext::Global(); + EXPECT_ANY_THROW(op = + ac.createObject("adam", {"lr=0.1"})); +} + +/** + * @brief Optimizer create + */ +TEST(nntrainer_Optimizer, create_04_n) { + std::unique_ptr op; + auto &ac = nntrainer::AppContext::Global(); + EXPECT_ANY_THROW( + op = ac.createObject("adam", {"learning_rate:0.1"})); +} + +/** + * @brief Optimizer create + */ +TEST(nntrainer_Optimizer, create_05_n) { std::unique_ptr op; auto &ac = nntrainer::AppContext::Global(); EXPECT_NO_THROW(op = ac.createObject("sgd", {})); @@ -62,7 +82,27 @@ TEST(nntrainer_Optimizer, setType_02_n) { /** * @brief Optimizer create */ -TEST(nntrainer_Optimizer, setType_03_n) { +TEST(nntrainer_Optimizer, create_06_n) { + std::unique_ptr op; + auto &ac = nntrainer::AppContext::Global(); + EXPECT_ANY_THROW(op = + ac.createObject("sgd", {"lr=0.1"})); +} + +/** + * @brief Optimizer create + */ +TEST(nntrainer_Optimizer, create_07_n) { + std::unique_ptr op; + auto &ac = nntrainer::AppContext::Global(); + EXPECT_ANY_THROW( + op = ac.createObject("sgd", {"learning_rate:0.1"})); +} + +/** + * @brief Optimizer create + */ +TEST(nntrainer_Optimizer, create_08_n) { std::unique_ptr op; auto &ac = nntrainer::AppContext::Global(); EXPECT_ANY_THROW(op = @@ -72,7 +112,7 @@ TEST(nntrainer_Optimizer, setType_03_n) { /** * @brief Optimizer create */ -TEST(nntrainer_Optimizer, setType_04_n) { +TEST(nntrainer_Optimizer, create_09_n) { std::unique_ptr op; auto &ac = nntrainer::AppContext::Global(); EXPECT_ANY_THROW( diff --git a/test/unittest/unittest_nntrainer_lr_scheduler.cpp b/test/unittest/unittest_nntrainer_lr_scheduler.cpp index b990e18d8b..97fef1c4a0 100644 --- a/test/unittest/unittest_nntrainer_lr_scheduler.cpp +++ b/test/unittest/unittest_nntrainer_lr_scheduler.cpp @@ -64,6 +64,48 @@ TEST(lr_constant, ctor_initializer_04_n) { std::invalid_argument); } +/** + * @brief test constructing lr scheduler + * + */ +TEST(lr_constant, ctor_initializer_05_n) { + EXPECT_THROW(nntrainer::createLearningRateScheduler< + nntrainer::ConstantLearningRateScheduler>({"lr=0.1"}), + std::invalid_argument); +} + +/** + * @brief test constructing lr scheduler + * + */ +TEST(lr_constant, ctor_initializer_06_n) { + EXPECT_THROW( + nntrainer::createLearningRateScheduler< + nntrainer::ConstantLearningRateScheduler>({"learning_rate:0.1"}), + std::invalid_argument); +} + +/** + * @brief test constructing lr scheduler + * + */ +TEST(lr_constant, ctor_initializer_07_n) { + EXPECT_THROW( + nntrainer::createLearningRateScheduler< + nntrainer::ConstantLearningRateScheduler>({"learning_rate(0.1)"}), + std::invalid_argument); +} + +/** + * @brief test constructing lr scheduler + * + */ +TEST(lr_constant, ctor_initializer_08_n) { + EXPECT_THROW(nntrainer::createLearningRateScheduler< + nntrainer::ConstantLearningRateScheduler>({"0.1"}), + std::invalid_argument); +} + /** * @brief test set and get learning rate * @@ -98,6 +140,24 @@ TEST(lr_constant, prop_03_p) { EXPECT_FLOAT_EQ(lr->getLearningRate(10), 1.0f); } +/** + * @brief test set property with wrong format + * + */ +TEST(lr_constant, prop_04_n) { + auto lr = createLRS("constant"); + EXPECT_THROW(lr->setProperty({"learning_rate:0.1"}), std::invalid_argument); +} + +/** + * @brief test set property with wrong format + * + */ +TEST(lr_constant, prop_05_n) { + auto lr = createLRS("constant"); + EXPECT_THROW(lr->setProperty({"learning_rate(0.1)"}), std::invalid_argument); +} + /** * @brief test set and get learning rate * @@ -171,6 +231,14 @@ TEST(lr_exponential, prop_02_n) { EXPECT_THROW(lr->setProperty({"unknown=unknown"}), std::invalid_argument); } +/** + * @brief test set property with wrong format + * + */ +TEST(lr_exponential, prop_03_n) { + auto lr = createLRS("exponential"); + EXPECT_THROW(lr->setProperty({"learning_rate:0.1"}), std::invalid_argument); +} /** * @brief test finalize * @@ -212,6 +280,14 @@ TEST(lr_step, prop_01_n) { auto lr = createLRS("step"); EXPECT_THROW(lr->setProperty({"unknown=unknown"}), std::invalid_argument); } +/** + * @brief test set property with wrong format + * + */ +TEST(lr_step, prop_02_n) { + auto lr = createLRS("step"); + EXPECT_THROW(lr->setProperty({"learning_rate:0.1"}), std::invalid_argument); +} /** * @brief test finalize diff --git a/test/unittest/unittest_nntrainer_tensor.cpp b/test/unittest/unittest_nntrainer_tensor.cpp index 0d6d47be37..25461feb10 100644 --- a/test/unittest/unittest_nntrainer_tensor.cpp +++ b/test/unittest/unittest_nntrainer_tensor.cpp @@ -545,6 +545,13 @@ TEST(nntrainer_Tensor, multiply_i_broadcast_not_broadcastable_02_n) { EXPECT_EQ(target.multiply_i(target2), ML_ERROR_INVALID_PARAMETER); } +TEST(nntrainer_Tensor, multiply_i_broadcast_not_broadcastable_03_n) { + nntrainer::Tensor target(1, 2, 1, 2); + nntrainer::Tensor target2(1, 2, 3, 1); + + EXPECT_EQ(target.multiply_i(target2), ML_ERROR_INVALID_PARAMETER); +} + TEST(nntrainer_Tensor, multiply_01_p) { int status = ML_ERROR_NONE; int batch = 3; @@ -1191,6 +1198,13 @@ TEST(nntrainer_Tensor, divide_i_broadcast_not_broadcastable_02_n) { EXPECT_EQ(target.divide_i(target2), ML_ERROR_INVALID_PARAMETER); } +TEST(nntrainer_Tensor, divide_i_broadcast_not_broadcastable_03_n) { + nntrainer::Tensor target(1, 2, 1, 2); + nntrainer::Tensor target2(1, 2, 3, 1); + + EXPECT_EQ(target.divide_i(target2), ML_ERROR_INVALID_PARAMETER); +} + TEST(nntrainer_Tensor, add_i_01_p) { int status = ML_ERROR_NONE; int batch = 3; @@ -1496,6 +1510,13 @@ TEST(nntrainer_Tensor, add_i_broadcast_not_broadcastable_02_n) { EXPECT_EQ(target.add_i(target2), ML_ERROR_INVALID_PARAMETER); } +TEST(nntrainer_Tensor, add_i_broadcast_not_broadcastable_03_n) { + nntrainer::Tensor target(1, 2, 1, 2); + nntrainer::Tensor target2(1, 2, 3, 1); + + EXPECT_EQ(target.add_i(target2), ML_ERROR_INVALID_PARAMETER); +} + TEST(nntrainer_Tensor, add_01_p) { int status = ML_ERROR_NONE; int batch = 3; @@ -1660,6 +1681,27 @@ TEST(nntrainer_Tensor, pow_01_p) { EXPECT_EQ(actual, expected); } +TEST(nntrainer_Tensor, subtract_i_broadcast_not_supported_01_n) { + nntrainer::Tensor target(3, 1, 3, 1); + nntrainer::Tensor target2(3, 1, 3, 3); + + EXPECT_EQ(target.subtract_i(target2), ML_ERROR_INVALID_PARAMETER); +} + +TEST(nntrainer_Tensor, subtract_i_broadcast_not_broadcastable_02_n) { + nntrainer::Tensor target(3, 2, 4, 5); + nntrainer::Tensor target2(3, 2, 3, 1); + + EXPECT_EQ(target.subtract_i(target2), ML_ERROR_INVALID_PARAMETER); +} + +TEST(nntrainer_Tensor, subtract_i_broadcast_not_broadcastable_03_n) { + nntrainer::Tensor target(1, 2, 1, 2); + nntrainer::Tensor target2(1, 2, 3, 1); + + EXPECT_EQ(target.subtract_i(target2), ML_ERROR_INVALID_PARAMETER); +} + TEST(nntrainer_Tensor, subtract_i_01_p) { int status = ML_ERROR_NONE; int batch = 3; @@ -3278,12 +3320,6 @@ TEST(nntrainer_Tensor, fill_p) { EXPECT_EQ(target, original); } - /// same dimension, buffer size is different (not tested) - { - /// there is no way to make non contiguous tensor publicily yet - EXPECT_TRUE(true); - } - /// uninitialized with initialized flag is true { nntrainer::Tensor target; diff --git a/test/unittest/unittest_nntrainer_tensor_fp16.cpp b/test/unittest/unittest_nntrainer_tensor_fp16.cpp index 619aa77f3a..1cdd9467c0 100644 --- a/test/unittest/unittest_nntrainer_tensor_fp16.cpp +++ b/test/unittest/unittest_nntrainer_tensor_fp16.cpp @@ -4848,12 +4848,6 @@ TEST(nntrainer_Tensor, fill_p) { EXPECT_EQ(target, original); } - /// same dimension, buffer size is different (not tested) - { - /// there is no way to make non contiguous tensor publicily yet - EXPECT_TRUE(true); - } - /// uninitialized with initialized flag is true { nntrainer::Tensor target; diff --git a/test/unittest/unittest_nntrainer_tensor_nhwc.cpp b/test/unittest/unittest_nntrainer_tensor_nhwc.cpp index 6bb0cb8a12..bda5efd430 100644 --- a/test/unittest/unittest_nntrainer_tensor_nhwc.cpp +++ b/test/unittest/unittest_nntrainer_tensor_nhwc.cpp @@ -4692,13 +4692,55 @@ TEST(nntrainer_Tensor, transpose_nhwc_p) { } } -TEST(nntrainer_Tensor, tranpose_dimension_not_match_nhwc_n) { +TEST(nntrainer_Tensor, tranpose_dimension_not_match_nhwc_01_n) { nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_); nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_); EXPECT_THROW(a.transpose("0:1:2", b), std::invalid_argument); } +TEST(nntrainer_Tensor, tranpose_dimension_not_match_nhwc_02_n) { + nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_); + nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_); + + EXPECT_THROW(a.transpose("0:1", b), std::invalid_argument); +} + +TEST(nntrainer_Tensor, tranpose_dimension_not_match_nhwc_03_n) { + nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_); + nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_); + + EXPECT_THROW(a.transpose("1:2:3:4", b), std::invalid_argument); +} + +TEST(nntrainer_Tensor, tranpose_invalid_format_01_n) { + nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_); + nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_); + + EXPECT_THROW(a.transpose("1<->4", b), std::invalid_argument); +} + +TEST(nntrainer_Tensor, tranpose_invalid_format_02_n) { + nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_); + nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_); + + EXPECT_THROW(a.transpose("2,0,1,3", b), std::invalid_argument); +} + +TEST(nntrainer_Tensor, tranpose_invalid_format_03_n) { + nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_); + nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_); + + EXPECT_THROW(a.transpose("2-0-1-3", b), std::invalid_argument); +} + +TEST(nntrainer_Tensor, tranpose_invalid_format_04_n) { + nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_); + nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_); + + EXPECT_THROW(a.transpose("2/0/1/3", b), std::invalid_argument); +} + // /** // * @brief dequantize tensor with different format // */