Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[unittest] add negative cases & delete meaningless cases #2755

Merged
merged 1 commit into from
Oct 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
105 changes: 103 additions & 2 deletions test/tizen_capi/unittest_tizen_capi_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
#include <nntrainer_test_util.h>

/**
* @brief Neural Network Layer Create / Delete Test (possitive test)
* @brief Neural Network Layer Create / Delete Test (positive test)
*/
TEST(nntrainer_capi_nnlayer, create_delete_01_p) {
ml_train_layer_h handle;
Expand All @@ -38,7 +38,7 @@ TEST(nntrainer_capi_nnlayer, create_delete_01_p) {
}

/**
* @brief Neural Network Layer Create / Delete Test (possitive test)
* @brief Neural Network Layer Create / Delete Test (positive test)
*/
TEST(nntrainer_capi_nnlayer, create_delete_02_p) {
ml_train_layer_h handle;
Expand Down Expand Up @@ -232,6 +232,58 @@ TEST(nntrainer_capi_nnlayer, setproperty_11_n) {
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
}

/**
* @brief Neural Network Set Property Test (negative test)
*/
TEST(nntrainer_capi_nnlayer, setproperty_12_n) {
ml_train_layer_h handle = nullptr;
int status;
/**
* If property is set which is an inappropriate way, then error.
*/
status = ml_train_layer_set_property(handle, "relu", NULL);
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
}

/**
* @brief Neural Network Set Property Test (negative test)
*/
TEST(nntrainer_capi_nnlayer, setproperty_13_n) {
ml_train_layer_h handle = nullptr;
int status;
/**
* If property is set which is an inappropriate way, then error.
*/
status = ml_train_layer_set_property(handle, "=relu", NULL);
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
}

/**
* @brief Neural Network Set Property Test (negative test)
*/
TEST(nntrainer_capi_nnlayer, setproperty_14_n) {
ml_train_layer_h handle = nullptr;
int status;
/**
* If property is set which is an inappropriate way, then error.
*/
status = ml_train_layer_set_property(handle, "=0.01", NULL);
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
}

/**
* @brief Neural Network Set Property Test (negative test)
*/
TEST(nntrainer_capi_nnlayer, setproperty_15_n) {
ml_train_layer_h handle = nullptr;
int status;
/**
* If property is set which is an inappropriate way, then error.
*/
status = ml_train_layer_set_property(handle, "activation:relu", NULL);
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
}

/**
* @brief Neural Network Layer Set Property Test (positive test)
*/
Expand Down Expand Up @@ -280,6 +332,55 @@ TEST(nntrainer_capi_nnlayer, setproperty_with_single_param_03_n) {
EXPECT_EQ(status, ML_ERROR_NONE);
}

/**
* @brief Neural Network Layer Set Property Test (negative test )
*/
TEST(nntrainer_capi_nnlayer, setproperty_with_single_param_04_n) {
ml_train_layer_h handle;
int status;
status = ml_train_layer_create(&handle, ML_TRAIN_LAYER_TYPE_INPUT);
EXPECT_EQ(status, ML_ERROR_NONE);
status = ml_train_layer_set_property_with_single_param(
handle, "input_shape=1:1:6270 / normalization=true / standardization=true");
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);

status = ml_train_layer_destroy(handle);
EXPECT_EQ(status, ML_ERROR_NONE);
}

/**
* @brief Neural Network Layer Set Property Test (negative test )
*/
TEST(nntrainer_capi_nnlayer, setproperty_with_single_param_05_n) {
ml_train_layer_h handle;
int status;
status = ml_train_layer_create(&handle, ML_TRAIN_LAYER_TYPE_INPUT);
EXPECT_EQ(status, ML_ERROR_NONE);
status = ml_train_layer_set_property_with_single_param(
handle,
"input_shape=1:1:6270 // normalization=true // standardization=true");
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);

status = ml_train_layer_destroy(handle);
EXPECT_EQ(status, ML_ERROR_NONE);
}

/**
* @brief Neural Network Layer Set Property Test (negative test )
*/
TEST(nntrainer_capi_nnlayer, setproperty_with_single_param_06_n) {
ml_train_layer_h handle;
int status;
status = ml_train_layer_create(&handle, ML_TRAIN_LAYER_TYPE_INPUT);
EXPECT_EQ(status, ML_ERROR_NONE);
status = ml_train_layer_set_property_with_single_param(
handle, "input_shape=1:1:6270 : normalization=true : standardization=true");
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);

status = ml_train_layer_destroy(handle);
EXPECT_EQ(status, ML_ERROR_NONE);
}

/*** since tizen 6.5 ***/

/**
Expand Down
2 changes: 1 addition & 1 deletion test/unittest/compiler/unittest_realizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -942,5 +942,5 @@ TEST(LossRealizer, loss_realizer_p) {
};
LossRealizer r;
std::vector<std::unique_ptr<nntrainer::GraphRealizer>> realizers;
compileAndRealizeAndEqual(r, realizers, before, after);
EXPECT_NO_THROW(compileAndRealizeAndEqual(r, realizers, before, after));
}
20 changes: 16 additions & 4 deletions test/unittest/datasets/unittest_iteration_queue.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -575,25 +575,37 @@ GTEST_PARAMETER_TEST(IterQueue, IterQueueScenarios,
multi_slot_single_batch,
single_slot_single_batch));

TEST(IterQueue, constructEmptySlots_n) {
TEST(IterQueue, constructEmptySlots_01_n) {
EXPECT_ANY_THROW(nntrainer::IterationQueue(0, {}, {}));
}

TEST(IterQueue, constructEmptySlots_02_n) {
EXPECT_ANY_THROW(nntrainer::IterationQueue(0, {}, {{1}}));
}

TEST(IterQueue, constructEmptySlots_03_n) {
EXPECT_ANY_THROW(nntrainer::IterationQueue(0, {{1}}, {{1}}));
}

TEST(IterQueue, constructEmptyInput_n) {
EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {}, {{1}}));
}

TEST(IterQueue, constructNotConsistentBatchSizeBetweenInputs_n) {
TEST(IterQueue, constructNotConsistentBatchSizeBetweenInputs_01_n) {
EXPECT_ANY_THROW(
nntrainer::IterationQueue(1, {{3, 1, 1, 10}, {2, 1, 1, 10}}, {}));
}

TEST(IterQueue, constructNotConsistentBatchSizeInLabel_n) {
TEST(IterQueue, constructNotConsistentBatchSizeBetweenInputs_02_n) {
EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {{3, 1}, {2, 1}}, {{1, 0}}));
}

TEST(IterQueue, constructNotConsistentBatchSizeInLabel_01_n) {
EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {{3, 1, 1, 10}, {3, 1, 1, 10}},
{{2, 1, 1, 10}}));
}

TEST(IterQueue, constructNotConsistentBatchSizeInLabel2_n) {
TEST(IterQueue, constructNotConsistentBatchSizeInLabel_02_n) {
EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {{3, 1, 1, 10}, {3, 1, 1, 10}},
{{3, 1, 1, 10}, {2, 1, 1, 10}}));
}
8 changes: 7 additions & 1 deletion test/unittest/layers/layers_dependent_common_tests.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,18 @@ TEST_P(LayerPropertySemantics, setPropertiesInvalid_n) {
EXPECT_THROW(layer->setProperty({valid_properties}), std::invalid_argument);
}

TEST_P(LayerSemantics, setPropertiesInvalid_n) {
TEST_P(LayerSemantics, setPropertiesInvalid_01_n) {
auto lnode = nntrainer::createLayerNode(expected_type);
/** must not crash */
EXPECT_THROW(layer->setProperty({"unknown_props=2"}), std::invalid_argument);
}

TEST_P(LayerSemantics, setPropertiesInvalid_02_n) {
auto lnode = nntrainer::createLayerNode(expected_type);
/** must not crash */
EXPECT_THROW(layer->setProperty({"unknown_props:2"}), std::invalid_argument);
}

TEST_P(LayerSemantics, finalizeValidateLayerNode_p) {
auto lnode = nntrainer::createLayerNode(expected_type);
std::vector<std::string> props = {"name=test"};
Expand Down
2 changes: 0 additions & 2 deletions test/unittest/layers/layers_golden_tests.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -426,6 +426,4 @@ TEST_P(LayerGoldenTest, run) {

compareRunContext(rc, golden_file, skip_calc_grad, skip_calc_deriv,
dropout_compare_60_percent, skip_cos_sim);

EXPECT_TRUE(true); // stub test for tcm
}
5 changes: 0 additions & 5 deletions test/unittest/memory/unittest_cache_pool.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -67,11 +67,6 @@ class CachePoolTest : public ::testing::Test {
MockCachePool *pool;
};

/**
* @brief creation and destruction
*/
TEST_F(CachePoolTest, create_destroy) {}

/**
* @brief get cache memory
*/
Expand Down
45 changes: 0 additions & 45 deletions test/unittest/models/models_golden_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,51 +43,6 @@ void nntrainerModelTest::validate(
}
}

/**
* @brief check given ini is failing/suceeding at unoptimized running
*/
TEST_P(nntrainerModelTest, model_test) {
if (!shouldCompare()) {
std::cout << "[ SKIPPED ] option not enabled \n";
return;
}
/** Check model with all optimizations off */
compare(false);

/// add stub test for tcm
EXPECT_TRUE(true);
}

/**
* @brief check given ini is failing/suceeding at optimized running
*/
TEST_P(nntrainerModelTest, model_test_optimized) {
if (!shouldCompare()) {
std::cout << "[ SKIPPED ] option not enabled \n";
return;
}
/** Check model with all optimizations on */

compare(true);

/// add stub test for tcm
EXPECT_TRUE(true);
}

/**
* @brief check given ini is failing/suceeding at validation
*/
TEST_P(nntrainerModelTest, model_test_validate) {
if (!shouldValidate()) {
std::cout << "[ SKIPPED ] option not enabled \n";
return;
}

validate(true);
/// add stub test for tcm
EXPECT_TRUE(true);
}

TEST_P(nntrainerModelTest, model_test_save_load_compare) {
if (!shouldSaveLoadIniTest() || !shouldCompare()) {
std::cout << "[ SKIPPED ] option not enabled \n";
Expand Down
15 changes: 15 additions & 0 deletions test/unittest/unittest_base_properties.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -383,6 +383,21 @@ TEST(BasicProperty, setNotValid_03_n) {
EXPECT_THROW(d.set({3, 3, 2, 4}), std::invalid_argument);
}

TEST(BasicProperty, setNotValid_04_n) {
DimensionOfBanana d;
EXPECT_THROW(d.set({1, 2, 3, 4, 5}), std::invalid_argument);
}

TEST(BasicProperty, setNotValid_05_n) {
DimensionOfBanana d;
EXPECT_THROW(d.set({0}), std::invalid_argument);
}

TEST(BasicProperty, setNotValid_06_n) {
DimensionOfBanana d;
EXPECT_THROW(d.set({0, 1}), std::invalid_argument);
}

TEST(BasicProperty, fromStringNotValid_01_n) {
NumBanana b;
EXPECT_THROW(nntrainer::from_string("not integer", b), std::invalid_argument);
Expand Down
21 changes: 21 additions & 0 deletions test/unittest/unittest_common_properties.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,27 @@ TEST(InputConnection, invalidFormat_n_06) {
std::invalid_argument);
}

TEST(InputConnection, invalidFormat_n_07) {
using namespace nntrainer::props;
InputConnection actual;
EXPECT_THROW(nntrainer::from_string("name:layer0", actual),
std::invalid_argument);
}

TEST(InputConnection, invalidFormat_n_08) {
using namespace nntrainer::props;
InputConnection actual;
EXPECT_THROW(nntrainer::from_string("name(layer0)", actual),
std::invalid_argument);
}

TEST(InputConnection, invalidFormat_n_09) {
using namespace nntrainer::props;
InputConnection actual;
EXPECT_THROW(nntrainer::from_string("name==layer0", actual),
std::invalid_argument);
}

TEST(DropOutRate, dropout_01_n) {
nntrainer::props::DropOutRate dropout;
EXPECT_THROW(dropout.set(-0.5), std::invalid_argument);
Expand Down
46 changes: 43 additions & 3 deletions test/unittest/unittest_nntrainer_internal.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,27 @@ TEST(nntrainer_Optimizer, create_02_n) {
/**
* @brief Optimizer create
*/
TEST(nntrainer_Optimizer, setType_02_n) {
TEST(nntrainer_Optimizer, create_03_n) {
std::unique_ptr<nntrainer::Optimizer> op;
auto &ac = nntrainer::AppContext::Global();
EXPECT_ANY_THROW(op =
ac.createObject<nntrainer::Optimizer>("adam", {"lr=0.1"}));
}

/**
* @brief Optimizer create
*/
TEST(nntrainer_Optimizer, create_04_n) {
std::unique_ptr<nntrainer::Optimizer> op;
auto &ac = nntrainer::AppContext::Global();
EXPECT_ANY_THROW(
op = ac.createObject<nntrainer::Optimizer>("adam", {"learning_rate:0.1"}));
}

/**
* @brief Optimizer create
*/
TEST(nntrainer_Optimizer, create_05_n) {
std::unique_ptr<nntrainer::Optimizer> op;
auto &ac = nntrainer::AppContext::Global();
EXPECT_NO_THROW(op = ac.createObject<nntrainer::Optimizer>("sgd", {}));
Expand All @@ -62,7 +82,27 @@ TEST(nntrainer_Optimizer, setType_02_n) {
/**
* @brief Optimizer create
*/
TEST(nntrainer_Optimizer, setType_03_n) {
TEST(nntrainer_Optimizer, create_06_n) {
std::unique_ptr<nntrainer::Optimizer> op;
auto &ac = nntrainer::AppContext::Global();
EXPECT_ANY_THROW(op =
ac.createObject<nntrainer::Optimizer>("sgd", {"lr=0.1"}));
}

/**
* @brief Optimizer create
*/
TEST(nntrainer_Optimizer, create_07_n) {
std::unique_ptr<nntrainer::Optimizer> op;
auto &ac = nntrainer::AppContext::Global();
EXPECT_ANY_THROW(
op = ac.createObject<nntrainer::Optimizer>("sgd", {"learning_rate:0.1"}));
}

/**
* @brief Optimizer create
*/
TEST(nntrainer_Optimizer, create_08_n) {
std::unique_ptr<nntrainer::Optimizer> op;
auto &ac = nntrainer::AppContext::Global();
EXPECT_ANY_THROW(op =
Expand All @@ -72,7 +112,7 @@ TEST(nntrainer_Optimizer, setType_03_n) {
/**
* @brief Optimizer create
*/
TEST(nntrainer_Optimizer, setType_04_n) {
TEST(nntrainer_Optimizer, create_09_n) {
std::unique_ptr<nntrainer::Optimizer> op;
auto &ac = nntrainer::AppContext::Global();
EXPECT_ANY_THROW(
Expand Down
Loading
Loading