Skip to content

Commit

Permalink
BUG FIX : Concat GPU Layer and CPU layer unittest cases name overlapp…
Browse files Browse the repository at this point in the history
…ing.

Modified the concat gpu testcases name in unittest_layers_concat_cl for differentiation with concat cpu testcases name.

**Self evaluation:**
1. Build test:   [X]Passed [ ]Failed [ ]Skipped
2. Run test:     [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Niket Agarwal <niket.a@samsung.com>
  • Loading branch information
niket-agarwal authored and jijoongmoon committed Aug 1, 2024
1 parent 632c68a commit 8877d6a
Showing 1 changed file with 11 additions and 9 deletions.
20 changes: 11 additions & 9 deletions test/unittest/layers/unittest_layers_concat_cl.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,41 +24,43 @@ auto semantic_concat_gpu = LayerSemanticsParamType(
GTEST_PARAMETER_TEST(ConcatGPU, LayerSemanticsGpu,
::testing::Values(semantic_concat_gpu));

auto concat_dim3 = LayerGoldenTestParamType(
auto concat_dim3_gpu = LayerGoldenTestParamType(
nntrainer::createLayer<nntrainer::ConcatLayerCl>, {"axis=3"},
"2:3:3:2,2:3:3:3", "concat_dim3.nnlayergolden",
LayerGoldenTestParamOptions::SKIP_CALC_DERIV, "nchw", "fp32", "fp32");

auto concat_dim2 = LayerGoldenTestParamType(
auto concat_dim2_gpu = LayerGoldenTestParamType(
nntrainer::createLayer<nntrainer::ConcatLayerCl>, {"axis=2"},
"2:3:2:3,2:3:3:3", "concat_dim2.nnlayergolden",
LayerGoldenTestParamOptions::SKIP_CALC_DERIV, "nchw", "fp32", "fp32");

auto concat_dim1 = LayerGoldenTestParamType(
auto concat_dim1_gpu = LayerGoldenTestParamType(
nntrainer::createLayer<nntrainer::ConcatLayerCl>, {"axis=1"},
"2:2:3:3,2:3:3:3", "concat_dim1.nnlayergolden",
LayerGoldenTestParamOptions::SKIP_CALC_DERIV, "nchw", "fp32", "fp32");

GTEST_PARAMETER_TEST(ConcatGPU, LayerGoldenTest,
::testing::Values(concat_dim3, concat_dim2, concat_dim1));
::testing::Values(concat_dim3_gpu, concat_dim2_gpu,
concat_dim1_gpu));

#ifdef ENABLE_FP16
auto concat_dim3_w16a16 = LayerGoldenTestParamType(
auto concat_dim3_w16a16_gpu = LayerGoldenTestParamType(
nntrainer::createLayer<nntrainer::ConcatLayerCl>, {"axis=3"},
"2:3:3:2,2:3:3:3", "concat_dim3_w16a16.nnlayergolden",
LayerGoldenTestParamOptions::SKIP_CALC_DERIV, "nchw", "fp16", "fp16");

auto concat_dim2_w16a16 = LayerGoldenTestParamType(
auto concat_dim2_w16a16_gpu = LayerGoldenTestParamType(
nntrainer::createLayer<nntrainer::ConcatLayerCl>, {"axis=2"},
"2:3:2:3,2:3:3:3", "concat_dim2_w16a16.nnlayergolden",
LayerGoldenTestParamOptions::SKIP_CALC_DERIV, "nchw", "fp16", "fp16");

auto concat_dim1_w16a16 = LayerGoldenTestParamType(
auto concat_dim1_w16a16_gpu = LayerGoldenTestParamType(
nntrainer::createLayer<nntrainer::ConcatLayerCl>, {"axis=1"},
"2:2:3:3,2:3:3:3", "concat_dim1_w16a16.nnlayergolden",
LayerGoldenTestParamOptions::SKIP_CALC_DERIV, "nchw", "fp16", "fp16");

GTEST_PARAMETER_TEST(ConcatGPU16, LayerGoldenTest,
::testing::Values(concat_dim3_w16a16, concat_dim2_w16a16,
concat_dim1_w16a16));
::testing::Values(concat_dim3_w16a16_gpu,
concat_dim2_w16a16_gpu,
concat_dim1_w16a16_gpu));
#endif

0 comments on commit 8877d6a

Please sign in to comment.