Skip to content

Commit

Permalink
[unittest/modelfile] Fix nntrainerIniTest
Browse files Browse the repository at this point in the history
Fix nntrainer IniTest cases on basic valid senarios
- fix old parm to valid
- uncommnet tcs

**Self evaluation:**
1. Build test:	 [X]Passed [ ]Failed [ ]Skipped
2. Run test:	 [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Donghak PARK <[email protected]>
  • Loading branch information
DonghakPark committed Oct 24, 2024
1 parent 410b3ba commit a6272be
Showing 1 changed file with 60 additions and 68 deletions.
128 changes: 60 additions & 68 deletions test/unittest/unittest_nntrainer_modelfile.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -120,70 +120,70 @@ TEST_P(nntrainerIniTest, loadConfigTwice_n) {
* @brief check given ini is failing/succeeding at init
* @todo Fix : basic3_p, basic4_p, basic_act_p
*/
// TEST_P(nntrainerIniTest, init) {
// std::cout << std::get<0>(GetParam()) << std::endl;
// int status = ML_ERROR_NONE;
// try {
// status = NN.loadFromConfig(getIniName());
// } catch (...) {
// status = ML_ERROR_INVALID_PARAMETER;
// }
TEST_P(nntrainerIniTest, init) {
std::cout << std::get<0>(GetParam()) << std::endl;
int status = ML_ERROR_NONE;
try {
status = NN.loadFromConfig(getIniName());
} catch (...) {
status = ML_ERROR_INVALID_PARAMETER;
}

// try {
// status = NN.compile();
// } catch (...) {
// status = ML_ERROR_INVALID_PARAMETER;
// }
try {
status = NN.compile();
} catch (...) {
status = ML_ERROR_INVALID_PARAMETER;
}

// if (failAtComp()) {
// EXPECT_NE(status, ML_ERROR_NONE);
// } else {
// EXPECT_EQ(status, ML_ERROR_NONE);
// }
if (failAtComp()) {
EXPECT_NE(status, ML_ERROR_NONE);
} else {
EXPECT_EQ(status, ML_ERROR_NONE);
}

// try {
// status = NN.initialize();
// } catch (...) {
// status = ML_ERROR_INVALID_PARAMETER;
// }
try {
status = NN.initialize();
} catch (...) {
status = ML_ERROR_INVALID_PARAMETER;
}

// if (failAtInit()) {
// EXPECT_NE(status, ML_ERROR_NONE);
// } else {
// EXPECT_EQ(status, ML_ERROR_NONE);
// }
// }
if (failAtInit()) {
EXPECT_NE(status, ML_ERROR_NONE);
} else {
EXPECT_EQ(status, ML_ERROR_NONE);
}
}

/**
* @brief check given ini is failing/succeeding when init happens twice.
* this should fail at all time.
* @todo Fix basic3_p, basic4_p, basic_act_p
*/
// TEST_P(nntrainerIniTest, initTwice_n) {
// std::cout << std::get<0>(GetParam()) << std::endl;
// int status = NN.loadFromConfig(getIniName());

// try {
// status = NN.compile();
// } catch (...) {
// status = ML_ERROR_INVALID_PARAMETER;
// }
TEST_P(nntrainerIniTest, initTwice_n) {
std::cout << std::get<0>(GetParam()) << std::endl;
int status = NN.loadFromConfig(getIniName());

// if (failAtComp()) {
// EXPECT_NE(status, ML_ERROR_NONE);
// } else {
// EXPECT_EQ(status, ML_ERROR_NONE);
// }
try {
status = NN.compile();
} catch (...) {
status = ML_ERROR_INVALID_PARAMETER;
}

// try {
// status = NN.initialize();
// status = NN.initialize();
// } catch (...) {
// status = ML_ERROR_INVALID_PARAMETER;
// }
if (failAtComp()) {
EXPECT_NE(status, ML_ERROR_NONE);
} else {
EXPECT_EQ(status, ML_ERROR_NONE);
}

// EXPECT_NE(status, ML_ERROR_NONE);
// }
try {
status = NN.initialize();
status = NN.initialize();
} catch (...) {
status = ML_ERROR_INVALID_PARAMETER;
}

EXPECT_NE(status, ML_ERROR_NONE);
}

/**
* @brief check given ini is failing/succeeding when init happens three times.
Expand Down Expand Up @@ -230,14 +230,6 @@ static nntrainer::IniSection adam("Optimizer", "Type = adam |"
static nntrainer::IniSection sgd("Optimizer", "Type = sgd |"
"Learning_rate = 1");

// static nntrainer::IniSection nw_sgd = nw_base_cross + "Optimizer = sgd |"
// "Learning_rate = 1";

// static nntrainer::IniSection nw_adam = nw_base_cross + adam;

// static nntrainer::IniSection nw_adam_n = nw_base_cross + "Learning_rate =
// -1"; static nntrainer::IniSection adam_n = adam + "Learning_rate = -1";

static nntrainer::IniSection dataset("DataSet", "BufferSize = 100 |"
"TrainData = trainingSet.dat | "
"TestData = testSet.dat |"
Expand All @@ -263,7 +255,7 @@ static nntrainer::IniSection loss_mse("loss", "Type = mse");

static nntrainer::IniSection batch_normal("bn",
"Type = batch_normalization |"
"momentum = 1.2 |"
"momentum = 0.99 |"
"moving_mean_initializer = zeros |"
"moving_variance_initializer = ones |"
"gamma_initializer = zeros |"
Expand Down Expand Up @@ -358,13 +350,13 @@ GTEST_PARAMETER_TEST(
/**< positive: basic valid scenarios (2 positive and 3 negative cases) */
mkIniTc("basic_p", {nw_base_mse, adam, input + "-Activation", out+"input_layers=inputlayer" + "-Activation"}, SUCCESS),
mkIniTc("basic2_p", {nw_base_mse, sgd, input + "-Activation", out+"input_layers=inputlayer" + "-Activation"}, SUCCESS),
mkIniTc("basic3_p", {nw_base + "loss=cross", adam, input + "-Activation", out+"input_layers=inputlayer" + "-Activation"}, SUCCESS),
mkIniTc("basic4_p", {nw_base + "loss=cross", adam, input + "-Activation", out+"input_layers=inputlayer" + "-Activation"}, SUCCESS),
mkIniTc("basic3_p", {nw_base + "loss=cross", adam, input + "-Activation", out+"input_layers=inputlayer"}, SUCCESS),
mkIniTc("basic4_p", {nw_base + "loss=cross", adam, input, out+"input_layers=inputlayer"}, SUCCESS),
mkIniTc("basic5_p", {nw_base_cross, adam, input, out+"input_layers=inputlayer"}, SUCCESS),
mkIniTc("basic6_p", {nw_base_cross, sgd, input, out+"input_layers=inputlayer"}, SUCCESS),
mkIniTc("basic_act_p", {nw_base_cross, sgd, input + "-Activation", act_relu+"input_layers=inputlayer", out+"input_layers=activation_relu" }, SUCCESS),
// mkIniTc("basic_bn_p", {nw_base_cross, sgd, input + "-Activation", batch_normal+"input_layers=inputlayer", act_relu+"input_layers=bn", out+"input_layers=relu" }, SUCCESS),
// mkIniTc("basic_bn2_p", {nw_base_cross, sgd, input + "-Activation", batch_normal + "Activation = relu"+"input_layers=inputlayer", out+"input_layers=bn" }, SUCCESS),
mkIniTc("basic_bn_p", {nw_base_cross, sgd, input + "-Activation", batch_normal+"input_layers=inputlayer", act_relu+"input_layers=bn", out+"input_layers=activation_relu" }, SUCCESS),
mkIniTc("basic_bn2_p", {nw_base_cross, sgd, input + "-Activation", batch_normal + "Activation = relu"+"input_layers=inputlayer", out+"input_layers=bn" }, SUCCESS),
/**
* @todo Fail on Android loadConfig Test
*/
Expand All @@ -383,9 +375,9 @@ GTEST_PARAMETER_TEST(
/**
* @todo Fail on Android loadConfig Test
*/
// mkIniTc("no_testSet_p", {nw_base_cross, adam, dataset + "-TestData", input, out+"input_layers=inputlayer"}, SUCCESS),
// mkIniTc("no_validSet_p", {nw_base_cross, adam, dataset + "-ValidData", input, out+"input_layers=inputlayer"}, SUCCESS),
// mkIniTc("no_bufferSize_p", {nw_base_cross, adam, dataset + "-BufferSize", input, out+"input_layers=inputlayer"}, SUCCESS),
mkIniTc("no_testSet_p", {nw_base_cross, adam, dataset + "-TestData", input, out+"input_layers=inputlayer"}, SUCCESS),
mkIniTc("no_validSet_p", {nw_base_cross, adam, dataset + "-ValidData", input, out+"input_layers=inputlayer"}, SUCCESS),
mkIniTc("no_bufferSize_p", {nw_base_cross, adam, dataset + "-BufferSize", input, out+"input_layers=inputlayer"}, SUCCESS),
/**
* @todo Fail at Android loadConfig Test
*/
Expand Down Expand Up @@ -465,7 +457,7 @@ TEST(nntrainerIniTest, backbone_01_n) {
* @brief Ini file unittest with backbone with empty backbone
*/
TEST(nntrainerIniTest, backbone_02_n) {
ScopedIni b{"base", {nw_base_cross}};
ScopedIni b{"base", {}};
ScopedIni s{"backbone_02_n", {nw_base_cross, adam, backbone_valid}};
nntrainer::NeuralNetwork NN;

Expand Down

0 comments on commit a6272be

Please sign in to comment.