Skip to content

Commit

Permalink
[Layer] add "subtract layer"
Browse files Browse the repository at this point in the history
- added "subtract layer"
- added a "model unit test" for sub layer.

Many people gave great feedback, so I've improved the structure accordingly.
- An upper class called "OperationLayer" was added to reduce redundant code.
- Based on the number of input tensors, the behavior of "OperationLayer" has been classified into two types: unary and binary operations.

**Self evaluation:**
1. Build test:   [X]Passed [ ]Failed [ ]Skipped
2. Run test:     [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Seungbaek Hong <[email protected]>
  • Loading branch information
baek2sm authored and jijoongmoon committed Nov 11, 2024
1 parent eb7cf07 commit 6a06ec6
Show file tree
Hide file tree
Showing 11 changed files with 441 additions and 141 deletions.
15 changes: 12 additions & 3 deletions api/ccapi/include/layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,10 @@ namespace train {
* @brief Enumeration of layer type
*/
enum LayerType {
LAYER_IN = ML_TRAIN_LAYER_TYPE_INPUT, /**< Input Layer type */
LAYER_WEIGHT = ML_TRAIN_LAYER_TYPE_WEIGHT, /**< Weight Layer type */
LAYER_ADD = ML_TRAIN_LAYER_TYPE_ADD, /**< Add Layer type */
LAYER_IN = ML_TRAIN_LAYER_TYPE_INPUT, /**< Input Layer type */
LAYER_WEIGHT = ML_TRAIN_LAYER_TYPE_WEIGHT, /**< Weight Layer type */
LAYER_ADD = ML_TRAIN_LAYER_TYPE_ADD, /**< Add Layer type */
LAYER_SUBTRACT = ML_TRAIN_LAYER_TYPE_SUBTRACT, /**< Subtract Layer type */
LAYER_FC = ML_TRAIN_LAYER_TYPE_FC, /**< Fully Connected Layer type */
LAYER_SWIGLU = ML_TRAIN_LAYER_TYPE_SWIGLU, /**< Swiglu Layer type */
LAYER_BN = ML_TRAIN_LAYER_TYPE_BN, /**< Batch Normalization Layer type */
Expand Down Expand Up @@ -314,6 +315,14 @@ AddLayer(const std::vector<std::string> &properties = {}) {
return createLayer(LayerType::LAYER_ADD, properties);
}

/**
* @brief Helper function to create subtract layer
*/
inline std::unique_ptr<Layer>
SubtractLayer(const std::vector<std::string> &properties = {}) {
return createLayer(LayerType::LAYER_SUBTRACT, properties);
}

/**
* @brief Helper function to create fully connected layer
*/
Expand Down
1 change: 1 addition & 0 deletions api/nntrainer-api-common.h
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ typedef enum {
ML_TRAIN_LAYER_TYPE_SWIGLU = 30, /**< Swiglu Layer type */
ML_TRAIN_LAYER_TYPE_WEIGHT = 31, /**< Weight Layer type (Since 9.0)*/
ML_TRAIN_LAYER_TYPE_ADD = 32, /**< Add Layer type (Since 9.0)*/
ML_TRAIN_LAYER_TYPE_SUBTRACT = 33, /**< Subtract Layer type (Since 9.0)*/
ML_TRAIN_LAYER_TYPE_PREPROCESS_FLIP =
300, /**< Preprocess flip Layer (Since 6.5) */
ML_TRAIN_LAYER_TYPE_PREPROCESS_TRANSLATE =
Expand Down
3 changes: 3 additions & 0 deletions nntrainer/app_context.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@
#include <rnn.h>
#include <rnncell.h>
#include <split_layer.h>
#include <subtract_layer.h>
#include <time_dist.h>
#include <upsample2d_layer.h>
#include <weight_layer.h>
Expand Down Expand Up @@ -256,6 +257,8 @@ static void add_default_object(AppContext &ac) {
LayerType::LAYER_WEIGHT);
ac.registerFactory(nntrainer::createLayer<AddLayer>, AddLayer::type,
LayerType::LAYER_ADD);
ac.registerFactory(nntrainer::createLayer<SubtractLayer>, SubtractLayer::type,
LayerType::LAYER_SUBTRACT);
ac.registerFactory(nntrainer::createLayer<FullyConnectedLayer>,
FullyConnectedLayer::type, LayerType::LAYER_FC);
ac.registerFactory(nntrainer::createLayer<BatchNormalizationLayer>,
Expand Down
1 change: 1 addition & 0 deletions nntrainer/layers/meson.build
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ layer_sources = [
'activation_layer.cpp',
'weight_layer.cpp',
'add_layer.cpp',
'subtract_layer.cpp',
'addition_layer.cpp',
'attention_layer.cpp',
'mol_attention_layer.cpp',
Expand Down
49 changes: 49 additions & 0 deletions nntrainer/layers/subtract_layer.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
// SPDX-License-Identifier: Apache-2.0
/**
* Copyright (C) 2024 SeungBaek Hong <[email protected]>
*
* @file subtract_layer.cpp
* @date 10 Oct 2024
* @see https://github.com/nnstreamer/nntrainer
* @author SeungBaek Hong <[email protected]>
* @bug No known bugs except for NYI items
* @brief This is subtract layer class (operation layer)
*
*/

#include <nntrainer_error.h>
#include <nntrainer_log.h>
#include <node_exporter.h>
#include <subtract_layer.h>
#include <util_func.h>

#include <layer_context.h>

namespace nntrainer {

void SubtractLayer::finalize(InitLayerContext &context) {
context.setOutputDimensions({context.getInputDimensions()[0]});
}

void SubtractLayer::forwarding_operation(const Tensor &input0,
const Tensor &input1, Tensor &hidden) {
input0.subtract(input1, hidden);
}

void SubtractLayer::calcDerivative(RunLayerContext &context) {
context.getOutgoingDerivative(0).copy(
context.getIncomingDerivative(SINGLE_INOUT_IDX));

context.getOutgoingDerivative(1).copy(
context.getIncomingDerivative(SINGLE_INOUT_IDX).multiply(-1));
}

void SubtractLayer::setProperty(const std::vector<std::string> &values) {
auto remain_props = loadProperties(values, subtract_props);
if (!remain_props.empty()) {
std::string msg = "[SubtractLayer] Unknown Layer Properties count " +
std::to_string(values.size());
throw exception::not_supported(msg);
}
}
} /* namespace nntrainer */
102 changes: 102 additions & 0 deletions nntrainer/layers/subtract_layer.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
// SPDX-License-Identifier: Apache-2.0
/**
* Copyright (C) 2024 SeungBaek Hong <[email protected]>
*
* @file subtract_layer.h
* @date 10 Oct 2024
* @see https://github.com/nnstreamer/nntrainer
* @author SeungBaek Hong <[email protected]>
* @bug No known bugs except for NYI items
* @brief This is subtract layer class (operation layer)
*
*/

#ifndef __SUBTRACT_LAYER_H__
#define __SUBTRACT_LAYER_H__
#ifdef __cplusplus

#include <common_properties.h>
#include <layer_devel.h>
#include <operation_layer.h>

namespace nntrainer {

/**
* @class Subtract Layer
* @brief Subtract Layer
*/
class SubtractLayer : public BinaryOperationLayer {
public:
/**
* @brief Constructor of Subtract Layer
*/
SubtractLayer() : BinaryOperationLayer(), subtract_props(props::Print()) {}

/**
* @brief Destructor of Sub Layer
*/
~SubtractLayer(){};

/**
* @brief Move constructor of Sub Layer.
* @param[in] SubtractLayer &&
*/
SubtractLayer(SubtractLayer &&rhs) noexcept = default;

/**
* @brief Move assignment operator.
* @parma[in] rhs SubtractLayer to be moved.
*/
SubtractLayer &operator=(SubtractLayer &&rhs) = default;

/**
* @copydoc Layer::finalize(InitLayerContext &context)
*/
void finalize(InitLayerContext &context) final;

/**
* @brief forwarding operation for add
*
* @param input0 input tensor 0
* @param input1 input tensor 1
* @param hidden tensor to store the result of addition
*/
void forwarding_operation(const Tensor &input0, const Tensor &input1,
Tensor &hidden) final;

/**
* @copydoc Layer::calcDerivative(RunLayerContext &context)
*/
void calcDerivative(RunLayerContext &context) final;

/**
* @copydoc bool supportBackwarding() const
*/
bool supportBackwarding() const final { return true; };

/**
* @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods
* method)
*/
void exportTo(Exporter &exporter,
const ml::train::ExportMethods &method) const final {}

/**
* @copydoc Layer::setProperty(const std::vector<std::string> &values)
*/
void setProperty(const std::vector<std::string> &values) final;

/**
* @copydoc Layer::getType()
*/
const std::string getType() const final { return SubtractLayer::type; };

std::tuple<props::Print> subtract_props;

inline static const std::string type = "subtract";
};

} // namespace nntrainer

#endif /* __cplusplus */
#endif /* __SUBTRACT_LAYER_H__ */
3 changes: 3 additions & 0 deletions test/ccapi/unittest_ccapi.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,9 @@ TEST(ccapi_layer, construct_02_p) {
EXPECT_NO_THROW(layer = ml::train::layer::AddLayer());
EXPECT_EQ(layer->getType(), "add");

EXPECT_NO_THROW(layer = ml::train::layer::SubtractLayer());
EXPECT_EQ(layer->getType(), "subtract");

EXPECT_NO_THROW(layer = ml::train::layer::FullyConnected());
EXPECT_EQ(layer->getType(), "fully_connected");

Expand Down
Loading

0 comments on commit 6a06ec6

Please sign in to comment.