COMPMID-1522: Add ElementWiseOperation node in the graph API
Change-Id: Icb428bf3b5d3634fdddc57562cce670776e7f7a3
Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/145814
Tested-by: Jenkins <bsgcomp@arm.com>
Reviewed-by: Anthony Barbier <anthony.barbier@arm.com>
diff --git a/examples/graph_inception_v3.cpp b/examples/graph_inception_v3.cpp
index 168a506..80e771b 100644
--- a/examples/graph_inception_v3.cpp
+++ b/examples/graph_inception_v3.cpp
@@ -230,7 +230,7 @@
Stream graph;
private:
- BranchLayer get_inception_node_A(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout,
+ ConcatLayer get_inception_node_A(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout,
unsigned int a_filt,
std::tuple<unsigned int, unsigned int> b_filters,
std::tuple<unsigned int, unsigned int, unsigned int> c_filters,
@@ -355,10 +355,10 @@
.set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
<< ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
- return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
+ return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
}
- BranchLayer get_inception_node_B(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout,
+ ConcatLayer get_inception_node_B(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout,
unsigned int a_filt,
std::tuple<unsigned int, unsigned int, unsigned int> b_filters)
{
@@ -426,10 +426,10 @@
SubStream i_c(graph);
i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
- return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c));
+ return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
}
- BranchLayer get_inception_node_C(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout,
+ ConcatLayer get_inception_node_C(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout,
unsigned int a_filt,
std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
std::tuple<unsigned int, unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
@@ -585,10 +585,10 @@
.set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
<< ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
- return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
+ return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
}
- BranchLayer get_inception_node_D(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout,
+ ConcatLayer get_inception_node_D(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout,
std::tuple<unsigned int, unsigned int> a_filters,
std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> b_filters)
{
@@ -684,10 +684,10 @@
SubStream i_c(graph);
i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
- return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c));
+ return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
}
- BranchLayer get_inception_node_E(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout,
+ ConcatLayer get_inception_node_E(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout,
unsigned int a_filt,
std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
@@ -767,7 +767,7 @@
<< ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/Relu");
// Merge b1 and b2
- i_b << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat");
+ i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat");
SubStream i_c(graph);
i_c << ConvolutionLayer(
@@ -832,7 +832,7 @@
<< ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_3x1/Relu");
// Merge i_c1 and i_c2
- i_c << BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat");
+ i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat");
SubStream i_d(graph);
i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
@@ -851,7 +851,7 @@
.set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
<< ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
- return BranchLayer(BranchMergeMethod::DEPTH_CONCATENATE, std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
+ return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
}
};