blob: 052498ad38e9f13ddcb931a9982e7168fa65a8f4 [file] [log] [blame]
Georgios Pinitas240cfa62018-02-26 19:58:04 +00001/*
SiCong Li4841c972021-02-03 12:17:35 +00002 * Copyright (c) 2018-2021 Arm Limited.
Georgios Pinitas240cfa62018-02-26 19:58:04 +00003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010024#include "arm_compute/graph.h"
Inki Daeea2ce172020-04-09 10:01:44 +090025#ifdef ARM_COMPUTE_CL
26#include "arm_compute/runtime/CL/Utils.h"
27#endif /* ARM_COMPUTE_CL */
Georgios Pinitas240cfa62018-02-26 19:58:04 +000028#include "support/ToolchainSupport.h"
Georgios Pinitas12be7ab2018-07-03 12:06:23 +010029#include "utils/CommonGraphOptions.h"
Georgios Pinitas240cfa62018-02-26 19:58:04 +000030#include "utils/GraphUtils.h"
31#include "utils/Utils.h"
32
Inki Daeea2ce172020-04-09 10:01:44 +090033using namespace arm_compute;
Georgios Pinitas240cfa62018-02-26 19:58:04 +000034using namespace arm_compute::utils;
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010035using namespace arm_compute::graph::frontend;
Georgios Pinitas240cfa62018-02-26 19:58:04 +000036using namespace arm_compute::graph_utils;
37
Georgios Pinitas108ab0b2018-09-14 18:35:11 +010038/** Example demonstrating how to implement InceptionV4's network using the Compute Library's graph API */
Georgios Pinitas240cfa62018-02-26 19:58:04 +000039class InceptionV4Example final : public Example
40{
41public:
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010042 InceptionV4Example() : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionV4")
Georgios Pinitas240cfa62018-02-26 19:58:04 +000043 {
Georgios Pinitas12be7ab2018-07-03 12:06:23 +010044 }
45 bool do_setup(int argc, char **argv) override
46 {
47 // Parse arguments
48 cmd_parser.parse(argc, argv);
Georgios Pinitascd60a5f2019-08-21 17:06:54 +010049 cmd_parser.validate();
Georgios Pinitas12be7ab2018-07-03 12:06:23 +010050
51 // Consume common parameters
52 common_params = consume_common_graph_parameters(common_opts);
53
54 // Return when help menu is requested
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010055 if (common_params.help)
Georgios Pinitas12be7ab2018-07-03 12:06:23 +010056 {
57 cmd_parser.print_help(argv[0]);
58 return false;
59 }
60
Georgios Pinitas12be7ab2018-07-03 12:06:23 +010061 // Print parameter values
62 std::cout << common_params << std::endl;
63
64 // Get trainable parameters data path
65 std::string data_path = common_params.data_path;
Georgios Pinitas240cfa62018-02-26 19:58:04 +000066
67 // Create a preprocessor object
Georgios Pinitas40f51a62020-11-21 03:04:18 +000068 std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<TFPreproccessor>();
Georgios Pinitas240cfa62018-02-26 19:58:04 +000069
Georgios Pinitase2220552018-07-20 13:23:44 +010070 // Create input descriptor
Sang-Hoon Park11fedda2020-01-15 14:44:04 +000071 const auto operation_layout = common_params.data_layout;
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010072 const TensorShape tensor_shape =
73 permute_shape(TensorShape(299U, 299U, 3U, common_params.batches), DataLayout::NCHW, operation_layout);
74 TensorDescriptor input_descriptor =
75 TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout);
Georgios Pinitase2220552018-07-20 13:23:44 +010076
77 // Set weights trained layout
78 const DataLayout weights_layout = DataLayout::NCHW;
79
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010080 graph << common_params.target << common_params.fast_math_hint
Georgios Pinitase2220552018-07-20 13:23:44 +010081 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
Georgios Pinitas240cfa62018-02-26 19:58:04 +000082 // Conv2d_1a_3x3
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010083 << ConvolutionLayer(
84 3U, 3U, 32U,
85 get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_weights.npy",
86 weights_layout),
87 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
88 .set_name("Conv2d_1a_3x3/Conv2D")
89 << BatchNormalizationLayer(
90 get_weights_accessor(data_path,
91 "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
92 get_weights_accessor(data_path,
93 "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
94 get_random_accessor(1.f, 1.f),
95 get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_BatchNorm_beta.npy"),
96 0.001f)
97 .set_name("Conv2d_1a_3x3/BatchNorm")
98 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
99 .set_name("Conv2d_1a_3x3/Relu")
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000100 // Conv2d_2a_3x3
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100101 << ConvolutionLayer(
102 3U, 3U, 32U,
103 get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_weights.npy",
104 weights_layout),
105 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
106 .set_name("Conv2d_2a_3x3/Conv2D")
107 << BatchNormalizationLayer(
108 get_weights_accessor(data_path,
109 "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
110 get_weights_accessor(data_path,
111 "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
112 get_random_accessor(1.f, 1.f),
113 get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_BatchNorm_beta.npy"),
114 0.001f)
115 .set_name("Conv2d_2a_3x3/BatchNorm")
116 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
117 .set_name("Conv2d_2a_3x3/Relu")
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000118 // Conv2d_2b_3x3
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100119 << ConvolutionLayer(
120 3U, 3U, 64U,
121 get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_weights.npy",
122 weights_layout),
123 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
124 .set_name("Conv2d_2b_3x3/Conv2D")
125 << BatchNormalizationLayer(
126 get_weights_accessor(data_path,
127 "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
128 get_weights_accessor(data_path,
129 "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
130 get_random_accessor(1.f, 1.f),
131 get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_BatchNorm_beta.npy"),
132 0.001f)
133 .set_name("Conv2d_2b_3x3/BatchNorm")
134 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
135 .set_name("Conv2d_2b_3x3/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000136
Georgios Pinitas62c36392019-01-31 12:53:10 +0000137 graph << get_mixed_3a(data_path, weights_layout).set_name("Mixed_3a/concat");
138 graph << get_mixed_4a(data_path, weights_layout).set_name("Mixed_4a/concat");
139 graph << get_mixed_5a(data_path, weights_layout).set_name("Mixed_5a/concat");
Georgios Pinitas41c482d2018-04-17 13:23:26 +0100140 // 4 inception A blocks
Georgios Pinitas62c36392019-01-31 12:53:10 +0000141 graph << get_inceptionA_block(data_path, weights_layout, "Mixed_5b").set_name("Mixed_5b/concat");
142 graph << get_inceptionA_block(data_path, weights_layout, "Mixed_5c").set_name("Mixed_5c/concat");
143 graph << get_inceptionA_block(data_path, weights_layout, "Mixed_5d").set_name("Mixed_5d/concat");
144 graph << get_inceptionA_block(data_path, weights_layout, "Mixed_5e").set_name("Mixed_5e/concat");
Georgios Pinitas41c482d2018-04-17 13:23:26 +0100145 // reduction A block
Georgios Pinitas62c36392019-01-31 12:53:10 +0000146 graph << get_reductionA_block(data_path, weights_layout).set_name("Mixed_6a/concat");
Georgios Pinitas41c482d2018-04-17 13:23:26 +0100147 // 7 inception B blocks
Georgios Pinitas62c36392019-01-31 12:53:10 +0000148 graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6b").set_name("Mixed_6b/concat");
149 graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6c").set_name("Mixed_6c/concat");
150 graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6d").set_name("Mixed_6d/concat");
151 graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6e").set_name("Mixed_6e/concat");
152 graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6f").set_name("Mixed_6f/concat");
153 graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6g").set_name("Mixed_6g/concat");
154 graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6h").set_name("Mixed_6h/concat");
Georgios Pinitas41c482d2018-04-17 13:23:26 +0100155 // reduction B block
Georgios Pinitas62c36392019-01-31 12:53:10 +0000156 graph << get_reductionB_block(data_path, weights_layout).set_name("Mixed_7a/concat");
Georgios Pinitas41c482d2018-04-17 13:23:26 +0100157 // 3 inception C blocks
Georgios Pinitas62c36392019-01-31 12:53:10 +0000158 graph << get_inceptionC_block(data_path, weights_layout, "Mixed_7b").set_name("Mixed_7b/concat");
159 graph << get_inceptionC_block(data_path, weights_layout, "Mixed_7c").set_name("Mixed_7c/concat");
160 graph << get_inceptionC_block(data_path, weights_layout, "Mixed_7d").set_name("Mixed_7d/concat");
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100161 graph
162 << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, operation_layout)).set_name("Logits/AvgPool_1a/AvgPool")
163 << FlattenLayer().set_name("Logits/Flatten")
164 << FullyConnectedLayer(
165 1001U,
166 get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Logits_Logits_weights.npy",
167 weights_layout),
168 get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Logits_Logits_biases.npy"))
169 .set_name("Logits/MatMul")
170 << SoftmaxLayer().set_name("Logits/Predictions") << OutputLayer(get_output_accessor(common_params, 5));
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000171
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000172 // Finalize graph
Georgios Pinitas9a8c6722018-03-21 17:52:35 +0000173 GraphConfig config;
SiCongLif466d752021-03-01 15:26:18 +0000174 config.num_threads = common_params.threads;
175 config.use_tuner = common_params.enable_tuner;
176 config.tuner_mode = common_params.tuner_mode;
177 config.tuner_file = common_params.tuner_file;
178 config.mlgo_file = common_params.mlgo_file;
179 config.use_synthetic_type = arm_compute::is_data_type_quantized(common_params.data_type);
180 config.synthetic_type = common_params.data_type;
Anthony Barbier7b607dc2018-07-13 15:55:24 +0100181
Pablo Tellodb9116f2019-07-11 16:50:37 +0100182 // Load the precompiled kernels from a file into the kernel library, in this way the next time they are needed
183 // compilation won't be required.
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100184 if (common_params.enable_cl_cache)
Pablo Tellodb9116f2019-07-11 16:50:37 +0100185 {
Inki Daeea2ce172020-04-09 10:01:44 +0900186#ifdef ARM_COMPUTE_CL
Pablo Tellodb9116f2019-07-11 16:50:37 +0100187 restore_program_cache_from_file();
Inki Daeea2ce172020-04-09 10:01:44 +0900188#endif /* ARM_COMPUTE_CL */
Pablo Tellodb9116f2019-07-11 16:50:37 +0100189 }
190
Georgios Pinitas12be7ab2018-07-03 12:06:23 +0100191 graph.finalize(common_params.target, config);
192
Pablo Tellodb9116f2019-07-11 16:50:37 +0100193 // Save the opencl kernels to a file
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100194 if (common_opts.enable_cl_cache)
Pablo Tellodb9116f2019-07-11 16:50:37 +0100195 {
Inki Daeea2ce172020-04-09 10:01:44 +0900196#ifdef ARM_COMPUTE_CL
Pablo Tellodb9116f2019-07-11 16:50:37 +0100197 save_program_cache_to_file();
Inki Daeea2ce172020-04-09 10:01:44 +0900198#endif /* ARM_COMPUTE_CL */
Pablo Tellodb9116f2019-07-11 16:50:37 +0100199 }
200
Georgios Pinitas12be7ab2018-07-03 12:06:23 +0100201 return true;
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000202 }
203
204 void do_run() override
205 {
206 graph.run();
207 }
208
209private:
Georgios Pinitas12be7ab2018-07-03 12:06:23 +0100210 CommandLineParser cmd_parser;
211 CommonGraphOptions common_opts;
212 CommonGraphParams common_params;
213 Stream graph;
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000214
215private:
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100216 ConcatLayer get_mixed_3a(const std::string &data_path, DataLayout weights_layout)
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000217 {
218 std::string total_path = "/cnn_data/inceptionv4_model/Mixed_3a_";
219
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000220 SubStream i_a(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100221 i_a << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout,
222 PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true))
223 .set_name("Mixed_3a/Branch_0/MaxPool_0a_3x3/MaxPool");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000224
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000225 SubStream i_b(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100226 i_b << ConvolutionLayer(
227 3U, 3U, 96U,
228 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_weights.npy", weights_layout),
229 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
230 .set_name("Mixed_3a/Branch_1/Conv2d_0a_3x3/Conv2D")
231 << BatchNormalizationLayer(
232 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_BatchNorm_moving_mean.npy"),
233 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_BatchNorm_moving_variance.npy"),
234 get_random_accessor(1.f, 1.f),
235 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_BatchNorm_beta.npy"), 0.001f)
236 .set_name("Mixed_3a/Branch_1/Conv2d_0a_3x3/BatchNorm")
237 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
238 .set_name("Mixed_3a/Branch_1/Conv2d_0a_3x3/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000239
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100240 return ConcatLayer(std::move(i_a), std::move(i_b));
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000241 }
242
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100243 ConcatLayer get_mixed_4a(const std::string &data_path, DataLayout weights_layout)
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000244 {
245 std::string total_path = "/cnn_data/inceptionv4_model/Mixed_4a_";
246
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000247 SubStream i_a(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100248 i_a << ConvolutionLayer(
249 1U, 1U, 64U,
250 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
251 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
252 .set_name("Mixed_4a/Branch_0/Conv2d_0a_1x1/Conv2D")
253 << BatchNormalizationLayer(
254 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
255 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
256 get_random_accessor(1.f, 1.f),
257 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f)
258 .set_name("Mixed_4a/Branch_0/Conv2d_0a_1x1/BatchNorm")
259 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
260 .set_name("Mixed_4a/Branch_0/Conv2d_0a_1x1/Relu")
261 << ConvolutionLayer(
262 3U, 3U, 96U,
263 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
264 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
265 .set_name("Mixed_4a/Branch_0/Conv2d_1a_3x3/Conv2D")
266 << BatchNormalizationLayer(
267 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
268 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
269 get_random_accessor(1.f, 1.f),
270 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f)
271 .set_name("Mixed_4a/Branch_0/Conv2d_1a_3x3/BatchNorm")
272 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
273 .set_name("Mixed_4a/Branch_0/Conv2d_1a_3x3/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000274
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000275 SubStream i_b(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100276 i_b << ConvolutionLayer(
277 1U, 1U, 64U,
278 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
279 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
280 .set_name("Mixed_4a/Branch_1/Conv2d_0a_1x1/Conv2D")
281 << BatchNormalizationLayer(
282 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
283 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
284 get_random_accessor(1.f, 1.f),
285 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f)
286 .set_name("Mixed_4a/Branch_1/Conv2d_0a_1x1/BatchNorm")
287 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
288 .set_name("Mixed_4a/Branch_1/Conv2d_0a_1x1/Relu")
289 << ConvolutionLayer(
290 7U, 1U, 64U,
291 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
292 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 3, 0))
293 .set_name("Mixed_4a/Branch_1/Conv2d_0b_1x7/Conv2D")
294 << BatchNormalizationLayer(
295 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
296 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
297 get_random_accessor(1.f, 1.f),
298 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), 0.001f)
299 .set_name("Mixed_4a/Branch_1/Conv2d_0b_1x7/BatchNorm")
300 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
301 .set_name("Mixed_4a/Branch_1/Conv2d_0b_1x7/Relu")
302 << ConvolutionLayer(
303 1U, 7U, 64U,
304 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
305 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 3))
306 .set_name("Mixed_4a/Branch_1/Conv2d_0c_7x1/Conv2D")
307 << BatchNormalizationLayer(
308 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
309 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
310 get_random_accessor(1.f, 1.f),
311 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), 0.001f)
312 .set_name("Mixed_4a/Branch_1/Conv2d_0c_7x1/BatchNorm")
313 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
314 .set_name("Mixed_4a/Branch_1/Conv2d_0c_7x1/Relu")
315 << ConvolutionLayer(
316 3U, 3U, 96U,
317 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
318 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
319 .set_name("Mixed_4a/Branch_1/Conv2d_1a_3x3/Conv2D")
320 << BatchNormalizationLayer(
321 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
322 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
323 get_random_accessor(1.f, 1.f),
324 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f)
325 .set_name("Mixed_4a/Branch_1/Conv2d_1a_3x3/BatchNorm")
326 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
327 .set_name("Mixed_4a/Branch_1/Conv2d_1a_3x3/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000328
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100329 return ConcatLayer(std::move(i_a), std::move(i_b));
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000330 }
331
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100332 ConcatLayer get_mixed_5a(const std::string &data_path, DataLayout weights_layout)
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000333 {
334 std::string total_path = "/cnn_data/inceptionv4_model/Mixed_5a_";
335
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000336 SubStream i_a(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100337 i_a << ConvolutionLayer(
338 3U, 3U, 192U,
339 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
340 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
341 .set_name("Mixed_5a/Branch_0/Conv2d_1a_3x3/Conv2D")
342 << BatchNormalizationLayer(
343 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
344 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
345 get_random_accessor(1.f, 1.f),
346 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f)
347 .set_name("Mixed_5a/Branch_0/Conv2d_1a_3x3/BatchNorm")
348 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
349 .set_name("Mixed_5a/Branch_0/Conv2d_1a_3x3/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000350
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000351 SubStream i_b(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100352 i_b << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout,
353 PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true))
354 .set_name("Mixed_5a/Branch_1/MaxPool_1a_3x3/MaxPool");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000355
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100356 return ConcatLayer(std::move(i_a), std::move(i_b));
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000357 }
358
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100359 ConcatLayer get_inceptionA_block(const std::string &data_path, DataLayout weights_layout, std::string &&param_path)
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000360 {
361 std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_";
362
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000363 SubStream i_a(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100364 i_a << ConvolutionLayer(
365 1U, 1U, 96U,
366 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
367 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
368 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Conv2D")
369 << BatchNormalizationLayer(
370 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
371 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
372 get_random_accessor(1.f, 1.f),
373 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f)
374 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm")
375 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
376 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000377
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000378 SubStream i_b(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100379 i_b << ConvolutionLayer(
380 1U, 1U, 64U,
381 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
382 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
383 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Conv2D")
384 << BatchNormalizationLayer(
385 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
386 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
387 get_random_accessor(1.f, 1.f),
388 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f)
389 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm")
390 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
391 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
392 << ConvolutionLayer(
393 3U, 3U, 96U,
394 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
395 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
396 .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Conv2D")
397 << BatchNormalizationLayer(
398 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
399 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
400 get_random_accessor(1.f, 1.f),
401 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"), 0.001f)
402 .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/BatchNorm")
403 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
404 .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000405
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000406 SubStream i_c(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100407 i_c << ConvolutionLayer(
408 1U, 1U, 64U,
409 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
410 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
411 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Conv2D")
412 << BatchNormalizationLayer(
413 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
414 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
415 get_random_accessor(1.f, 1.f),
416 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f)
417 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm")
418 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
419 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
420 << ConvolutionLayer(
421 3U, 3U, 96U,
422 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
423 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
424 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Conv2D")
425 << BatchNormalizationLayer(
426 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
427 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
428 get_random_accessor(1.f, 1.f),
429 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"), 0.001f)
430 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm")
431 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
432 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu")
433 << ConvolutionLayer(
434 3U, 3U, 96U,
435 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
436 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
437 .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Conv2D")
438 << BatchNormalizationLayer(
439 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
440 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
441 get_random_accessor(1.f, 1.f),
442 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"), 0.001f)
443 .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/BatchNorm")
444 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
445 .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000446
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000447 SubStream i_d(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100448 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout,
449 PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
450 .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
451 << ConvolutionLayer(
452 1U, 1U, 96U,
453 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
454 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
455 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Conv2D")
456 << BatchNormalizationLayer(
457 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
458 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
459 get_random_accessor(1.f, 1.f),
460 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), 0.001f)
461 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm")
462 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
463 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000464
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100465 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000466 }
467
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100468 ConcatLayer get_reductionA_block(const std::string &data_path, DataLayout weights_layout)
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000469 {
470 std::string total_path = "/cnn_data/inceptionv4_model/Mixed_6a_";
471
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000472 SubStream i_a(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100473 i_a << ConvolutionLayer(
474 3U, 3U, 384U,
475 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
476 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
477 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Conv2D")
478 << BatchNormalizationLayer(
479 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
480 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
481 get_random_accessor(1.f, 1.f),
482 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f)
483 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
484 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
485 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000486
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000487 SubStream i_b(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100488 i_b << ConvolutionLayer(
489 1U, 1U, 192U,
490 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
491 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
492 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Conv2D")
493 << BatchNormalizationLayer(
494 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
495 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
496 get_random_accessor(1.f, 1.f),
497 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f)
498 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
499 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
500 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
501 << ConvolutionLayer(
502 3U, 3U, 224U,
503 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
504 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
505 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Conv2D")
506 << BatchNormalizationLayer(
507 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
508 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
509 get_random_accessor(1.f, 1.f),
510 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"), 0.001f)
511 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
512 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
513 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
514 << ConvolutionLayer(
515 3U, 3U, 256U,
516 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
517 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
518 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Conv2D")
519 << BatchNormalizationLayer(
520 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
521 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
522 get_random_accessor(1.f, 1.f),
523 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f)
524 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
525 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
526 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000527
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000528 SubStream i_c(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100529 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout,
530 PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true))
531 .set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3/MaxPool");
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000532
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100533 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000534 }
535
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100536 ConcatLayer get_inceptionB_block(const std::string &data_path, DataLayout weights_layout, std::string &&param_path)
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000537 {
538 std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_";
539
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000540 SubStream i_a(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100541 i_a << ConvolutionLayer(
542 1U, 1U, 384U,
543 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
544 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
545 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Conv2D")
546 << BatchNormalizationLayer(
547 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
548 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
549 get_random_accessor(1.f, 1.f),
550 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f)
551 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm")
552 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
553 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000554
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000555 SubStream i_b(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100556 i_b << ConvolutionLayer(
557 1U, 1U, 192U,
558 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
559 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
560 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Conv2D")
561 << BatchNormalizationLayer(
562 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
563 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
564 get_random_accessor(1.f, 1.f),
565 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f)
566 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm")
567 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
568 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
569 << ConvolutionLayer(
570 7U, 1U, 224U,
571 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
572 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 3, 0))
573 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Conv2D")
574 << BatchNormalizationLayer(
575 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
576 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
577 get_random_accessor(1.f, 1.f),
578 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), 0.001f)
579 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm")
580 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
581 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
582 << ConvolutionLayer(
583 1U, 7U, 256U,
584 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
585 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 3))
586 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Conv2D")
587 << BatchNormalizationLayer(
588 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
589 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
590 get_random_accessor(1.f, 1.f),
591 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), 0.001f)
592 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm")
593 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
594 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000595
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000596 SubStream i_c(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100597 i_c << ConvolutionLayer(
598 1U, 1U, 192U,
599 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
600 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
601 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Conv2D")
602 << BatchNormalizationLayer(
603 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
604 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
605 get_random_accessor(1.f, 1.f),
606 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f)
607 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm")
608 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
609 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
610 << ConvolutionLayer(
611 1U, 7U, 192U,
612 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy", weights_layout),
613 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 3))
614 .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Conv2D")
615 << BatchNormalizationLayer(
616 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"),
617 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"),
618 get_random_accessor(1.f, 1.f),
619 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"), 0.001f)
620 .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/BatchNorm")
621 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
622 .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Relu")
623 << ConvolutionLayer(
624 7U, 1U, 224U,
625 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy", weights_layout),
626 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 3, 0))
627 .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Conv2D")
628 << BatchNormalizationLayer(
629 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"),
630 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"),
631 get_random_accessor(1.f, 1.f),
632 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"), 0.001f)
633 .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/BatchNorm")
634 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
635 .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Relu")
636 << ConvolutionLayer(
637 1U, 7U, 224U,
638 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy", weights_layout),
639 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 3))
640 .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Conv2D")
641 << BatchNormalizationLayer(
642 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"),
643 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"),
644 get_random_accessor(1.f, 1.f),
645 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"), 0.001f)
646 .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/BatchNorm")
647 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
648 .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Relu")
649 << ConvolutionLayer(
650 7U, 1U, 256U,
651 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy", weights_layout),
652 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 3, 0))
653 .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Conv2D")
654 << BatchNormalizationLayer(
655 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"),
656 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"),
657 get_random_accessor(1.f, 1.f),
658 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"), 0.001f)
659 .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/BatchNorm")
660 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
661 .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000662
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000663 SubStream i_d(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100664 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout,
665 PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
666 .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
667 << ConvolutionLayer(
668 1U, 1U, 128U,
669 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
670 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
671 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Conv2D")
672 << BatchNormalizationLayer(
673 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
674 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
675 get_random_accessor(1.f, 1.f),
676 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), 0.001f)
677 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm")
678 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
679 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000680
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100681 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000682 }
683
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100684 ConcatLayer get_reductionB_block(const std::string &data_path, DataLayout weights_layout)
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000685 {
686 std::string total_path = "/cnn_data/inceptionv4_model/Mixed_7a_";
687
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000688 SubStream i_a(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100689 i_a << ConvolutionLayer(
690 1U, 1U, 192U,
691 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
692 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
693 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Conv2D")
694 << BatchNormalizationLayer(
695 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
696 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
697 get_random_accessor(1.f, 1.f),
698 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f)
699 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
700 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
701 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
702 << ConvolutionLayer(
703 3U, 3U, 192U,
704 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
705 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
706 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Conv2D")
707 << BatchNormalizationLayer(
708 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
709 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
710 get_random_accessor(1.f, 1.f),
711 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f)
712 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
713 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
714 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000715
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000716 SubStream i_b(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100717 i_b << ConvolutionLayer(
718 1U, 1U, 256U,
719 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
720 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
721 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Conv2D")
722 << BatchNormalizationLayer(
723 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
724 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
725 get_random_accessor(1.f, 1.f),
726 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f)
727 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
728 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
729 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
730 << ConvolutionLayer(
731 7U, 1U, 256U,
732 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
733 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 3, 0))
734 .set_name("Mixed_7a/Branch_1/Conv2d_0b_1x7/Conv2D")
735 << BatchNormalizationLayer(
736 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
737 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
738 get_random_accessor(1.f, 1.f),
739 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"), 0.001f)
740 .set_name("Mixed_7a/Branch_1/Conv2d_0b_1x7/BatchNorm")
741 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
742 .set_name("Mixed_7a/Branch_1/Conv2d_0b_1x7/Relu")
743 << ConvolutionLayer(
744 1U, 7U, 320U,
745 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
746 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 3))
747 .set_name("Mixed_7a/Branch_1/Conv2d_0c_7x1/Conv2D")
748 << BatchNormalizationLayer(
749 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
750 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
751 get_random_accessor(1.f, 1.f),
752 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"), 0.001f)
753 .set_name("Mixed_7a/Branch_1/Conv2d_0c_7x1/BatchNorm")
754 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
755 .set_name("Mixed_7a/Branch_1/Conv2d_0c_7x1/Relu")
756 << ConvolutionLayer(
757 3U, 3U, 320U,
758 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
759 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
760 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Conv2D")
761 << BatchNormalizationLayer(
762 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
763 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
764 get_random_accessor(1.f, 1.f),
765 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"), 0.001f)
766 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
767 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
768 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000769
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000770 SubStream i_c(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100771 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout,
772 PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true))
773 .set_name("Mixed_7a/Branch_2/MaxPool_1a_3x3/MaxPool");
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000774
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100775 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000776 }
777
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100778 ConcatLayer get_inceptionC_block(const std::string &data_path, DataLayout weights_layout, std::string &&param_path)
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000779 {
780 std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_";
781
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000782 SubStream i_a(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100783 i_a << ConvolutionLayer(
784 1U, 1U, 256U,
785 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
786 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
787 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Conv2D")
788 << BatchNormalizationLayer(
789 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
790 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
791 get_random_accessor(1.f, 1.f),
792 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f)
793 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm")
794 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
795 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000796
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000797 SubStream i_b(graph);
798 i_b << ConvolutionLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100799 1U, 1U, 384U,
800 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
801 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
802 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Conv2D")
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000803 << BatchNormalizationLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100804 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
805 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
806 get_random_accessor(1.f, 1.f),
807 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f)
808 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm")
809 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
810 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu");
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000811
Georgios Pinitas772e17f2018-07-13 12:25:33 +0100812 SubStream i_b1(i_b);
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000813 i_b1 << ConvolutionLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100814 3U, 1U, 256U,
815 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
816 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 0))
817 .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Conv2D")
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000818 << BatchNormalizationLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100819 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
820 get_weights_accessor(data_path,
821 total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
822 get_random_accessor(1.f, 1.f),
823 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"), 0.001f)
824 .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/BatchNorm")
825 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
826 .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000827
Georgios Pinitas772e17f2018-07-13 12:25:33 +0100828 SubStream i_b2(i_b);
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000829 i_b2 << ConvolutionLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100830 1U, 3U, 256U,
831 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout),
832 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 1))
833 .set_name(param_path + "/Branch_1/Conv2d_0c_3x1/Conv2D")
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000834 << BatchNormalizationLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100835 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
836 get_weights_accessor(data_path,
837 total_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
838 get_random_accessor(1.f, 1.f),
839 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"), 0.001f)
840 .set_name(param_path + "/Branch_1/Conv2d_0c_3x1/BatchNorm")
841 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
842 .set_name(param_path + "/Branch_1/Conv2d_0c_3x1/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000843
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000844 // Merge b1 and b2
Georgios Pinitas62c36392019-01-31 12:53:10 +0000845 i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000846
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000847 SubStream i_c(graph);
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000848 i_c << ConvolutionLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100849 1U, 1U, 384U,
850 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
851 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
852 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Conv2D")
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000853 << BatchNormalizationLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100854 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
855 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
856 get_random_accessor(1.f, 1.f),
857 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"), 0.001f)
858 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm")
859 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
860 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000861 << ConvolutionLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100862 1U, 3U, 448U,
863 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_weights.npy", weights_layout),
864 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 1))
865 .set_name(param_path + "/Branch_2/Conv2d_0b_3x1/Conv2D")
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000866 << BatchNormalizationLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100867 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_BatchNorm_moving_mean.npy"),
868 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_BatchNorm_moving_variance.npy"),
869 get_random_accessor(1.f, 1.f),
870 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_BatchNorm_beta.npy"), 0.001f)
871 .set_name(param_path + "/Branch_2/Conv2d_0b_3x1/BatchNorm")
872 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
873 .set_name(param_path + "/Branch_2/Conv2d_0b_3x1/Relu")
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000874 << ConvolutionLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100875 3U, 1U, 512U,
876 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy", weights_layout),
877 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 0))
878 .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Conv2D")
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000879 << BatchNormalizationLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100880 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"),
881 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"),
882 get_random_accessor(1.f, 1.f),
883 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"), 0.001f)
884 .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/BatchNorm")
885 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
886 .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000887
Georgios Pinitas772e17f2018-07-13 12:25:33 +0100888 SubStream i_c1(i_c);
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000889 i_c1 << ConvolutionLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100890 3U, 1U, 256U,
891 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_weights.npy", weights_layout),
892 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 0))
893 .set_name(param_path + "/Branch_2/Conv2d_0d_1x3/Conv2D")
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000894 << BatchNormalizationLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100895 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_BatchNorm_moving_mean.npy"),
896 get_weights_accessor(data_path,
897 total_path + "Branch_2_Conv2d_0d_1x3_BatchNorm_moving_variance.npy"),
898 get_random_accessor(1.f, 1.f),
899 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_BatchNorm_beta.npy"), 0.001f)
900 .set_name(param_path + "/Branch_2/Conv2d_0d_1x3/BatchNorm")
901 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
902 .set_name(param_path + "/Branch_2/Conv2d_0d_1x3/Relu");
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000903
Georgios Pinitas772e17f2018-07-13 12:25:33 +0100904 SubStream i_c2(i_c);
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000905 i_c2 << ConvolutionLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100906 1U, 3U, 256U,
907 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_weights.npy", weights_layout),
908 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 1))
909 .set_name(param_path + "/Branch_2/Conv2d_0e_3x1/Conv2D")
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000910 << BatchNormalizationLayer(
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100911 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_BatchNorm_moving_mean.npy"),
912 get_weights_accessor(data_path,
913 total_path + "Branch_2_Conv2d_0e_3x1_BatchNorm_moving_variance.npy"),
914 get_random_accessor(1.f, 1.f),
915 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_BatchNorm_beta.npy"), 0.001f)
916 .set_name(param_path + "/Branch_2/Conv2d_0e_3x1/BatchNorm")
917 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
918 .set_name(param_path + "/Branch_2/Conv2d_0e_3x1/Relu");
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000919
920 // Merge i_c1 and i_c2
Georgios Pinitas62c36392019-01-31 12:53:10 +0000921 i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat");
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000922
923 SubStream i_d(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100924 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout,
925 PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
926 .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
927 << ConvolutionLayer(
928 1U, 1U, 256U,
929 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
930 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
931 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Conv2D")
932 << BatchNormalizationLayer(
933 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
934 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
935 get_random_accessor(1.f, 1.f),
936 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"), 0.001f)
937 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm")
938 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
939 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000940
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100941 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000942 }
943};
944
945/** Main program for Inception V4
946 *
Georgios Pinitasbdbbbe82018-11-07 16:06:47 +0000947 * Model is based on:
948 * https://arxiv.org/abs/1602.07261
949 * "Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning"
950 * Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
951 *
Georgios Pinitas588ebc52018-12-21 13:39:07 +0000952 * Provenance: download.tensorflow.org/models/inception_v4_2016_09_09.tar.gz
953 *
Georgios Pinitas9f28b392018-07-18 20:01:53 +0100954 * @note To list all the possible arguments execute the binary appended with the --help option
955 *
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000956 * @param[in] argc Number of arguments
Georgios Pinitas12be7ab2018-07-03 12:06:23 +0100957 * @param[in] argv Arguments
Georgios Pinitas240cfa62018-02-26 19:58:04 +0000958 */
959int main(int argc, char **argv)
960{
961 return arm_compute::utils::run_example<InceptionV4Example>(argc, argv);
962}