blob: 43e31ee14bae98c553dd8a9c359472c35acba973 [file] [log] [blame]
Georgios Pinitasbe2772a2018-08-17 15:33:39 +01001/*
SiCong Li4841c972021-02-03 12:17:35 +00002 * Copyright (c) 2018-2021 Arm Limited.
Georgios Pinitasbe2772a2018-08-17 15:33:39 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/graph.h"
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010025
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010026#include "support/ToolchainSupport.h"
27#include "utils/CommonGraphOptions.h"
28#include "utils/GraphUtils.h"
29#include "utils/Utils.h"
30
31using namespace arm_compute::utils;
32using namespace arm_compute::graph::frontend;
33using namespace arm_compute::graph_utils;
34
Georgios Pinitas108ab0b2018-09-14 18:35:11 +010035/** Example demonstrating how to implement InceptionV4's network using the Compute Library's graph API */
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010036class InceptionResNetV2Example final : public Example
37{
38public:
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010039 InceptionResNetV2Example() : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionResNetV2")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010040 {
41 }
42 bool do_setup(int argc, char **argv) override
43 {
44 // Parse arguments
45 cmd_parser.parse(argc, argv);
Georgios Pinitascd60a5f2019-08-21 17:06:54 +010046 cmd_parser.validate();
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010047
48 // Consume common parameters
49 common_params = consume_common_graph_parameters(common_opts);
50
51 // Return when help menu is requested
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010052 if (common_params.help)
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010053 {
54 cmd_parser.print_help(argv[0]);
55 return false;
56 }
57
58 // Set default layout if needed
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010059 if (!common_opts.data_layout->is_set() && common_params.target == Target::NEON)
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010060 {
61 common_params.data_layout = DataLayout::NCHW;
62 }
63
64 // Checks
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010065 ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type),
66 "QASYMM8 not supported for this graph");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010067
68 // Print parameter values
69 std::cout << common_params << std::endl;
70
71 // Create model path
72 std::string data_path = common_params.data_path;
73 std::string model_path = "/cnn_data/inception_resnet_v2_model/";
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010074 if (!data_path.empty())
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010075 {
76 data_path += model_path;
77 }
78
79 // Create a preprocessor object
Georgios Pinitas40f51a62020-11-21 03:04:18 +000080 std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<TFPreproccessor>(0.f, 1.f);
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010081
82 // Create input descriptor
Sang-Hoon Park11fedda2020-01-15 14:44:04 +000083 const auto operation_layout = common_params.data_layout;
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010084 const TensorShape tensor_shape =
85 permute_shape(TensorShape(299U, 299U, 3U, common_params.batches), DataLayout::NCHW, operation_layout);
86 TensorDescriptor input_descriptor =
87 TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout);
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010088
89 // Set weights trained layout
90 const DataLayout weights_layout = DataLayout::NCHW;
91
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010092 graph << common_params.target << common_params.fast_math_hint
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010093 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
94 // Conv2d_1a_3x3
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010095 << ConvolutionLayer(
96 3U, 3U, 32U, get_weights_accessor(data_path, "Conv2d_1a_3x3_weights.npy", weights_layout),
97 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
98 .set_name("Conv2d_1a_3x3/convolution")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010099 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
100 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
101 get_random_accessor(1.f, 1.f),
102 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_beta.npy"),
103 0.0010000000474974513f)
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100104 .set_name("Conv2d_1a_3x3/BatchNorm")
105 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
106 .set_name("Conv2d_1a_3x3/Relu")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100107 // Conv2d_2a_3x3
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100108 << ConvolutionLayer(
109 3U, 3U, 32U, get_weights_accessor(data_path, "Conv2d_2a_3x3_weights.npy", weights_layout),
110 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
111 .set_name("Conv2d_2a_3x3/convolution")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100112 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
113 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
114 get_random_accessor(1.f, 1.f),
115 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_beta.npy"),
116 0.0010000000474974513f)
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100117 .set_name("Conv2d_2a_3x3/BatchNorm")
118 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
119 .set_name("Conv2d_2a_3x3/Relu")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100120 // Conv2d_2b_3x3
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100121 << ConvolutionLayer(
122 3U, 3U, 64U, get_weights_accessor(data_path, "Conv2d_2b_3x3_weights.npy", weights_layout),
123 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
124 .set_name("Conv2d_2b_3x3/convolution")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100125 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
126 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
127 get_random_accessor(1.f, 1.f),
128 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_beta.npy"),
129 0.0010000000474974513f)
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100130 .set_name("Conv2d_2b_3x3/BatchNorm")
131 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
132 .set_name("Conv2d_2b_3x3/Relu")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100133 // MaxPool_3a_3x3
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100134 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout,
135 PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true))
136 .set_name("MaxPool_3a_3x3/MaxPool")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100137 // Conv2d_3b_1x1
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100138 << ConvolutionLayer(
139 1U, 1U, 80U, get_weights_accessor(data_path, "Conv2d_3b_1x1_weights.npy", weights_layout),
140 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
141 .set_name("Conv2d_3b_1x1/convolution")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100142 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
143 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
144 get_random_accessor(1.f, 1.f),
145 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_beta.npy"),
146 0.0010000000474974513f)
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100147 .set_name("Conv2d_3b_1x1/BatchNorm")
148 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
149 .set_name("Conv2d_3b_1x1/Relu")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100150 // Conv2d_4a_3x3
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100151 << ConvolutionLayer(
152 3U, 3U, 192U, get_weights_accessor(data_path, "Conv2d_4a_3x3_weights.npy", weights_layout),
153 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
154 .set_name("Conv2d_4a_3x3/convolution")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100155 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
156 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
157 get_random_accessor(1.f, 1.f),
158 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_beta.npy"),
159 0.0010000000474974513f)
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100160 .set_name("Conv2d_4a_3x3/BatchNorm")
161 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
162 .set_name("Conv2d_4a_3x3/Relu")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100163 // MaxPool_5a_3x3
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100164 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0), true))
165 .set_name("MaxPool_5a_3x3/MaxPool");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100166
167 block_mixed_5b(data_path, weights_layout);
168 block35_repeat(data_path, weights_layout, 10);
169 block_mixed_6a(data_path, weights_layout);
170 block17_repeat(data_path, weights_layout, 20);
171 block_mixed_7a(data_path, weights_layout);
172 block8_repeat(data_path, weights_layout, 9, 0.2f, true);
173 block8_repeat(data_path, weights_layout, 1, 1.f, false);
174
175 // Conv2d_7b_1x1
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100176 graph << ConvolutionLayer(
177 1U, 1U, 1536U, get_weights_accessor(data_path, "Conv2d_7b_1x1_weights.npy", weights_layout),
178 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
179 .set_name("Conv2d_7b_1x1/convolution")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100180 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_moving_mean.npy"),
181 get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_moving_variance.npy"),
182 get_random_accessor(1.f, 1.f),
183 get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_beta.npy"),
184 0.0010000000474974513f)
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100185 .set_name("Conv2d_7b_1x1/BatchNorm")
186 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
187 .set_name("Conv2d_7b_1x1/Relu")
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000188 << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, operation_layout)).set_name("Logits/AvgPool_1a_8x8")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100189 << FlattenLayer().set_name("Logits/Flatten")
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100190 << FullyConnectedLayer(1001U,
191 get_weights_accessor(data_path, "Logits_Logits_weights.npy", weights_layout),
192 get_weights_accessor(data_path, "Logits_Logits_biases.npy"))
193 .set_name("Logits/Logits")
194 << SoftmaxLayer().set_name("Logits/Predictions") << OutputLayer(get_output_accessor(common_params, 5));
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100195
196 // Finalize graph
197 GraphConfig config;
198 config.num_threads = common_params.threads;
199 config.use_tuner = common_params.enable_tuner;
Vidhya Sudhan Loganathan050471e2019-04-25 09:27:24 +0100200 config.tuner_mode = common_params.tuner_mode;
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100201 config.tuner_file = common_params.tuner_file;
SiCong Li4841c972021-02-03 12:17:35 +0000202 config.mlgo_file = common_params.mlgo_file;
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100203
204 graph.finalize(common_params.target, config);
205
206 return true;
207 }
208
209 void do_run() override
210 {
211 graph.run();
212 }
213
214private:
215 CommandLineParser cmd_parser;
216 CommonGraphOptions common_opts;
217 CommonGraphParams common_params;
218 Stream graph;
219
220private:
221 void block_mixed_5b(const std::string &data_path, DataLayout weights_layout)
222 {
223 // Branch 0
224 SubStream i_a(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100225 i_a << ConvolutionLayer(
226 1U, 1U, 96U,
227 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_weights.npy", weights_layout),
228 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
229 .set_name("Mixed_5b/Branch_0/Conv2d_1x1/convolution")
230 << BatchNormalizationLayer(
231 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
232 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
233 get_random_accessor(1.f, 1.f),
234 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
235 0.0010000000474974513f)
236 .set_name("Mixed_5b/Branch_0/Conv2d_1x1/BatchNorm")
237 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
238 .set_name("Mixed_5b/Branch_0/Conv2d_1x1/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100239
240 // Branch 1
241 SubStream i_b(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100242 i_b << ConvolutionLayer(
243 1U, 1U, 48U,
244 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
245 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
246 .set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/convolution")
247 << BatchNormalizationLayer(
248 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
249 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
250 get_random_accessor(1.f, 1.f),
251 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
252 0.0010000000474974513f)
253 .set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm")
254 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
255 .set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/Relu")
256 << ConvolutionLayer(
257 5U, 5U, 64U,
258 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_weights.npy", weights_layout),
259 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 2, 2))
260 .set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/convolution")
261 << BatchNormalizationLayer(
262 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_moving_mean.npy"),
263 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_moving_variance.npy"),
264 get_random_accessor(1.f, 1.f),
265 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_beta.npy"),
266 0.0010000000474974513f)
267 .set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/BatchNorm")
268 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
269 .set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100270
271 // Branch 2
272 SubStream i_c(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100273 i_c << ConvolutionLayer(
274 1U, 1U, 64U,
275 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
276 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
277 .set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/convolution")
278 << BatchNormalizationLayer(
279 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
280 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
281 get_random_accessor(1.f, 1.f),
282 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
283 0.0010000000474974513f)
284 .set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm")
285 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
286 .set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/Relu")
287 << ConvolutionLayer(
288 3U, 3U, 96U,
289 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
290 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
291 .set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/convolution")
292 << BatchNormalizationLayer(
293 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
294 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
295 get_random_accessor(1.f, 1.f),
296 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
297 0.0010000000474974513f)
298 .set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm")
299 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
300 .set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/Relu")
301 << ConvolutionLayer(
302 3U, 3U, 96U,
303 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
304 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
305 .set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/convolution")
306 << BatchNormalizationLayer(
307 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
308 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
309 get_random_accessor(1.f, 1.f),
310 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
311 0.0010000000474974513f)
312 .set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/BatchNorm")
313 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
314 .set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100315
316 // Branch 3
317 SubStream i_d(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100318 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout,
319 PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true))
320 .set_name("Mixed_5b/Branch_3/AvgPool_0a_3x3")
321 << ConvolutionLayer(
322 1U, 1U, 64U,
323 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
324 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
325 .set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/convolution")
326 << BatchNormalizationLayer(
327 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
328 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
329 get_random_accessor(1.f, 1.f),
330 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
331 0.0010000000474974513f)
332 .set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm")
333 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
334 .set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100335
336 // Concatenate
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100337 graph
338 << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_5a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100339 }
340
341 void block_mixed_6a(const std::string &data_path, DataLayout weights_layout)
342 {
343 // Branch 0
344 SubStream i_a(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100345 i_a << ConvolutionLayer(
346 3U, 3U, 384U,
347 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
348 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
349 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution")
350 << BatchNormalizationLayer(
351 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
352 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
353 get_random_accessor(1.f, 1.f),
354 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
355 0.0010000000474974513f)
356 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
357 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
358 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100359
360 // Branch 1
361 SubStream i_b(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100362 i_b << ConvolutionLayer(
363 1U, 1U, 256U,
364 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
365 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
366 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution")
367 << BatchNormalizationLayer(
368 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
369 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
370 get_random_accessor(1.f, 1.f),
371 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
372 0.0010000000474974513f)
373 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
374 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
375 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
376 << ConvolutionLayer(
377 3U, 3U, 256U,
378 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
379 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
380 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution")
381 << BatchNormalizationLayer(
382 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
383 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
384 get_random_accessor(1.f, 1.f),
385 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
386 0.0010000000474974513f)
387 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
388 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
389 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
390 << ConvolutionLayer(
391 3U, 3U, 384U,
392 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
393 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
394 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution")
395 << BatchNormalizationLayer(
396 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
397 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
398 get_random_accessor(1.f, 1.f),
399 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
400 0.0010000000474974513f)
401 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
402 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
403 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100404
405 // Branch 2
406 SubStream i_c(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100407 i_c << PoolingLayer(
408 PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0), true))
409 .set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100410
411 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100412 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100413 }
414
415 void block_mixed_7a(const std::string &data_path, DataLayout weights_layout)
416 {
417 // Branch 0
418 SubStream i_a(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100419 i_a << ConvolutionLayer(
420 1U, 1U, 256U,
421 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
422 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
423 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution")
424 << BatchNormalizationLayer(
425 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
426 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
427 get_random_accessor(1.f, 1.f),
428 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
429 0.0010000000474974513f)
430 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm")
431 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
432 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu")
433 << ConvolutionLayer(
434 3U, 3U, 384U,
435 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
436 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
437 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution")
438 << BatchNormalizationLayer(
439 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
440 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
441 get_random_accessor(1.f, 1.f),
442 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
443 0.0010000000474974513f)
444 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
445 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
446 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100447
448 // Branch 1
449 SubStream i_b(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100450 i_b << ConvolutionLayer(
451 1U, 1U, 256U,
452 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
453 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
454 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution")
455 << BatchNormalizationLayer(
456 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
457 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
458 get_random_accessor(1.f, 1.f),
459 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
460 0.0010000000474974513f)
461 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
462 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
463 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
464 << ConvolutionLayer(
465 3U, 3U, 288U,
466 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
467 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
468 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution")
469 << BatchNormalizationLayer(
470 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
471 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
472 get_random_accessor(1.f, 1.f),
473 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
474 0.0010000000474974513f)
475 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
476 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
477 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100478
479 // Branch 2
480 SubStream i_c(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100481 i_c << ConvolutionLayer(
482 1U, 1U, 256U,
483 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
484 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
485 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution")
486 << BatchNormalizationLayer(
487 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
488 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
489 get_random_accessor(1.f, 1.f),
490 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
491 0.0010000000474974513f)
492 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm")
493 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
494 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu")
495 << ConvolutionLayer(
496 3U, 3U, 288U,
497 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
498 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
499 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution")
500 << BatchNormalizationLayer(
501 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
502 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
503 get_random_accessor(1.f, 1.f),
504 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
505 0.0010000000474974513f)
506 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm")
507 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
508 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu")
509 << ConvolutionLayer(
510 3U, 3U, 320U,
511 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_weights.npy", weights_layout),
512 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
513 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution")
514 << BatchNormalizationLayer(
515 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
516 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
517 get_random_accessor(1.f, 1.f),
518 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_beta.npy"),
519 0.0010000000474974513f)
520 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm")
521 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
522 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100523
524 // Branch 3
525 SubStream i_d(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100526 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout,
527 PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true))
528 .set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100529
530 // Concatenate
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100531 graph
532 << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100533 }
534
535 void block35_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
536 {
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100537 for (unsigned int i = 0; i < num_blocks; ++i)
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100538 {
539 std::stringstream unit_path_ss;
540 unit_path_ss << "Repeat_block35_" << (i + 1) << "_";
541 std::stringstream unit_name_ss;
542 unit_name_ss << "Repeat/block35_" << (i + 1) << "/";
543
544 std::string unit_path = unit_path_ss.str();
545 std::string unit_name = unit_name_ss.str();
546
547 // Create left and write substreams
548 SubStream i_l(graph);
549 SubStream i_r(graph);
550
551 // Branch 0
552 SubStream i_la(i_l);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100553 i_la << ConvolutionLayer(
554 1U, 1U, 32U,
555 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
556 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
557 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
558 << BatchNormalizationLayer(
559 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
560 get_weights_accessor(data_path,
561 unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
562 get_random_accessor(1.f, 1.f),
563 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
564 0.0010000000474974513f)
565 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
566 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
567 .set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100568
569 // Branch 1
570 SubStream i_lb(i_l);
571 i_lb << ConvolutionLayer(1U, 1U, 32U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100572 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy",
573 weights_layout),
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100574 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
575 PadStrideInfo(1, 1, 0, 0))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100576 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
577 << BatchNormalizationLayer(
578 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
579 get_weights_accessor(data_path,
580 unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
581 get_random_accessor(1.f, 1.f),
582 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
583 0.0010000000474974513f)
584 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
585 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
586 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100587 << ConvolutionLayer(3U, 3U, 32U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100588 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_weights.npy",
589 weights_layout),
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100590 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
591 PadStrideInfo(1, 1, 1, 1))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100592 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/convolution")
593 << BatchNormalizationLayer(
594 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
595 get_weights_accessor(data_path,
596 unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
597 get_random_accessor(1.f, 1.f),
598 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
599 0.0010000000474974513f)
600 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/BatchNorm")
601 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
602 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100603
604 // Branch 2
605 SubStream i_lc(i_l);
606 i_lc << ConvolutionLayer(1U, 1U, 32U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100607 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_weights.npy",
608 weights_layout),
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100609 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
610 PadStrideInfo(1, 1, 0, 0))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100611 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/convolution")
612 << BatchNormalizationLayer(
613 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
614 get_weights_accessor(data_path,
615 unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
616 get_random_accessor(1.f, 1.f),
617 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
618 0.0010000000474974513f)
619 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/BatchNorm")
620 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
621 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/Relu")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100622 << ConvolutionLayer(3U, 3U, 48U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100623 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_weights.npy",
624 weights_layout),
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100625 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
626 PadStrideInfo(1, 1, 1, 1))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100627 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/convolution")
628 << BatchNormalizationLayer(
629 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
630 get_weights_accessor(data_path,
631 unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
632 get_random_accessor(1.f, 1.f),
633 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
634 0.0010000000474974513f)
635 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/BatchNorm")
636 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
637 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/Relu")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100638 << ConvolutionLayer(3U, 3U, 64U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100639 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_weights.npy",
640 weights_layout),
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100641 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
642 PadStrideInfo(1, 1, 1, 1))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100643 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/convolution")
644 << BatchNormalizationLayer(
645 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
646 get_weights_accessor(data_path,
647 unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
648 get_random_accessor(1.f, 1.f),
649 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
650 0.0010000000474974513f)
651 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/BatchNorm")
652 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
653 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100654
655 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100656 i_l << ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat")
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100657 << ConvolutionLayer(
658 1U, 1U, 320U,
659 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
660 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
661 PadStrideInfo(1, 1, 0, 0))
662 .set_name(unit_name + "Conv2d_1x1/convolution")
663 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f))
664 .set_name(unit_name + "mul");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100665
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100666 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100667 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
668 .set_name(unit_name + "Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100669 }
670 }
671
672 void block17_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
673 {
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100674 for (unsigned int i = 0; i < num_blocks; ++i)
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100675 {
676 std::stringstream unit_path_ss;
677 unit_path_ss << "Repeat_1_block17_" << (i + 1) << "_";
678 std::stringstream unit_name_ss;
679 unit_name_ss << "Repeat_1/block17_" << (i + 1) << "/";
680
681 std::string unit_path = unit_path_ss.str();
682 std::string unit_name = unit_name_ss.str();
683
684 // Create left and write substreams
685 SubStream i_l(graph);
686 SubStream i_r(graph);
687
688 // Branch 0
689 SubStream i_la(i_l);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100690 i_la << ConvolutionLayer(
691 1U, 1U, 192U,
692 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
693 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
694 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
695 << BatchNormalizationLayer(
696 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
697 get_weights_accessor(data_path,
698 unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
699 get_random_accessor(1.f, 1.f),
700 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
701 0.0010000000474974513f)
702 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
703 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
704 .set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100705
706 // Branch 1
707 SubStream i_lb(i_l);
708 i_lb << ConvolutionLayer(1U, 1U, 128U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100709 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy",
710 weights_layout),
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100711 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
712 PadStrideInfo(1, 1, 0, 0))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100713 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
714 << BatchNormalizationLayer(
715 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
716 get_weights_accessor(data_path,
717 unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
718 get_random_accessor(1.f, 1.f),
719 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
720 0.0010000000474974513f)
721 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
722 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
723 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100724 << ConvolutionLayer(7U, 1U, 160U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100725 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_weights.npy",
726 weights_layout),
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100727 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
728 PadStrideInfo(1, 1, 3, 0))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100729 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/convolution")
730 << BatchNormalizationLayer(
731 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
732 get_weights_accessor(data_path,
733 unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
734 get_random_accessor(1.f, 1.f),
735 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
736 0.0010000000474974513f)
737 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/BatchNorm")
738 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
739 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/Relu")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100740 << ConvolutionLayer(1U, 7U, 192U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100741 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_weights.npy",
742 weights_layout),
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100743 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
744 PadStrideInfo(1, 1, 0, 3))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100745 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/convolution")
746 << BatchNormalizationLayer(
747 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
748 get_weights_accessor(data_path,
749 unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
750 get_random_accessor(1.f, 1.f),
751 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
752 0.0010000000474974513f)
753 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/BatchNorm")
754 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
755 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100756
757 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100758 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100759 << ConvolutionLayer(
760 1U, 1U, 1088U,
761 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
762 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
763 PadStrideInfo(1, 1, 0, 0))
764 .set_name(unit_name + "Conv2d_1x1/convolution")
765 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f))
766 .set_name(unit_name + "mul");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100767
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100768 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100769 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
770 .set_name(unit_name + "Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100771 }
772 }
773
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100774 void block8_repeat(const std::string &data_path,
775 DataLayout weights_layout,
776 unsigned int num_blocks,
777 float scale,
778 bool has_activation)
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100779 {
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100780 for (unsigned int i = 0; i < num_blocks; ++i)
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100781 {
782 std::stringstream unit_path_ss;
783 std::stringstream unit_name_ss;
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100784 if (num_blocks != 1)
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100785 {
786 unit_path_ss << "Repeat_2_block8_" << (i + 1) << "_";
787 unit_name_ss << "Repeat_2/block8_" << (i + 1) << "/";
788 }
789 else
790 {
791 unit_path_ss << "Block8_";
792 unit_name_ss << "Block8/";
793 }
794
795 std::string unit_path = unit_path_ss.str();
796 std::string unit_name = unit_name_ss.str();
797
798 // Create left and write substreams
799 SubStream i_l(graph);
800 SubStream i_r(graph);
801
802 // Branch 0
803 SubStream i_la(i_l);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100804 i_la << ConvolutionLayer(
805 1U, 1U, 192U,
806 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
807 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
808 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
809 << BatchNormalizationLayer(
810 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
811 get_weights_accessor(data_path,
812 unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
813 get_random_accessor(1.f, 1.f),
814 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
815 0.0010000000474974513f)
816 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
817 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
818 .set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100819
820 // Branch 1
821 SubStream i_lb(i_l);
822 i_lb << ConvolutionLayer(1U, 1U, 192U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100823 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy",
824 weights_layout),
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100825 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
826 PadStrideInfo(1, 1, 0, 0))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100827 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
828 << BatchNormalizationLayer(
829 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
830 get_weights_accessor(data_path,
831 unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
832 get_random_accessor(1.f, 1.f),
833 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
834 0.0010000000474974513f)
835 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
836 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
837 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100838 << ConvolutionLayer(3U, 1U, 224U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100839 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_weights.npy",
840 weights_layout),
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100841 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
842 PadStrideInfo(1, 1, 1, 0))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100843 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/convolution")
844 << BatchNormalizationLayer(
845 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
846 get_weights_accessor(data_path,
847 unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
848 get_random_accessor(1.f, 1.f),
849 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
850 0.0010000000474974513f)
851 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/BatchNorm")
852 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
853 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/Relu")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100854 << ConvolutionLayer(1U, 3U, 256U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100855 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_weights.npy",
856 weights_layout),
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100857 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
858 PadStrideInfo(1, 1, 0, 1))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100859 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/convolution")
860 << BatchNormalizationLayer(
861 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
862 get_weights_accessor(data_path,
863 unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
864 get_random_accessor(1.f, 1.f),
865 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"),
866 0.0010000000474974513f)
867 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/BatchNorm")
868 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
869 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100870
871 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100872 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100873 << ConvolutionLayer(
874 1U, 1U, 2080U,
875 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
876 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
877 PadStrideInfo(1, 1, 0, 0))
878 .set_name(unit_name + "Conv2d_1x1/convolution");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100879
880 // Scale result
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100881 if (scale != 1.f)
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100882 {
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100883 i_l << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, scale, 0.f))
884 .set_name(unit_name + "mul");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100885 }
886
887 // Residual add
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100888 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100889
890 // Apply activation if needed
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100891 if (has_activation)
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100892 {
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100893 graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
894 .set_name(unit_name + "Relu");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100895 }
896 }
897 }
898};
899
900/** Main program for Inception ResNet V2
901 *
Georgios Pinitasbdbbbe82018-11-07 16:06:47 +0000902 * Model is based on:
903 * https://arxiv.org/abs/1602.07261
904 * "Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning"
905 * Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
906 *
Georgios Pinitas588ebc52018-12-21 13:39:07 +0000907 * Provenance: download.tensorflow.org/models/inception_resnet_v2_2016_08_30.tar.gz
908 *
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100909 * @note To list all the possible arguments execute the binary appended with the --help option
910 *
911 * @param[in] argc Number of arguments
912 * @param[in] argv Arguments
913 */
914int main(int argc, char **argv)
915{
916 return arm_compute::utils::run_example<InceptionResNetV2Example>(argc, argv);
917}