blob: 89e44edc33fd80ed44d209d6c9b04eff42640095 [file] [log] [blame]
Georgios Pinitasdacd3de2018-12-04 17:25:48 +00001/*
Vidhya Sudhan Loganathan050471e2019-04-25 09:27:24 +01002 * Copyright (c) 2018-2019 ARM Limited.
Georgios Pinitasdacd3de2018-12-04 17:25:48 +00003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/graph.h"
25#include "support/ToolchainSupport.h"
26#include "utils/CommonGraphOptions.h"
27#include "utils/GraphUtils.h"
28#include "utils/Utils.h"
29
30using namespace arm_compute::utils;
31using namespace arm_compute::graph::frontend;
32using namespace arm_compute::graph_utils;
33
34const float batch_norm_epsilon = 0.0010000000474974513f;
35
36/** Example demonstrating how to implement Inception ResNet V1 network using the Compute Library's graph API */
37class InceptionResNetV1Example final : public Example
38{
39public:
40 InceptionResNetV1Example()
41 : cmd_parser(), common_opts(cmd_parser), common_params(), model_input_width(nullptr), model_input_height(nullptr), graph(0, "InceptionResNetV1")
42 {
43 model_input_width = cmd_parser.add_option<SimpleOption<unsigned int>>("image-width", 512);
44 model_input_height = cmd_parser.add_option<SimpleOption<unsigned int>>("image-height", 512);
45
46 // Add model id option
47 model_input_width->set_help("Input image width.");
48 model_input_height->set_help("Input image height.");
49 }
50 InceptionResNetV1Example(const InceptionResNetV1Example &) = delete;
51 InceptionResNetV1Example &operator=(const InceptionResNetV1Example &) = delete;
52 InceptionResNetV1Example(InceptionResNetV1Example &&) = default; // NOLINT
53 InceptionResNetV1Example &operator=(InceptionResNetV1Example &&) = default; // NOLINT
54 ~InceptionResNetV1Example() override = default;
55 bool do_setup(int argc, char **argv) override
56 {
57 // Parse arguments
58 cmd_parser.parse(argc, argv);
Georgios Pinitascd60a5f2019-08-21 17:06:54 +010059 cmd_parser.validate();
Georgios Pinitasdacd3de2018-12-04 17:25:48 +000060
61 // Consume common parameters
62 common_params = consume_common_graph_parameters(common_opts);
63
64 // Return when help menu is requested
65 if(common_params.help)
66 {
67 cmd_parser.print_help(argv[0]);
68 return false;
69 }
70 // Get input image width and height
71 const unsigned int image_width = model_input_width->value();
72 const unsigned int image_height = model_input_height->value();
73
74 // Set default layout if needed
75 if(!common_opts.data_layout->is_set() && common_params.target == Target::NEON)
76 {
77 common_params.data_layout = DataLayout::NCHW;
78 }
79
80 // Checks
81 ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph");
82
83 // Print parameter values
84 std::cout << common_params << std::endl;
85 std::cout << "Image width: " << image_width << std::endl;
86 std::cout << "Image height: " << image_height << std::endl;
87
88 // Create model path
89 std::string data_path = common_params.data_path;
90 std::string model_path = "/cnn_data/inception_resnet_v1_model/";
91 if(!data_path.empty())
92 {
93 data_path += model_path;
94 }
95
96 // Create a preprocessor object
97 std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<TFPreproccessor>(0.f, 1.f);
98
99 // Create input descriptor
100 const TensorShape tensor_shape = permute_shape(TensorShape(image_width, image_height, 3U, 1U), DataLayout::NCHW, common_params.data_layout);
101 TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout);
102
103 // Set weights trained layout
104 const DataLayout weights_layout = DataLayout::NCHW;
105
106 graph << common_params.target
107 << common_params.fast_math_hint
108 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
109 // Conv2d_1a_3x3
110 << ConvolutionLayer(3U, 3U, 32U,
111 get_weights_accessor(data_path, "Conv2d_1a_3x3_weights.npy", weights_layout),
112 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
113 PadStrideInfo(2, 2, 0, 0))
114 .set_name("Conv2d_1a_3x3/convolution")
115 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
116 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
117 get_random_accessor(1.f, 1.f),
118 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_beta.npy"),
119 batch_norm_epsilon)
120 .set_name("Conv2d_1a_3x3/BatchNorm")
121 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
122 // Conv2d_2a_3x3
123 << ConvolutionLayer(3U, 3U, 32U,
124 get_weights_accessor(data_path, "Conv2d_2a_3x3_weights.npy", weights_layout),
125 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
126 PadStrideInfo(1, 1, 0, 0))
127 .set_name("Conv2d_2a_3x3/convolution")
128 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
129 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
130 get_random_accessor(1.f, 1.f),
131 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_beta.npy"),
132 batch_norm_epsilon)
133 .set_name("Conv2d_2a_3x3/BatchNorm")
134 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
135 // Conv2d_2b_3x3
136 << ConvolutionLayer(3U, 3U, 64U,
137 get_weights_accessor(data_path, "Conv2d_2b_3x3_weights.npy", weights_layout),
138 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
139 PadStrideInfo(1, 1, 1, 1))
140 .set_name("Conv2d_2b_3x3/convolution")
141 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
142 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
143 get_random_accessor(1.f, 1.f),
144 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_beta.npy"),
145 batch_norm_epsilon)
146 .set_name("Conv2d_2b_3x3/BatchNorm")
147 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
148 // MaxPool_3a_3x3
149 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("MaxPool_3a_3x3/MaxPool")
150 // Conv2d_3b_1x1
151 << ConvolutionLayer(1U, 1U, 80U,
152 get_weights_accessor(data_path, "Conv2d_3b_1x1_weights.npy", weights_layout),
153 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
154 PadStrideInfo(1, 1, 0, 0))
155 .set_name("Conv2d_3b_1x1/convolution")
156 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
157 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
158 get_random_accessor(1.f, 1.f),
159 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_beta.npy"),
160 batch_norm_epsilon)
161 .set_name("Conv2d_3b_1x1/BatchNorm")
162 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
163 // Conv2d_4a_3x3
164 << ConvolutionLayer(3U, 3U, 192U,
165 get_weights_accessor(data_path, "Conv2d_4a_3x3_weights.npy", weights_layout),
166 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
167 PadStrideInfo(1, 1, 0, 0))
168 .set_name("Conv2d_4a_3x3/convolution")
169 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
170 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
171 get_random_accessor(1.f, 1.f),
172 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_beta.npy"),
173 batch_norm_epsilon)
174 .set_name("Conv2d_4a_3x3/BatchNorm")
175 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
176 // Conv2d_4b_3x3
177 << ConvolutionLayer(3U, 3U, 256U,
178 get_weights_accessor(data_path, "Conv2d_4b_3x3_weights.npy", weights_layout),
179 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
180 PadStrideInfo(2, 2, 0, 0))
181 .set_name("Conv2d_4a_3x3/convolution")
182 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_moving_mean.npy"),
183 get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_moving_variance.npy"),
184 get_random_accessor(1.f, 1.f),
185 get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_beta.npy"),
186 batch_norm_epsilon)
187 .set_name("Conv2d_4b_3x3/BatchNorm")
188 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4b_3x3/Relu");
189
190 // 5 x Inception-resnet-A
191 block35_repeat(data_path, weights_layout, 5);
192 // Reduction-A
193 reduction_a(data_path, weights_layout);
194 // 10 x Inception-Resnet-B
195 block17_repeat(data_path, weights_layout, 10);
196 // Reduction-B
197 reduction_b(data_path, weights_layout);
198 // 5 x Inception-resnet-C
199 block8_repeat(data_path, weights_layout, 5, 0.2f, true);
200
201 block8_repeat(data_path, weights_layout, 1, 1.f, false);
202
203 // Logits tail
204 graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG)).set_name("Logits/AvgPool_1a_8x8")
205 << FlattenLayer().set_name("Logits/Flatten")
206 << FullyConnectedLayer(
207 128U,
208 get_weights_accessor(data_path, "Logits_Logits_weights.npy", weights_layout),
209 get_weights_accessor(data_path, "Logits_Logits_biases.npy"))
210 .set_name("Logits/Logits")
211 << OutputLayer(arm_compute::support::cpp14::make_unique<DummyAccessor>(0));
212
213 // Finalize graph
214 GraphConfig config;
215 config.num_threads = common_params.threads;
216 config.use_tuner = common_params.enable_tuner;
Vidhya Sudhan Loganathan050471e2019-04-25 09:27:24 +0100217 config.tuner_mode = common_params.tuner_mode;
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000218 config.tuner_file = common_params.tuner_file;
219
220 graph.finalize(common_params.target, config);
221
222 return true;
223 }
224
225 void do_run() override
226 {
227 graph.run();
228 }
229
230private:
231 CommandLineParser cmd_parser;
232 CommonGraphOptions common_opts;
233 CommonGraphParams common_params;
234 SimpleOption<unsigned int> *model_input_width{ nullptr };
235 SimpleOption<unsigned int> *model_input_height{ nullptr };
236 Stream graph;
237
238private:
239 void block35_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
240 {
241 for(unsigned int i = 0; i < num_blocks; ++i)
242 {
243 std::stringstream unit_path_ss;
244 unit_path_ss << "Repeat_block35_" << (i + 1) << "_";
245 std::stringstream unit_name_ss;
246 unit_name_ss << "Repeat/block35_" << (i + 1) << "/";
247
248 std::string unit_path = unit_path_ss.str();
249 std::string unit_name = unit_name_ss.str();
250
251 // Create left and write substreams
252 SubStream i_l(graph);
253 SubStream i_r(graph);
254
255 // Branch 0
256 SubStream i_la(i_l);
257 i_la << ConvolutionLayer(1U, 1U, 32U,
258 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
259 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
260 PadStrideInfo(1, 1, 0, 0))
261 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
262 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
263 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
264 get_random_accessor(1.f, 1.f),
265 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
266 batch_norm_epsilon)
267 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
268 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
269
270 // Branch 1
271 SubStream i_lb(i_l);
272 i_lb << ConvolutionLayer(1U, 1U, 32U,
273 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
274 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
275 PadStrideInfo(1, 1, 0, 0))
276 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
277 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
278 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
279 get_random_accessor(1.f, 1.f),
280 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
281 batch_norm_epsilon)
282 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
283 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
284 << ConvolutionLayer(3U, 3U, 32U,
285 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
286 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
287 PadStrideInfo(1, 1, 1, 1))
288 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/convolution")
289 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
290 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
291 get_random_accessor(1.f, 1.f),
292 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
293 batch_norm_epsilon)
294 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/BatchNorm")
295 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_3x3/Relu");
296
297 // Branch 2
298 SubStream i_lc(i_l);
299 i_lc << ConvolutionLayer(1U, 1U, 32U,
300 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
301 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
302 PadStrideInfo(1, 1, 0, 0))
303 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/convolution")
304 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
305 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
306 get_random_accessor(1.f, 1.f),
307 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
308 batch_norm_epsilon)
309 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/BatchNorm")
310 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0a_1x1/Relu")
311 << ConvolutionLayer(3U, 3U, 32U,
312 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
313 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
314 PadStrideInfo(1, 1, 1, 1))
315 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/convolution")
316 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
317 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
318 get_random_accessor(1.f, 1.f),
319 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
320 batch_norm_epsilon)
321 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/BatchNorm")
322 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0b_3x3/Relu")
323 << ConvolutionLayer(3U, 3U, 32U,
324 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
325 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
326 PadStrideInfo(1, 1, 1, 1))
327 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/convolution")
328 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
329 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
330 get_random_accessor(1.f, 1.f),
331 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
332 batch_norm_epsilon)
333 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/BatchNorm")
334 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu");
335
336 // Concatenate
337 i_l << ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat")
338 << ConvolutionLayer(1U, 1U, 256U,
339 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
340 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
341 PadStrideInfo(1, 1, 0, 0))
342 .set_name(unit_name + "Conv2d_1x1/convolution")
343 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f)).set_name(unit_name + "mul");
344
345 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
346 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
347 }
348 }
349
350 void block17_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
351 {
352 for(unsigned int i = 0; i < num_blocks; ++i)
353 {
354 std::stringstream unit_path_ss;
355 unit_path_ss << "Repeat_1_block17_" << (i + 1) << "_";
356 std::stringstream unit_name_ss;
357 unit_name_ss << "Repeat_1/block17_" << (i + 1) << "/";
358
359 std::string unit_path = unit_path_ss.str();
360 std::string unit_name = unit_name_ss.str();
361
362 // Create left and write substreams
363 SubStream i_l(graph);
364 SubStream i_r(graph);
365
366 // Branch 0
367 SubStream i_la(i_l);
368 i_la << ConvolutionLayer(1U, 1U, 128U,
369 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
370 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
371 PadStrideInfo(1, 1, 0, 0))
372 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
373 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
374 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
375 get_random_accessor(1.f, 1.f),
376 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
377 batch_norm_epsilon)
378 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
379 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
380
381 // Branch 1
382 SubStream i_lb(i_l);
383 i_lb << ConvolutionLayer(1U, 1U, 128U,
384 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
385 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
386 PadStrideInfo(1, 1, 0, 0))
387 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
388 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
389 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
390 get_random_accessor(1.f, 1.f),
391 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
392 batch_norm_epsilon)
393 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
394 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
395 << ConvolutionLayer(7U, 1U, 128U,
396 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
397 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
398 PadStrideInfo(1, 1, 3, 0))
399 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/convolution")
400 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
401 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
402 get_random_accessor(1.f, 1.f),
403 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
404 batch_norm_epsilon)
405 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/BatchNorm")
406 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x7/Relu")
407 << ConvolutionLayer(1U, 7U, 128U,
408 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
409 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
410 PadStrideInfo(1, 1, 0, 3))
411 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/convolution")
412 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
413 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
414 get_random_accessor(1.f, 1.f),
415 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
416 batch_norm_epsilon)
417 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/BatchNorm")
418 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu");
419
420 // Concatenate
421 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
422 << ConvolutionLayer(1U, 1U, 896U,
423 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
424 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
425 PadStrideInfo(1, 1, 0, 0))
426 .set_name(unit_name + "Conv2d_1x1/convolution")
427 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f)).set_name(unit_name + "mul");
428
429 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
430 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
431 }
432 }
433
434 void block8_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks, float scale, bool has_activation)
435 {
436 for(unsigned int i = 0; i < num_blocks; ++i)
437 {
438 std::stringstream unit_path_ss;
439 std::stringstream unit_name_ss;
440 if(num_blocks != 1)
441 {
442 unit_path_ss << "Repeat_2_block8_" << (i + 1) << "_";
443 unit_name_ss << "Repeat_2/block8_" << (i + 1) << "/";
444 }
445 else
446 {
447 unit_path_ss << "Block8_";
448 unit_name_ss << "Block8/";
449 }
450
451 std::string unit_path = unit_path_ss.str();
452 std::string unit_name = unit_name_ss.str();
453
454 // Create left and write substreams
455 SubStream i_l(graph);
456 SubStream i_r(graph);
457
458 // Branch 0
459 SubStream i_la(i_l);
460 i_la << ConvolutionLayer(1U, 1U, 192U,
461 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
462 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
463 PadStrideInfo(1, 1, 0, 0))
464 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
465 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
466 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
467 get_random_accessor(1.f, 1.f),
468 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
469 batch_norm_epsilon)
470 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
471 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
472
473 // Branch 1
474 SubStream i_lb(i_l);
475 i_lb << ConvolutionLayer(1U, 1U, 192U,
476 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
477 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
478 PadStrideInfo(1, 1, 0, 0))
479 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
480 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
481 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
482 get_random_accessor(1.f, 1.f),
483 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
484 batch_norm_epsilon)
485 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
486 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
487 << ConvolutionLayer(3U, 1U, 192U,
488 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
489 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
490 PadStrideInfo(1, 1, 1, 0))
491 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/convolution")
492 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
493 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
494 get_random_accessor(1.f, 1.f),
495 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
496 batch_norm_epsilon)
497 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/BatchNorm")
498 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x3/Relu")
499 << ConvolutionLayer(1U, 3U, 192U,
500 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout),
501 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
502 PadStrideInfo(1, 1, 0, 1))
503 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/convolution")
504 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
505 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
506 get_random_accessor(1.f, 1.f),
507 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"),
508 batch_norm_epsilon)
509 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/BatchNorm")
510 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu");
511
512 // Concatenate
513 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
514 << ConvolutionLayer(1U, 1U, 1792U,
515 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
516 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
517 PadStrideInfo(1, 1, 0, 0))
518 .set_name(unit_name + "Conv2d_1x1/convolution");
519
520 // Scale result
521 if(scale != 1.f)
522 {
523 i_l << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, scale, 0.f)).set_name(unit_name + "mul");
524 }
525
526 // Residual add
527 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add");
528
529 // Apply activation if needed
530 if(has_activation)
531 {
532 graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
533 }
534 }
535 }
536
537 void reduction_a(const std::string &data_path, DataLayout weights_layout)
538 {
539 // Branch 0
540 SubStream i_a(graph);
541 i_a << ConvolutionLayer(3U, 3U, 384U,
542 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
543 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
544 PadStrideInfo(2, 2, 0, 0))
545 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution")
546 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
547 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
548 get_random_accessor(1.f, 1.f),
549 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
550 batch_norm_epsilon)
551 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
552 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
553
554 // Branch 1
555 SubStream i_b(graph);
556 i_b << ConvolutionLayer(1U, 1U, 192U,
557 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
558 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
559 PadStrideInfo(1, 1, 0, 0))
560 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution")
561 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
562 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
563 get_random_accessor(1.f, 1.f),
564 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
565 batch_norm_epsilon)
566 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
567 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
568 << ConvolutionLayer(3U, 3U, 192U,
569 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
570 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
571 PadStrideInfo(1, 1, 1, 1))
572 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution")
573 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
574 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
575 get_random_accessor(1.f, 1.f),
576 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
577 batch_norm_epsilon)
578 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
579 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
580 << ConvolutionLayer(3U, 3U, 256U,
581 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
582 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
583 PadStrideInfo(2, 2, 0, 0))
584 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution")
585 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
586 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
587 get_random_accessor(1.f, 1.f),
588 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
589 batch_norm_epsilon)
590 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
591 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
592
593 // Branch 2
594 SubStream i_c(graph);
595 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3");
596
597 // Concatenate
598 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat");
599 }
600
601 void reduction_b(const std::string &data_path, DataLayout weights_layout)
602 {
603 // Branch 0
604 SubStream i_a(graph);
605 i_a << ConvolutionLayer(1U, 1U, 256U,
606 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
607 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
608 PadStrideInfo(1, 1, 0, 0))
609 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution")
610 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
611 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
612 get_random_accessor(1.f, 1.f),
613 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
614 batch_norm_epsilon)
615 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm")
616 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu")
617 << ConvolutionLayer(3U, 3U, 384U,
618 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
619 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
620 PadStrideInfo(2, 2, 0, 0))
621 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution")
622 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
623 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
624 get_random_accessor(1.f, 1.f),
625 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
626 batch_norm_epsilon)
627 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
628 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
629
630 // Branch 1
631 SubStream i_b(graph);
632 i_b << ConvolutionLayer(1U, 1U, 256U,
633 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
634 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
635 PadStrideInfo(1, 1, 0, 0))
636 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution")
637 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
638 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
639 get_random_accessor(1.f, 1.f),
640 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
641 batch_norm_epsilon)
642 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
643 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
644 << ConvolutionLayer(3U, 3U, 256U,
645 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
646 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
647 PadStrideInfo(2, 2, 0, 0))
648 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution")
649 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
650 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
651 get_random_accessor(1.f, 1.f),
652 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
653 batch_norm_epsilon)
654 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
655 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
656
657 // Branch 2
658 SubStream i_c(graph);
659 i_c << ConvolutionLayer(1U, 1U, 256U,
660 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
661 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
662 PadStrideInfo(1, 1, 0, 0))
663 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution")
664 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
665 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
666 get_random_accessor(1.f, 1.f),
667 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
668 batch_norm_epsilon)
669 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm")
670 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu")
671 << ConvolutionLayer(3U, 3U, 256U,
672 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
673 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
674 PadStrideInfo(1, 1, 1, 1))
675 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution")
676 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
677 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
678 get_random_accessor(1.f, 1.f),
679 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
680 batch_norm_epsilon)
681 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm")
682 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu")
683 << ConvolutionLayer(3U, 3U, 256U,
684 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_weights.npy", weights_layout),
685 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
686 PadStrideInfo(2, 2, 0, 0))
687 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution")
688 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
689 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
690 get_random_accessor(1.f, 1.f),
691 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_beta.npy"),
692 batch_norm_epsilon)
693 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm")
694 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu");
695
696 // Branch 3
697 SubStream i_d(graph);
698 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3");
699
700 // Concatenate
701 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat");
702 }
703};
704
705/** Main program for Inception ResNet V1
706 *
707 * Model is based on:
708 * https://arxiv.org/abs/1602.07261
709 * "Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning"
710 * Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
711 *
712 * @note To list all the possible arguments execute the binary appended with the --help option
713 *
714 * @param[in] argc Number of arguments
715 * @param[in] argv Arguments
716 */
717int main(int argc, char **argv)
718{
719 return arm_compute::utils::run_example<InceptionResNetV1Example>(argc, argv);
720}