blob: d789d7f6e7bd120069c8a41a62cea273d1c48d68 [file] [log] [blame]
Georgios Pinitasdacd3de2018-12-04 17:25:48 +00001/*
SiCong Li4841c972021-02-03 12:17:35 +00002 * Copyright (c) 2018-2021 Arm Limited.
Georgios Pinitasdacd3de2018-12-04 17:25:48 +00003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/graph.h"
25#include "support/ToolchainSupport.h"
26#include "utils/CommonGraphOptions.h"
27#include "utils/GraphUtils.h"
28#include "utils/Utils.h"
29
30using namespace arm_compute::utils;
31using namespace arm_compute::graph::frontend;
32using namespace arm_compute::graph_utils;
33
34const float batch_norm_epsilon = 0.0010000000474974513f;
35
36/** Example demonstrating how to implement Inception ResNet V1 network using the Compute Library's graph API */
37class InceptionResNetV1Example final : public Example
38{
39public:
40 InceptionResNetV1Example()
41 : cmd_parser(), common_opts(cmd_parser), common_params(), model_input_width(nullptr), model_input_height(nullptr), graph(0, "InceptionResNetV1")
42 {
43 model_input_width = cmd_parser.add_option<SimpleOption<unsigned int>>("image-width", 512);
44 model_input_height = cmd_parser.add_option<SimpleOption<unsigned int>>("image-height", 512);
45
46 // Add model id option
47 model_input_width->set_help("Input image width.");
48 model_input_height->set_help("Input image height.");
49 }
50 InceptionResNetV1Example(const InceptionResNetV1Example &) = delete;
51 InceptionResNetV1Example &operator=(const InceptionResNetV1Example &) = delete;
Matthew Benthamf5f23912020-03-05 22:32:16 +000052 ~InceptionResNetV1Example() override = default;
Georgios Pinitasdacd3de2018-12-04 17:25:48 +000053 bool do_setup(int argc, char **argv) override
54 {
55 // Parse arguments
56 cmd_parser.parse(argc, argv);
Georgios Pinitascd60a5f2019-08-21 17:06:54 +010057 cmd_parser.validate();
Georgios Pinitasdacd3de2018-12-04 17:25:48 +000058
59 // Consume common parameters
60 common_params = consume_common_graph_parameters(common_opts);
61
62 // Return when help menu is requested
63 if(common_params.help)
64 {
65 cmd_parser.print_help(argv[0]);
66 return false;
67 }
68 // Get input image width and height
69 const unsigned int image_width = model_input_width->value();
70 const unsigned int image_height = model_input_height->value();
71
72 // Set default layout if needed
73 if(!common_opts.data_layout->is_set() && common_params.target == Target::NEON)
74 {
75 common_params.data_layout = DataLayout::NCHW;
76 }
77
78 // Checks
79 ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph");
80
81 // Print parameter values
82 std::cout << common_params << std::endl;
83 std::cout << "Image width: " << image_width << std::endl;
84 std::cout << "Image height: " << image_height << std::endl;
85
86 // Create model path
87 std::string data_path = common_params.data_path;
88 std::string model_path = "/cnn_data/inception_resnet_v1_model/";
89 if(!data_path.empty())
90 {
91 data_path += model_path;
92 }
93
94 // Create a preprocessor object
Georgios Pinitas40f51a62020-11-21 03:04:18 +000095 std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<TFPreproccessor>(0.f, 1.f);
Georgios Pinitasdacd3de2018-12-04 17:25:48 +000096
97 // Create input descriptor
Sang-Hoon Park11fedda2020-01-15 14:44:04 +000098 const auto operation_layout = common_params.data_layout;
Georgios Pinitas450dfb12021-06-15 10:11:47 +010099 const TensorShape tensor_shape = permute_shape(TensorShape(image_width, image_height, 3U, common_params.batches), DataLayout::NCHW, operation_layout);
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000100 TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout);
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000101
102 // Set weights trained layout
103 const DataLayout weights_layout = DataLayout::NCHW;
104
105 graph << common_params.target
106 << common_params.fast_math_hint
107 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
108 // Conv2d_1a_3x3
109 << ConvolutionLayer(3U, 3U, 32U,
110 get_weights_accessor(data_path, "Conv2d_1a_3x3_weights.npy", weights_layout),
111 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
112 PadStrideInfo(2, 2, 0, 0))
113 .set_name("Conv2d_1a_3x3/convolution")
114 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
115 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
116 get_random_accessor(1.f, 1.f),
117 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_beta.npy"),
118 batch_norm_epsilon)
119 .set_name("Conv2d_1a_3x3/BatchNorm")
120 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
121 // Conv2d_2a_3x3
122 << ConvolutionLayer(3U, 3U, 32U,
123 get_weights_accessor(data_path, "Conv2d_2a_3x3_weights.npy", weights_layout),
124 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
125 PadStrideInfo(1, 1, 0, 0))
126 .set_name("Conv2d_2a_3x3/convolution")
127 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
128 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
129 get_random_accessor(1.f, 1.f),
130 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_beta.npy"),
131 batch_norm_epsilon)
132 .set_name("Conv2d_2a_3x3/BatchNorm")
133 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
134 // Conv2d_2b_3x3
135 << ConvolutionLayer(3U, 3U, 64U,
136 get_weights_accessor(data_path, "Conv2d_2b_3x3_weights.npy", weights_layout),
137 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
138 PadStrideInfo(1, 1, 1, 1))
139 .set_name("Conv2d_2b_3x3/convolution")
140 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
141 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
142 get_random_accessor(1.f, 1.f),
143 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_beta.npy"),
144 batch_norm_epsilon)
145 .set_name("Conv2d_2b_3x3/BatchNorm")
146 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
147 // MaxPool_3a_3x3
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000148 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("MaxPool_3a_3x3/MaxPool")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000149 // Conv2d_3b_1x1
150 << ConvolutionLayer(1U, 1U, 80U,
151 get_weights_accessor(data_path, "Conv2d_3b_1x1_weights.npy", weights_layout),
152 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
153 PadStrideInfo(1, 1, 0, 0))
154 .set_name("Conv2d_3b_1x1/convolution")
155 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
156 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
157 get_random_accessor(1.f, 1.f),
158 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_beta.npy"),
159 batch_norm_epsilon)
160 .set_name("Conv2d_3b_1x1/BatchNorm")
161 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
162 // Conv2d_4a_3x3
163 << ConvolutionLayer(3U, 3U, 192U,
164 get_weights_accessor(data_path, "Conv2d_4a_3x3_weights.npy", weights_layout),
165 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
166 PadStrideInfo(1, 1, 0, 0))
167 .set_name("Conv2d_4a_3x3/convolution")
168 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
169 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
170 get_random_accessor(1.f, 1.f),
171 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_beta.npy"),
172 batch_norm_epsilon)
173 .set_name("Conv2d_4a_3x3/BatchNorm")
174 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
175 // Conv2d_4b_3x3
176 << ConvolutionLayer(3U, 3U, 256U,
177 get_weights_accessor(data_path, "Conv2d_4b_3x3_weights.npy", weights_layout),
178 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
179 PadStrideInfo(2, 2, 0, 0))
180 .set_name("Conv2d_4a_3x3/convolution")
181 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_moving_mean.npy"),
182 get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_moving_variance.npy"),
183 get_random_accessor(1.f, 1.f),
184 get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_beta.npy"),
185 batch_norm_epsilon)
186 .set_name("Conv2d_4b_3x3/BatchNorm")
187 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4b_3x3/Relu");
188
189 // 5 x Inception-resnet-A
190 block35_repeat(data_path, weights_layout, 5);
191 // Reduction-A
192 reduction_a(data_path, weights_layout);
193 // 10 x Inception-Resnet-B
194 block17_repeat(data_path, weights_layout, 10);
195 // Reduction-B
196 reduction_b(data_path, weights_layout);
197 // 5 x Inception-resnet-C
198 block8_repeat(data_path, weights_layout, 5, 0.2f, true);
199
200 block8_repeat(data_path, weights_layout, 1, 1.f, false);
201
202 // Logits tail
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000203 graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, operation_layout)).set_name("Logits/AvgPool_1a_8x8")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000204 << FlattenLayer().set_name("Logits/Flatten")
205 << FullyConnectedLayer(
206 128U,
207 get_weights_accessor(data_path, "Logits_Logits_weights.npy", weights_layout),
208 get_weights_accessor(data_path, "Logits_Logits_biases.npy"))
209 .set_name("Logits/Logits")
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000210 << OutputLayer(std::make_unique<DummyAccessor>(0));
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000211
212 // Finalize graph
213 GraphConfig config;
214 config.num_threads = common_params.threads;
215 config.use_tuner = common_params.enable_tuner;
Vidhya Sudhan Loganathan050471e2019-04-25 09:27:24 +0100216 config.tuner_mode = common_params.tuner_mode;
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000217 config.tuner_file = common_params.tuner_file;
SiCong Li4841c972021-02-03 12:17:35 +0000218 config.mlgo_file = common_params.mlgo_file;
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000219
220 graph.finalize(common_params.target, config);
221
222 return true;
223 }
224
225 void do_run() override
226 {
227 graph.run();
228 }
229
230private:
231 CommandLineParser cmd_parser;
232 CommonGraphOptions common_opts;
233 CommonGraphParams common_params;
234 SimpleOption<unsigned int> *model_input_width{ nullptr };
235 SimpleOption<unsigned int> *model_input_height{ nullptr };
236 Stream graph;
237
238private:
239 void block35_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
240 {
241 for(unsigned int i = 0; i < num_blocks; ++i)
242 {
243 std::stringstream unit_path_ss;
244 unit_path_ss << "Repeat_block35_" << (i + 1) << "_";
245 std::stringstream unit_name_ss;
246 unit_name_ss << "Repeat/block35_" << (i + 1) << "/";
247
248 std::string unit_path = unit_path_ss.str();
249 std::string unit_name = unit_name_ss.str();
250
251 // Create left and write substreams
252 SubStream i_l(graph);
253 SubStream i_r(graph);
254
255 // Branch 0
256 SubStream i_la(i_l);
257 i_la << ConvolutionLayer(1U, 1U, 32U,
258 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
259 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
260 PadStrideInfo(1, 1, 0, 0))
261 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
262 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
263 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
264 get_random_accessor(1.f, 1.f),
265 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
266 batch_norm_epsilon)
267 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
268 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
269
270 // Branch 1
271 SubStream i_lb(i_l);
272 i_lb << ConvolutionLayer(1U, 1U, 32U,
273 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
274 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
275 PadStrideInfo(1, 1, 0, 0))
276 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
277 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
278 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
279 get_random_accessor(1.f, 1.f),
280 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
281 batch_norm_epsilon)
282 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
283 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
284 << ConvolutionLayer(3U, 3U, 32U,
285 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
286 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
287 PadStrideInfo(1, 1, 1, 1))
288 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/convolution")
289 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
290 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
291 get_random_accessor(1.f, 1.f),
292 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
293 batch_norm_epsilon)
294 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/BatchNorm")
295 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_3x3/Relu");
296
297 // Branch 2
298 SubStream i_lc(i_l);
299 i_lc << ConvolutionLayer(1U, 1U, 32U,
300 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
301 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
302 PadStrideInfo(1, 1, 0, 0))
303 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/convolution")
304 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
305 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
306 get_random_accessor(1.f, 1.f),
307 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
308 batch_norm_epsilon)
309 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/BatchNorm")
310 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0a_1x1/Relu")
311 << ConvolutionLayer(3U, 3U, 32U,
312 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
313 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
314 PadStrideInfo(1, 1, 1, 1))
315 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/convolution")
316 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
317 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
318 get_random_accessor(1.f, 1.f),
319 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
320 batch_norm_epsilon)
321 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/BatchNorm")
322 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0b_3x3/Relu")
323 << ConvolutionLayer(3U, 3U, 32U,
324 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
325 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
326 PadStrideInfo(1, 1, 1, 1))
327 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/convolution")
328 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
329 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
330 get_random_accessor(1.f, 1.f),
331 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
332 batch_norm_epsilon)
333 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/BatchNorm")
334 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu");
335
336 // Concatenate
337 i_l << ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat")
338 << ConvolutionLayer(1U, 1U, 256U,
339 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
340 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
341 PadStrideInfo(1, 1, 0, 0))
342 .set_name(unit_name + "Conv2d_1x1/convolution")
343 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f)).set_name(unit_name + "mul");
344
345 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
346 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
347 }
348 }
349
350 void block17_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
351 {
352 for(unsigned int i = 0; i < num_blocks; ++i)
353 {
354 std::stringstream unit_path_ss;
355 unit_path_ss << "Repeat_1_block17_" << (i + 1) << "_";
356 std::stringstream unit_name_ss;
357 unit_name_ss << "Repeat_1/block17_" << (i + 1) << "/";
358
359 std::string unit_path = unit_path_ss.str();
360 std::string unit_name = unit_name_ss.str();
361
362 // Create left and write substreams
363 SubStream i_l(graph);
364 SubStream i_r(graph);
365
366 // Branch 0
367 SubStream i_la(i_l);
368 i_la << ConvolutionLayer(1U, 1U, 128U,
369 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
370 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
371 PadStrideInfo(1, 1, 0, 0))
372 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
373 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
374 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
375 get_random_accessor(1.f, 1.f),
376 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
377 batch_norm_epsilon)
378 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
379 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
380
381 // Branch 1
382 SubStream i_lb(i_l);
383 i_lb << ConvolutionLayer(1U, 1U, 128U,
384 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
385 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
386 PadStrideInfo(1, 1, 0, 0))
387 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
388 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
389 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
390 get_random_accessor(1.f, 1.f),
391 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
392 batch_norm_epsilon)
393 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
394 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
395 << ConvolutionLayer(7U, 1U, 128U,
396 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
397 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
398 PadStrideInfo(1, 1, 3, 0))
399 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/convolution")
400 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
401 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
402 get_random_accessor(1.f, 1.f),
403 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
404 batch_norm_epsilon)
405 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/BatchNorm")
406 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x7/Relu")
407 << ConvolutionLayer(1U, 7U, 128U,
408 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
409 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
410 PadStrideInfo(1, 1, 0, 3))
411 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/convolution")
412 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
413 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
414 get_random_accessor(1.f, 1.f),
415 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
416 batch_norm_epsilon)
417 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/BatchNorm")
418 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu");
419
420 // Concatenate
421 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
422 << ConvolutionLayer(1U, 1U, 896U,
423 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
424 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
425 PadStrideInfo(1, 1, 0, 0))
426 .set_name(unit_name + "Conv2d_1x1/convolution")
427 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f)).set_name(unit_name + "mul");
428
429 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
430 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
431 }
432 }
433
434 void block8_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks, float scale, bool has_activation)
435 {
436 for(unsigned int i = 0; i < num_blocks; ++i)
437 {
438 std::stringstream unit_path_ss;
439 std::stringstream unit_name_ss;
440 if(num_blocks != 1)
441 {
442 unit_path_ss << "Repeat_2_block8_" << (i + 1) << "_";
443 unit_name_ss << "Repeat_2/block8_" << (i + 1) << "/";
444 }
445 else
446 {
447 unit_path_ss << "Block8_";
448 unit_name_ss << "Block8/";
449 }
450
451 std::string unit_path = unit_path_ss.str();
452 std::string unit_name = unit_name_ss.str();
453
454 // Create left and write substreams
455 SubStream i_l(graph);
456 SubStream i_r(graph);
457
458 // Branch 0
459 SubStream i_la(i_l);
460 i_la << ConvolutionLayer(1U, 1U, 192U,
461 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
462 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
463 PadStrideInfo(1, 1, 0, 0))
464 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
465 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
466 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
467 get_random_accessor(1.f, 1.f),
468 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
469 batch_norm_epsilon)
470 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
471 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
472
473 // Branch 1
474 SubStream i_lb(i_l);
475 i_lb << ConvolutionLayer(1U, 1U, 192U,
476 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
477 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
478 PadStrideInfo(1, 1, 0, 0))
479 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
480 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
481 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
482 get_random_accessor(1.f, 1.f),
483 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
484 batch_norm_epsilon)
485 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
486 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
487 << ConvolutionLayer(3U, 1U, 192U,
488 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
489 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
490 PadStrideInfo(1, 1, 1, 0))
491 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/convolution")
492 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
493 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
494 get_random_accessor(1.f, 1.f),
495 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
496 batch_norm_epsilon)
497 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/BatchNorm")
498 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x3/Relu")
499 << ConvolutionLayer(1U, 3U, 192U,
500 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout),
501 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
502 PadStrideInfo(1, 1, 0, 1))
503 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/convolution")
504 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
505 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
506 get_random_accessor(1.f, 1.f),
507 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"),
508 batch_norm_epsilon)
509 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/BatchNorm")
510 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu");
511
512 // Concatenate
513 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
514 << ConvolutionLayer(1U, 1U, 1792U,
515 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
516 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
517 PadStrideInfo(1, 1, 0, 0))
518 .set_name(unit_name + "Conv2d_1x1/convolution");
519
520 // Scale result
521 if(scale != 1.f)
522 {
523 i_l << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, scale, 0.f)).set_name(unit_name + "mul");
524 }
525
526 // Residual add
527 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add");
528
529 // Apply activation if needed
530 if(has_activation)
531 {
532 graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
533 }
534 }
535 }
536
537 void reduction_a(const std::string &data_path, DataLayout weights_layout)
538 {
539 // Branch 0
540 SubStream i_a(graph);
541 i_a << ConvolutionLayer(3U, 3U, 384U,
542 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
543 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
544 PadStrideInfo(2, 2, 0, 0))
545 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution")
546 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
547 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
548 get_random_accessor(1.f, 1.f),
549 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
550 batch_norm_epsilon)
551 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
552 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
553
554 // Branch 1
555 SubStream i_b(graph);
556 i_b << ConvolutionLayer(1U, 1U, 192U,
557 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
558 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
559 PadStrideInfo(1, 1, 0, 0))
560 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution")
561 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
562 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
563 get_random_accessor(1.f, 1.f),
564 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
565 batch_norm_epsilon)
566 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
567 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
568 << ConvolutionLayer(3U, 3U, 192U,
569 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
570 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
571 PadStrideInfo(1, 1, 1, 1))
572 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution")
573 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
574 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
575 get_random_accessor(1.f, 1.f),
576 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
577 batch_norm_epsilon)
578 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
579 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
580 << ConvolutionLayer(3U, 3U, 256U,
581 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
582 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
583 PadStrideInfo(2, 2, 0, 0))
584 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution")
585 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
586 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
587 get_random_accessor(1.f, 1.f),
588 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
589 batch_norm_epsilon)
590 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
591 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
592
593 // Branch 2
594 SubStream i_c(graph);
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000595 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000596
597 // Concatenate
598 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat");
599 }
600
601 void reduction_b(const std::string &data_path, DataLayout weights_layout)
602 {
603 // Branch 0
604 SubStream i_a(graph);
605 i_a << ConvolutionLayer(1U, 1U, 256U,
606 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
607 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
608 PadStrideInfo(1, 1, 0, 0))
609 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution")
610 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
611 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
612 get_random_accessor(1.f, 1.f),
613 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
614 batch_norm_epsilon)
615 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm")
616 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu")
617 << ConvolutionLayer(3U, 3U, 384U,
618 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
619 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
620 PadStrideInfo(2, 2, 0, 0))
621 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution")
622 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
623 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
624 get_random_accessor(1.f, 1.f),
625 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
626 batch_norm_epsilon)
627 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
628 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
629
630 // Branch 1
631 SubStream i_b(graph);
632 i_b << ConvolutionLayer(1U, 1U, 256U,
633 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
634 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
635 PadStrideInfo(1, 1, 0, 0))
636 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution")
637 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
638 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
639 get_random_accessor(1.f, 1.f),
640 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
641 batch_norm_epsilon)
642 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
643 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
644 << ConvolutionLayer(3U, 3U, 256U,
645 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
646 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
647 PadStrideInfo(2, 2, 0, 0))
648 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution")
649 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
650 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
651 get_random_accessor(1.f, 1.f),
652 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
653 batch_norm_epsilon)
654 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
655 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
656
657 // Branch 2
658 SubStream i_c(graph);
659 i_c << ConvolutionLayer(1U, 1U, 256U,
660 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
661 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
662 PadStrideInfo(1, 1, 0, 0))
663 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution")
664 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
665 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
666 get_random_accessor(1.f, 1.f),
667 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
668 batch_norm_epsilon)
669 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm")
670 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu")
671 << ConvolutionLayer(3U, 3U, 256U,
672 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
673 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
674 PadStrideInfo(1, 1, 1, 1))
675 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution")
676 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
677 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
678 get_random_accessor(1.f, 1.f),
679 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
680 batch_norm_epsilon)
681 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm")
682 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu")
683 << ConvolutionLayer(3U, 3U, 256U,
684 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_weights.npy", weights_layout),
685 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
686 PadStrideInfo(2, 2, 0, 0))
687 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution")
688 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
689 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
690 get_random_accessor(1.f, 1.f),
691 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_beta.npy"),
692 batch_norm_epsilon)
693 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm")
694 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu");
695
696 // Branch 3
697 SubStream i_d(graph);
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000698 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000699
700 // Concatenate
701 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat");
702 }
703};
704
705/** Main program for Inception ResNet V1
706 *
707 * Model is based on:
708 * https://arxiv.org/abs/1602.07261
709 * "Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning"
710 * Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
711 *
712 * @note To list all the possible arguments execute the binary appended with the --help option
713 *
714 * @param[in] argc Number of arguments
715 * @param[in] argv Arguments
716 */
717int main(int argc, char **argv)
718{
719 return arm_compute::utils::run_example<InceptionResNetV1Example>(argc, argv);
720}