blob: a54a0f78063b1b6873b8543f80c6066ed1e39c13 [file] [log] [blame]
Georgios Pinitasdacd3de2018-12-04 17:25:48 +00001/*
SiCong Li4841c972021-02-03 12:17:35 +00002 * Copyright (c) 2018-2021 Arm Limited.
Georgios Pinitasdacd3de2018-12-04 17:25:48 +00003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/graph.h"
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010025
Georgios Pinitasdacd3de2018-12-04 17:25:48 +000026#include "support/ToolchainSupport.h"
27#include "utils/CommonGraphOptions.h"
28#include "utils/GraphUtils.h"
29#include "utils/Utils.h"
30
31using namespace arm_compute::utils;
32using namespace arm_compute::graph::frontend;
33using namespace arm_compute::graph_utils;
34
35const float batch_norm_epsilon = 0.0010000000474974513f;
36
37/** Example demonstrating how to implement Inception ResNet V1 network using the Compute Library's graph API */
38class InceptionResNetV1Example final : public Example
39{
40public:
41 InceptionResNetV1Example()
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010042 : cmd_parser(),
43 common_opts(cmd_parser),
44 common_params(),
45 model_input_width(nullptr),
46 model_input_height(nullptr),
47 graph(0, "InceptionResNetV1")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +000048 {
49 model_input_width = cmd_parser.add_option<SimpleOption<unsigned int>>("image-width", 512);
50 model_input_height = cmd_parser.add_option<SimpleOption<unsigned int>>("image-height", 512);
51
52 // Add model id option
53 model_input_width->set_help("Input image width.");
54 model_input_height->set_help("Input image height.");
55 }
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010056 InceptionResNetV1Example(const InceptionResNetV1Example &) = delete;
Georgios Pinitasdacd3de2018-12-04 17:25:48 +000057 InceptionResNetV1Example &operator=(const InceptionResNetV1Example &) = delete;
Matthew Benthamf5f23912020-03-05 22:32:16 +000058 ~InceptionResNetV1Example() override = default;
Georgios Pinitasdacd3de2018-12-04 17:25:48 +000059 bool do_setup(int argc, char **argv) override
60 {
61 // Parse arguments
62 cmd_parser.parse(argc, argv);
Georgios Pinitascd60a5f2019-08-21 17:06:54 +010063 cmd_parser.validate();
Georgios Pinitasdacd3de2018-12-04 17:25:48 +000064
65 // Consume common parameters
66 common_params = consume_common_graph_parameters(common_opts);
67
68 // Return when help menu is requested
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010069 if (common_params.help)
Georgios Pinitasdacd3de2018-12-04 17:25:48 +000070 {
71 cmd_parser.print_help(argv[0]);
72 return false;
73 }
74 // Get input image width and height
75 const unsigned int image_width = model_input_width->value();
76 const unsigned int image_height = model_input_height->value();
77
78 // Set default layout if needed
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010079 if (!common_opts.data_layout->is_set() && common_params.target == Target::NEON)
Georgios Pinitasdacd3de2018-12-04 17:25:48 +000080 {
81 common_params.data_layout = DataLayout::NCHW;
82 }
83
84 // Checks
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010085 ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type),
86 "QASYMM8 not supported for this graph");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +000087
88 // Print parameter values
89 std::cout << common_params << std::endl;
90 std::cout << "Image width: " << image_width << std::endl;
91 std::cout << "Image height: " << image_height << std::endl;
92
93 // Create model path
94 std::string data_path = common_params.data_path;
95 std::string model_path = "/cnn_data/inception_resnet_v1_model/";
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010096 if (!data_path.empty())
Georgios Pinitasdacd3de2018-12-04 17:25:48 +000097 {
98 data_path += model_path;
99 }
100
101 // Create a preprocessor object
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000102 std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<TFPreproccessor>(0.f, 1.f);
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000103
104 // Create input descriptor
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000105 const auto operation_layout = common_params.data_layout;
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100106 const TensorShape tensor_shape = permute_shape(
107 TensorShape(image_width, image_height, 3U, common_params.batches), DataLayout::NCHW, operation_layout);
108 TensorDescriptor input_descriptor =
109 TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout);
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000110
111 // Set weights trained layout
112 const DataLayout weights_layout = DataLayout::NCHW;
113
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100114 graph << common_params.target << common_params.fast_math_hint
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000115 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
116 // Conv2d_1a_3x3
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100117 << ConvolutionLayer(
118 3U, 3U, 32U, get_weights_accessor(data_path, "Conv2d_1a_3x3_weights.npy", weights_layout),
119 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
120 .set_name("Conv2d_1a_3x3/convolution")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000121 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
122 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
123 get_random_accessor(1.f, 1.f),
124 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_beta.npy"),
125 batch_norm_epsilon)
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100126 .set_name("Conv2d_1a_3x3/BatchNorm")
127 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
128 .set_name("Conv2d_1a_3x3/Relu")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000129 // Conv2d_2a_3x3
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100130 << ConvolutionLayer(
131 3U, 3U, 32U, get_weights_accessor(data_path, "Conv2d_2a_3x3_weights.npy", weights_layout),
132 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
133 .set_name("Conv2d_2a_3x3/convolution")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000134 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
135 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
136 get_random_accessor(1.f, 1.f),
137 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_beta.npy"),
138 batch_norm_epsilon)
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100139 .set_name("Conv2d_2a_3x3/BatchNorm")
140 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
141 .set_name("Conv2d_2a_3x3/Relu")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000142 // Conv2d_2b_3x3
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100143 << ConvolutionLayer(
144 3U, 3U, 64U, get_weights_accessor(data_path, "Conv2d_2b_3x3_weights.npy", weights_layout),
145 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
146 .set_name("Conv2d_2b_3x3/convolution")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000147 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
148 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
149 get_random_accessor(1.f, 1.f),
150 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_beta.npy"),
151 batch_norm_epsilon)
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100152 .set_name("Conv2d_2b_3x3/BatchNorm")
153 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
154 .set_name("Conv2d_2b_3x3/Relu")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000155 // MaxPool_3a_3x3
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100156 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout,
157 PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true))
158 .set_name("MaxPool_3a_3x3/MaxPool")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000159 // Conv2d_3b_1x1
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100160 << ConvolutionLayer(
161 1U, 1U, 80U, get_weights_accessor(data_path, "Conv2d_3b_1x1_weights.npy", weights_layout),
162 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
163 .set_name("Conv2d_3b_1x1/convolution")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000164 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
165 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
166 get_random_accessor(1.f, 1.f),
167 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_beta.npy"),
168 batch_norm_epsilon)
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100169 .set_name("Conv2d_3b_1x1/BatchNorm")
170 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
171 .set_name("Conv2d_3b_1x1/Relu")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000172 // Conv2d_4a_3x3
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100173 << ConvolutionLayer(
174 3U, 3U, 192U, get_weights_accessor(data_path, "Conv2d_4a_3x3_weights.npy", weights_layout),
175 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
176 .set_name("Conv2d_4a_3x3/convolution")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000177 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
178 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
179 get_random_accessor(1.f, 1.f),
180 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_beta.npy"),
181 batch_norm_epsilon)
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100182 .set_name("Conv2d_4a_3x3/BatchNorm")
183 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
184 .set_name("Conv2d_4a_3x3/Relu")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000185 // Conv2d_4b_3x3
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100186 << ConvolutionLayer(
187 3U, 3U, 256U, get_weights_accessor(data_path, "Conv2d_4b_3x3_weights.npy", weights_layout),
188 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
189 .set_name("Conv2d_4a_3x3/convolution")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000190 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_moving_mean.npy"),
191 get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_moving_variance.npy"),
192 get_random_accessor(1.f, 1.f),
193 get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_beta.npy"),
194 batch_norm_epsilon)
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100195 .set_name("Conv2d_4b_3x3/BatchNorm")
196 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
197 .set_name("Conv2d_4b_3x3/Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000198
199 // 5 x Inception-resnet-A
200 block35_repeat(data_path, weights_layout, 5);
201 // Reduction-A
202 reduction_a(data_path, weights_layout);
203 // 10 x Inception-Resnet-B
204 block17_repeat(data_path, weights_layout, 10);
205 // Reduction-B
206 reduction_b(data_path, weights_layout);
207 // 5 x Inception-resnet-C
208 block8_repeat(data_path, weights_layout, 5, 0.2f, true);
209
210 block8_repeat(data_path, weights_layout, 1, 1.f, false);
211
212 // Logits tail
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000213 graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, operation_layout)).set_name("Logits/AvgPool_1a_8x8")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000214 << FlattenLayer().set_name("Logits/Flatten")
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100215 << FullyConnectedLayer(128U, get_weights_accessor(data_path, "Logits_Logits_weights.npy", weights_layout),
216 get_weights_accessor(data_path, "Logits_Logits_biases.npy"))
217 .set_name("Logits/Logits")
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000218 << OutputLayer(std::make_unique<DummyAccessor>(0));
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000219
220 // Finalize graph
221 GraphConfig config;
222 config.num_threads = common_params.threads;
223 config.use_tuner = common_params.enable_tuner;
Vidhya Sudhan Loganathan050471e2019-04-25 09:27:24 +0100224 config.tuner_mode = common_params.tuner_mode;
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000225 config.tuner_file = common_params.tuner_file;
SiCong Li4841c972021-02-03 12:17:35 +0000226 config.mlgo_file = common_params.mlgo_file;
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000227
228 graph.finalize(common_params.target, config);
229
230 return true;
231 }
232
233 void do_run() override
234 {
235 graph.run();
236 }
237
238private:
239 CommandLineParser cmd_parser;
240 CommonGraphOptions common_opts;
241 CommonGraphParams common_params;
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100242 SimpleOption<unsigned int> *model_input_width{nullptr};
243 SimpleOption<unsigned int> *model_input_height{nullptr};
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000244 Stream graph;
245
246private:
247 void block35_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
248 {
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100249 for (unsigned int i = 0; i < num_blocks; ++i)
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000250 {
251 std::stringstream unit_path_ss;
252 unit_path_ss << "Repeat_block35_" << (i + 1) << "_";
253 std::stringstream unit_name_ss;
254 unit_name_ss << "Repeat/block35_" << (i + 1) << "/";
255
256 std::string unit_path = unit_path_ss.str();
257 std::string unit_name = unit_name_ss.str();
258
259 // Create left and write substreams
260 SubStream i_l(graph);
261 SubStream i_r(graph);
262
263 // Branch 0
264 SubStream i_la(i_l);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100265 i_la << ConvolutionLayer(
266 1U, 1U, 32U,
267 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
268 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
269 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
270 << BatchNormalizationLayer(
271 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
272 get_weights_accessor(data_path,
273 unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
274 get_random_accessor(1.f, 1.f),
275 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
276 batch_norm_epsilon)
277 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
278 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
279 .set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000280
281 // Branch 1
282 SubStream i_lb(i_l);
283 i_lb << ConvolutionLayer(1U, 1U, 32U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100284 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy",
285 weights_layout),
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000286 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
287 PadStrideInfo(1, 1, 0, 0))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100288 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
289 << BatchNormalizationLayer(
290 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
291 get_weights_accessor(data_path,
292 unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
293 get_random_accessor(1.f, 1.f),
294 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
295 batch_norm_epsilon)
296 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
297 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
298 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000299 << ConvolutionLayer(3U, 3U, 32U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100300 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_weights.npy",
301 weights_layout),
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000302 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
303 PadStrideInfo(1, 1, 1, 1))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100304 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/convolution")
305 << BatchNormalizationLayer(
306 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
307 get_weights_accessor(data_path,
308 unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
309 get_random_accessor(1.f, 1.f),
310 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
311 batch_norm_epsilon)
312 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/BatchNorm")
313 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
314 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000315
316 // Branch 2
317 SubStream i_lc(i_l);
318 i_lc << ConvolutionLayer(1U, 1U, 32U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100319 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_weights.npy",
320 weights_layout),
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000321 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
322 PadStrideInfo(1, 1, 0, 0))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100323 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/convolution")
324 << BatchNormalizationLayer(
325 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
326 get_weights_accessor(data_path,
327 unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
328 get_random_accessor(1.f, 1.f),
329 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
330 batch_norm_epsilon)
331 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/BatchNorm")
332 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
333 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/Relu")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000334 << ConvolutionLayer(3U, 3U, 32U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100335 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_weights.npy",
336 weights_layout),
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000337 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
338 PadStrideInfo(1, 1, 1, 1))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100339 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/convolution")
340 << BatchNormalizationLayer(
341 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
342 get_weights_accessor(data_path,
343 unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
344 get_random_accessor(1.f, 1.f),
345 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
346 batch_norm_epsilon)
347 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/BatchNorm")
348 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
349 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/Relu")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000350 << ConvolutionLayer(3U, 3U, 32U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100351 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_weights.npy",
352 weights_layout),
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000353 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
354 PadStrideInfo(1, 1, 1, 1))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100355 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/convolution")
356 << BatchNormalizationLayer(
357 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
358 get_weights_accessor(data_path,
359 unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
360 get_random_accessor(1.f, 1.f),
361 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
362 batch_norm_epsilon)
363 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/BatchNorm")
364 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
365 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000366
367 // Concatenate
368 i_l << ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat")
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100369 << ConvolutionLayer(
370 1U, 1U, 256U,
371 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
372 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
373 PadStrideInfo(1, 1, 0, 0))
374 .set_name(unit_name + "Conv2d_1x1/convolution")
375 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f))
376 .set_name(unit_name + "mul");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000377
378 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100379 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
380 .set_name(unit_name + "Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000381 }
382 }
383
384 void block17_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
385 {
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100386 for (unsigned int i = 0; i < num_blocks; ++i)
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000387 {
388 std::stringstream unit_path_ss;
389 unit_path_ss << "Repeat_1_block17_" << (i + 1) << "_";
390 std::stringstream unit_name_ss;
391 unit_name_ss << "Repeat_1/block17_" << (i + 1) << "/";
392
393 std::string unit_path = unit_path_ss.str();
394 std::string unit_name = unit_name_ss.str();
395
396 // Create left and write substreams
397 SubStream i_l(graph);
398 SubStream i_r(graph);
399
400 // Branch 0
401 SubStream i_la(i_l);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100402 i_la << ConvolutionLayer(
403 1U, 1U, 128U,
404 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
405 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
406 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
407 << BatchNormalizationLayer(
408 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
409 get_weights_accessor(data_path,
410 unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
411 get_random_accessor(1.f, 1.f),
412 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
413 batch_norm_epsilon)
414 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
415 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
416 .set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000417
418 // Branch 1
419 SubStream i_lb(i_l);
420 i_lb << ConvolutionLayer(1U, 1U, 128U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100421 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy",
422 weights_layout),
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000423 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
424 PadStrideInfo(1, 1, 0, 0))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100425 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
426 << BatchNormalizationLayer(
427 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
428 get_weights_accessor(data_path,
429 unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
430 get_random_accessor(1.f, 1.f),
431 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
432 batch_norm_epsilon)
433 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
434 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
435 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000436 << ConvolutionLayer(7U, 1U, 128U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100437 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_weights.npy",
438 weights_layout),
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000439 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
440 PadStrideInfo(1, 1, 3, 0))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100441 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/convolution")
442 << BatchNormalizationLayer(
443 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
444 get_weights_accessor(data_path,
445 unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
446 get_random_accessor(1.f, 1.f),
447 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
448 batch_norm_epsilon)
449 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/BatchNorm")
450 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
451 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/Relu")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000452 << ConvolutionLayer(1U, 7U, 128U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100453 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_weights.npy",
454 weights_layout),
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000455 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
456 PadStrideInfo(1, 1, 0, 3))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100457 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/convolution")
458 << BatchNormalizationLayer(
459 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
460 get_weights_accessor(data_path,
461 unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
462 get_random_accessor(1.f, 1.f),
463 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
464 batch_norm_epsilon)
465 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/BatchNorm")
466 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
467 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000468
469 // Concatenate
470 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100471 << ConvolutionLayer(
472 1U, 1U, 896U,
473 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
474 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
475 PadStrideInfo(1, 1, 0, 0))
476 .set_name(unit_name + "Conv2d_1x1/convolution")
477 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f))
478 .set_name(unit_name + "mul");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000479
480 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100481 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
482 .set_name(unit_name + "Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000483 }
484 }
485
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100486 void block8_repeat(const std::string &data_path,
487 DataLayout weights_layout,
488 unsigned int num_blocks,
489 float scale,
490 bool has_activation)
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000491 {
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100492 for (unsigned int i = 0; i < num_blocks; ++i)
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000493 {
494 std::stringstream unit_path_ss;
495 std::stringstream unit_name_ss;
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100496 if (num_blocks != 1)
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000497 {
498 unit_path_ss << "Repeat_2_block8_" << (i + 1) << "_";
499 unit_name_ss << "Repeat_2/block8_" << (i + 1) << "/";
500 }
501 else
502 {
503 unit_path_ss << "Block8_";
504 unit_name_ss << "Block8/";
505 }
506
507 std::string unit_path = unit_path_ss.str();
508 std::string unit_name = unit_name_ss.str();
509
510 // Create left and write substreams
511 SubStream i_l(graph);
512 SubStream i_r(graph);
513
514 // Branch 0
515 SubStream i_la(i_l);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100516 i_la << ConvolutionLayer(
517 1U, 1U, 192U,
518 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
519 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
520 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
521 << BatchNormalizationLayer(
522 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
523 get_weights_accessor(data_path,
524 unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
525 get_random_accessor(1.f, 1.f),
526 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
527 batch_norm_epsilon)
528 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
529 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
530 .set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000531
532 // Branch 1
533 SubStream i_lb(i_l);
534 i_lb << ConvolutionLayer(1U, 1U, 192U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100535 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy",
536 weights_layout),
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000537 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
538 PadStrideInfo(1, 1, 0, 0))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100539 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
540 << BatchNormalizationLayer(
541 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
542 get_weights_accessor(data_path,
543 unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
544 get_random_accessor(1.f, 1.f),
545 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
546 batch_norm_epsilon)
547 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
548 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
549 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000550 << ConvolutionLayer(3U, 1U, 192U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100551 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_weights.npy",
552 weights_layout),
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000553 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
554 PadStrideInfo(1, 1, 1, 0))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100555 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/convolution")
556 << BatchNormalizationLayer(
557 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
558 get_weights_accessor(data_path,
559 unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
560 get_random_accessor(1.f, 1.f),
561 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
562 batch_norm_epsilon)
563 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/BatchNorm")
564 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
565 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/Relu")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000566 << ConvolutionLayer(1U, 3U, 192U,
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100567 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_weights.npy",
568 weights_layout),
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000569 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
570 PadStrideInfo(1, 1, 0, 1))
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100571 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/convolution")
572 << BatchNormalizationLayer(
573 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
574 get_weights_accessor(data_path,
575 unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
576 get_random_accessor(1.f, 1.f),
577 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"),
578 batch_norm_epsilon)
579 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/BatchNorm")
580 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
581 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000582
583 // Concatenate
584 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100585 << ConvolutionLayer(
586 1U, 1U, 1792U,
587 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
588 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
589 PadStrideInfo(1, 1, 0, 0))
590 .set_name(unit_name + "Conv2d_1x1/convolution");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000591
592 // Scale result
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100593 if (scale != 1.f)
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000594 {
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100595 i_l << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, scale, 0.f))
596 .set_name(unit_name + "mul");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000597 }
598
599 // Residual add
600 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add");
601
602 // Apply activation if needed
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100603 if (has_activation)
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000604 {
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100605 graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
606 .set_name(unit_name + "Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000607 }
608 }
609 }
610
611 void reduction_a(const std::string &data_path, DataLayout weights_layout)
612 {
613 // Branch 0
614 SubStream i_a(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100615 i_a << ConvolutionLayer(
616 3U, 3U, 384U,
617 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
618 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
619 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution")
620 << BatchNormalizationLayer(
621 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
622 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
623 get_random_accessor(1.f, 1.f),
624 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
625 batch_norm_epsilon)
626 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
627 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
628 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000629
630 // Branch 1
631 SubStream i_b(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100632 i_b << ConvolutionLayer(
633 1U, 1U, 192U,
634 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
635 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
636 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution")
637 << BatchNormalizationLayer(
638 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
639 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
640 get_random_accessor(1.f, 1.f),
641 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
642 batch_norm_epsilon)
643 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
644 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
645 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
646 << ConvolutionLayer(
647 3U, 3U, 192U,
648 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
649 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
650 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution")
651 << BatchNormalizationLayer(
652 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
653 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
654 get_random_accessor(1.f, 1.f),
655 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
656 batch_norm_epsilon)
657 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
658 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
659 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
660 << ConvolutionLayer(
661 3U, 3U, 256U,
662 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
663 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
664 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution")
665 << BatchNormalizationLayer(
666 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
667 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
668 get_random_accessor(1.f, 1.f),
669 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
670 batch_norm_epsilon)
671 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
672 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
673 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000674
675 // Branch 2
676 SubStream i_c(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100677 i_c << PoolingLayer(
678 PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0), true))
679 .set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000680
681 // Concatenate
682 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat");
683 }
684
685 void reduction_b(const std::string &data_path, DataLayout weights_layout)
686 {
687 // Branch 0
688 SubStream i_a(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100689 i_a << ConvolutionLayer(
690 1U, 1U, 256U,
691 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
692 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
693 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution")
694 << BatchNormalizationLayer(
695 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
696 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
697 get_random_accessor(1.f, 1.f),
698 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
699 batch_norm_epsilon)
700 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm")
701 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
702 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu")
703 << ConvolutionLayer(
704 3U, 3U, 384U,
705 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
706 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
707 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution")
708 << BatchNormalizationLayer(
709 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
710 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
711 get_random_accessor(1.f, 1.f),
712 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
713 batch_norm_epsilon)
714 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
715 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
716 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000717
718 // Branch 1
719 SubStream i_b(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100720 i_b << ConvolutionLayer(
721 1U, 1U, 256U,
722 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
723 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
724 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution")
725 << BatchNormalizationLayer(
726 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
727 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
728 get_random_accessor(1.f, 1.f),
729 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
730 batch_norm_epsilon)
731 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
732 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
733 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
734 << ConvolutionLayer(
735 3U, 3U, 256U,
736 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
737 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
738 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution")
739 << BatchNormalizationLayer(
740 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
741 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
742 get_random_accessor(1.f, 1.f),
743 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
744 batch_norm_epsilon)
745 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
746 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
747 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000748
749 // Branch 2
750 SubStream i_c(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100751 i_c << ConvolutionLayer(
752 1U, 1U, 256U,
753 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
754 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
755 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution")
756 << BatchNormalizationLayer(
757 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
758 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
759 get_random_accessor(1.f, 1.f),
760 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
761 batch_norm_epsilon)
762 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm")
763 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
764 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu")
765 << ConvolutionLayer(
766 3U, 3U, 256U,
767 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
768 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
769 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution")
770 << BatchNormalizationLayer(
771 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
772 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
773 get_random_accessor(1.f, 1.f),
774 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
775 batch_norm_epsilon)
776 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm")
777 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
778 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu")
779 << ConvolutionLayer(
780 3U, 3U, 256U,
781 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_weights.npy", weights_layout),
782 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
783 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution")
784 << BatchNormalizationLayer(
785 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
786 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
787 get_random_accessor(1.f, 1.f),
788 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_beta.npy"),
789 batch_norm_epsilon)
790 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm")
791 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
792 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000793
794 // Branch 3
795 SubStream i_d(graph);
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100796 i_d << PoolingLayer(
797 PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0), true))
798 .set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000799
800 // Concatenate
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100801 graph
802 << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000803 }
804};
805
806/** Main program for Inception ResNet V1
807 *
808 * Model is based on:
809 * https://arxiv.org/abs/1602.07261
810 * "Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning"
811 * Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
812 *
813 * @note To list all the possible arguments execute the binary appended with the --help option
814 *
815 * @param[in] argc Number of arguments
816 * @param[in] argv Arguments
817 */
818int main(int argc, char **argv)
819{
820 return arm_compute::utils::run_example<InceptionResNetV1Example>(argc, argv);
821}