blob: 1d0c51e9ad2fe1fec75ceb89d735462177f23d3f [file] [log] [blame]
Georgios Pinitasbe2772a2018-08-17 15:33:39 +01001/*
SiCong Li4841c972021-02-03 12:17:35 +00002 * Copyright (c) 2018-2021 Arm Limited.
Georgios Pinitasbe2772a2018-08-17 15:33:39 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/graph.h"
25#include "support/ToolchainSupport.h"
26#include "utils/CommonGraphOptions.h"
27#include "utils/GraphUtils.h"
28#include "utils/Utils.h"
29
30using namespace arm_compute::utils;
31using namespace arm_compute::graph::frontend;
32using namespace arm_compute::graph_utils;
33
Georgios Pinitas108ab0b2018-09-14 18:35:11 +010034/** Example demonstrating how to implement InceptionV4's network using the Compute Library's graph API */
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010035class InceptionResNetV2Example final : public Example
36{
37public:
38 InceptionResNetV2Example()
39 : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionResNetV2")
40 {
41 }
42 bool do_setup(int argc, char **argv) override
43 {
44 // Parse arguments
45 cmd_parser.parse(argc, argv);
Georgios Pinitascd60a5f2019-08-21 17:06:54 +010046 cmd_parser.validate();
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010047
48 // Consume common parameters
49 common_params = consume_common_graph_parameters(common_opts);
50
51 // Return when help menu is requested
52 if(common_params.help)
53 {
54 cmd_parser.print_help(argv[0]);
55 return false;
56 }
57
58 // Set default layout if needed
59 if(!common_opts.data_layout->is_set() && common_params.target == Target::NEON)
60 {
61 common_params.data_layout = DataLayout::NCHW;
62 }
63
64 // Checks
Anthony Barbiercdd68c02018-08-23 15:03:41 +010065 ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010066
67 // Print parameter values
68 std::cout << common_params << std::endl;
69
70 // Create model path
71 std::string data_path = common_params.data_path;
72 std::string model_path = "/cnn_data/inception_resnet_v2_model/";
73 if(!data_path.empty())
74 {
75 data_path += model_path;
76 }
77
78 // Create a preprocessor object
Georgios Pinitas40f51a62020-11-21 03:04:18 +000079 std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<TFPreproccessor>(0.f, 1.f);
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010080
81 // Create input descriptor
Sang-Hoon Park11fedda2020-01-15 14:44:04 +000082 const auto operation_layout = common_params.data_layout;
Georgios Pinitas450dfb12021-06-15 10:11:47 +010083 const TensorShape tensor_shape = permute_shape(TensorShape(299U, 299U, 3U, common_params.batches), DataLayout::NCHW, operation_layout);
Sang-Hoon Park11fedda2020-01-15 14:44:04 +000084 TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout);
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010085
86 // Set weights trained layout
87 const DataLayout weights_layout = DataLayout::NCHW;
88
89 graph << common_params.target
90 << common_params.fast_math_hint
91 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
92 // Conv2d_1a_3x3
93 << ConvolutionLayer(3U, 3U, 32U,
94 get_weights_accessor(data_path, "Conv2d_1a_3x3_weights.npy", weights_layout),
95 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
96 PadStrideInfo(2, 2, 0, 0))
97 .set_name("Conv2d_1a_3x3/convolution")
98 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
99 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
100 get_random_accessor(1.f, 1.f),
101 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_beta.npy"),
102 0.0010000000474974513f)
103 .set_name("Conv2d_1a_3x3/BatchNorm")
104 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
105 // Conv2d_2a_3x3
106 << ConvolutionLayer(3U, 3U, 32U,
107 get_weights_accessor(data_path, "Conv2d_2a_3x3_weights.npy", weights_layout),
108 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
109 PadStrideInfo(1, 1, 0, 0))
110 .set_name("Conv2d_2a_3x3/convolution")
111 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
112 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
113 get_random_accessor(1.f, 1.f),
114 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_beta.npy"),
115 0.0010000000474974513f)
116 .set_name("Conv2d_2a_3x3/BatchNorm")
117 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
118 // Conv2d_2b_3x3
119 << ConvolutionLayer(3U, 3U, 64U,
120 get_weights_accessor(data_path, "Conv2d_2b_3x3_weights.npy", weights_layout),
121 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
122 PadStrideInfo(1, 1, 1, 1))
123 .set_name("Conv2d_2b_3x3/convolution")
124 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
125 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
126 get_random_accessor(1.f, 1.f),
127 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_beta.npy"),
128 0.0010000000474974513f)
129 .set_name("Conv2d_2b_3x3/BatchNorm")
130 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
131 // MaxPool_3a_3x3
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000132 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("MaxPool_3a_3x3/MaxPool")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100133 // Conv2d_3b_1x1
134 << ConvolutionLayer(1U, 1U, 80U,
135 get_weights_accessor(data_path, "Conv2d_3b_1x1_weights.npy", weights_layout),
136 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
137 PadStrideInfo(1, 1, 0, 0))
138 .set_name("Conv2d_3b_1x1/convolution")
139 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
140 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
141 get_random_accessor(1.f, 1.f),
142 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_beta.npy"),
143 0.0010000000474974513f)
144 .set_name("Conv2d_3b_1x1/BatchNorm")
145 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
146 // Conv2d_4a_3x3
147 << ConvolutionLayer(3U, 3U, 192U,
148 get_weights_accessor(data_path, "Conv2d_4a_3x3_weights.npy", weights_layout),
149 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
150 PadStrideInfo(1, 1, 0, 0))
151 .set_name("Conv2d_4a_3x3/convolution")
152 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
153 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
154 get_random_accessor(1.f, 1.f),
155 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_beta.npy"),
156 0.0010000000474974513f)
157 .set_name("Conv2d_4a_3x3/BatchNorm")
158 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
159 // MaxPool_5a_3x3
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000160 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0), true)).set_name("MaxPool_5a_3x3/MaxPool");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100161
162 block_mixed_5b(data_path, weights_layout);
163 block35_repeat(data_path, weights_layout, 10);
164 block_mixed_6a(data_path, weights_layout);
165 block17_repeat(data_path, weights_layout, 20);
166 block_mixed_7a(data_path, weights_layout);
167 block8_repeat(data_path, weights_layout, 9, 0.2f, true);
168 block8_repeat(data_path, weights_layout, 1, 1.f, false);
169
170 // Conv2d_7b_1x1
171 graph << ConvolutionLayer(1U, 1U, 1536U,
172 get_weights_accessor(data_path, "Conv2d_7b_1x1_weights.npy", weights_layout),
173 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
174 PadStrideInfo(1, 1, 0, 0))
175 .set_name("Conv2d_7b_1x1/convolution")
176 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_moving_mean.npy"),
177 get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_moving_variance.npy"),
178 get_random_accessor(1.f, 1.f),
179 get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_beta.npy"),
180 0.0010000000474974513f)
181 .set_name("Conv2d_7b_1x1/BatchNorm")
182 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_7b_1x1/Relu")
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000183 << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, operation_layout)).set_name("Logits/AvgPool_1a_8x8")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100184 << FlattenLayer().set_name("Logits/Flatten")
185 << FullyConnectedLayer(
186 1001U,
187 get_weights_accessor(data_path, "Logits_Logits_weights.npy", weights_layout),
188 get_weights_accessor(data_path, "Logits_Logits_biases.npy"))
189 .set_name("Logits/Logits")
190 << SoftmaxLayer().set_name("Logits/Predictions")
191 << OutputLayer(get_output_accessor(common_params, 5));
192
193 // Finalize graph
194 GraphConfig config;
195 config.num_threads = common_params.threads;
196 config.use_tuner = common_params.enable_tuner;
Vidhya Sudhan Loganathan050471e2019-04-25 09:27:24 +0100197 config.tuner_mode = common_params.tuner_mode;
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100198 config.tuner_file = common_params.tuner_file;
SiCong Li4841c972021-02-03 12:17:35 +0000199 config.mlgo_file = common_params.mlgo_file;
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100200
201 graph.finalize(common_params.target, config);
202
203 return true;
204 }
205
206 void do_run() override
207 {
208 graph.run();
209 }
210
211private:
212 CommandLineParser cmd_parser;
213 CommonGraphOptions common_opts;
214 CommonGraphParams common_params;
215 Stream graph;
216
217private:
218 void block_mixed_5b(const std::string &data_path, DataLayout weights_layout)
219 {
220 // Branch 0
221 SubStream i_a(graph);
222 i_a << ConvolutionLayer(1U, 1U, 96U,
223 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_weights.npy", weights_layout),
224 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
225 PadStrideInfo(1, 1, 0, 0))
226 .set_name("Mixed_5b/Branch_0/Conv2d_1x1/convolution")
227 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
228 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
229 get_random_accessor(1.f, 1.f),
230 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
231 0.0010000000474974513f)
232 .set_name("Mixed_5b/Branch_0/Conv2d_1x1/BatchNorm")
233 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_0/Conv2d_1x1/Relu");
234
235 // Branch 1
236 SubStream i_b(graph);
237 i_b << ConvolutionLayer(1U, 1U, 48U,
238 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
239 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
240 PadStrideInfo(1, 1, 0, 0))
241 .set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/convolution")
242 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
243 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
244 get_random_accessor(1.f, 1.f),
245 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
246 0.0010000000474974513f)
247 .set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm")
248 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/Relu")
249 << ConvolutionLayer(5U, 5U, 64U,
250 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_weights.npy", weights_layout),
251 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
252 PadStrideInfo(1, 1, 2, 2))
253 .set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/convolution")
254 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_moving_mean.npy"),
255 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_moving_variance.npy"),
256 get_random_accessor(1.f, 1.f),
257 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_beta.npy"),
258 0.0010000000474974513f)
259 .set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/BatchNorm")
260 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/Relu");
261
262 // Branch 2
263 SubStream i_c(graph);
264 i_c << ConvolutionLayer(1U, 1U, 64U,
265 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
266 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
267 PadStrideInfo(1, 1, 0, 0))
268 .set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/convolution")
269 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
270 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
271 get_random_accessor(1.f, 1.f),
272 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
273 0.0010000000474974513f)
274 .set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm")
275 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/Relu")
276 << ConvolutionLayer(3U, 3U, 96U,
277 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
278 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
279 PadStrideInfo(1, 1, 1, 1))
280 .set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/convolution")
281 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
282 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
283 get_random_accessor(1.f, 1.f),
284 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
285 0.0010000000474974513f)
286 .set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm")
287 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/Relu")
288 << ConvolutionLayer(3U, 3U, 96U,
289 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
290 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
291 PadStrideInfo(1, 1, 1, 1))
292 .set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/convolution")
293 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
294 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
295 get_random_accessor(1.f, 1.f),
296 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
297 0.0010000000474974513f)
298 .set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/BatchNorm")
299 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/Relu");
300
301 // Branch 3
302 SubStream i_d(graph);
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000303 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name("Mixed_5b/Branch_3/AvgPool_0a_3x3")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100304 << ConvolutionLayer(1U, 1U, 64U,
305 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
306 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
307 PadStrideInfo(1, 1, 0, 0))
308 .set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/convolution")
309 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
310 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
311 get_random_accessor(1.f, 1.f),
312 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
313 0.0010000000474974513f)
314 .set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm")
315 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/Relu");
316
317 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100318 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_5a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100319 }
320
321 void block_mixed_6a(const std::string &data_path, DataLayout weights_layout)
322 {
323 // Branch 0
324 SubStream i_a(graph);
325 i_a << ConvolutionLayer(3U, 3U, 384U,
326 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
327 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
328 PadStrideInfo(2, 2, 0, 0))
329 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution")
330 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
331 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
332 get_random_accessor(1.f, 1.f),
333 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
334 0.0010000000474974513f)
335 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
336 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
337
338 // Branch 1
339 SubStream i_b(graph);
340 i_b << ConvolutionLayer(1U, 1U, 256U,
341 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
342 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
343 PadStrideInfo(1, 1, 0, 0))
344 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution")
345 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
346 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
347 get_random_accessor(1.f, 1.f),
348 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
349 0.0010000000474974513f)
350 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
351 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
352 << ConvolutionLayer(3U, 3U, 256U,
353 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
354 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
355 PadStrideInfo(1, 1, 1, 1))
356 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution")
357 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
358 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
359 get_random_accessor(1.f, 1.f),
360 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
361 0.0010000000474974513f)
362 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
363 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
364 << ConvolutionLayer(3U, 3U, 384U,
365 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
366 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
367 PadStrideInfo(2, 2, 0, 0))
368 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution")
369 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
370 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
371 get_random_accessor(1.f, 1.f),
372 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
373 0.0010000000474974513f)
374 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
375 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
376
377 // Branch 2
378 SubStream i_c(graph);
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000379 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100380
381 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100382 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100383 }
384
385 void block_mixed_7a(const std::string &data_path, DataLayout weights_layout)
386 {
387 // Branch 0
388 SubStream i_a(graph);
389 i_a << ConvolutionLayer(1U, 1U, 256U,
390 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
391 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
392 PadStrideInfo(1, 1, 0, 0))
393 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution")
394 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
395 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
396 get_random_accessor(1.f, 1.f),
397 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
398 0.0010000000474974513f)
399 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm")
400 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu")
401 << ConvolutionLayer(3U, 3U, 384U,
402 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
403 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
404 PadStrideInfo(2, 2, 0, 0))
405 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution")
406 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
407 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
408 get_random_accessor(1.f, 1.f),
409 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
410 0.0010000000474974513f)
411 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
412 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
413
414 // Branch 1
415 SubStream i_b(graph);
416 i_b << ConvolutionLayer(1U, 1U, 256U,
417 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
418 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
419 PadStrideInfo(1, 1, 0, 0))
420 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution")
421 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
422 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
423 get_random_accessor(1.f, 1.f),
424 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
425 0.0010000000474974513f)
426 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
427 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
428 << ConvolutionLayer(3U, 3U, 288U,
429 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
430 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
431 PadStrideInfo(2, 2, 0, 0))
432 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution")
433 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
434 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
435 get_random_accessor(1.f, 1.f),
436 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
437 0.0010000000474974513f)
438 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
439 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
440
441 // Branch 2
442 SubStream i_c(graph);
443 i_c << ConvolutionLayer(1U, 1U, 256U,
444 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
445 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
446 PadStrideInfo(1, 1, 0, 0))
447 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution")
448 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
449 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
450 get_random_accessor(1.f, 1.f),
451 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
452 0.0010000000474974513f)
453 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm")
454 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu")
455 << ConvolutionLayer(3U, 3U, 288U,
456 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
457 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
458 PadStrideInfo(1, 1, 1, 1))
459 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution")
460 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
461 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
462 get_random_accessor(1.f, 1.f),
463 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
464 0.0010000000474974513f)
465 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm")
466 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu")
467 << ConvolutionLayer(3U, 3U, 320U,
468 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_weights.npy", weights_layout),
469 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
470 PadStrideInfo(2, 2, 0, 0))
471 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution")
472 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
473 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
474 get_random_accessor(1.f, 1.f),
475 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_beta.npy"),
476 0.0010000000474974513f)
477 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm")
478 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu");
479
480 // Branch 3
481 SubStream i_d(graph);
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000482 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100483
484 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100485 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100486 }
487
488 void block35_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
489 {
490 for(unsigned int i = 0; i < num_blocks; ++i)
491 {
492 std::stringstream unit_path_ss;
493 unit_path_ss << "Repeat_block35_" << (i + 1) << "_";
494 std::stringstream unit_name_ss;
495 unit_name_ss << "Repeat/block35_" << (i + 1) << "/";
496
497 std::string unit_path = unit_path_ss.str();
498 std::string unit_name = unit_name_ss.str();
499
500 // Create left and write substreams
501 SubStream i_l(graph);
502 SubStream i_r(graph);
503
504 // Branch 0
505 SubStream i_la(i_l);
506 i_la << ConvolutionLayer(1U, 1U, 32U,
507 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
508 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
509 PadStrideInfo(1, 1, 0, 0))
510 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
511 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
512 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
513 get_random_accessor(1.f, 1.f),
514 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
515 0.0010000000474974513f)
516 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
517 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
518
519 // Branch 1
520 SubStream i_lb(i_l);
521 i_lb << ConvolutionLayer(1U, 1U, 32U,
522 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
523 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
524 PadStrideInfo(1, 1, 0, 0))
525 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
526 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
527 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
528 get_random_accessor(1.f, 1.f),
529 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
530 0.0010000000474974513f)
531 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
532 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
533 << ConvolutionLayer(3U, 3U, 32U,
534 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
535 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
536 PadStrideInfo(1, 1, 1, 1))
537 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/convolution")
538 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
539 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
540 get_random_accessor(1.f, 1.f),
541 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
542 0.0010000000474974513f)
543 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/BatchNorm")
544 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_3x3/Relu");
545
546 // Branch 2
547 SubStream i_lc(i_l);
548 i_lc << ConvolutionLayer(1U, 1U, 32U,
549 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
550 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
551 PadStrideInfo(1, 1, 0, 0))
552 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/convolution")
553 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
554 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
555 get_random_accessor(1.f, 1.f),
556 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
557 0.0010000000474974513f)
558 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/BatchNorm")
559 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0a_1x1/Relu")
560 << ConvolutionLayer(3U, 3U, 48U,
561 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
562 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
563 PadStrideInfo(1, 1, 1, 1))
564 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/convolution")
565 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
566 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
567 get_random_accessor(1.f, 1.f),
568 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
569 0.0010000000474974513f)
570 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/BatchNorm")
571 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0b_3x3/Relu")
572 << ConvolutionLayer(3U, 3U, 64U,
573 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
574 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
575 PadStrideInfo(1, 1, 1, 1))
576 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/convolution")
577 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
578 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
579 get_random_accessor(1.f, 1.f),
580 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
581 0.0010000000474974513f)
582 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/BatchNorm")
583 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu");
584
585 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100586 i_l << ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100587 << ConvolutionLayer(1U, 1U, 320U,
588 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
589 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
590 PadStrideInfo(1, 1, 0, 0))
591 .set_name(unit_name + "Conv2d_1x1/convolution")
592 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f)).set_name(unit_name + "mul");
593
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100594 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100595 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
596 }
597 }
598
599 void block17_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
600 {
601 for(unsigned int i = 0; i < num_blocks; ++i)
602 {
603 std::stringstream unit_path_ss;
604 unit_path_ss << "Repeat_1_block17_" << (i + 1) << "_";
605 std::stringstream unit_name_ss;
606 unit_name_ss << "Repeat_1/block17_" << (i + 1) << "/";
607
608 std::string unit_path = unit_path_ss.str();
609 std::string unit_name = unit_name_ss.str();
610
611 // Create left and write substreams
612 SubStream i_l(graph);
613 SubStream i_r(graph);
614
615 // Branch 0
616 SubStream i_la(i_l);
617 i_la << ConvolutionLayer(1U, 1U, 192U,
618 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
619 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
620 PadStrideInfo(1, 1, 0, 0))
621 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
622 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
623 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
624 get_random_accessor(1.f, 1.f),
625 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
626 0.0010000000474974513f)
627 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
628 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
629
630 // Branch 1
631 SubStream i_lb(i_l);
632 i_lb << ConvolutionLayer(1U, 1U, 128U,
633 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
634 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
635 PadStrideInfo(1, 1, 0, 0))
636 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
637 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
638 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
639 get_random_accessor(1.f, 1.f),
640 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
641 0.0010000000474974513f)
642 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
643 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
644 << ConvolutionLayer(7U, 1U, 160U,
645 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
646 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
647 PadStrideInfo(1, 1, 3, 0))
648 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/convolution")
649 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
650 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
651 get_random_accessor(1.f, 1.f),
652 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
653 0.0010000000474974513f)
654 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/BatchNorm")
655 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x7/Relu")
656 << ConvolutionLayer(1U, 7U, 192U,
657 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
658 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
659 PadStrideInfo(1, 1, 0, 3))
660 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/convolution")
661 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
662 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
663 get_random_accessor(1.f, 1.f),
664 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
665 0.0010000000474974513f)
666 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/BatchNorm")
667 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu");
668
669 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100670 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100671 << ConvolutionLayer(1U, 1U, 1088U,
672 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
673 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
674 PadStrideInfo(1, 1, 0, 0))
675 .set_name(unit_name + "Conv2d_1x1/convolution")
676 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f)).set_name(unit_name + "mul");
677
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100678 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100679 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
680 }
681 }
682
683 void block8_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks, float scale, bool has_activation)
684 {
685 for(unsigned int i = 0; i < num_blocks; ++i)
686 {
687 std::stringstream unit_path_ss;
688 std::stringstream unit_name_ss;
689 if(num_blocks != 1)
690 {
691 unit_path_ss << "Repeat_2_block8_" << (i + 1) << "_";
692 unit_name_ss << "Repeat_2/block8_" << (i + 1) << "/";
693 }
694 else
695 {
696 unit_path_ss << "Block8_";
697 unit_name_ss << "Block8/";
698 }
699
700 std::string unit_path = unit_path_ss.str();
701 std::string unit_name = unit_name_ss.str();
702
703 // Create left and write substreams
704 SubStream i_l(graph);
705 SubStream i_r(graph);
706
707 // Branch 0
708 SubStream i_la(i_l);
709 i_la << ConvolutionLayer(1U, 1U, 192U,
710 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
711 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
712 PadStrideInfo(1, 1, 0, 0))
713 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
714 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
715 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
716 get_random_accessor(1.f, 1.f),
717 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
718 0.0010000000474974513f)
719 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
720 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
721
722 // Branch 1
723 SubStream i_lb(i_l);
724 i_lb << ConvolutionLayer(1U, 1U, 192U,
725 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
726 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
727 PadStrideInfo(1, 1, 0, 0))
728 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
729 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
730 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
731 get_random_accessor(1.f, 1.f),
732 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
733 0.0010000000474974513f)
734 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
735 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
736 << ConvolutionLayer(3U, 1U, 224U,
737 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
738 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
739 PadStrideInfo(1, 1, 1, 0))
740 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/convolution")
741 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
742 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
743 get_random_accessor(1.f, 1.f),
744 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
745 0.0010000000474974513f)
746 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/BatchNorm")
747 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x3/Relu")
748 << ConvolutionLayer(1U, 3U, 256U,
749 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout),
750 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
751 PadStrideInfo(1, 1, 0, 1))
752 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/convolution")
753 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
754 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
755 get_random_accessor(1.f, 1.f),
756 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"),
757 0.0010000000474974513f)
758 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/BatchNorm")
759 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu");
760
761 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100762 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100763 << ConvolutionLayer(1U, 1U, 2080U,
764 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
765 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
766 PadStrideInfo(1, 1, 0, 0))
767 .set_name(unit_name + "Conv2d_1x1/convolution");
768
769 // Scale result
770 if(scale != 1.f)
771 {
772 i_l << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, scale, 0.f)).set_name(unit_name + "mul");
773 }
774
775 // Residual add
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100776 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100777
778 // Apply activation if needed
779 if(has_activation)
780 {
781 graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
782 }
783 }
784 }
785};
786
787/** Main program for Inception ResNet V2
788 *
Georgios Pinitasbdbbbe82018-11-07 16:06:47 +0000789 * Model is based on:
790 * https://arxiv.org/abs/1602.07261
791 * "Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning"
792 * Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
793 *
Georgios Pinitas588ebc52018-12-21 13:39:07 +0000794 * Provenance: download.tensorflow.org/models/inception_resnet_v2_2016_08_30.tar.gz
795 *
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100796 * @note To list all the possible arguments execute the binary appended with the --help option
797 *
798 * @param[in] argc Number of arguments
799 * @param[in] argv Arguments
800 */
801int main(int argc, char **argv)
802{
803 return arm_compute::utils::run_example<InceptionResNetV2Example>(argc, argv);
804}