blob: 921fadac4fe6db2b4232abcdf0ac686f49414eee [file] [log] [blame]
Georgios Pinitasbe2772a2018-08-17 15:33:39 +01001/*
Vidhya Sudhan Loganathan050471e2019-04-25 09:27:24 +01002 * Copyright (c) 2018-2019 ARM Limited.
Georgios Pinitasbe2772a2018-08-17 15:33:39 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/graph.h"
25#include "support/ToolchainSupport.h"
26#include "utils/CommonGraphOptions.h"
27#include "utils/GraphUtils.h"
28#include "utils/Utils.h"
29
30using namespace arm_compute::utils;
31using namespace arm_compute::graph::frontend;
32using namespace arm_compute::graph_utils;
33
Georgios Pinitas108ab0b2018-09-14 18:35:11 +010034/** Example demonstrating how to implement InceptionV4's network using the Compute Library's graph API */
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010035class InceptionResNetV2Example final : public Example
36{
37public:
38 InceptionResNetV2Example()
39 : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionResNetV2")
40 {
41 }
42 bool do_setup(int argc, char **argv) override
43 {
44 // Parse arguments
45 cmd_parser.parse(argc, argv);
46
47 // Consume common parameters
48 common_params = consume_common_graph_parameters(common_opts);
49
50 // Return when help menu is requested
51 if(common_params.help)
52 {
53 cmd_parser.print_help(argv[0]);
54 return false;
55 }
56
57 // Set default layout if needed
58 if(!common_opts.data_layout->is_set() && common_params.target == Target::NEON)
59 {
60 common_params.data_layout = DataLayout::NCHW;
61 }
62
63 // Checks
Anthony Barbiercdd68c02018-08-23 15:03:41 +010064 ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010065
66 // Print parameter values
67 std::cout << common_params << std::endl;
68
69 // Create model path
70 std::string data_path = common_params.data_path;
71 std::string model_path = "/cnn_data/inception_resnet_v2_model/";
72 if(!data_path.empty())
73 {
74 data_path += model_path;
75 }
76
77 // Create a preprocessor object
78 std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<TFPreproccessor>(0.f, 1.f);
79
80 // Create input descriptor
81 const TensorShape tensor_shape = permute_shape(TensorShape(299U, 299U, 3U, 1U), DataLayout::NCHW, common_params.data_layout);
82 TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout);
83
84 // Set weights trained layout
85 const DataLayout weights_layout = DataLayout::NCHW;
86
87 graph << common_params.target
88 << common_params.fast_math_hint
89 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
90 // Conv2d_1a_3x3
91 << ConvolutionLayer(3U, 3U, 32U,
92 get_weights_accessor(data_path, "Conv2d_1a_3x3_weights.npy", weights_layout),
93 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
94 PadStrideInfo(2, 2, 0, 0))
95 .set_name("Conv2d_1a_3x3/convolution")
96 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
97 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
98 get_random_accessor(1.f, 1.f),
99 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_beta.npy"),
100 0.0010000000474974513f)
101 .set_name("Conv2d_1a_3x3/BatchNorm")
102 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
103 // Conv2d_2a_3x3
104 << ConvolutionLayer(3U, 3U, 32U,
105 get_weights_accessor(data_path, "Conv2d_2a_3x3_weights.npy", weights_layout),
106 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
107 PadStrideInfo(1, 1, 0, 0))
108 .set_name("Conv2d_2a_3x3/convolution")
109 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
110 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
111 get_random_accessor(1.f, 1.f),
112 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_beta.npy"),
113 0.0010000000474974513f)
114 .set_name("Conv2d_2a_3x3/BatchNorm")
115 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
116 // Conv2d_2b_3x3
117 << ConvolutionLayer(3U, 3U, 64U,
118 get_weights_accessor(data_path, "Conv2d_2b_3x3_weights.npy", weights_layout),
119 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
120 PadStrideInfo(1, 1, 1, 1))
121 .set_name("Conv2d_2b_3x3/convolution")
122 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
123 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
124 get_random_accessor(1.f, 1.f),
125 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_beta.npy"),
126 0.0010000000474974513f)
127 .set_name("Conv2d_2b_3x3/BatchNorm")
128 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
129 // MaxPool_3a_3x3
130 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("MaxPool_3a_3x3/MaxPool")
131 // Conv2d_3b_1x1
132 << ConvolutionLayer(1U, 1U, 80U,
133 get_weights_accessor(data_path, "Conv2d_3b_1x1_weights.npy", weights_layout),
134 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
135 PadStrideInfo(1, 1, 0, 0))
136 .set_name("Conv2d_3b_1x1/convolution")
137 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
138 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
139 get_random_accessor(1.f, 1.f),
140 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_beta.npy"),
141 0.0010000000474974513f)
142 .set_name("Conv2d_3b_1x1/BatchNorm")
143 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
144 // Conv2d_4a_3x3
145 << ConvolutionLayer(3U, 3U, 192U,
146 get_weights_accessor(data_path, "Conv2d_4a_3x3_weights.npy", weights_layout),
147 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
148 PadStrideInfo(1, 1, 0, 0))
149 .set_name("Conv2d_4a_3x3/convolution")
150 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
151 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
152 get_random_accessor(1.f, 1.f),
153 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_beta.npy"),
154 0.0010000000474974513f)
155 .set_name("Conv2d_4a_3x3/BatchNorm")
156 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
157 // MaxPool_5a_3x3
158 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0), true)).set_name("MaxPool_5a_3x3/MaxPool");
159
160 block_mixed_5b(data_path, weights_layout);
161 block35_repeat(data_path, weights_layout, 10);
162 block_mixed_6a(data_path, weights_layout);
163 block17_repeat(data_path, weights_layout, 20);
164 block_mixed_7a(data_path, weights_layout);
165 block8_repeat(data_path, weights_layout, 9, 0.2f, true);
166 block8_repeat(data_path, weights_layout, 1, 1.f, false);
167
168 // Conv2d_7b_1x1
169 graph << ConvolutionLayer(1U, 1U, 1536U,
170 get_weights_accessor(data_path, "Conv2d_7b_1x1_weights.npy", weights_layout),
171 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
172 PadStrideInfo(1, 1, 0, 0))
173 .set_name("Conv2d_7b_1x1/convolution")
174 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_moving_mean.npy"),
175 get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_moving_variance.npy"),
176 get_random_accessor(1.f, 1.f),
177 get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_beta.npy"),
178 0.0010000000474974513f)
179 .set_name("Conv2d_7b_1x1/BatchNorm")
180 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_7b_1x1/Relu")
181 << PoolingLayer(PoolingLayerInfo(PoolingType::AVG)).set_name("Logits/AvgPool_1a_8x8")
182 << FlattenLayer().set_name("Logits/Flatten")
183 << FullyConnectedLayer(
184 1001U,
185 get_weights_accessor(data_path, "Logits_Logits_weights.npy", weights_layout),
186 get_weights_accessor(data_path, "Logits_Logits_biases.npy"))
187 .set_name("Logits/Logits")
188 << SoftmaxLayer().set_name("Logits/Predictions")
189 << OutputLayer(get_output_accessor(common_params, 5));
190
191 // Finalize graph
192 GraphConfig config;
193 config.num_threads = common_params.threads;
194 config.use_tuner = common_params.enable_tuner;
Vidhya Sudhan Loganathan050471e2019-04-25 09:27:24 +0100195 config.tuner_mode = common_params.tuner_mode;
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100196 config.tuner_file = common_params.tuner_file;
197
198 graph.finalize(common_params.target, config);
199
200 return true;
201 }
202
203 void do_run() override
204 {
205 graph.run();
206 }
207
208private:
209 CommandLineParser cmd_parser;
210 CommonGraphOptions common_opts;
211 CommonGraphParams common_params;
212 Stream graph;
213
214private:
215 void block_mixed_5b(const std::string &data_path, DataLayout weights_layout)
216 {
217 // Branch 0
218 SubStream i_a(graph);
219 i_a << ConvolutionLayer(1U, 1U, 96U,
220 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_weights.npy", weights_layout),
221 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
222 PadStrideInfo(1, 1, 0, 0))
223 .set_name("Mixed_5b/Branch_0/Conv2d_1x1/convolution")
224 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
225 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
226 get_random_accessor(1.f, 1.f),
227 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
228 0.0010000000474974513f)
229 .set_name("Mixed_5b/Branch_0/Conv2d_1x1/BatchNorm")
230 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_0/Conv2d_1x1/Relu");
231
232 // Branch 1
233 SubStream i_b(graph);
234 i_b << ConvolutionLayer(1U, 1U, 48U,
235 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
236 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
237 PadStrideInfo(1, 1, 0, 0))
238 .set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/convolution")
239 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
240 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
241 get_random_accessor(1.f, 1.f),
242 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
243 0.0010000000474974513f)
244 .set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm")
245 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/Relu")
246 << ConvolutionLayer(5U, 5U, 64U,
247 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_weights.npy", weights_layout),
248 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
249 PadStrideInfo(1, 1, 2, 2))
250 .set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/convolution")
251 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_moving_mean.npy"),
252 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_moving_variance.npy"),
253 get_random_accessor(1.f, 1.f),
254 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_beta.npy"),
255 0.0010000000474974513f)
256 .set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/BatchNorm")
257 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/Relu");
258
259 // Branch 2
260 SubStream i_c(graph);
261 i_c << ConvolutionLayer(1U, 1U, 64U,
262 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
263 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
264 PadStrideInfo(1, 1, 0, 0))
265 .set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/convolution")
266 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
267 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
268 get_random_accessor(1.f, 1.f),
269 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
270 0.0010000000474974513f)
271 .set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm")
272 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/Relu")
273 << ConvolutionLayer(3U, 3U, 96U,
274 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
275 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
276 PadStrideInfo(1, 1, 1, 1))
277 .set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/convolution")
278 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
279 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
280 get_random_accessor(1.f, 1.f),
281 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
282 0.0010000000474974513f)
283 .set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm")
284 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/Relu")
285 << ConvolutionLayer(3U, 3U, 96U,
286 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
287 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
288 PadStrideInfo(1, 1, 1, 1))
289 .set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/convolution")
290 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
291 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
292 get_random_accessor(1.f, 1.f),
293 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
294 0.0010000000474974513f)
295 .set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/BatchNorm")
296 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/Relu");
297
298 // Branch 3
299 SubStream i_d(graph);
300 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name("Mixed_5b/Branch_3/AvgPool_0a_3x3")
301 << ConvolutionLayer(1U, 1U, 64U,
302 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
303 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
304 PadStrideInfo(1, 1, 0, 0))
305 .set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/convolution")
306 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
307 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
308 get_random_accessor(1.f, 1.f),
309 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
310 0.0010000000474974513f)
311 .set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm")
312 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/Relu");
313
314 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100315 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_5a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100316 }
317
318 void block_mixed_6a(const std::string &data_path, DataLayout weights_layout)
319 {
320 // Branch 0
321 SubStream i_a(graph);
322 i_a << ConvolutionLayer(3U, 3U, 384U,
323 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
324 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
325 PadStrideInfo(2, 2, 0, 0))
326 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution")
327 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
328 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
329 get_random_accessor(1.f, 1.f),
330 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
331 0.0010000000474974513f)
332 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
333 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
334
335 // Branch 1
336 SubStream i_b(graph);
337 i_b << ConvolutionLayer(1U, 1U, 256U,
338 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
339 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
340 PadStrideInfo(1, 1, 0, 0))
341 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution")
342 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
343 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
344 get_random_accessor(1.f, 1.f),
345 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
346 0.0010000000474974513f)
347 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
348 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
349 << ConvolutionLayer(3U, 3U, 256U,
350 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
351 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
352 PadStrideInfo(1, 1, 1, 1))
353 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution")
354 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
355 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
356 get_random_accessor(1.f, 1.f),
357 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
358 0.0010000000474974513f)
359 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
360 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
361 << ConvolutionLayer(3U, 3U, 384U,
362 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
363 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
364 PadStrideInfo(2, 2, 0, 0))
365 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution")
366 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
367 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
368 get_random_accessor(1.f, 1.f),
369 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
370 0.0010000000474974513f)
371 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
372 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
373
374 // Branch 2
375 SubStream i_c(graph);
376 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3");
377
378 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100379 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100380 }
381
382 void block_mixed_7a(const std::string &data_path, DataLayout weights_layout)
383 {
384 // Branch 0
385 SubStream i_a(graph);
386 i_a << ConvolutionLayer(1U, 1U, 256U,
387 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
388 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
389 PadStrideInfo(1, 1, 0, 0))
390 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution")
391 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
392 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
393 get_random_accessor(1.f, 1.f),
394 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
395 0.0010000000474974513f)
396 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm")
397 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu")
398 << ConvolutionLayer(3U, 3U, 384U,
399 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
400 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
401 PadStrideInfo(2, 2, 0, 0))
402 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution")
403 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
404 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
405 get_random_accessor(1.f, 1.f),
406 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
407 0.0010000000474974513f)
408 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
409 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
410
411 // Branch 1
412 SubStream i_b(graph);
413 i_b << ConvolutionLayer(1U, 1U, 256U,
414 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
415 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
416 PadStrideInfo(1, 1, 0, 0))
417 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution")
418 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
419 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
420 get_random_accessor(1.f, 1.f),
421 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
422 0.0010000000474974513f)
423 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
424 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
425 << ConvolutionLayer(3U, 3U, 288U,
426 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
427 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
428 PadStrideInfo(2, 2, 0, 0))
429 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution")
430 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
431 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
432 get_random_accessor(1.f, 1.f),
433 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
434 0.0010000000474974513f)
435 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
436 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
437
438 // Branch 2
439 SubStream i_c(graph);
440 i_c << ConvolutionLayer(1U, 1U, 256U,
441 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
442 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
443 PadStrideInfo(1, 1, 0, 0))
444 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution")
445 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
446 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
447 get_random_accessor(1.f, 1.f),
448 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
449 0.0010000000474974513f)
450 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm")
451 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu")
452 << ConvolutionLayer(3U, 3U, 288U,
453 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
454 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
455 PadStrideInfo(1, 1, 1, 1))
456 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution")
457 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
458 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
459 get_random_accessor(1.f, 1.f),
460 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
461 0.0010000000474974513f)
462 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm")
463 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu")
464 << ConvolutionLayer(3U, 3U, 320U,
465 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_weights.npy", weights_layout),
466 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
467 PadStrideInfo(2, 2, 0, 0))
468 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution")
469 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
470 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
471 get_random_accessor(1.f, 1.f),
472 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_beta.npy"),
473 0.0010000000474974513f)
474 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm")
475 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu");
476
477 // Branch 3
478 SubStream i_d(graph);
479 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3");
480
481 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100482 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100483 }
484
485 void block35_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
486 {
487 for(unsigned int i = 0; i < num_blocks; ++i)
488 {
489 std::stringstream unit_path_ss;
490 unit_path_ss << "Repeat_block35_" << (i + 1) << "_";
491 std::stringstream unit_name_ss;
492 unit_name_ss << "Repeat/block35_" << (i + 1) << "/";
493
494 std::string unit_path = unit_path_ss.str();
495 std::string unit_name = unit_name_ss.str();
496
497 // Create left and write substreams
498 SubStream i_l(graph);
499 SubStream i_r(graph);
500
501 // Branch 0
502 SubStream i_la(i_l);
503 i_la << ConvolutionLayer(1U, 1U, 32U,
504 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
505 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
506 PadStrideInfo(1, 1, 0, 0))
507 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
508 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
509 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
510 get_random_accessor(1.f, 1.f),
511 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
512 0.0010000000474974513f)
513 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
514 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
515
516 // Branch 1
517 SubStream i_lb(i_l);
518 i_lb << ConvolutionLayer(1U, 1U, 32U,
519 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
520 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
521 PadStrideInfo(1, 1, 0, 0))
522 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
523 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
524 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
525 get_random_accessor(1.f, 1.f),
526 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
527 0.0010000000474974513f)
528 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
529 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
530 << ConvolutionLayer(3U, 3U, 32U,
531 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
532 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
533 PadStrideInfo(1, 1, 1, 1))
534 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/convolution")
535 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
536 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
537 get_random_accessor(1.f, 1.f),
538 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
539 0.0010000000474974513f)
540 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/BatchNorm")
541 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_3x3/Relu");
542
543 // Branch 2
544 SubStream i_lc(i_l);
545 i_lc << ConvolutionLayer(1U, 1U, 32U,
546 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
547 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
548 PadStrideInfo(1, 1, 0, 0))
549 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/convolution")
550 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
551 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
552 get_random_accessor(1.f, 1.f),
553 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
554 0.0010000000474974513f)
555 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/BatchNorm")
556 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0a_1x1/Relu")
557 << ConvolutionLayer(3U, 3U, 48U,
558 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
559 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
560 PadStrideInfo(1, 1, 1, 1))
561 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/convolution")
562 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
563 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
564 get_random_accessor(1.f, 1.f),
565 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
566 0.0010000000474974513f)
567 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/BatchNorm")
568 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0b_3x3/Relu")
569 << ConvolutionLayer(3U, 3U, 64U,
570 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
571 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
572 PadStrideInfo(1, 1, 1, 1))
573 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/convolution")
574 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
575 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
576 get_random_accessor(1.f, 1.f),
577 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
578 0.0010000000474974513f)
579 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/BatchNorm")
580 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu");
581
582 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100583 i_l << ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100584 << ConvolutionLayer(1U, 1U, 320U,
585 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
586 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
587 PadStrideInfo(1, 1, 0, 0))
588 .set_name(unit_name + "Conv2d_1x1/convolution")
589 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f)).set_name(unit_name + "mul");
590
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100591 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100592 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
593 }
594 }
595
596 void block17_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
597 {
598 for(unsigned int i = 0; i < num_blocks; ++i)
599 {
600 std::stringstream unit_path_ss;
601 unit_path_ss << "Repeat_1_block17_" << (i + 1) << "_";
602 std::stringstream unit_name_ss;
603 unit_name_ss << "Repeat_1/block17_" << (i + 1) << "/";
604
605 std::string unit_path = unit_path_ss.str();
606 std::string unit_name = unit_name_ss.str();
607
608 // Create left and write substreams
609 SubStream i_l(graph);
610 SubStream i_r(graph);
611
612 // Branch 0
613 SubStream i_la(i_l);
614 i_la << ConvolutionLayer(1U, 1U, 192U,
615 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
616 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
617 PadStrideInfo(1, 1, 0, 0))
618 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
619 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
620 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
621 get_random_accessor(1.f, 1.f),
622 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
623 0.0010000000474974513f)
624 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
625 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
626
627 // Branch 1
628 SubStream i_lb(i_l);
629 i_lb << ConvolutionLayer(1U, 1U, 128U,
630 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
631 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
632 PadStrideInfo(1, 1, 0, 0))
633 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
634 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
635 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
636 get_random_accessor(1.f, 1.f),
637 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
638 0.0010000000474974513f)
639 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
640 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
641 << ConvolutionLayer(7U, 1U, 160U,
642 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
643 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
644 PadStrideInfo(1, 1, 3, 0))
645 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/convolution")
646 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
647 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
648 get_random_accessor(1.f, 1.f),
649 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
650 0.0010000000474974513f)
651 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/BatchNorm")
652 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x7/Relu")
653 << ConvolutionLayer(1U, 7U, 192U,
654 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
655 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
656 PadStrideInfo(1, 1, 0, 3))
657 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/convolution")
658 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
659 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
660 get_random_accessor(1.f, 1.f),
661 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
662 0.0010000000474974513f)
663 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/BatchNorm")
664 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu");
665
666 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100667 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100668 << ConvolutionLayer(1U, 1U, 1088U,
669 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
670 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
671 PadStrideInfo(1, 1, 0, 0))
672 .set_name(unit_name + "Conv2d_1x1/convolution")
673 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f)).set_name(unit_name + "mul");
674
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100675 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100676 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
677 }
678 }
679
680 void block8_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks, float scale, bool has_activation)
681 {
682 for(unsigned int i = 0; i < num_blocks; ++i)
683 {
684 std::stringstream unit_path_ss;
685 std::stringstream unit_name_ss;
686 if(num_blocks != 1)
687 {
688 unit_path_ss << "Repeat_2_block8_" << (i + 1) << "_";
689 unit_name_ss << "Repeat_2/block8_" << (i + 1) << "/";
690 }
691 else
692 {
693 unit_path_ss << "Block8_";
694 unit_name_ss << "Block8/";
695 }
696
697 std::string unit_path = unit_path_ss.str();
698 std::string unit_name = unit_name_ss.str();
699
700 // Create left and write substreams
701 SubStream i_l(graph);
702 SubStream i_r(graph);
703
704 // Branch 0
705 SubStream i_la(i_l);
706 i_la << ConvolutionLayer(1U, 1U, 192U,
707 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
708 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
709 PadStrideInfo(1, 1, 0, 0))
710 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
711 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
712 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
713 get_random_accessor(1.f, 1.f),
714 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
715 0.0010000000474974513f)
716 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
717 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
718
719 // Branch 1
720 SubStream i_lb(i_l);
721 i_lb << ConvolutionLayer(1U, 1U, 192U,
722 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
723 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
724 PadStrideInfo(1, 1, 0, 0))
725 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
726 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
727 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
728 get_random_accessor(1.f, 1.f),
729 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
730 0.0010000000474974513f)
731 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
732 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
733 << ConvolutionLayer(3U, 1U, 224U,
734 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
735 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
736 PadStrideInfo(1, 1, 1, 0))
737 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/convolution")
738 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
739 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
740 get_random_accessor(1.f, 1.f),
741 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
742 0.0010000000474974513f)
743 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/BatchNorm")
744 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x3/Relu")
745 << ConvolutionLayer(1U, 3U, 256U,
746 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout),
747 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
748 PadStrideInfo(1, 1, 0, 1))
749 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/convolution")
750 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
751 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
752 get_random_accessor(1.f, 1.f),
753 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"),
754 0.0010000000474974513f)
755 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/BatchNorm")
756 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu");
757
758 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100759 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100760 << ConvolutionLayer(1U, 1U, 2080U,
761 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
762 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
763 PadStrideInfo(1, 1, 0, 0))
764 .set_name(unit_name + "Conv2d_1x1/convolution");
765
766 // Scale result
767 if(scale != 1.f)
768 {
769 i_l << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, scale, 0.f)).set_name(unit_name + "mul");
770 }
771
772 // Residual add
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100773 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100774
775 // Apply activation if needed
776 if(has_activation)
777 {
778 graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
779 }
780 }
781 }
782};
783
784/** Main program for Inception ResNet V2
785 *
Georgios Pinitasbdbbbe82018-11-07 16:06:47 +0000786 * Model is based on:
787 * https://arxiv.org/abs/1602.07261
788 * "Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning"
789 * Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
790 *
Georgios Pinitas588ebc52018-12-21 13:39:07 +0000791 * Provenance: download.tensorflow.org/models/inception_resnet_v2_2016_08_30.tar.gz
792 *
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100793 * @note To list all the possible arguments execute the binary appended with the --help option
794 *
795 * @param[in] argc Number of arguments
796 * @param[in] argv Arguments
797 */
798int main(int argc, char **argv)
799{
800 return arm_compute::utils::run_example<InceptionResNetV2Example>(argc, argv);
801}