blob: badcaec1074c97be24ecc322735054b1f7226610 [file] [log] [blame]
Georgios Pinitasf554be72018-12-03 16:02:47 +00001/*
Michele Di Giorgiod9eaf612020-07-08 11:12:57 +01002 * Copyright (c) 2018-2020 Arm Limited.
Georgios Pinitasf554be72018-12-03 16:02:47 +00003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/graph.h"
25#include "support/ToolchainSupport.h"
26#include "utils/CommonGraphOptions.h"
27#include "utils/GraphUtils.h"
28#include "utils/Utils.h"
29
30using namespace arm_compute::utils;
31using namespace arm_compute::graph::frontend;
32using namespace arm_compute::graph_utils;
33
34/** Example demonstrating how to implement ResNet12 network using the Compute Library's graph API */
35class GraphResNet12Example : public Example
36{
37public:
38 GraphResNet12Example()
39 : cmd_parser(), common_opts(cmd_parser), model_input_width(nullptr), model_input_height(nullptr), common_params(), graph(0, "ResNet12")
40 {
41 model_input_width = cmd_parser.add_option<SimpleOption<unsigned int>>("image-width", 192);
42 model_input_height = cmd_parser.add_option<SimpleOption<unsigned int>>("image-height", 128);
43
44 // Add model id option
45 model_input_width->set_help("Input image width.");
46 model_input_height->set_help("Input image height.");
47 }
48 GraphResNet12Example(const GraphResNet12Example &) = delete;
49 GraphResNet12Example &operator=(const GraphResNet12Example &) = delete;
Matthew Benthamf5f23912020-03-05 22:32:16 +000050 ~GraphResNet12Example() override = default;
Georgios Pinitasf554be72018-12-03 16:02:47 +000051 bool do_setup(int argc, char **argv) override
52 {
53 // Parse arguments
54 cmd_parser.parse(argc, argv);
Georgios Pinitascd60a5f2019-08-21 17:06:54 +010055 cmd_parser.validate();
Georgios Pinitasf554be72018-12-03 16:02:47 +000056
57 // Consume common parameters
58 common_params = consume_common_graph_parameters(common_opts);
59
60 // Return when help menu is requested
61 if(common_params.help)
62 {
63 cmd_parser.print_help(argv[0]);
64 return false;
65 }
66
67 // Get input image width and height
68 const unsigned int image_width = model_input_width->value();
69 const unsigned int image_height = model_input_height->value();
70
71 // Checks
72 ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph");
73
74 // Print parameter values
75 std::cout << common_params << std::endl;
76 std::cout << "Image width: " << image_width << std::endl;
77 std::cout << "Image height: " << image_height << std::endl;
78
79 // Get trainable parameters data path
80 const std::string data_path = common_params.data_path;
81 const std::string model_path = "/cnn_data/resnet12_model/";
82
83 // Create a preprocessor object
84 std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<TFPreproccessor>();
85
86 // Create input descriptor
87 const TensorShape tensor_shape = permute_shape(TensorShape(image_width, image_height, 3U, 1U), DataLayout::NCHW, common_params.data_layout);
88 TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout);
89
90 // Set weights trained layout
91 const DataLayout weights_layout = DataLayout::NCHW;
92
93 graph << common_params.target
94 << common_params.fast_math_hint
95 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false /* Do not convert to BGR */))
96 << ConvolutionLayer(
97 9U, 9U, 64U,
98 get_weights_accessor(data_path, "conv1_weights.npy", weights_layout),
99 get_weights_accessor(data_path, "conv1_biases.npy", weights_layout),
100 PadStrideInfo(1, 1, 4, 4))
101 .set_name("conv1/convolution")
102 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv1/Relu");
103
104 add_residual_block(data_path, "block1", weights_layout);
105 add_residual_block(data_path, "block2", weights_layout);
106 add_residual_block(data_path, "block3", weights_layout);
107 add_residual_block(data_path, "block4", weights_layout);
108
109 graph << ConvolutionLayer(
110 3U, 3U, 64U,
111 get_weights_accessor(data_path, "conv10_weights.npy", weights_layout),
112 get_weights_accessor(data_path, "conv10_biases.npy"),
113 PadStrideInfo(1, 1, 1, 1))
114 .set_name("conv10/convolution")
115 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv10/Relu")
116 << ConvolutionLayer(
117 3U, 3U, 64U,
118 get_weights_accessor(data_path, "conv11_weights.npy", weights_layout),
119 get_weights_accessor(data_path, "conv11_biases.npy"),
120 PadStrideInfo(1, 1, 1, 1))
121 .set_name("conv11/convolution")
122 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("conv11/Relu")
123 << ConvolutionLayer(
124 9U, 9U, 3U,
125 get_weights_accessor(data_path, "conv12_weights.npy", weights_layout),
126 get_weights_accessor(data_path, "conv12_biases.npy"),
127 PadStrideInfo(1, 1, 4, 4))
128 .set_name("conv12/convolution")
129 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::TANH)).set_name("conv12/Tanh")
130 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.58f, 0.5f)).set_name("conv12/Linear")
131 << OutputLayer(arm_compute::support::cpp14::make_unique<DummyAccessor>(0));
132
133 // Finalize graph
134 GraphConfig config;
135 config.num_threads = common_params.threads;
136 config.use_tuner = common_params.enable_tuner;
Vidhya Sudhan Loganathan050471e2019-04-25 09:27:24 +0100137 config.tuner_mode = common_params.tuner_mode;
Michele Di Giorgio1df9cca2019-01-17 13:20:32 +0000138 config.tuner_file = common_params.tuner_file;
139
Georgios Pinitasf554be72018-12-03 16:02:47 +0000140 graph.finalize(common_params.target, config);
141
142 return true;
143 }
144
145 void do_run() override
146 {
147 // Run graph
148 graph.run();
149 }
150
151private:
152 CommandLineParser cmd_parser;
153 CommonGraphOptions common_opts;
154 SimpleOption<unsigned int> *model_input_width{ nullptr };
155 SimpleOption<unsigned int> *model_input_height{ nullptr };
156 CommonGraphParams common_params;
157 Stream graph;
158
159 void add_residual_block(const std::string &data_path, const std::string &name, DataLayout weights_layout)
160 {
161 std::stringstream unit_path_ss;
162 unit_path_ss << data_path << name << "_";
163 std::stringstream unit_name_ss;
164 unit_name_ss << name << "/";
165
166 std::string unit_path = unit_path_ss.str();
167 std::string unit_name = unit_name_ss.str();
168
169 SubStream left(graph);
170 SubStream right(graph);
171
172 right << ConvolutionLayer(
173 3U, 3U, 64U,
174 get_weights_accessor(data_path, unit_path + "conv1_weights.npy", weights_layout),
175 get_weights_accessor(data_path, unit_path + "conv1_biases.npy", weights_layout),
176 PadStrideInfo(1, 1, 1, 1))
177 .set_name(unit_name + "conv1/convolution")
178 << BatchNormalizationLayer(
179 get_weights_accessor(data_path, unit_path + "conv1_BatchNorm_moving_mean.npy"),
180 get_weights_accessor(data_path, unit_path + "conv1_BatchNorm_moving_variance.npy"),
181 get_weights_accessor(data_path, unit_path + "conv1_BatchNorm_gamma.npy"),
182 get_weights_accessor(data_path, unit_path + "conv1_BatchNorm_beta.npy"),
183 0.0000100099996416f)
184 .set_name(unit_name + "conv1/BatchNorm")
185 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "conv1/Relu")
186
187 << ConvolutionLayer(
188 3U, 3U, 64U,
189 get_weights_accessor(data_path, unit_path + "conv2_weights.npy", weights_layout),
190 get_weights_accessor(data_path, unit_path + "conv2_biases.npy", weights_layout),
191 PadStrideInfo(1, 1, 1, 1))
192 .set_name(unit_name + "conv2/convolution")
193 << BatchNormalizationLayer(
194 get_weights_accessor(data_path, unit_path + "conv2_BatchNorm_moving_mean.npy"),
195 get_weights_accessor(data_path, unit_path + "conv2_BatchNorm_moving_variance.npy"),
196 get_weights_accessor(data_path, unit_path + "conv2_BatchNorm_gamma.npy"),
197 get_weights_accessor(data_path, unit_path + "conv2_BatchNorm_beta.npy"),
198 0.0000100099996416f)
199 .set_name(unit_name + "conv2/BatchNorm")
200 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "conv2/Relu");
201
202 graph << EltwiseLayer(std::move(left), std::move(right), EltwiseOperation::Add).set_name(unit_name + "add");
203 }
204};
205
206/** Main program for ResNet12
207 *
208 * Model is based on:
209 * https://arxiv.org/pdf/1709.01118.pdf
210 * "WESPE: Weakly Supervised Photo Enhancer for Digital Cameras"
211 * Andrey Ignatov, Nikolay Kobyshev, Kenneth Vanhoey, Radu Timofte, Luc Van Gool
212 *
213 * @note To list all the possible arguments execute the binary appended with the --help option
214 *
215 * @param[in] argc Number of arguments
216 * @param[in] argv Arguments
217 */
218int main(int argc, char **argv)
219{
220 return arm_compute::utils::run_example<GraphResNet12Example>(argc, argv);
221}