blob: 74d040b81d837213e7acf4c3612cb38246e3a334 [file] [log] [blame]
Georgios Pinitasf4261ad2019-12-02 11:58:19 +00001/*
SiCongLif466d752021-03-01 15:26:18 +00002 * Copyright (c) 2019-2021 Arm Limited.
Georgios Pinitasf4261ad2019-12-02 11:58:19 +00003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/graph/mutators/SyntheticDataTypeMutator.h"
25
26#include "arm_compute/graph/GraphBuilder.h"
27#include "arm_compute/graph/ITensorAccessor.h"
28#include "arm_compute/graph/Logger.h"
29#include "arm_compute/graph/Utils.h"
30#include "arm_compute/graph/nodes/Nodes.h"
31
Sang-Hoon Park68dd25f2020-10-19 16:00:11 +010032#include "support/Cast.h"
Georgios Pinitasf4261ad2019-12-02 11:58:19 +000033
34#include <set>
35
36namespace arm_compute
37{
38namespace graph
39{
40namespace
41{
42/** Empty accessor class */
43class EmptyAccessor final : public graph::ITensorAccessor
44{
45public:
46 /** Default Constructor */
47 EmptyAccessor() = default;
48
49 // Inherited methods overriden:
50 bool access_tensor(ITensor &tensor) override
51 {
52 ARM_COMPUTE_UNUSED(tensor);
53 return true;
54 }
55};
56
57/** Check if the mutation pass can be applied
58 *
59 * @param[in] g Graph the mutation pass need to be applied on
60 *
61 * @return True if the pass can be applied else false
62 */
63bool is_mutation_supported(Graph &g)
64{
65 const std::set<NodeType> unsupported_node_types = { NodeType::DetectionOutputLayer,
66 NodeType::NormalizationLayer,
67 NodeType::PriorBoxLayer
68 };
69
70 for(const auto &utype : unsupported_node_types)
71 {
72 if(!g.nodes(utype).empty())
73 {
74 return false;
75 }
76 }
77 return true;
78}
79
80/** Remove nodes that get optimized out during conversion
81 *
82 * @param[in, out] g Graph to remove the nodes from.
83 */
84void remove_optimized_nodes(Graph &g)
85{
86 const std::set<NodeType> optimized_node_types = { NodeType::BatchNormalizationLayer };
87
88 for(const auto &opt_type : optimized_node_types)
89 {
90 const std::vector<NodeID> opt_nodes_ids = g.nodes(opt_type);
91 for(const auto &node_id : opt_nodes_ids)
92 {
93 INode *node = g.node(node_id);
94
95 // Get input edge
96 Edge *input_edge = node->input_edge(0);
97 ARM_COMPUTE_ERROR_ON(input_edge == nullptr);
98
99 // Get producer node
100 INode *producer = input_edge->producer();
101 const EdgeID producer_edge_id = input_edge->producer_idx();
102 ARM_COMPUTE_ERROR_ON(producer == nullptr);
103
104 // Get driving nodes
105 std::vector<NodeIdxPair> driving_nodes = get_driving_nodes(*node);
106
107 // Remove node
108 g.remove_node(node->id());
109
110 // Update connections
111 for(auto &driving_node : driving_nodes)
112 {
113 g.add_connection(producer->id(), producer_edge_id, driving_node.node_id, driving_node.index);
114 }
115 }
116 }
117}
118
119/** Convert tensor meta-data
120 *
121 * @param[in,out] g Graph to convert tensors of.
122 */
SiCongLif466d752021-03-01 15:26:18 +0000123void convert_tensors(Graph &g, DataType data_type)
Georgios Pinitasf4261ad2019-12-02 11:58:19 +0000124{
125 auto &tensors = g.tensors();
126 for(auto &tensor : tensors)
127 {
128 if(tensor != nullptr)
129 {
SiCongLif466d752021-03-01 15:26:18 +0000130 switch(data_type)
131 {
132 case DataType::QASYMM8:
133 case DataType::QASYMM8_SIGNED:
134 {
135 tensor->desc().quant_info = QuantizationInfo(0.125f, -10);
136 break;
137 }
138 default:
139 {
140 ARM_COMPUTE_ERROR("Unsupported mutation type");
141 break;
142 }
143 }
144 tensor->desc().data_type = data_type;
Georgios Pinitasf4261ad2019-12-02 11:58:19 +0000145 }
146 }
147}
148
149/** Convert special node
150 *
151 * @param[in,out] g Graph to convert tensors of.
152 * @param[in] fnc Conversion function.
153 * @param[in] optional_arguments Conversion function arguments.
154 */
155template <typename NT>
156void convert_special_node(Graph &g, std::function<bool(INode *, Tensor *)> const &f)
157{
158 const std::vector<NodeID> nodes_ids = g.nodes(NT::node_type);
159 for(const auto &nodes_id : nodes_ids)
160 {
161 INode *node = arm_compute::utils::cast::polymorphic_downcast<NT *>(g.node(nodes_id));
162 ARM_COMPUTE_ERROR_ON(node == nullptr);
163
164 Tensor *output_tensor = node->output(0);
165 ARM_COMPUTE_ERROR_ON(output_tensor == nullptr);
166
167 f(node, output_tensor);
168 }
169}
170
171/** Converts special tensors
172 *
173 * @param[in,out] g Graph to convert tensors of.
174 */
175void convert_special_tensors(Graph &g)
176{
177 auto softmax_func = [](INode * node, Tensor * tensor)
178 {
179 ARM_COMPUTE_UNUSED(node);
SiCongLif466d752021-03-01 15:26:18 +0000180 if(tensor->desc().data_type == DataType::QASYMM8)
181 {
182 tensor->desc().quant_info = QuantizationInfo(1.f / 256.f, 0);
183 }
184 else if(tensor->desc().data_type == DataType::QASYMM8_SIGNED)
185 {
186 tensor->desc().quant_info = QuantizationInfo(1.f / 256.f, -128);
187 }
Georgios Pinitasf4261ad2019-12-02 11:58:19 +0000188 return true;
189 };
190
191 auto act_func = [](INode * node, Tensor * tensor)
192 {
193 auto *act_node = arm_compute::utils::cast::polymorphic_downcast<ActivationLayerNode *>(node);
SiCongLif466d752021-03-01 15:26:18 +0000194 if(tensor->desc().data_type == DataType::QASYMM8)
Georgios Pinitasf4261ad2019-12-02 11:58:19 +0000195 {
SiCongLif466d752021-03-01 15:26:18 +0000196 if(act_node->activation_info().activation() == ActivationLayerInfo::ActivationFunction::TANH)
197 {
198 tensor->desc().quant_info = QuantizationInfo(1.f / 128.f, 128);
199 }
200 else if(act_node->activation_info().activation() == ActivationLayerInfo::ActivationFunction::LOGISTIC)
201 {
202 tensor->desc().quant_info = QuantizationInfo(1.f / 256.f, 0);
203 }
Georgios Pinitasf4261ad2019-12-02 11:58:19 +0000204 }
SiCongLif466d752021-03-01 15:26:18 +0000205 else if(tensor->desc().data_type == DataType::QASYMM8_SIGNED)
SiCongLi2e5fd632020-03-02 15:39:15 +0000206 {
SiCongLif466d752021-03-01 15:26:18 +0000207 if(act_node->activation_info().activation() == ActivationLayerInfo::ActivationFunction::TANH)
208 {
209 tensor->desc().quant_info = QuantizationInfo(1.f / 128.f, 0);
210 }
211 else if(act_node->activation_info().activation() == ActivationLayerInfo::ActivationFunction::LOGISTIC)
212 {
213 tensor->desc().quant_info = QuantizationInfo(1.f / 256.f, -128);
214 }
SiCongLi2e5fd632020-03-02 15:39:15 +0000215 }
Georgios Pinitasf4261ad2019-12-02 11:58:19 +0000216 return true;
217 };
218
219 convert_special_node<ActivationLayerNode>(g, act_func);
220 convert_special_node<SoftmaxLayerNode>(g, softmax_func);
221}
222
223/** Handle nodes with bias
224 *
225 * @note Special tensors are for now biases that the data type differ
226 *
227 * @param[in,out] g Graph to convert tensors of.
228 */
229void handle_nodes_with_bias(Graph &g)
230{
231 const std::set<NodeType> special_node_types = { NodeType::ConvolutionLayer,
232 NodeType::DeconvolutionLayer,
233 NodeType::DepthwiseConvolutionLayer,
234 NodeType::FullyConnectedLayer
235 };
236
237 for(const auto &spc_type : special_node_types)
238 {
239 const std::vector<NodeID> scp_nodes_ids = g.nodes(spc_type);
240 for(const auto &node_id : scp_nodes_ids)
241 {
242 INode *node = g.node(node_id);
243 if(node != nullptr)
244 {
245 Tensor *tensor = node->input(2);
246 if(tensor != nullptr)
247 {
248 tensor->desc().data_type = DataType::S32;
249 }
250 else
251 {
252 auto params = node->common_node_params();
253 params.name = params.name.empty() ? "" : params.name + "Bias";
254
255 TensorDescriptor b_desc = node->input(1)->desc();
256 auto depth = b_desc.shape[get_dimension_idx(b_desc.layout, DataLayoutDimension::BATCHES)];
257 b_desc.shape = TensorShape(depth);
258
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000259 auto accessor = std::make_unique<EmptyAccessor>();
Georgios Pinitasf4261ad2019-12-02 11:58:19 +0000260 auto b_nid = GraphBuilder::add_const_node(g, params, b_desc, std::move(accessor));
261 g.add_connection(b_nid, 0, node_id, 2);
262 }
263 }
264 }
265 }
266}
267} // namespace
268
SiCongLif466d752021-03-01 15:26:18 +0000269SyntheticDataTypeMutator::SyntheticDataTypeMutator(DataType mutate_type)
270 : _mutate_type{ mutate_type }
271{
272}
273
Georgios Pinitasf4261ad2019-12-02 11:58:19 +0000274const char *SyntheticDataTypeMutator::name()
275{
276 return "SyntheticDataTypeMutator";
277}
278
279IGraphMutator::MutationType SyntheticDataTypeMutator::type() const
280{
281 return IGraphMutator::MutationType::IR;
282}
283
284void SyntheticDataTypeMutator::mutate(Graph &g)
285{
286 if(is_mutation_supported(g))
287 {
288 // Remove nodes that get optimized out (e.g. BatchNorm)
289 remove_optimized_nodes(g);
290
291 // Convert tensor
SiCongLif466d752021-03-01 15:26:18 +0000292 convert_tensors(g, _mutate_type);
Georgios Pinitasf4261ad2019-12-02 11:58:19 +0000293 convert_special_tensors(g);
294
295 // Handle special nodes
296 handle_nodes_with_bias(g);
297 }
298 else
299 {
300 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Synthetic data type mutator couldn't be applied" << std::endl);
301 }
302}
303} // namespace graph
304} // namespace arm_compute