blob: 521c17cd626b253044eb989ff9e35223816e8f36 [file] [log] [blame]
Mike Kelly07810fc2020-11-12 10:58:48 +00001//
2// Copyright © 2020 Arm Ltd and Contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#pragma once
7
8#include <armnn/backends/OptimizationViews.hpp>
Matthew Sloyan5fc0fd62021-05-03 12:22:03 +01009#include <armnn/utility/Assert.hpp>
10
11#include <aclCommon/ArmComputeUtils.hpp>
Mike Kelly07810fc2020-11-12 10:58:48 +000012
13namespace armnn
14{
15
16namespace
17{
18
19//
20// this helper only works if all layers where the inputs connect to are not selected
21//
22SubgraphView::InputSlots CreateInputsFrom(const std::vector<Layer*>& layers)
23{
24 SubgraphView::InputSlots result;
25 for (auto&& layer : layers)
26 {
27 for (auto&& it = layer->BeginInputSlots(); it != layer->EndInputSlots(); ++it)
28 {
29 result.push_back(&(*it));
30 }
31 }
32 return result;
33}
34
35//
36// this helper only works if all layers where the outputs connect to are not selected
37//
38SubgraphView::OutputSlots CreateOutputsFrom(const std::vector<Layer*>& layers)
39{
40 SubgraphView::OutputSlots result;
41 for (auto&& layer : layers)
42 {
43 for (auto&& it = layer->BeginOutputSlots(); it != layer->EndOutputSlots(); ++it)
44 {
45 result.push_back(&(*it));
46 }
47 }
48 return result;
49}
50
Teresa Charlind672f5d2021-01-18 18:07:57 +000051bool checkDataTypeInputandOutput(const Layer& layer)
52{
53 auto inputInfo = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
54 auto outputInfo = layer.GetOutputSlot(0).GetTensorInfo();
55 bool sameDataType = (inputInfo.GetDataType() == outputInfo.GetDataType());
56
57 // Check is same quantization info (same scale and offset)
58 if (sameDataType)
59 {
60 if (IsQuantizedType(inputInfo.GetDataType()))
61 {
62 bool sameScale = (inputInfo.GetQuantizationScale() == outputInfo.GetQuantizationScale());
63 bool sameOffset = (inputInfo.GetQuantizationOffset() == outputInfo.GetQuantizationOffset());
64
65 return (sameScale && sameOffset);
66 }
67 else
68 {
69 return true;
70 }
71 }
72 else
73 {
74 return false;
75 }
76}
77
Mike Kelly07810fc2020-11-12 10:58:48 +000078} // namespace
79
Mike Kelly1ac690a2020-11-17 11:41:38 +000080inline void ReportUntouchedLayers(OptimizationViews& optimizationViews, std::map<LayerGuid, Layer*> untouched)
Mike Kelly07810fc2020-11-12 10:58:48 +000081{
Mike Kelly1ac690a2020-11-17 11:41:38 +000082 std::vector<Layer*> untouchedVector;
83 for (const auto& pair : untouched)
Mike Kelly07810fc2020-11-12 10:58:48 +000084 {
Mike Kelly1ac690a2020-11-17 11:41:38 +000085 Layer* layer = pair.second;
86 SubgraphView subgraphView(CreateInputsFrom({layer}),
87 CreateOutputsFrom({layer}),
88 {layer});
89 optimizationViews.AddUntouchedSubgraph(std::move(subgraphView));
Mike Kelly07810fc2020-11-12 10:58:48 +000090 }
Mike Kelly07810fc2020-11-12 10:58:48 +000091}
92
93template<typename LayerType>
94LayerType* FuseLayerWithoutParameters(OptimizationViews& optimizationViews,
95 LayerType* baseLayer,
96 ActivationLayer* activationLayer,
97 ActivationDescriptor& activationDesc,
98 std::string name)
99{
100 LayerType* replacementLayer = optimizationViews.GetGraph().AddLayer<LayerType>(name.c_str());
101
102 replacementLayer->SetAdditionalInfoForObject(std::make_shared<ActivationDescriptor>(activationDesc));
103
104 SubgraphView substitutionSubgraph(CreateInputsFrom({baseLayer}),
105 CreateOutputsFrom({activationLayer}),
106 {baseLayer, activationLayer});
107 SubgraphView replacementSubgraph(replacementLayer);
108
109 optimizationViews.AddSubstitution({substitutionSubgraph, replacementSubgraph});
110 return replacementLayer;
111}
112
113template<typename LayerType>
114LayerType* FuseLayerWithParameters(OptimizationViews& optimizationViews,
115 LayerType* baseLayer,
116 ActivationLayer* activationLayer,
117 ActivationDescriptor& activationDesc,
118 std::string name)
119{
120 LayerType* replacementLayer = optimizationViews.GetGraph().AddLayer<LayerType>(baseLayer->GetParameters(),
121 name.c_str());
122
123 replacementLayer->SetAdditionalInfoForObject(std::make_shared<ActivationDescriptor>(activationDesc));
124
125 SubgraphView substitutionSubgraph(CreateInputsFrom({baseLayer}),
126 CreateOutputsFrom({activationLayer}),
127 {baseLayer, activationLayer});
128 SubgraphView replacementSubgraph(replacementLayer);
129
130 optimizationViews.AddSubstitution({substitutionSubgraph, replacementSubgraph});
131 return replacementLayer;
132}
133
134template<typename LayerType>
135LayerType* FuseLayerWithWeightsAndBiases(OptimizationViews& optimizationViews,
136 LayerType* baseLayer,
137 ActivationLayer* activationLayer,
138 ActivationDescriptor& activationDesc,
139 std::string name)
140{
141 LayerType* replacementLayer = FuseLayerWithParameters(optimizationViews,
142 baseLayer,
143 activationLayer,
144 activationDesc,
145 name);
146
147 replacementLayer->m_Weight = std::move(baseLayer->m_Weight);
148 replacementLayer->m_Bias = std::move(baseLayer->m_Bias);
149
150 return replacementLayer;
151}
152
Matthew Sloyan5fc0fd62021-05-03 12:22:03 +0100153//
154// If reduce layer has multiple axes, add new layer for each axis to simulate the same behaviour
155// as currently only one axis is supported.
156//
157template<typename LayerType>
158std::vector<Layer*> ChainReduceLayers(OptimizationViews& optimizationViews,
159 LayerType* baseLayer,
160 ReduceDescriptor& desc)
161{
162 // Vector of new chained layers, used for substitution.
163 std::vector<Layer*> layers;
164
165 // Vector of axes so each layer is reshaped correctly.
166 std::vector<uint32_t> axes;
167 unsigned int recalulatedAxis = 0;
168
169 for (unsigned int i = 0; i != desc.m_vAxis.size(); ++i)
170 {
171 // Get TensorInfo from base layer and reduce shape using axis.
172 TensorInfo layerInfo = baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo();
173
174 axes.emplace_back(desc.m_vAxis[i]);
175
176 const TensorInfo& reducedTensorInfo = ComputeReductionTensorShape(layerInfo,
177 axes,
178 desc.m_KeepDims);
179
180 // Create a vector for the single axis to be assigned to the descriptor.
181 // Update axis if keepDims is set reduce layers correctly.
182 std::vector<uint32_t> singleAxis(1, desc.m_vAxis[i] - recalulatedAxis);
183
184 // Create a descriptor and assign single axis.
185 ReduceDescriptor newReduceDescriptor = baseLayer->GetParameters();
186 newReduceDescriptor.m_vAxis.assign(singleAxis.begin(), singleAxis.end());
187
188 // Add new layer to graph.
189 std::string layerName = "reduce_layer_" + std::to_string(i);
190 Layer* replacementLayer = optimizationViews.GetGraph().AddLayer<LayerType>(newReduceDescriptor,
191 layerName.c_str());
192 // Connect previous layer with new layer.
193 // The first and last layer will be connected when the subgraph is replaced.
194 if (!layers.empty())
195 {
196 layers[i - 1]->GetOutputSlot(0).Connect(replacementLayer->GetInputSlot(0));
197 }
198
199 // Set updated tensorInfo for new layer.
200 replacementLayer->GetOutputSlot(0).SetTensorInfo(reducedTensorInfo);
201
202 if (!desc.m_KeepDims)
203 {
204 recalulatedAxis++;
205 }
206
207 layers.emplace_back(replacementLayer);
208 }
209
210 // Check if the TensorInfo from the last layer equals the inferred output from the original layer.
211 ARMNN_ASSERT(baseLayer->GetOutputSlot(0).GetTensorInfo() == layers.back()->GetOutputSlot().GetTensorInfo());
212
213 return layers;
214}
215
216//
217// Substitute baseLayer with new subgraph
218//
219template<typename LayerType>
220void ReplaceLayers(OptimizationViews& optimizationViews,
221 LayerType* baseLayer,
222 std::vector<Layer*>& layers)
223{
224 std::list<Layer*> replacementLayers(layers.begin(), layers.end());
225
226 SubgraphView substitutionSubgraph(baseLayer);
227 SubgraphView replacementSubgraph(CreateInputsFrom({replacementLayers.front()}),
228 CreateOutputsFrom({replacementLayers.back()}),
229 std::move(replacementLayers));
230
231 optimizationViews.AddSubstitution({substitutionSubgraph, replacementSubgraph});
232}
233
Mike Kelly07810fc2020-11-12 10:58:48 +0000234} // namespace armnn