Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 1 | // |
| 2 | // Copyright © 2020 Arm Ltd and Contributors. All rights reserved. |
| 3 | // SPDX-License-Identifier: MIT |
| 4 | // |
| 5 | |
| 6 | #pragma once |
| 7 | |
| 8 | #include <armnn/backends/OptimizationViews.hpp> |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 9 | #include <armnn/utility/Assert.hpp> |
| 10 | |
| 11 | #include <aclCommon/ArmComputeUtils.hpp> |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 12 | |
| 13 | namespace armnn |
| 14 | { |
| 15 | |
| 16 | namespace |
| 17 | { |
| 18 | |
| 19 | // |
| 20 | // this helper only works if all layers where the inputs connect to are not selected |
| 21 | // |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 22 | |
| 23 | SubgraphView::IInputSlots CreateIInputsFrom(const std::vector<armnn::IConnectableLayer*>& layers) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 24 | { |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 25 | SubgraphView::IInputSlots result; |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 26 | for (auto&& layer : layers) |
| 27 | { |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 28 | for (unsigned int i = 0 ; i < layer->GetNumInputSlots(); ++i) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 29 | { |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 30 | result.push_back(&(layer->GetInputSlot(i))); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 31 | } |
| 32 | } |
| 33 | return result; |
| 34 | } |
| 35 | |
| 36 | // |
| 37 | // this helper only works if all layers where the outputs connect to are not selected |
| 38 | // |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 39 | |
| 40 | SubgraphView::IOutputSlots CreateIOutputsFrom(const std::vector<armnn::IConnectableLayer*>& layers) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 41 | { |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 42 | SubgraphView::IOutputSlots result; |
| 43 | for (auto &&layer: layers) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 44 | { |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 45 | for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 46 | { |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 47 | result.push_back(&(layer->GetOutputSlot(i))); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 48 | } |
| 49 | } |
| 50 | return result; |
| 51 | } |
| 52 | |
Teresa Charlin | d672f5d | 2021-01-18 18:07:57 +0000 | [diff] [blame] | 53 | bool checkDataTypeInputandOutput(const Layer& layer) |
| 54 | { |
| 55 | auto inputInfo = layer.GetInputSlot(0).GetConnection()->GetTensorInfo(); |
| 56 | auto outputInfo = layer.GetOutputSlot(0).GetTensorInfo(); |
| 57 | bool sameDataType = (inputInfo.GetDataType() == outputInfo.GetDataType()); |
| 58 | |
| 59 | // Check is same quantization info (same scale and offset) |
| 60 | if (sameDataType) |
| 61 | { |
| 62 | if (IsQuantizedType(inputInfo.GetDataType())) |
| 63 | { |
| 64 | bool sameScale = (inputInfo.GetQuantizationScale() == outputInfo.GetQuantizationScale()); |
| 65 | bool sameOffset = (inputInfo.GetQuantizationOffset() == outputInfo.GetQuantizationOffset()); |
| 66 | |
| 67 | return (sameScale && sameOffset); |
| 68 | } |
| 69 | else |
| 70 | { |
| 71 | return true; |
| 72 | } |
| 73 | } |
| 74 | else |
| 75 | { |
| 76 | return false; |
| 77 | } |
| 78 | } |
| 79 | |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 80 | } // namespace |
| 81 | |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 82 | inline void ReportUntouchedLayers(OptimizationViews& optimizationViews, std::map<LayerGuid, Layer*> untouched) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 83 | { |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 84 | std::vector<Layer*> untouchedVector; |
| 85 | for (const auto& pair : untouched) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 86 | { |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 87 | Layer* layer = pair.second; |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 88 | SubgraphView subgraphView({layer}, |
| 89 | CreateIInputsFrom({layer}), |
| 90 | CreateIOutputsFrom({layer})); |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 91 | optimizationViews.AddUntouchedSubgraph(std::move(subgraphView)); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 92 | } |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 93 | } |
| 94 | |
| 95 | template<typename LayerType> |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 96 | LayerType* FuseLayer(OptimizationViews& optimizationViews, |
| 97 | LayerType* baseLayer, |
| 98 | LayerType* replacementLayer, |
| 99 | ActivationLayer* activationLayer, |
| 100 | ActivationDescriptor& activationDesc) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 101 | { |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 102 | replacementLayer->SetAdditionalInfoForObject( |
| 103 | std::make_shared<ActivationDescriptor>(activationDesc)); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 104 | |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 105 | SubgraphView substitutionSubgraph({baseLayer, activationLayer}, |
| 106 | CreateIInputsFrom({baseLayer}), |
| 107 | CreateIOutputsFrom({activationLayer})); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 108 | SubgraphView replacementSubgraph(replacementLayer); |
| 109 | |
| 110 | optimizationViews.AddSubstitution({substitutionSubgraph, replacementSubgraph}); |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 111 | |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 112 | return replacementLayer; |
| 113 | } |
| 114 | |
| 115 | template<typename LayerType> |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 116 | LayerType* FuseAdditionLayer(OptimizationViews& optimizationViews, |
| 117 | LayerType* baseLayer, |
| 118 | ActivationLayer* activationLayer, |
| 119 | ActivationDescriptor& activationDesc, |
| 120 | std::string name) |
| 121 | { |
| 122 | IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddAdditionLayer(name.c_str()); |
| 123 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
| 124 | |
| 125 | FuseLayer(optimizationViews, |
| 126 | baseLayer, |
| 127 | replacementLayer, |
| 128 | activationLayer, |
| 129 | activationDesc); |
| 130 | |
| 131 | return replacementLayer; |
| 132 | } |
| 133 | |
| 134 | template<typename LayerType> |
| 135 | LayerType* FuseSubtractionLayer(OptimizationViews& optimizationViews, |
| 136 | LayerType* baseLayer, |
| 137 | ActivationLayer* activationLayer, |
| 138 | ActivationDescriptor& activationDesc, |
| 139 | std::string name) |
| 140 | { |
| 141 | IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddSubtractionLayer(name.c_str()); |
| 142 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
| 143 | |
| 144 | FuseLayer(optimizationViews, |
| 145 | baseLayer, |
| 146 | replacementLayer, |
| 147 | activationLayer, |
| 148 | activationDesc); |
| 149 | |
| 150 | return replacementLayer; |
| 151 | } |
| 152 | |
| 153 | template<typename LayerType> |
| 154 | LayerType* FuseDivisionLayer(OptimizationViews& optimizationViews, |
| 155 | LayerType* baseLayer, |
| 156 | ActivationLayer* activationLayer, |
| 157 | ActivationDescriptor& activationDesc, |
| 158 | std::string name) |
| 159 | { |
| 160 | IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddDivisionLayer(name.c_str()); |
| 161 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
| 162 | |
| 163 | FuseLayer(optimizationViews, |
| 164 | baseLayer, |
| 165 | replacementLayer, |
| 166 | activationLayer, |
| 167 | activationDesc); |
| 168 | |
| 169 | return replacementLayer; |
| 170 | } |
| 171 | |
| 172 | template<typename LayerType> |
| 173 | LayerType* FuseMultiplicationLayer(OptimizationViews& optimizationViews, |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 174 | LayerType* baseLayer, |
| 175 | ActivationLayer* activationLayer, |
| 176 | ActivationDescriptor& activationDesc, |
| 177 | std::string name) |
| 178 | { |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 179 | IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddMultiplicationLayer(name.c_str()); |
| 180 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 181 | |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 182 | FuseLayer(optimizationViews, |
| 183 | baseLayer, |
| 184 | replacementLayer, |
| 185 | activationLayer, |
| 186 | activationDesc); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 187 | |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 188 | return replacementLayer; |
| 189 | } |
| 190 | |
| 191 | template<typename LayerType> |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 192 | LayerType* FuseBatchNormalizationLayer(OptimizationViews& optimizationViews, |
| 193 | LayerType* baseLayer, |
| 194 | ActivationLayer* activationLayer, |
| 195 | ActivationDescriptor& activationDesc, |
| 196 | std::string name) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 197 | { |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 198 | IConnectableLayer* replacement = |
| 199 | optimizationViews.GetINetwork()->AddBatchNormalizationLayer(baseLayer->GetParameters(), |
| 200 | ConstTensor(), |
| 201 | ConstTensor(), |
| 202 | ConstTensor(), |
| 203 | ConstTensor(), |
| 204 | name.c_str()); |
| 205 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
| 206 | |
| 207 | FuseLayer(optimizationViews, |
| 208 | baseLayer, |
| 209 | replacementLayer, |
| 210 | activationLayer, |
| 211 | activationDesc); |
| 212 | |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 213 | SubgraphView substitutionSubgraph({baseLayer, activationLayer}, |
| 214 | CreateIInputsFrom({baseLayer}), |
| 215 | CreateIOutputsFrom({activationLayer})); |
| 216 | SubgraphView replacementSubgraph(replacementLayer); |
| 217 | |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 218 | return replacementLayer; |
| 219 | } |
| 220 | |
| 221 | template<typename LayerType> |
| 222 | LayerType* FuseConvolution2dLayer(OptimizationViews& optimizationViews, |
| 223 | LayerType* baseLayer, |
| 224 | ActivationLayer* activationLayer, |
| 225 | ActivationDescriptor& activationDesc, |
| 226 | std::string name) |
| 227 | { |
Keith Davis | b4dd5cc | 2022-04-07 11:32:00 +0100 | [diff] [blame] | 228 | IConnectableLayer* replacement = optimizationViews.GetINetwork() |
| 229 | ->AddConvolution2dLayer(baseLayer->GetParameters(), name.c_str()); |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 230 | |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 231 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
| 232 | |
Keith Davis | b4dd5cc | 2022-04-07 11:32:00 +0100 | [diff] [blame] | 233 | replacementLayer->m_Weight = std::move(baseLayer->m_Weight); |
| 234 | replacementLayer->m_Bias = std::move(baseLayer->m_Bias); |
| 235 | |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 236 | FuseLayer(optimizationViews, |
| 237 | baseLayer, |
| 238 | replacementLayer, |
| 239 | activationLayer, |
| 240 | activationDesc); |
| 241 | |
| 242 | return replacementLayer; |
| 243 | } |
| 244 | |
| 245 | template<typename LayerType> |
| 246 | LayerType* FuseDepthwiseConvolution2dLayer(OptimizationViews& optimizationViews, |
| 247 | LayerType* baseLayer, |
| 248 | ActivationLayer* activationLayer, |
| 249 | ActivationDescriptor& activationDesc, |
| 250 | std::string name) |
| 251 | { |
Keith Davis | b4dd5cc | 2022-04-07 11:32:00 +0100 | [diff] [blame] | 252 | IConnectableLayer* replacement = |
| 253 | optimizationViews.GetINetwork()->AddDepthwiseConvolution2dLayer(baseLayer->GetParameters(), name.c_str()); |
| 254 | |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 255 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
| 256 | |
Cathal Corbett | 0690265 | 2022-04-14 17:55:11 +0100 | [diff] [blame] | 257 | replacementLayer->m_Weight = std::move(baseLayer->m_Weight); |
| 258 | replacementLayer->m_Bias = std::move(baseLayer->m_Bias); |
| 259 | |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 260 | FuseLayer(optimizationViews, |
| 261 | baseLayer, |
| 262 | replacementLayer, |
| 263 | activationLayer, |
| 264 | activationDesc); |
| 265 | |
| 266 | return replacementLayer; |
| 267 | } |
| 268 | |
| 269 | template<typename LayerType> |
| 270 | LayerType* FuseFullyConnectedLayer(OptimizationViews& optimizationViews, |
| 271 | LayerType* baseLayer, |
| 272 | ActivationLayer* activationLayer, |
| 273 | ActivationDescriptor& activationDesc, |
| 274 | std::string name) |
| 275 | { |
| 276 | IConnectableLayer* replacement = |
| 277 | optimizationViews.GetINetwork()->AddFullyConnectedLayer(baseLayer->GetParameters(), |
| 278 | name.c_str()); |
| 279 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
| 280 | |
| 281 | FuseLayer(optimizationViews, |
| 282 | baseLayer, |
| 283 | replacementLayer, |
| 284 | activationLayer, |
| 285 | activationDesc); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 286 | |
| 287 | replacementLayer->m_Weight = std::move(baseLayer->m_Weight); |
| 288 | replacementLayer->m_Bias = std::move(baseLayer->m_Bias); |
| 289 | |
| 290 | return replacementLayer; |
| 291 | } |
| 292 | |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 293 | // |
| 294 | // If reduce layer has multiple axes, add new layer for each axis to simulate the same behaviour |
| 295 | // as currently only one axis is supported. |
| 296 | // |
| 297 | template<typename LayerType> |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 298 | std::vector<IConnectableLayer*> ChainReduceLayers(OptimizationViews& optimizationViews, |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 299 | LayerType* baseLayer, |
| 300 | ReduceDescriptor& desc) |
| 301 | { |
| 302 | // Vector of new chained layers, used for substitution. |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 303 | std::vector<IConnectableLayer*> layers; |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 304 | |
| 305 | // Vector of axes so each layer is reshaped correctly. |
| 306 | std::vector<uint32_t> axes; |
| 307 | unsigned int recalulatedAxis = 0; |
| 308 | |
| 309 | for (unsigned int i = 0; i != desc.m_vAxis.size(); ++i) |
| 310 | { |
| 311 | // Get TensorInfo from base layer and reduce shape using axis. |
| 312 | TensorInfo layerInfo = baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(); |
| 313 | |
| 314 | axes.emplace_back(desc.m_vAxis[i]); |
| 315 | |
| 316 | const TensorInfo& reducedTensorInfo = ComputeReductionTensorShape(layerInfo, |
| 317 | axes, |
| 318 | desc.m_KeepDims); |
| 319 | |
| 320 | // Create a vector for the single axis to be assigned to the descriptor. |
| 321 | // Update axis if keepDims is set reduce layers correctly. |
| 322 | std::vector<uint32_t> singleAxis(1, desc.m_vAxis[i] - recalulatedAxis); |
| 323 | |
| 324 | // Create a descriptor and assign single axis. |
| 325 | ReduceDescriptor newReduceDescriptor = baseLayer->GetParameters(); |
| 326 | newReduceDescriptor.m_vAxis.assign(singleAxis.begin(), singleAxis.end()); |
| 327 | |
| 328 | // Add new layer to graph. |
| 329 | std::string layerName = "reduce_layer_" + std::to_string(i); |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 330 | |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 331 | Layer* replacementLayer = PolymorphicDowncast<Layer*>( |
| 332 | optimizationViews.GetINetwork()->AddReduceLayer(newReduceDescriptor, |
| 333 | layerName.c_str())); |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 334 | |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 335 | // Connect previous layer with new layer. |
| 336 | // The first and last layer will be connected when the subgraph is replaced. |
| 337 | if (!layers.empty()) |
| 338 | { |
| 339 | layers[i - 1]->GetOutputSlot(0).Connect(replacementLayer->GetInputSlot(0)); |
| 340 | } |
| 341 | |
| 342 | // Set updated tensorInfo for new layer. |
| 343 | replacementLayer->GetOutputSlot(0).SetTensorInfo(reducedTensorInfo); |
| 344 | |
| 345 | if (!desc.m_KeepDims) |
| 346 | { |
| 347 | recalulatedAxis++; |
| 348 | } |
| 349 | |
| 350 | layers.emplace_back(replacementLayer); |
| 351 | } |
| 352 | |
| 353 | // Check if the TensorInfo from the last layer equals the inferred output from the original layer. |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 354 | ARMNN_ASSERT(baseLayer->GetOutputSlot(0).GetTensorInfo() == |
| 355 | PolymorphicDowncast<Layer*>(layers.back())->GetOutputSlot().GetTensorInfo()); |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 356 | |
| 357 | return layers; |
| 358 | } |
| 359 | |
| 360 | // |
| 361 | // Substitute baseLayer with new subgraph |
| 362 | // |
| 363 | template<typename LayerType> |
| 364 | void ReplaceLayers(OptimizationViews& optimizationViews, |
| 365 | LayerType* baseLayer, |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 366 | std::vector<IConnectableLayer*>& layers) |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 367 | { |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 368 | std::list<IConnectableLayer*> replacementLayers(layers.begin(), layers.end()); |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 369 | |
| 370 | SubgraphView substitutionSubgraph(baseLayer); |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 371 | SubgraphView replacementSubgraph(std::move(replacementLayers), |
| 372 | CreateIInputsFrom({replacementLayers.front()}), |
| 373 | CreateIOutputsFrom({replacementLayers.back()})); |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 374 | |
| 375 | optimizationViews.AddSubstitution({substitutionSubgraph, replacementSubgraph}); |
| 376 | } |
| 377 | |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 378 | } // namespace armnn |