Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 1 | // |
| 2 | // Copyright © 2020 Arm Ltd and Contributors. All rights reserved. |
| 3 | // SPDX-License-Identifier: MIT |
| 4 | // |
| 5 | |
| 6 | #pragma once |
| 7 | |
| 8 | #include <armnn/backends/OptimizationViews.hpp> |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 9 | #include <armnn/utility/Assert.hpp> |
| 10 | |
| 11 | #include <aclCommon/ArmComputeUtils.hpp> |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 12 | |
| 13 | namespace armnn |
| 14 | { |
| 15 | |
| 16 | namespace |
| 17 | { |
| 18 | |
| 19 | // |
| 20 | // this helper only works if all layers where the inputs connect to are not selected |
| 21 | // |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 22 | |
| 23 | SubgraphView::IInputSlots CreateIInputsFrom(const std::vector<armnn::IConnectableLayer*>& layers) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 24 | { |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 25 | SubgraphView::IInputSlots result; |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 26 | for (auto&& layer : layers) |
| 27 | { |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 28 | for (unsigned int i = 0 ; i < layer->GetNumInputSlots(); ++i) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 29 | { |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 30 | result.push_back(&(layer->GetInputSlot(i))); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 31 | } |
| 32 | } |
| 33 | return result; |
| 34 | } |
| 35 | |
| 36 | // |
| 37 | // this helper only works if all layers where the outputs connect to are not selected |
| 38 | // |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 39 | |
| 40 | SubgraphView::IOutputSlots CreateIOutputsFrom(const std::vector<armnn::IConnectableLayer*>& layers) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 41 | { |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 42 | SubgraphView::IOutputSlots result; |
| 43 | for (auto &&layer: layers) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 44 | { |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 45 | for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 46 | { |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 47 | result.push_back(&(layer->GetOutputSlot(i))); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 48 | } |
| 49 | } |
| 50 | return result; |
| 51 | } |
| 52 | |
Teresa Charlin | d672f5d | 2021-01-18 18:07:57 +0000 | [diff] [blame] | 53 | bool checkDataTypeInputandOutput(const Layer& layer) |
| 54 | { |
| 55 | auto inputInfo = layer.GetInputSlot(0).GetConnection()->GetTensorInfo(); |
| 56 | auto outputInfo = layer.GetOutputSlot(0).GetTensorInfo(); |
| 57 | bool sameDataType = (inputInfo.GetDataType() == outputInfo.GetDataType()); |
| 58 | |
| 59 | // Check is same quantization info (same scale and offset) |
| 60 | if (sameDataType) |
| 61 | { |
| 62 | if (IsQuantizedType(inputInfo.GetDataType())) |
| 63 | { |
| 64 | bool sameScale = (inputInfo.GetQuantizationScale() == outputInfo.GetQuantizationScale()); |
| 65 | bool sameOffset = (inputInfo.GetQuantizationOffset() == outputInfo.GetQuantizationOffset()); |
| 66 | |
| 67 | return (sameScale && sameOffset); |
| 68 | } |
| 69 | else |
| 70 | { |
| 71 | return true; |
| 72 | } |
| 73 | } |
| 74 | else |
| 75 | { |
| 76 | return false; |
| 77 | } |
| 78 | } |
| 79 | |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 80 | } // namespace |
| 81 | |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 82 | inline void ReportUntouchedLayers(OptimizationViews& optimizationViews, std::map<LayerGuid, Layer*> untouched) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 83 | { |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 84 | std::vector<Layer*> untouchedVector; |
| 85 | for (const auto& pair : untouched) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 86 | { |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 87 | Layer* layer = pair.second; |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 88 | SubgraphView subgraphView({layer}, |
| 89 | CreateIInputsFrom({layer}), |
| 90 | CreateIOutputsFrom({layer})); |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 91 | optimizationViews.AddUntouchedSubgraph(std::move(subgraphView)); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 92 | } |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 93 | } |
| 94 | |
| 95 | template<typename LayerType> |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 96 | LayerType* FuseLayer(OptimizationViews& optimizationViews, |
| 97 | LayerType* baseLayer, |
| 98 | LayerType* replacementLayer, |
| 99 | ActivationLayer* activationLayer, |
| 100 | ActivationDescriptor& activationDesc) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 101 | { |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 102 | replacementLayer->SetAdditionalInfoForObject( |
| 103 | std::make_shared<ActivationDescriptor>(activationDesc)); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 104 | |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 105 | SubgraphView substitutionSubgraph({baseLayer, activationLayer}, |
| 106 | CreateIInputsFrom({baseLayer}), |
| 107 | CreateIOutputsFrom({activationLayer})); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 108 | SubgraphView replacementSubgraph(replacementLayer); |
| 109 | |
| 110 | optimizationViews.AddSubstitution({substitutionSubgraph, replacementSubgraph}); |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 111 | |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 112 | return replacementLayer; |
| 113 | } |
| 114 | |
| 115 | template<typename LayerType> |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 116 | LayerType* FuseAdditionLayer(OptimizationViews& optimizationViews, |
| 117 | LayerType* baseLayer, |
| 118 | ActivationLayer* activationLayer, |
| 119 | ActivationDescriptor& activationDesc, |
| 120 | std::string name) |
| 121 | { |
| 122 | IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddAdditionLayer(name.c_str()); |
| 123 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
| 124 | |
| 125 | FuseLayer(optimizationViews, |
| 126 | baseLayer, |
| 127 | replacementLayer, |
| 128 | activationLayer, |
| 129 | activationDesc); |
| 130 | |
| 131 | return replacementLayer; |
| 132 | } |
| 133 | |
| 134 | template<typename LayerType> |
| 135 | LayerType* FuseSubtractionLayer(OptimizationViews& optimizationViews, |
| 136 | LayerType* baseLayer, |
| 137 | ActivationLayer* activationLayer, |
| 138 | ActivationDescriptor& activationDesc, |
| 139 | std::string name) |
| 140 | { |
| 141 | IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddSubtractionLayer(name.c_str()); |
| 142 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
| 143 | |
| 144 | FuseLayer(optimizationViews, |
| 145 | baseLayer, |
| 146 | replacementLayer, |
| 147 | activationLayer, |
| 148 | activationDesc); |
| 149 | |
| 150 | return replacementLayer; |
| 151 | } |
| 152 | |
| 153 | template<typename LayerType> |
| 154 | LayerType* FuseDivisionLayer(OptimizationViews& optimizationViews, |
| 155 | LayerType* baseLayer, |
| 156 | ActivationLayer* activationLayer, |
| 157 | ActivationDescriptor& activationDesc, |
| 158 | std::string name) |
| 159 | { |
| 160 | IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddDivisionLayer(name.c_str()); |
| 161 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
| 162 | |
| 163 | FuseLayer(optimizationViews, |
| 164 | baseLayer, |
| 165 | replacementLayer, |
| 166 | activationLayer, |
| 167 | activationDesc); |
| 168 | |
| 169 | return replacementLayer; |
| 170 | } |
| 171 | |
| 172 | template<typename LayerType> |
| 173 | LayerType* FuseMultiplicationLayer(OptimizationViews& optimizationViews, |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 174 | LayerType* baseLayer, |
| 175 | ActivationLayer* activationLayer, |
| 176 | ActivationDescriptor& activationDesc, |
| 177 | std::string name) |
| 178 | { |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 179 | IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddMultiplicationLayer(name.c_str()); |
| 180 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 181 | |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 182 | FuseLayer(optimizationViews, |
| 183 | baseLayer, |
| 184 | replacementLayer, |
| 185 | activationLayer, |
| 186 | activationDesc); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 187 | |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 188 | return replacementLayer; |
| 189 | } |
| 190 | |
| 191 | template<typename LayerType> |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 192 | LayerType* FuseBatchNormalizationLayer(OptimizationViews& optimizationViews, |
| 193 | LayerType* baseLayer, |
| 194 | ActivationLayer* activationLayer, |
| 195 | ActivationDescriptor& activationDesc, |
| 196 | std::string name) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 197 | { |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 198 | IConnectableLayer* replacement = |
| 199 | optimizationViews.GetINetwork()->AddBatchNormalizationLayer(baseLayer->GetParameters(), |
| 200 | ConstTensor(), |
| 201 | ConstTensor(), |
| 202 | ConstTensor(), |
| 203 | ConstTensor(), |
| 204 | name.c_str()); |
| 205 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
| 206 | |
| 207 | FuseLayer(optimizationViews, |
| 208 | baseLayer, |
| 209 | replacementLayer, |
| 210 | activationLayer, |
| 211 | activationDesc); |
| 212 | |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 213 | SubgraphView substitutionSubgraph({baseLayer, activationLayer}, |
| 214 | CreateIInputsFrom({baseLayer}), |
| 215 | CreateIOutputsFrom({activationLayer})); |
| 216 | SubgraphView replacementSubgraph(replacementLayer); |
| 217 | |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 218 | return replacementLayer; |
| 219 | } |
| 220 | |
| 221 | template<typename LayerType> |
| 222 | LayerType* FuseConvolution2dLayer(OptimizationViews& optimizationViews, |
| 223 | LayerType* baseLayer, |
| 224 | ActivationLayer* activationLayer, |
| 225 | ActivationDescriptor& activationDesc, |
| 226 | std::string name) |
| 227 | { |
| 228 | std::shared_ptr<ConstTensorHandle> weightHandle = baseLayer->m_Weight; |
| 229 | TensorInfo weightInfo = weightHandle->GetTensorInfo(); |
| 230 | |
| 231 | std::shared_ptr<ConstTensorHandle> biasHandle = baseLayer->m_Bias; |
| 232 | ConstTensor biasTensor; |
| 233 | if (!biasHandle) |
| 234 | { |
| 235 | biasTensor = ConstTensor(); |
| 236 | } |
| 237 | else |
| 238 | { |
| 239 | biasTensor = ConstTensor(biasHandle->GetTensorInfo(), biasHandle->Map(true)); |
| 240 | } |
| 241 | |
| 242 | IConnectableLayer* replacement = |
| 243 | optimizationViews.GetINetwork()-> |
| 244 | AddConvolution2dLayer(baseLayer->GetParameters(), |
| 245 | ConstTensor(weightInfo, weightHandle->Map(true)), |
| 246 | Optional<ConstTensor>(biasTensor), |
| 247 | name.c_str()); |
| 248 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
| 249 | |
| 250 | FuseLayer(optimizationViews, |
| 251 | baseLayer, |
| 252 | replacementLayer, |
| 253 | activationLayer, |
| 254 | activationDesc); |
| 255 | |
| 256 | return replacementLayer; |
| 257 | } |
| 258 | |
| 259 | template<typename LayerType> |
| 260 | LayerType* FuseDepthwiseConvolution2dLayer(OptimizationViews& optimizationViews, |
| 261 | LayerType* baseLayer, |
| 262 | ActivationLayer* activationLayer, |
| 263 | ActivationDescriptor& activationDesc, |
| 264 | std::string name) |
| 265 | { |
| 266 | std::shared_ptr<ConstTensorHandle> weightHandle = baseLayer->m_Weight; |
| 267 | TensorInfo weightInfo = weightHandle->GetTensorInfo(); |
| 268 | |
| 269 | std::shared_ptr<ConstTensorHandle> biasHandle = baseLayer->m_Bias; |
| 270 | ConstTensor biasTensor; |
| 271 | if (!biasHandle) |
| 272 | { |
| 273 | biasTensor = ConstTensor(); |
| 274 | } |
| 275 | else |
| 276 | { |
| 277 | biasTensor = ConstTensor(biasHandle->GetTensorInfo(), biasHandle->Map(true)); |
| 278 | } |
| 279 | |
| 280 | IConnectableLayer* replacement = |
| 281 | optimizationViews.GetINetwork()-> |
| 282 | AddDepthwiseConvolution2dLayer(baseLayer->GetParameters(), |
| 283 | ConstTensor(weightInfo, weightHandle->Map(true)), |
| 284 | Optional<ConstTensor>(biasTensor), |
| 285 | name.c_str()); |
| 286 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
| 287 | |
| 288 | FuseLayer(optimizationViews, |
| 289 | baseLayer, |
| 290 | replacementLayer, |
| 291 | activationLayer, |
| 292 | activationDesc); |
| 293 | |
| 294 | return replacementLayer; |
| 295 | } |
| 296 | |
| 297 | template<typename LayerType> |
| 298 | LayerType* FuseFullyConnectedLayer(OptimizationViews& optimizationViews, |
| 299 | LayerType* baseLayer, |
| 300 | ActivationLayer* activationLayer, |
| 301 | ActivationDescriptor& activationDesc, |
| 302 | std::string name) |
| 303 | { |
| 304 | IConnectableLayer* replacement = |
| 305 | optimizationViews.GetINetwork()->AddFullyConnectedLayer(baseLayer->GetParameters(), |
| 306 | name.c_str()); |
| 307 | LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement); |
| 308 | |
| 309 | FuseLayer(optimizationViews, |
| 310 | baseLayer, |
| 311 | replacementLayer, |
| 312 | activationLayer, |
| 313 | activationDesc); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 314 | |
| 315 | replacementLayer->m_Weight = std::move(baseLayer->m_Weight); |
| 316 | replacementLayer->m_Bias = std::move(baseLayer->m_Bias); |
| 317 | |
| 318 | return replacementLayer; |
| 319 | } |
| 320 | |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 321 | // |
| 322 | // If reduce layer has multiple axes, add new layer for each axis to simulate the same behaviour |
| 323 | // as currently only one axis is supported. |
| 324 | // |
| 325 | template<typename LayerType> |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 326 | std::vector<IConnectableLayer*> ChainReduceLayers(OptimizationViews& optimizationViews, |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 327 | LayerType* baseLayer, |
| 328 | ReduceDescriptor& desc) |
| 329 | { |
| 330 | // Vector of new chained layers, used for substitution. |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 331 | std::vector<IConnectableLayer*> layers; |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 332 | |
| 333 | // Vector of axes so each layer is reshaped correctly. |
| 334 | std::vector<uint32_t> axes; |
| 335 | unsigned int recalulatedAxis = 0; |
| 336 | |
| 337 | for (unsigned int i = 0; i != desc.m_vAxis.size(); ++i) |
| 338 | { |
| 339 | // Get TensorInfo from base layer and reduce shape using axis. |
| 340 | TensorInfo layerInfo = baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(); |
| 341 | |
| 342 | axes.emplace_back(desc.m_vAxis[i]); |
| 343 | |
| 344 | const TensorInfo& reducedTensorInfo = ComputeReductionTensorShape(layerInfo, |
| 345 | axes, |
| 346 | desc.m_KeepDims); |
| 347 | |
| 348 | // Create a vector for the single axis to be assigned to the descriptor. |
| 349 | // Update axis if keepDims is set reduce layers correctly. |
| 350 | std::vector<uint32_t> singleAxis(1, desc.m_vAxis[i] - recalulatedAxis); |
| 351 | |
| 352 | // Create a descriptor and assign single axis. |
| 353 | ReduceDescriptor newReduceDescriptor = baseLayer->GetParameters(); |
| 354 | newReduceDescriptor.m_vAxis.assign(singleAxis.begin(), singleAxis.end()); |
| 355 | |
| 356 | // Add new layer to graph. |
| 357 | std::string layerName = "reduce_layer_" + std::to_string(i); |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 358 | |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 359 | Layer* replacementLayer = PolymorphicDowncast<Layer*>( |
| 360 | optimizationViews.GetINetwork()->AddReduceLayer(newReduceDescriptor, |
| 361 | layerName.c_str())); |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 362 | |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 363 | // Connect previous layer with new layer. |
| 364 | // The first and last layer will be connected when the subgraph is replaced. |
| 365 | if (!layers.empty()) |
| 366 | { |
| 367 | layers[i - 1]->GetOutputSlot(0).Connect(replacementLayer->GetInputSlot(0)); |
| 368 | } |
| 369 | |
| 370 | // Set updated tensorInfo for new layer. |
| 371 | replacementLayer->GetOutputSlot(0).SetTensorInfo(reducedTensorInfo); |
| 372 | |
| 373 | if (!desc.m_KeepDims) |
| 374 | { |
| 375 | recalulatedAxis++; |
| 376 | } |
| 377 | |
| 378 | layers.emplace_back(replacementLayer); |
| 379 | } |
| 380 | |
| 381 | // Check if the TensorInfo from the last layer equals the inferred output from the original layer. |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 382 | ARMNN_ASSERT(baseLayer->GetOutputSlot(0).GetTensorInfo() == |
| 383 | PolymorphicDowncast<Layer*>(layers.back())->GetOutputSlot().GetTensorInfo()); |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 384 | |
| 385 | return layers; |
| 386 | } |
| 387 | |
| 388 | // |
| 389 | // Substitute baseLayer with new subgraph |
| 390 | // |
| 391 | template<typename LayerType> |
| 392 | void ReplaceLayers(OptimizationViews& optimizationViews, |
| 393 | LayerType* baseLayer, |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 394 | std::vector<IConnectableLayer*>& layers) |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 395 | { |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 396 | std::list<IConnectableLayer*> replacementLayers(layers.begin(), layers.end()); |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 397 | |
| 398 | SubgraphView substitutionSubgraph(baseLayer); |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 399 | SubgraphView replacementSubgraph(std::move(replacementLayers), |
| 400 | CreateIInputsFrom({replacementLayers.front()}), |
| 401 | CreateIOutputsFrom({replacementLayers.back()})); |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 402 | |
| 403 | optimizationViews.AddSubstitution({substitutionSubgraph, replacementSubgraph}); |
| 404 | } |
| 405 | |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 406 | } // namespace armnn |