blob: 16185b6bc74996db5a200a400f574a9b7ebd2043 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00005
6#include "CpuTensorHandle.hpp"
Derek Lambertia9cca6a2019-03-25 15:41:58 +00007#include "WorkloadFactory.hpp"
8
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009#include <Layer.hpp>
10#include <LayersFwd.hpp>
David Beckdcb751f2018-10-03 11:42:42 +010011
David Beckb4540be2018-09-24 13:18:27 +010012#include <armnn/Types.hpp>
13#include <armnn/LayerSupport.hpp>
David Beck111b5d92018-11-12 14:59:37 +000014#include <armnn/ILayerSupport.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000015#include <armnn/BackendRegistry.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000016
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000017#include <backendsCommon/WorkloadFactory.hpp>
David Beck111b5d92018-11-12 14:59:37 +000018#include <backendsCommon/IBackendInternal.hpp>
Francis Murtagh46c09d02019-05-28 08:15:28 +010019#include <backendsCommon/test/WorkloadTestUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +000020
21#include <boost/cast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000022#include <boost/iterator/transform_iterator.hpp>
23
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000024#include <cstring>
David Beck111b5d92018-11-12 14:59:37 +000025#include <sstream>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000026
telsoa014fcda012018-03-09 14:13:49 +000027namespace armnn
28{
29
telsoa01c577f2c2018-08-31 09:22:23 +010030namespace
31{
telsoa01c577f2c2018-08-31 09:22:23 +010032
David Beck29c75de2018-10-23 13:35:58 +010033const TensorInfo OverrideDataType(const TensorInfo& info, Optional<DataType> type)
34{
35 if (!type)
36 {
37 return info;
telsoa01c577f2c2018-08-31 09:22:23 +010038 }
39
David Beck29c75de2018-10-23 13:35:58 +010040 return TensorInfo(info.GetShape(), type.value(), info.GetQuantizationScale(), info.GetQuantizationOffset());
telsoa01c577f2c2018-08-31 09:22:23 +010041}
42
David Beck29c75de2018-10-23 13:35:58 +010043} // anonymous namespace
44
David Beck33f0ae02018-10-18 15:13:56 +010045bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
David Beckdcb751f2018-10-03 11:42:42 +010046 const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +010047 Optional<DataType> dataType,
David Beckdcb751f2018-10-03 11:42:42 +010048 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +000049{
David Beck33f0ae02018-10-18 15:13:56 +010050 Optional<std::string&> reason = outReasonIfUnsupported;
telsoa014fcda012018-03-09 14:13:49 +000051 bool result;
David Beckdcb751f2018-10-03 11:42:42 +010052 const Layer& layer = *(boost::polymorphic_downcast<const Layer*>(&connectableLayer));
53
David Beck111b5d92018-11-12 14:59:37 +000054 auto const& backendRegistry = BackendRegistryInstance();
55 if (!backendRegistry.IsBackendRegistered(backendId))
56 {
57 std::stringstream ss;
58 ss << connectableLayer.GetName() << " is not supported on " << backendId
59 << " because this backend is not registered.";
60
61 outReasonIfUnsupported = ss.str();
62 return false;
63 }
64
65 auto backendFactory = backendRegistry.GetFactory(backendId);
66 auto backendObject = backendFactory();
67 auto layerSupportObject = backendObject->GetLayerSupport();
David Beck33f0ae02018-10-18 15:13:56 +010068
telsoa014fcda012018-03-09 14:13:49 +000069 switch(layer.GetType())
70 {
Kevin May868eb142019-09-04 17:29:31 +010071 case LayerType::Abs:
72 {
73 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
74 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
75 result = layerSupportObject->IsAbsSupported(OverrideDataType(input, dataType),
76 OverrideDataType(output, dataType),
77 reason);
78 break;
79 }
telsoa014fcda012018-03-09 14:13:49 +000080 case LayerType::Activation:
81 {
82 auto cLayer = boost::polymorphic_downcast<const ActivationLayer*>(&layer);
83 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +010084 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010085 result = layerSupportObject->IsActivationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010086 OverrideDataType(input, dataType),
87 OverrideDataType(output, dataType),
88 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +010089 reason);
telsoa014fcda012018-03-09 14:13:49 +000090 break;
91 }
92 case LayerType::Addition:
93 {
94 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
95 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
96 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010097 result = layerSupportObject->IsAdditionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010098 OverrideDataType(input0, dataType),
99 OverrideDataType(input1, dataType),
100 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100101 reason);
telsoa014fcda012018-03-09 14:13:49 +0000102 break;
103 }
Nikhil Rajee391d52019-09-05 17:50:44 +0100104 case LayerType::ArgMinMax:
105 {
106 auto cLayer = boost::polymorphic_downcast<const ArgMinMaxLayer*>(&layer);
107 const ArgMinMaxDescriptor& descriptor = cLayer->GetParameters();
108
109 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
110 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
111 result = layerSupportObject->IsArgMinMaxSupported(
112 OverrideDataType(input, dataType),
Narumol Prangnawaratd1f57732019-10-31 14:24:02 +0000113 OverrideDataType(output, DataType::Signed32),
Nikhil Rajee391d52019-09-05 17:50:44 +0100114 descriptor,
115 reason);
116 break;
117 }
telsoa014fcda012018-03-09 14:13:49 +0000118 case LayerType::BatchNormalization:
119 {
120 auto cLayer = boost::polymorphic_downcast<const BatchNormalizationLayer*>(&layer);
121 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100122 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
123 const TensorInfo& mean = cLayer->m_Mean->GetTensorInfo();
124 const TensorInfo& var = cLayer->m_Variance->GetTensorInfo();
125 const TensorInfo& beta = cLayer->m_Beta->GetTensorInfo();
126 const TensorInfo& gamma = cLayer->m_Gamma->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100127 result = layerSupportObject->IsBatchNormalizationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100128 OverrideDataType(input, dataType),
129 OverrideDataType(output, dataType),
130 OverrideDataType(mean, dataType),
131 OverrideDataType(var, dataType),
132 OverrideDataType(beta, dataType),
133 OverrideDataType(gamma, dataType),
134 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100135 reason);
telsoa014fcda012018-03-09 14:13:49 +0000136 break;
137 }
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000138 case LayerType::BatchToSpaceNd:
139 {
140 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
141 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
142 auto cLayer = boost::polymorphic_downcast<const BatchToSpaceNdLayer*>(&layer);
143
144 result = layerSupportObject->IsBatchToSpaceNdSupported(OverrideDataType(input, dataType),
145 OverrideDataType(output, dataType),
146 cLayer->GetParameters(),
147 reason);
148 break;
149 }
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100150 case LayerType::Comparison:
151 {
152 auto cLayer = boost::polymorphic_downcast<const ComparisonLayer*>(&layer);
153
154 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
155 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
156 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
157
158 result = layerSupportObject->IsComparisonSupported(OverrideDataType(input0, dataType),
159 OverrideDataType(input1, dataType),
160 OverrideDataType(output, DataType::Boolean),
161 cLayer->GetParameters(),
162 reason);
163 break;
164 }
telsoa014fcda012018-03-09 14:13:49 +0000165 case LayerType::Constant:
166 {
167 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100168 result = layerSupportObject->IsConstantSupported(OverrideDataType(output, dataType), reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100169 break;
170 }
171 case LayerType::ConvertFp16ToFp32:
172 {
173 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
174 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100175 result = layerSupportObject->IsConvertFp16ToFp32Supported(input, output, reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100176 break;
177 }
178 case LayerType::ConvertFp32ToFp16:
179 {
180 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
181 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100182 result = layerSupportObject->IsConvertFp32ToFp16Supported(input, output, reason);
telsoa014fcda012018-03-09 14:13:49 +0000183 break;
184 }
185 case LayerType::Convolution2d:
186 {
187 auto cLayer = boost::polymorphic_downcast<const Convolution2dLayer*>(&layer);
arovir01a6824102018-08-28 17:40:45 +0100188
189 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
190 dataType);
telsoa01c577f2c2018-08-31 09:22:23 +0100191 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
surmeh013537c2c2018-05-18 16:31:43 +0100192 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
193
arovir01a6824102018-08-28 17:40:45 +0100194 const Convolution2dDescriptor& descriptor = cLayer->GetParameters();
surmeh013537c2c2018-05-18 16:31:43 +0100195
arovir01a6824102018-08-28 17:40:45 +0100196 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100197 Optional<TensorInfo> biases;
surmeh013537c2c2018-05-18 16:31:43 +0100198 if (descriptor.m_BiasEnabled)
199 {
David Beck5eec11d2018-10-04 15:43:17 +0100200 biases =
201 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
surmeh013537c2c2018-05-18 16:31:43 +0100202 }
203
David Beck33f0ae02018-10-18 15:13:56 +0100204 result = layerSupportObject->IsConvolution2dSupported(
surmeh013537c2c2018-05-18 16:31:43 +0100205 input,
206 output,
207 descriptor,
telsoa01c577f2c2018-08-31 09:22:23 +0100208 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100209 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100210 reason);
telsoa014fcda012018-03-09 14:13:49 +0000211 break;
212 }
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000213 case LayerType::Debug:
214 {
215 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
216 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
217
218 result = layerSupportObject->IsDebugSupported(OverrideDataType(input, dataType),
219 OverrideDataType(output, dataType),
220 reason);
221 break;
222 }
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100223 case LayerType::DepthToSpace:
224 {
225 auto cLayer = boost::polymorphic_downcast<const DepthToSpaceLayer*>(&layer);
226
227 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
228 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
229
230 result = layerSupportObject->IsDepthToSpaceSupported(OverrideDataType(input, dataType),
231 OverrideDataType(output, dataType),
232 cLayer->GetParameters(),
233 reason);
234 break;
235 }
telsoa014fcda012018-03-09 14:13:49 +0000236 case LayerType::DepthwiseConvolution2d:
237 {
238 auto cLayer = boost::polymorphic_downcast<const DepthwiseConvolution2dLayer*>(&layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100239 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
240 dataType);
241 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
242 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
243
telsoa01c577f2c2018-08-31 09:22:23 +0100244 const DepthwiseConvolution2dDescriptor& descriptor = cLayer->GetParameters();
arovir01a6824102018-08-28 17:40:45 +0100245
246 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100247 Optional<TensorInfo> biases;
telsoa01c577f2c2018-08-31 09:22:23 +0100248 if (descriptor.m_BiasEnabled)
249 {
David Beck5eec11d2018-10-04 15:43:17 +0100250 biases =
251 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
telsoa01c577f2c2018-08-31 09:22:23 +0100252 }
telsoa01c577f2c2018-08-31 09:22:23 +0100253
David Beck33f0ae02018-10-18 15:13:56 +0100254 result = layerSupportObject->IsDepthwiseConvolutionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100255 input,
256 output,
257 descriptor,
258 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100259 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100260 reason);
telsoa014fcda012018-03-09 14:13:49 +0000261 break;
262 }
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000263 case LayerType::Dequantize:
264 {
265 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
266 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
267
Aron Virginas-Tar87972be2019-11-13 15:16:28 +0000268 result = layerSupportObject->IsDequantizeSupported(input,
269 OverrideDataType(output, dataType),
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000270 reason);
271 break;
272 }
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000273 case LayerType::DetectionPostProcess:
274 {
275 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
276 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
277 auto cLayer = boost::polymorphic_downcast<const DetectionPostProcessLayer*>(&layer);
278 const DetectionPostProcessDescriptor& descriptor = cLayer->GetParameters();
279 result = layerSupportObject->IsDetectionPostProcessSupported(input0,
280 input1,
281 descriptor,
282 reason);
283 break;
284 }
telsoa014fcda012018-03-09 14:13:49 +0000285 case LayerType::FakeQuantization:
286 {
287 auto cLayer = boost::polymorphic_downcast<const FakeQuantizationLayer*>(&layer);
288 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100289 result = layerSupportObject->IsFakeQuantizationSupported(OverrideDataType(input, dataType),
290 cLayer->GetParameters(),
291 reason);
telsoa014fcda012018-03-09 14:13:49 +0000292 break;
293 }
294 case LayerType::Floor:
295 {
296 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
297 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100298 result = layerSupportObject->IsFloorSupported(OverrideDataType(input, dataType),
299 OverrideDataType(output, dataType),
300 reason);
telsoa014fcda012018-03-09 14:13:49 +0000301 break;
302 }
303 case LayerType::FullyConnected:
304 {
305 auto cLayer = boost::polymorphic_downcast<const FullyConnectedLayer*>(&layer);
306 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100307 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
308 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
309
310 TensorInfo biasInfo;
311 const TensorInfo * biasInfoPtr = nullptr;
312 static const TensorInfo dummyFloat16Bias(TensorShape({1,1,1,1}), DataType::Float16);
313 static const TensorInfo dummyFloat32Bias(TensorShape({1,1,1,1}), DataType::Float32);
314 static const TensorInfo dummyQA8Bias(TensorShape({1,1,1,1}), DataType::Signed32);
315
316 const FullyConnectedDescriptor& descriptor = cLayer->GetParameters();
317 if (descriptor.m_BiasEnabled)
318 {
319 BOOST_ASSERT(cLayer->m_Bias.get() != nullptr);
320 biasInfo = OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
321 biasInfoPtr = &biasInfo;
322 }
323 else
324 {
325 // If biases are not enabled pass a dummy tensorinfo for the validation
326 switch(input.GetDataType())
327 {
328 case DataType::Float16:
329 {
330 biasInfoPtr = &dummyFloat16Bias;
331 break;
332 }
333 case DataType::Float32:
334 {
335 biasInfoPtr = &dummyFloat32Bias;
336 break;
337 }
338 case DataType::QuantisedAsymm8:
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100339 case DataType::QuantisedSymm16:
telsoa01c577f2c2018-08-31 09:22:23 +0100340 {
341 biasInfoPtr = &dummyQA8Bias;
342 break;
343 }
344 default:
345 {
346 BOOST_ASSERT_MSG(false, "Unexpected bias type");
347 }
348 }
349 }
350
David Beck33f0ae02018-10-18 15:13:56 +0100351 result = layerSupportObject->IsFullyConnectedSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100352 OverrideDataType(input, dataType),
353 OverrideDataType(output, dataType),
354 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
355 *biasInfoPtr,
356 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100357 reason);
telsoa014fcda012018-03-09 14:13:49 +0000358 break;
359 }
narpra01b89b05f2019-01-16 09:53:09 +0000360 case LayerType::Gather:
361 {
362 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
363 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
364 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
365 result = layerSupportObject->IsGatherSupported(OverrideDataType(input0, dataType),
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100366 input1,
narpra01b89b05f2019-01-16 09:53:09 +0000367 OverrideDataType(output, dataType),
368 reason);
369 break;
370 }
telsoa014fcda012018-03-09 14:13:49 +0000371 case LayerType::Input:
372 {
373 const TensorInfo& input = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100374 result = layerSupportObject->IsInputSupported(OverrideDataType(input, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000375 break;
376 }
Kevin Mayce5045a2019-10-02 14:07:47 +0100377 case LayerType::InstanceNormalization:
378 {
379 auto cLayer = boost::polymorphic_downcast<const InstanceNormalizationLayer*>(&layer);
380 const InstanceNormalizationDescriptor& descriptor = cLayer->GetParameters();
381
382 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
383 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
384
385 result = layerSupportObject->IsInstanceNormalizationSupported(
386 OverrideDataType(input, dataType),
387 OverrideDataType(output, dataType),
388 descriptor,
389 reason);
390 break;
391 }
telsoa014fcda012018-03-09 14:13:49 +0000392 case LayerType::L2Normalization:
393 {
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100394 auto cLayer = boost::polymorphic_downcast<const L2NormalizationLayer*>(&layer);
395 const L2NormalizationDescriptor& descriptor = cLayer->GetParameters();
396
telsoa014fcda012018-03-09 14:13:49 +0000397 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100398 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100399
David Beck33f0ae02018-10-18 15:13:56 +0100400 result = layerSupportObject->IsL2NormalizationSupported(
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100401 OverrideDataType(input, dataType),
402 OverrideDataType(output, dataType),
403 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100404 reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100405 break;
406 }
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100407 case LayerType::LogSoftmax:
408 {
409 auto cLayer = boost::polymorphic_downcast<const LogSoftmaxLayer*>(&layer);
410
411 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
412 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
413
414 result = layerSupportObject->IsLogSoftmaxSupported(OverrideDataType(input, dataType),
415 OverrideDataType(output, dataType),
416 cLayer->GetParameters(),
417 reason);
418 break;
419 }
telsoa01c577f2c2018-08-31 09:22:23 +0100420 case LayerType::Lstm:
421 {
422 auto cLayer = boost::polymorphic_downcast<const LstmLayer*>(&layer);
423 const LstmDescriptor& descriptor = cLayer->GetParameters();
424
425 // All inputs.
426 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
427 dataType);
428 const TensorInfo& outputStateIn = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
429 dataType);
430 const TensorInfo& cellStateIn = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
431 dataType);
432 // All outputs
433 const TensorInfo& scratchBuffer = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
434 const TensorInfo& outputStateOut = OverrideDataType(layer.GetOutputSlot(1).GetTensorInfo(), dataType);
435 const TensorInfo& cellStateOut = OverrideDataType(layer.GetOutputSlot(2).GetTensorInfo(), dataType);
436 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(3).GetTensorInfo(), dataType);
437
438 // Basic parameters
439 const TensorInfo& inputToForgetWeights
440 = OverrideDataType(cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(), dataType);
441 const TensorInfo& inputToCellWeights
442 = OverrideDataType(cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo(), dataType);
443 const TensorInfo& inputToOutputWeights
444 = OverrideDataType(cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(), dataType);
445 const TensorInfo& recurrentToForgetWeights
446 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(), dataType);
447 const TensorInfo& recurrentToCellWeights
448 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(), dataType);
449 const TensorInfo& recurrentToOutputWeights
450 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(), dataType);
451 const TensorInfo& forgetGateBias
452 = OverrideDataType(cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo(), dataType);
453 const TensorInfo& cellBias
454 = OverrideDataType(cLayer->m_BasicParameters.m_CellBias->GetTensorInfo(), dataType);
455 const TensorInfo& outputGateBias
456 = OverrideDataType(cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo(), dataType);
457
Jan Eilersd01a83c2019-07-03 18:20:40 +0100458 LstmInputParamsInfo paramsInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100459
Jan Eilersd01a83c2019-07-03 18:20:40 +0100460 paramsInfo.m_InputToForgetWeights = &inputToForgetWeights;
461 paramsInfo.m_InputToCellWeights = &inputToCellWeights;
462 paramsInfo.m_InputToOutputWeights = &inputToOutputWeights;
463 paramsInfo.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
464 paramsInfo.m_RecurrentToCellWeights = &recurrentToCellWeights;
465 paramsInfo.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
466 paramsInfo.m_ForgetGateBias = &forgetGateBias;
467 paramsInfo.m_CellBias = &cellBias;
468 paramsInfo.m_OutputGateBias = &outputGateBias;
469
470
471 // Optional parameters
telsoa01c577f2c2018-08-31 09:22:23 +0100472 TensorInfo optInputToInputWeights;
473 TensorInfo optRecurrentToInputWeights;
474 TensorInfo optCellToInputWeights;
475 TensorInfo optInputGateBias;
476 TensorInfo optProjectionWeights;
477 TensorInfo optProjectionBias;
478 TensorInfo optCellToForgetWeights;
479 TensorInfo optCellToOutputWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100480 TensorInfo optInputLayerNormWeights;
481 TensorInfo optForgetLayerNormWeights;
482 TensorInfo optCellLayerNormWeights;
483 TensorInfo optOutputLayerNormWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100484
485 if(!descriptor.m_CifgEnabled)
486 {
487 optInputToInputWeights =
488 OverrideDataType(cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100489 paramsInfo.m_InputToInputWeights = &optInputToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100490
491 optRecurrentToInputWeights =
492 OverrideDataType(cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100493 paramsInfo.m_RecurrentToInputWeights = &optRecurrentToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100494 if (cLayer->m_CifgParameters.m_CellToInputWeights != nullptr)
495 {
496 optCellToInputWeights =
497 OverrideDataType(cLayer->m_CifgParameters.m_CellToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100498 paramsInfo.m_CellToInputWeights = &optCellToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100499 }
500 optInputGateBias =
501 OverrideDataType(cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100502 paramsInfo.m_InputGateBias = &optInputGateBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100503 }
504
505 if(descriptor.m_ProjectionEnabled)
506 {
507 optProjectionWeights =
508 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100509 paramsInfo.m_ProjectionWeights = &optProjectionWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100510 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
511 {
512 optProjectionBias =
513 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100514 paramsInfo.m_ProjectionBias = &optProjectionBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100515 }
516 }
517
518 if(descriptor.m_PeepholeEnabled)
519 {
520 optCellToForgetWeights =
521 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100522 paramsInfo.m_CellToForgetWeights = &optCellToForgetWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100523 optCellToOutputWeights =
524 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100525 paramsInfo.m_CellToOutputWeights = &optCellToOutputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100526 }
527
Jan Eilers38e05bd2019-06-26 13:10:09 +0100528 if(descriptor.m_LayerNormEnabled)
529 {
Ferran Balaguere30c16e2019-07-24 17:03:45 +0100530 if (!descriptor.m_CifgEnabled)
531 {
532 optInputLayerNormWeights = OverrideDataType(
533 cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(), dataType);
534 paramsInfo.m_InputLayerNormWeights = &optInputLayerNormWeights;
535 }
Jan Eilers38e05bd2019-06-26 13:10:09 +0100536
537 optForgetLayerNormWeights = OverrideDataType(
538 cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100539 paramsInfo.m_ForgetLayerNormWeights = &optForgetLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100540
541 optCellLayerNormWeights = OverrideDataType(
542 cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100543 paramsInfo.m_CellLayerNormWeights = &optCellLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100544
545 optOutputLayerNormWeights = OverrideDataType(
546 cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100547 paramsInfo.m_OutputLayerNormWeights = &optOutputLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100548 }
549
David Beck33f0ae02018-10-18 15:13:56 +0100550 result = layerSupportObject->IsLstmSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100551 input,
552 outputStateIn,
553 cellStateIn,
554 scratchBuffer,
555 outputStateOut,
556 cellStateOut,
557 output,
558 descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +0100559 paramsInfo,
560 reason);
telsoa014fcda012018-03-09 14:13:49 +0000561 break;
562 }
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000563 case LayerType::Maximum:
564 {
565 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
566 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
567 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
568
569 result = layerSupportObject->IsMaximumSupported(OverrideDataType(input0, dataType),
570 OverrideDataType(input1, dataType),
571 OverrideDataType(output, dataType),
572 reason);
573 break;
574 }
narpra01b89b05f2019-01-16 09:53:09 +0000575 case LayerType::MemCopy:
576 {
577 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
578 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000579
narpra01b89b05f2019-01-16 09:53:09 +0000580 result = layerSupportObject->IsMemCopySupported(OverrideDataType(input, dataType),
581 OverrideDataType(output, dataType),
582 reason);
583 break;
584 }
Derek Lambertif674aa02019-08-01 15:56:25 +0100585 case LayerType::MemImport:
586 {
587 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
588 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
589
590 result = layerSupportObject->IsMemImportSupported(OverrideDataType(input, dataType),
591 OverrideDataType(output, dataType),
592 reason);
593 break;
594 }
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100595 case LayerType::Merge:
596 {
597 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
598 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
599 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
600
601 result = layerSupportObject->IsMergeSupported(OverrideDataType(input0, dataType),
602 OverrideDataType(input1, dataType),
603 OverrideDataType(output, dataType),
604 reason);
605 break;
606 }
Jim Flynne242f2d2019-05-22 14:24:13 +0100607 case LayerType::Concat:
telsoa014fcda012018-03-09 14:13:49 +0000608 {
Jim Flynne242f2d2019-05-22 14:24:13 +0100609 auto cLayer = boost::polymorphic_downcast<const ConcatLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000610
telsoa01c577f2c2018-08-31 09:22:23 +0100611 // Get vector of all inputs.
612 auto getTensorInfo = [&dataType](const InputSlot& slot)
telsoa014fcda012018-03-09 14:13:49 +0000613 {
telsoa01c577f2c2018-08-31 09:22:23 +0100614 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
telsoa014fcda012018-03-09 14:13:49 +0000615 };
telsoa01c577f2c2018-08-31 09:22:23 +0100616 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfo);
617 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfo);
618 std::vector<TensorInfo> inputs(beginI, endI);
telsoa014fcda012018-03-09 14:13:49 +0000619
telsoa01c577f2c2018-08-31 09:22:23 +0100620 auto getTensorInfoPtr = [](const TensorInfo& info)
621 {
622 return &info;
623 };
624 auto beginPtr = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
625 auto endPtr = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
626 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
telsoa014fcda012018-03-09 14:13:49 +0000627
Nikhil Raj8599a412018-11-19 14:51:07 +0000628 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
629
Jim Flynne242f2d2019-05-22 14:24:13 +0100630 result = layerSupportObject->IsConcatSupported(inputPtrs, output, cLayer->GetParameters(), reason);
631
632
telsoa014fcda012018-03-09 14:13:49 +0000633 break;
634 }
635 case LayerType::Multiplication:
636 {
637 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
638 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100639 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100640 result = layerSupportObject->IsMultiplicationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100641 OverrideDataType(input0, dataType),
642 OverrideDataType(input1, dataType),
643 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100644 reason);
telsoa014fcda012018-03-09 14:13:49 +0000645 break;
646 }
647 case LayerType::Normalization:
648 {
649 auto cLayer = boost::polymorphic_downcast<const NormalizationLayer*>(&layer);
650 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
651 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100652 result = layerSupportObject->IsNormalizationSupported(OverrideDataType(input, dataType),
653 OverrideDataType(output, dataType),
654 cLayer->GetParameters(),
655 reason);
telsoa014fcda012018-03-09 14:13:49 +0000656 break;
657 }
658 case LayerType::Output:
659 {
660 const TensorInfo& output = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100661 result = layerSupportObject->IsOutputSupported(OverrideDataType(output, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000662 break;
663 }
664 case LayerType::Permute:
665 {
666 auto cLayer = boost::polymorphic_downcast<const PermuteLayer*>(&layer);
667 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
668 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100669 result = layerSupportObject->IsPermuteSupported(OverrideDataType(input, dataType),
670 OverrideDataType(output, dataType),
671 cLayer->GetParameters(),
672 reason);
telsoa014fcda012018-03-09 14:13:49 +0000673 break;
674 }
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100675 case LayerType::Pad:
676 {
677 auto cLayer = boost::polymorphic_downcast<const PadLayer*>(&layer);
678 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
679 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100680 result = layerSupportObject->IsPadSupported(
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100681 OverrideDataType(input, dataType),
682 OverrideDataType(output, dataType),
683 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100684 reason);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100685 break;
686 }
telsoa014fcda012018-03-09 14:13:49 +0000687 case LayerType::Pooling2d:
688 {
689 auto cLayer = boost::polymorphic_downcast<const Pooling2dLayer*>(&layer);
690 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
691 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100692 result = layerSupportObject->IsPooling2dSupported(OverrideDataType(input, dataType),
693 OverrideDataType(output, dataType),
694 cLayer->GetParameters(),
695 reason);
telsoa014fcda012018-03-09 14:13:49 +0000696 break;
697 }
Matteo Martincigh49124022019-01-11 13:25:59 +0000698 case LayerType::PreCompiled:
699 {
700 auto cLayer = boost::polymorphic_downcast<const PreCompiledLayer*>(&layer);
701 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
702 result = layerSupportObject->IsPreCompiledSupported(OverrideDataType(input, dataType),
703 cLayer->GetParameters(),
704 reason);
705 break;
706 }
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000707 case LayerType::Quantize:
708 {
709 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
710 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
711 result = layerSupportObject->IsQuantizeSupported(input, output, reason);
712 break;
713 }
James Conroyee18dc82019-07-17 11:27:46 +0100714 case LayerType::QuantizedLstm:
715 {
716 auto cLayer = boost::polymorphic_downcast<const QuantizedLstmLayer*>(&layer);
717
718 // Inputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100719 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
720 const TensorInfo& previousCellStateIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
721 const TensorInfo& previousOutputIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100722
723 // Outputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100724 const TensorInfo& cellStateOut = layer.GetOutputSlot(0).GetTensorInfo();
725 const TensorInfo& output = layer.GetOutputSlot(1).GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100726
727 // QuantizedLstm parameters
James Conroyee18dc82019-07-17 11:27:46 +0100728 QuantizedLstmInputParamsInfo paramsInfo;
729
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100730 paramsInfo.m_InputToInputWeights =
731 &cLayer->m_QuantizedLstmParameters.m_InputToInputWeights->GetTensorInfo();
732 paramsInfo.m_InputToForgetWeights =
733 &cLayer->m_QuantizedLstmParameters.m_InputToForgetWeights->GetTensorInfo();
734 paramsInfo.m_InputToCellWeights =
735 &cLayer->m_QuantizedLstmParameters.m_InputToCellWeights->GetTensorInfo();
736 paramsInfo.m_InputToOutputWeights =
737 &cLayer->m_QuantizedLstmParameters.m_InputToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100738
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100739 paramsInfo.m_RecurrentToInputWeights =
740 &cLayer->m_QuantizedLstmParameters.m_RecurrentToInputWeights->GetTensorInfo();
741 paramsInfo.m_RecurrentToForgetWeights =
742 &cLayer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights->GetTensorInfo();
743 paramsInfo.m_RecurrentToCellWeights =
744 &cLayer->m_QuantizedLstmParameters.m_RecurrentToCellWeights->GetTensorInfo();
745 paramsInfo.m_RecurrentToOutputWeights =
746 &cLayer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100747
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100748 paramsInfo.m_InputGateBias =
749 &cLayer->m_QuantizedLstmParameters.m_InputGateBias->GetTensorInfo();
750 paramsInfo.m_ForgetGateBias =
751 &cLayer->m_QuantizedLstmParameters.m_ForgetGateBias->GetTensorInfo();
752 paramsInfo.m_CellBias =
753 &cLayer->m_QuantizedLstmParameters.m_CellBias->GetTensorInfo();
754 paramsInfo.m_OutputGateBias =
755 &cLayer->m_QuantizedLstmParameters.m_OutputGateBias->GetTensorInfo();;
James Conroyee18dc82019-07-17 11:27:46 +0100756
757 result = layerSupportObject->IsQuantizedLstmSupported(input,
758 previousCellStateIn,
759 previousOutputIn,
760 cellStateOut,
761 output,
762 paramsInfo,
763 reason);
764 break;
765 }
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100766 case LayerType::Division:
767 {
768 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
769 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
770 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100771 result = layerSupportObject->IsDivisionSupported(
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100772 OverrideDataType(input0, dataType),
773 OverrideDataType(input1, dataType),
774 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100775 reason);
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100776 break;
777 }
telsoa014fcda012018-03-09 14:13:49 +0000778 case LayerType::Reshape:
779 {
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000780 auto cLayer = boost::polymorphic_downcast<const ReshapeLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000781 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Kevin May93e023b2019-12-12 17:28:05 +0000782 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000783 result = layerSupportObject->IsReshapeSupported(OverrideDataType(input, dataType),
Kevin May93e023b2019-12-12 17:28:05 +0000784 OverrideDataType(output, dataType),
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000785 cLayer->GetParameters(),
786 reason);
telsoa014fcda012018-03-09 14:13:49 +0000787 break;
788 }
Teresa Charlina9075df2019-06-27 15:41:57 +0100789 case LayerType::Resize:
790 {
791 auto cLayer = boost::polymorphic_downcast<const ResizeLayer*>(&layer);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100792 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Teresa Charlina9075df2019-06-27 15:41:57 +0100793 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
794 result = layerSupportObject->IsResizeSupported(OverrideDataType(input, dataType),
795 OverrideDataType(output, dataType),
796 cLayer->GetParameters(),
797 reason);
798 break;
799 }
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000800 case LayerType::Rsqrt:
801 {
802 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
803 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
804 result = layerSupportObject->IsRsqrtSupported(OverrideDataType(input, dataType),
805 OverrideDataType(output, dataType),
806 reason);
807 break;
808 }
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100809 case LayerType::Slice:
810 {
811 auto cLayer = boost::polymorphic_downcast<const SliceLayer*>(&layer);
812
813 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
814 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
815
816 result = layerSupportObject->IsSliceSupported(OverrideDataType(input, dataType),
817 OverrideDataType(output, dataType),
818 cLayer->GetParameters(),
819 reason);
820 break;
821 }
telsoa014fcda012018-03-09 14:13:49 +0000822 case LayerType::Softmax:
823 {
824 auto cLayer = boost::polymorphic_downcast<const SoftmaxLayer*>(&layer);
825 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100826 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100827 result = layerSupportObject->IsSoftmaxSupported(OverrideDataType(input, dataType),
828 OverrideDataType(output, dataType),
829 cLayer->GetParameters(),
830 reason);
telsoa014fcda012018-03-09 14:13:49 +0000831 break;
832 }
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000833 case LayerType::SpaceToBatchNd:
834 {
835 auto cLayer = boost::polymorphic_downcast<const SpaceToBatchNdLayer*>(&layer);
836 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
837 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
838 result = layerSupportObject->IsSpaceToBatchNdSupported(OverrideDataType(input, dataType),
839 OverrideDataType(output, dataType),
840 cLayer->GetParameters(),
841 reason);
842 break;
843 }
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100844 case LayerType::SpaceToDepth:
845 {
846 auto cLayer = boost::polymorphic_downcast<const SpaceToDepthLayer*>(&layer);
847
848 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
849 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
850
851 result = layerSupportObject->IsSpaceToDepthSupported(OverrideDataType(input, dataType),
852 OverrideDataType(output, dataType),
853 cLayer->GetParameters(),
854 reason);
855 break;
856 }
telsoa014fcda012018-03-09 14:13:49 +0000857 case LayerType::Splitter:
858 {
859 auto cLayer = boost::polymorphic_downcast<const SplitterLayer*>(&layer);
860 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +0100861
862 // Get vector of all outputs.
863 auto getTensorInfo = [&dataType](const OutputSlot& slot)
864 {
865 return OverrideDataType(slot.GetTensorInfo(), dataType);
866 };
867 auto beginI = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfo);
868 auto endI = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfo);
869 std::vector<TensorInfo> outputs(beginI, endI);
870
871 const std::vector<std::reference_wrapper<TensorInfo>> outputPtrs(outputs.begin(), outputs.end());
872
David Beck33f0ae02018-10-18 15:13:56 +0100873 result = layerSupportObject->IsSplitterSupported(OverrideDataType(input, dataType),
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +0100874 outputPtrs,
David Beck33f0ae02018-10-18 15:13:56 +0100875 cLayer->GetParameters(),
876 reason);
telsoa014fcda012018-03-09 14:13:49 +0000877 break;
878 }
Matthew Jackson2b8c1da2019-07-04 14:59:16 +0100879 case LayerType::Stack:
880 {
881 auto cLayer = boost::polymorphic_downcast<const StackLayer*>(&layer);
882
883 // Get vector of all inputs.
884 auto getTensorInfo = [&dataType](const InputSlot& slot)
885 {
886 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
887 };
888 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfo);
889 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfo);
890 std::vector<TensorInfo> inputs(beginI, endI);
891
892 auto getTensorInfoPtr = [](const TensorInfo& info)
893 {
894 return &info;
895 };
896 auto beginPtr = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
897 auto endPtr = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
898 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
899
900 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
901
902 result = layerSupportObject->IsStackSupported(inputPtrs, output, cLayer->GetParameters(), reason);
903
904 break;
905 }
Derek Lamberti013c3902019-10-21 10:46:16 +0100906 case LayerType::StandIn:
907 {
908 auto cLayer = boost::polymorphic_downcast<const StandInLayer*>(&layer);
909
910 // Get vector of all inputs.
911 auto getTensorInfoIn = [&dataType](const InputSlot& slot)
912 {
913 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
914 };
915 auto getTensorInfoOut = [&dataType](const OutputSlot& slot)
916 {
917 return OverrideDataType(slot.GetTensorInfo(), dataType);
918 };
919 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfoIn);
920 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfoIn);
921 std::vector<TensorInfo> inputs(beginI, endI);
922
923 auto beginO = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfoOut);
924 auto endO = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfoOut);
925 std::vector<TensorInfo> outputs(beginO, endO);
926
927
928 auto getTensorInfoPtr = [](const TensorInfo& info)
929 {
930 return &info;
931 };
932 auto beginPtrI = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
933 auto endPtrI = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
934 std::vector<const TensorInfo*> inputPtrs(beginPtrI, endPtrI);
935
936 auto beginPtrO = boost::make_transform_iterator(outputs.begin(), getTensorInfoPtr);
937 auto endPtrO = boost::make_transform_iterator(outputs.end(), getTensorInfoPtr);
938 std::vector<const TensorInfo*> outputPtrs(beginPtrO, endPtrO);
939
940
941 result = layerSupportObject->IsStandInSupported(inputPtrs,
942 outputPtrs,
943 cLayer->GetParameters(),
944 reason);
945 break;
946 }
Conor Kennedy430b5d82018-11-14 15:28:28 +0000947 case LayerType::StridedSlice:
948 {
949 auto cLayer = boost::polymorphic_downcast<const StridedSliceLayer*>(&layer);
950 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
951 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
952 result = layerSupportObject->IsStridedSliceSupported(OverrideDataType(input, dataType),
953 OverrideDataType(output, dataType),
954 cLayer->GetParameters(),
955 reason);
956 break;
957 }
David Beckc2044fe2018-09-05 15:00:38 +0100958 case LayerType::Subtraction:
959 {
960 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
961 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
962 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100963 result = layerSupportObject->IsSubtractionSupported(
David Beckc2044fe2018-09-05 15:00:38 +0100964 OverrideDataType(input0, dataType),
965 OverrideDataType(input1, dataType),
966 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100967 reason);
David Beckc2044fe2018-09-05 15:00:38 +0100968 break;
969 }
Sadik Armaganeff363d2019-04-05 15:25:46 +0100970 case LayerType::Switch:
971 {
972 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
973 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
974 const TensorInfo& output0 = layer.GetOutputSlot(0).GetTensorInfo();
975 const TensorInfo& output1 = layer.GetOutputSlot(1).GetTensorInfo();
976 result = layerSupportObject->IsSwitchSupported(OverrideDataType(input0, dataType),
977 OverrideDataType(input1, dataType),
978 OverrideDataType(output0, dataType),
979 OverrideDataType(output1, dataType),
980 reason);
981 break;
982 }
narpra0132b90462018-09-13 11:07:48 +0100983 case LayerType::Mean:
984 {
985 auto cLayer = boost::polymorphic_downcast<const MeanLayer*>(&layer);
986 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
987 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100988 result = layerSupportObject->IsMeanSupported(
narpra0132b90462018-09-13 11:07:48 +0100989 OverrideDataType(input, dataType),
990 OverrideDataType(output, dataType),
991 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100992 reason);
narpra0132b90462018-09-13 11:07:48 +0100993 break;
994 }
kevmay0190539692018-11-29 08:40:19 +0000995 case LayerType::Minimum:
996 {
997 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
998 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
999 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1000 result = layerSupportObject->IsMinimumSupported(OverrideDataType(input0, dataType),
1001 OverrideDataType(input1, dataType),
1002 OverrideDataType(output, dataType),
1003 reason);
1004 break;
1005 }
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001006 case LayerType::Prelu:
1007 {
1008 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1009 const TensorInfo& alpha = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1010 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1011 result = layerSupportObject->IsPreluSupported(OverrideDataType(input, dataType),
1012 OverrideDataType(alpha, dataType),
1013 OverrideDataType(output, dataType),
1014 reason);
1015 break;
1016 }
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001017 case LayerType::TransposeConvolution2d:
1018 {
1019 auto cLayer = boost::polymorphic_downcast<const TransposeConvolution2dLayer*>(&layer);
1020
1021 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
1022 dataType);
1023 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
1024
1025 const TransposeConvolution2dDescriptor& descriptor = cLayer->GetParameters();
1026
1027 Optional<TensorInfo> biases;
1028 if (descriptor.m_BiasEnabled)
1029 {
1030 BOOST_ASSERT(cLayer->m_Bias.get() != nullptr);
1031 biases = OverrideDataType(cLayer->m_Bias->GetTensorInfo(),
1032 GetBiasTypeFromWeightsType(dataType));
1033 }
1034
1035 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
1036 const TensorInfo weights = OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType);
1037
1038 result = layerSupportObject->IsTransposeConvolution2dSupported(input,
1039 output,
1040 descriptor,
1041 weights,
1042 biases,
1043 reason);
1044
1045 break;
1046 }
telsoa014fcda012018-03-09 14:13:49 +00001047 default:
1048 {
1049 BOOST_ASSERT_MSG(false, "WorkloadFactory did not recognise type of layer.");
David Beck33f0ae02018-10-18 15:13:56 +01001050 reason.value() = "Unrecognised layer type";
telsoa014fcda012018-03-09 14:13:49 +00001051 result = false;
1052 break;
1053 }
1054 }
telsoa014fcda012018-03-09 14:13:49 +00001055 return result;
1056}
1057
David Beckdcb751f2018-10-03 11:42:42 +01001058bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +01001059 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +01001060 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +00001061{
David Beckdcb751f2018-10-03 11:42:42 +01001062 auto layer = boost::polymorphic_downcast<const Layer*>(&connectableLayer);
David Beck33f0ae02018-10-18 15:13:56 +01001063 return IsLayerSupported(layer->GetBackendId(), connectableLayer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +00001064}
1065
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001066// Default Implementations
Kevin May868eb142019-09-04 17:29:31 +01001067std::unique_ptr<IWorkload> IWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
1068 const WorkloadInfo& info) const
1069{
1070 return std::unique_ptr<IWorkload>();
1071}
1072
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001073std::unique_ptr<IWorkload> IWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
1074 const WorkloadInfo& info) const
1075{
1076 return std::unique_ptr<IWorkload>();
1077}
1078
1079std::unique_ptr<IWorkload> IWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
1080 const WorkloadInfo& info) const
1081{
1082 return std::unique_ptr<IWorkload>();
1083}
1084
Nikhil Rajee391d52019-09-05 17:50:44 +01001085std::unique_ptr<IWorkload> IWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
1086 const WorkloadInfo& info) const
1087{
1088 return std::unique_ptr<IWorkload>();
1089}
1090
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001091std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchNormalization(
1092 const BatchNormalizationQueueDescriptor& descriptor, const WorkloadInfo& info) const
1093{
1094 return std::unique_ptr<IWorkload>();
1095}
1096
1097std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
1098 const WorkloadInfo& Info) const
1099{
1100 return std::unique_ptr<IWorkload>();
1101}
1102
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001103std::unique_ptr<IWorkload> IWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
1104 const WorkloadInfo& info) const
1105{
1106 return std::unique_ptr<IWorkload>();
1107}
1108
Jim Flynne242f2d2019-05-22 14:24:13 +01001109std::unique_ptr<IWorkload> IWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
Jim Flynn4ed6c832019-05-20 11:02:46 +01001110 const WorkloadInfo& info) const
1111{
1112 return std::unique_ptr<IWorkload>();
1113}
1114
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001115std::unique_ptr<IWorkload> IWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
1116 const WorkloadInfo& info) const
1117{
1118 return std::unique_ptr<IWorkload>();
1119}
1120
1121std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp16ToFp32(const ConvertFp16ToFp32QueueDescriptor& descriptor,
1122 const WorkloadInfo& info) const
1123{
1124 return std::unique_ptr<IWorkload>();
1125}
1126
1127std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp32ToFp16(const ConvertFp32ToFp16QueueDescriptor& descriptor,
1128 const WorkloadInfo& info) const
1129{
1130 return std::unique_ptr<IWorkload>();
1131}
1132
1133std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
1134 const WorkloadInfo& info) const
1135{
1136 return std::unique_ptr<IWorkload>();
1137}
1138
1139std::unique_ptr<IWorkload> IWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
1140 const WorkloadInfo& info) const
1141{
1142 return std::unique_ptr<IWorkload>();
1143}
1144
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +01001145std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
1146 const WorkloadInfo& info) const
1147{
1148 return std::unique_ptr<IWorkload>();
1149}
1150
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001151std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthwiseConvolution2d(
1152 const DepthwiseConvolution2dQueueDescriptor& descriptor, const WorkloadInfo& info) const
1153{
1154 return std::unique_ptr<IWorkload>();
1155}
1156
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00001157std::unique_ptr<IWorkload> IWorkloadFactory::CreateDequantize(
1158 const DequantizeQueueDescriptor& descriptor, const WorkloadInfo& info) const
1159{
1160 return std::unique_ptr<IWorkload>();
1161}
1162
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001163std::unique_ptr<IWorkload> IWorkloadFactory::CreateDetectionPostProcess(
1164 const DetectionPostProcessQueueDescriptor& descriptor, const WorkloadInfo& info) const
1165{
1166 return std::unique_ptr<IWorkload>();
1167}
1168
1169std::unique_ptr<IWorkload> IWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
1170 const WorkloadInfo& info) const
1171{
1172 return std::unique_ptr<IWorkload>();
1173}
1174
1175std::unique_ptr<IWorkload> IWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
1176 const WorkloadInfo& Info) const
1177{
1178 return std::unique_ptr<IWorkload>();
1179}
1180
1181std::unique_ptr<IWorkload> IWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& descriptor,
1182 const WorkloadInfo& info) const
1183{
1184 return std::unique_ptr<IWorkload>();
1185}
1186
1187std::unique_ptr<IWorkload> IWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
1188 const WorkloadInfo& info) const
1189{
1190 return std::unique_ptr<IWorkload>();
1191}
1192
1193std::unique_ptr<IWorkload> IWorkloadFactory::CreateFullyConnected(const FullyConnectedQueueDescriptor& descriptor,
1194 const WorkloadInfo& info) const
1195{
1196 return std::unique_ptr<IWorkload>();
1197}
1198
1199std::unique_ptr<IWorkload> IWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
1200 const WorkloadInfo& info) const
1201{
1202 return std::unique_ptr<IWorkload>();
1203}
1204
1205std::unique_ptr<IWorkload> IWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
1206 const WorkloadInfo& info) const
1207{
1208 return std::unique_ptr<IWorkload>();
1209}
1210
Kevin Mayce5045a2019-10-02 14:07:47 +01001211std::unique_ptr<IWorkload> IWorkloadFactory::CreateInstanceNormalization(
1212 const InstanceNormalizationQueueDescriptor& descriptor,
1213 const WorkloadInfo& info) const
1214{
1215 return std::unique_ptr<IWorkload>();
1216}
1217
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001218std::unique_ptr<IWorkload> IWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
1219 const WorkloadInfo& info) const
1220{
1221 return std::unique_ptr<IWorkload>();
1222}
1223
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +01001224std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
1225 const WorkloadInfo& info) const
1226{
1227 return std::unique_ptr<IWorkload>();
1228}
1229
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001230std::unique_ptr<IWorkload> IWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
1231 const WorkloadInfo& info) const
1232{
1233 return std::unique_ptr<IWorkload>();
1234}
1235
1236std::unique_ptr<IWorkload> IWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
1237 const WorkloadInfo& info) const
1238{
1239 return std::unique_ptr<IWorkload>();
1240}
1241
1242std::unique_ptr<IWorkload> IWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
1243 const WorkloadInfo& Info) const
1244{
1245 return std::unique_ptr<IWorkload>();
1246}
1247
1248std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
1249 const WorkloadInfo& info) const
1250{
1251 return std::unique_ptr<IWorkload>();
1252}
1253
Derek Lambertif674aa02019-08-01 15:56:25 +01001254std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
1255 const WorkloadInfo& info) const
1256{
1257 return std::unique_ptr<IWorkload>();
1258}
1259
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01001260std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerge(const MergeQueueDescriptor& descriptor,
1261 const WorkloadInfo& info) const
1262{
1263 return std::unique_ptr<IWorkload>();
1264}
1265
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001266std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
1267 const WorkloadInfo& info) const
1268{
1269 return std::unique_ptr<IWorkload>();
1270}
1271
1272std::unique_ptr<IWorkload> IWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
1273 const WorkloadInfo& info) const
1274{
1275 return std::unique_ptr<IWorkload>();
1276}
1277
1278std::unique_ptr<IWorkload> IWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
1279 const WorkloadInfo& info) const
1280{
1281 return std::unique_ptr<IWorkload>();
1282}
1283
1284std::unique_ptr<IWorkload> IWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
1285 const WorkloadInfo& info) const
1286{
1287 return std::unique_ptr<IWorkload>();
1288}
1289
1290std::unique_ptr<IWorkload> IWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
1291 const WorkloadInfo& info) const
1292{
1293 return std::unique_ptr<IWorkload>();
1294}
1295
1296std::unique_ptr<IWorkload> IWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
1297 const WorkloadInfo& Info) const
1298{
1299 return std::unique_ptr<IWorkload>();
1300}
1301
1302std::unique_ptr<IWorkload> IWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
1303 const WorkloadInfo& info) const
1304{
1305 return std::unique_ptr<IWorkload>();
1306}
1307
1308std::unique_ptr<IWorkload> IWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
1309 const WorkloadInfo& info) const
1310{
1311 return std::unique_ptr<IWorkload>();
1312}
1313
1314std::unique_ptr<IWorkload> IWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& descriptor,
1315 const WorkloadInfo& info) const
1316{
1317 return std::unique_ptr<IWorkload>();
1318}
1319
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001320std::unique_ptr<IWorkload> IWorkloadFactory::CreatePrelu(const PreluQueueDescriptor &descriptor,
1321 const WorkloadInfo &info) const
1322{
1323 return std::unique_ptr<IWorkload>();
1324}
1325
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001326std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
1327 const WorkloadInfo& Info) const
1328{
1329 return std::unique_ptr<IWorkload>();
1330}
1331
James Conroyee18dc82019-07-17 11:27:46 +01001332std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& descriptor,
1333 const WorkloadInfo& info) const
1334{
1335 return std::unique_ptr<IWorkload>();
1336}
1337
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001338std::unique_ptr<IWorkload> IWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
1339 const WorkloadInfo& info) const
1340{
1341 return std::unique_ptr<IWorkload>();
1342}
1343
1344std::unique_ptr<IWorkload> IWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& descriptor,
1345 const WorkloadInfo& info) const
1346{
1347 return std::unique_ptr<IWorkload>();
1348}
1349
Teresa Charlina9075df2019-06-27 15:41:57 +01001350std::unique_ptr<IWorkload> IWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
1351 const WorkloadInfo& info) const
1352{
1353 return std::unique_ptr<IWorkload>();
1354}
1355
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001356std::unique_ptr<IWorkload> IWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& descriptor,
1357 const WorkloadInfo& info) const
1358{
1359 return std::unique_ptr<IWorkload>();
1360}
1361
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001362std::unique_ptr<IWorkload> IWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
1363 const WorkloadInfo& info) const
1364{
1365 return std::unique_ptr<IWorkload>();
1366}
1367
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001368std::unique_ptr<IWorkload> IWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
1369 const WorkloadInfo& info) const
1370{
1371 return std::unique_ptr<IWorkload>();
1372}
1373
1374std::unique_ptr<IWorkload> IWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
1375 const WorkloadInfo& info) const
1376{
1377 return std::unique_ptr<IWorkload>();
1378}
1379
1380std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
1381 const WorkloadInfo& info) const
1382{
1383 return std::unique_ptr<IWorkload>();
1384}
1385
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001386std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
1387 const WorkloadInfo& info) const
1388{
1389 return std::unique_ptr<IWorkload>();
1390}
1391
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001392std::unique_ptr<IWorkload> IWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
1393 const WorkloadInfo& info) const
1394{
1395 return std::unique_ptr<IWorkload>();
1396}
1397
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001398std::unique_ptr<IWorkload> IWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
1399 const WorkloadInfo& Info) const
1400{
1401 return std::unique_ptr<IWorkload>();
1402}
1403
1404std::unique_ptr<IWorkload> IWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
1405 const WorkloadInfo& info) const
1406{
1407 return std::unique_ptr<IWorkload>();
1408}
1409
Sadik Armaganeff363d2019-04-05 15:25:46 +01001410std::unique_ptr<IWorkload> IWorkloadFactory::CreateSwitch(const SwitchQueueDescriptor& descriptor,
1411 const WorkloadInfo& info) const
1412{
1413 return std::unique_ptr<IWorkload>();
1414}
1415
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001416std::unique_ptr<IWorkload> IWorkloadFactory::CreateTransposeConvolution2d(
1417 const TransposeConvolution2dQueueDescriptor& descriptor,
1418 const WorkloadInfo& info) const
1419{
1420 return std::unique_ptr<IWorkload>();
surmeh013537c2c2018-05-18 16:31:43 +01001421}
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001422
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001423} // namepsace armnn