blob: 9901dcb7c1457ab8ebb4079cb52ed0b36439fca6 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00005
6#include "CpuTensorHandle.hpp"
Derek Lambertia9cca6a2019-03-25 15:41:58 +00007#include "WorkloadFactory.hpp"
8
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009#include <Layer.hpp>
10#include <LayersFwd.hpp>
David Beckdcb751f2018-10-03 11:42:42 +010011
David Beckb4540be2018-09-24 13:18:27 +010012#include <armnn/Types.hpp>
13#include <armnn/LayerSupport.hpp>
David Beck111b5d92018-11-12 14:59:37 +000014#include <armnn/ILayerSupport.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000015#include <armnn/BackendRegistry.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000016
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000017#include <backendsCommon/WorkloadFactory.hpp>
David Beck111b5d92018-11-12 14:59:37 +000018#include <backendsCommon/IBackendInternal.hpp>
Francis Murtagh46c09d02019-05-28 08:15:28 +010019#include <backendsCommon/test/WorkloadTestUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +000020
21#include <boost/cast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000022#include <boost/iterator/transform_iterator.hpp>
23
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000024#include <cstring>
David Beck111b5d92018-11-12 14:59:37 +000025#include <sstream>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000026
telsoa014fcda012018-03-09 14:13:49 +000027namespace armnn
28{
29
telsoa01c577f2c2018-08-31 09:22:23 +010030namespace
31{
telsoa01c577f2c2018-08-31 09:22:23 +010032
David Beck29c75de2018-10-23 13:35:58 +010033const TensorInfo OverrideDataType(const TensorInfo& info, Optional<DataType> type)
34{
35 if (!type)
36 {
37 return info;
telsoa01c577f2c2018-08-31 09:22:23 +010038 }
39
David Beck29c75de2018-10-23 13:35:58 +010040 return TensorInfo(info.GetShape(), type.value(), info.GetQuantizationScale(), info.GetQuantizationOffset());
telsoa01c577f2c2018-08-31 09:22:23 +010041}
42
David Beck29c75de2018-10-23 13:35:58 +010043} // anonymous namespace
44
David Beck33f0ae02018-10-18 15:13:56 +010045bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
David Beckdcb751f2018-10-03 11:42:42 +010046 const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +010047 Optional<DataType> dataType,
David Beckdcb751f2018-10-03 11:42:42 +010048 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +000049{
David Beck33f0ae02018-10-18 15:13:56 +010050 Optional<std::string&> reason = outReasonIfUnsupported;
telsoa014fcda012018-03-09 14:13:49 +000051 bool result;
David Beckdcb751f2018-10-03 11:42:42 +010052 const Layer& layer = *(boost::polymorphic_downcast<const Layer*>(&connectableLayer));
53
David Beck111b5d92018-11-12 14:59:37 +000054 auto const& backendRegistry = BackendRegistryInstance();
55 if (!backendRegistry.IsBackendRegistered(backendId))
56 {
57 std::stringstream ss;
58 ss << connectableLayer.GetName() << " is not supported on " << backendId
59 << " because this backend is not registered.";
60
61 outReasonIfUnsupported = ss.str();
62 return false;
63 }
64
65 auto backendFactory = backendRegistry.GetFactory(backendId);
66 auto backendObject = backendFactory();
67 auto layerSupportObject = backendObject->GetLayerSupport();
David Beck33f0ae02018-10-18 15:13:56 +010068
telsoa014fcda012018-03-09 14:13:49 +000069 switch(layer.GetType())
70 {
Kevin May868eb142019-09-04 17:29:31 +010071 case LayerType::Abs:
72 {
73 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
74 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
75 result = layerSupportObject->IsAbsSupported(OverrideDataType(input, dataType),
76 OverrideDataType(output, dataType),
77 reason);
78 break;
79 }
telsoa014fcda012018-03-09 14:13:49 +000080 case LayerType::Activation:
81 {
82 auto cLayer = boost::polymorphic_downcast<const ActivationLayer*>(&layer);
83 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +010084 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010085 result = layerSupportObject->IsActivationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010086 OverrideDataType(input, dataType),
87 OverrideDataType(output, dataType),
88 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +010089 reason);
telsoa014fcda012018-03-09 14:13:49 +000090 break;
91 }
92 case LayerType::Addition:
93 {
94 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
95 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
96 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010097 result = layerSupportObject->IsAdditionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010098 OverrideDataType(input0, dataType),
99 OverrideDataType(input1, dataType),
100 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100101 reason);
telsoa014fcda012018-03-09 14:13:49 +0000102 break;
103 }
Nikhil Rajee391d52019-09-05 17:50:44 +0100104 case LayerType::ArgMinMax:
105 {
106 auto cLayer = boost::polymorphic_downcast<const ArgMinMaxLayer*>(&layer);
107 const ArgMinMaxDescriptor& descriptor = cLayer->GetParameters();
108
109 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
110 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
111 result = layerSupportObject->IsArgMinMaxSupported(
112 OverrideDataType(input, dataType),
Narumol Prangnawaratd1f57732019-10-31 14:24:02 +0000113 OverrideDataType(output, DataType::Signed32),
Nikhil Rajee391d52019-09-05 17:50:44 +0100114 descriptor,
115 reason);
116 break;
117 }
telsoa014fcda012018-03-09 14:13:49 +0000118 case LayerType::BatchNormalization:
119 {
120 auto cLayer = boost::polymorphic_downcast<const BatchNormalizationLayer*>(&layer);
121 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100122 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
123 const TensorInfo& mean = cLayer->m_Mean->GetTensorInfo();
124 const TensorInfo& var = cLayer->m_Variance->GetTensorInfo();
125 const TensorInfo& beta = cLayer->m_Beta->GetTensorInfo();
126 const TensorInfo& gamma = cLayer->m_Gamma->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100127 result = layerSupportObject->IsBatchNormalizationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100128 OverrideDataType(input, dataType),
129 OverrideDataType(output, dataType),
130 OverrideDataType(mean, dataType),
131 OverrideDataType(var, dataType),
132 OverrideDataType(beta, dataType),
133 OverrideDataType(gamma, dataType),
134 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100135 reason);
telsoa014fcda012018-03-09 14:13:49 +0000136 break;
137 }
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000138 case LayerType::BatchToSpaceNd:
139 {
140 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
141 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
142 auto cLayer = boost::polymorphic_downcast<const BatchToSpaceNdLayer*>(&layer);
143
144 result = layerSupportObject->IsBatchToSpaceNdSupported(OverrideDataType(input, dataType),
145 OverrideDataType(output, dataType),
146 cLayer->GetParameters(),
147 reason);
148 break;
149 }
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100150 case LayerType::Comparison:
151 {
152 auto cLayer = boost::polymorphic_downcast<const ComparisonLayer*>(&layer);
153
154 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
155 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
156 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
157
158 result = layerSupportObject->IsComparisonSupported(OverrideDataType(input0, dataType),
159 OverrideDataType(input1, dataType),
160 OverrideDataType(output, DataType::Boolean),
161 cLayer->GetParameters(),
162 reason);
163 break;
164 }
telsoa014fcda012018-03-09 14:13:49 +0000165 case LayerType::Constant:
166 {
167 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100168 result = layerSupportObject->IsConstantSupported(OverrideDataType(output, dataType), reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100169 break;
170 }
171 case LayerType::ConvertFp16ToFp32:
172 {
173 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
174 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100175 result = layerSupportObject->IsConvertFp16ToFp32Supported(input, output, reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100176 break;
177 }
178 case LayerType::ConvertFp32ToFp16:
179 {
180 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
181 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100182 result = layerSupportObject->IsConvertFp32ToFp16Supported(input, output, reason);
telsoa014fcda012018-03-09 14:13:49 +0000183 break;
184 }
185 case LayerType::Convolution2d:
186 {
187 auto cLayer = boost::polymorphic_downcast<const Convolution2dLayer*>(&layer);
arovir01a6824102018-08-28 17:40:45 +0100188
189 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
190 dataType);
telsoa01c577f2c2018-08-31 09:22:23 +0100191 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
surmeh013537c2c2018-05-18 16:31:43 +0100192 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
193
arovir01a6824102018-08-28 17:40:45 +0100194 const Convolution2dDescriptor& descriptor = cLayer->GetParameters();
surmeh013537c2c2018-05-18 16:31:43 +0100195
arovir01a6824102018-08-28 17:40:45 +0100196 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100197 Optional<TensorInfo> biases;
surmeh013537c2c2018-05-18 16:31:43 +0100198 if (descriptor.m_BiasEnabled)
199 {
David Beck5eec11d2018-10-04 15:43:17 +0100200 biases =
201 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
surmeh013537c2c2018-05-18 16:31:43 +0100202 }
203
David Beck33f0ae02018-10-18 15:13:56 +0100204 result = layerSupportObject->IsConvolution2dSupported(
surmeh013537c2c2018-05-18 16:31:43 +0100205 input,
206 output,
207 descriptor,
telsoa01c577f2c2018-08-31 09:22:23 +0100208 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100209 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100210 reason);
telsoa014fcda012018-03-09 14:13:49 +0000211 break;
212 }
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000213 case LayerType::Debug:
214 {
215 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
216 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
217
218 result = layerSupportObject->IsDebugSupported(OverrideDataType(input, dataType),
219 OverrideDataType(output, dataType),
220 reason);
221 break;
222 }
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100223 case LayerType::DepthToSpace:
224 {
225 auto cLayer = boost::polymorphic_downcast<const DepthToSpaceLayer*>(&layer);
226
227 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
228 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
229
230 result = layerSupportObject->IsDepthToSpaceSupported(OverrideDataType(input, dataType),
231 OverrideDataType(output, dataType),
232 cLayer->GetParameters(),
233 reason);
234 break;
235 }
telsoa014fcda012018-03-09 14:13:49 +0000236 case LayerType::DepthwiseConvolution2d:
237 {
238 auto cLayer = boost::polymorphic_downcast<const DepthwiseConvolution2dLayer*>(&layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100239 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
240 dataType);
241 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
242 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
243
telsoa01c577f2c2018-08-31 09:22:23 +0100244 const DepthwiseConvolution2dDescriptor& descriptor = cLayer->GetParameters();
arovir01a6824102018-08-28 17:40:45 +0100245
246 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100247 Optional<TensorInfo> biases;
telsoa01c577f2c2018-08-31 09:22:23 +0100248 if (descriptor.m_BiasEnabled)
249 {
David Beck5eec11d2018-10-04 15:43:17 +0100250 biases =
251 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
telsoa01c577f2c2018-08-31 09:22:23 +0100252 }
telsoa01c577f2c2018-08-31 09:22:23 +0100253
David Beck33f0ae02018-10-18 15:13:56 +0100254 result = layerSupportObject->IsDepthwiseConvolutionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100255 input,
256 output,
257 descriptor,
258 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100259 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100260 reason);
telsoa014fcda012018-03-09 14:13:49 +0000261 break;
262 }
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000263 case LayerType::Dequantize:
264 {
265 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
266 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
267
Aron Virginas-Tar87972be2019-11-13 15:16:28 +0000268 result = layerSupportObject->IsDequantizeSupported(input,
269 OverrideDataType(output, dataType),
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000270 reason);
271 break;
272 }
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000273 case LayerType::DetectionPostProcess:
274 {
275 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
276 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
277 auto cLayer = boost::polymorphic_downcast<const DetectionPostProcessLayer*>(&layer);
278 const DetectionPostProcessDescriptor& descriptor = cLayer->GetParameters();
279 result = layerSupportObject->IsDetectionPostProcessSupported(input0,
280 input1,
281 descriptor,
282 reason);
283 break;
284 }
telsoa014fcda012018-03-09 14:13:49 +0000285 case LayerType::FakeQuantization:
286 {
287 auto cLayer = boost::polymorphic_downcast<const FakeQuantizationLayer*>(&layer);
288 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100289 result = layerSupportObject->IsFakeQuantizationSupported(OverrideDataType(input, dataType),
290 cLayer->GetParameters(),
291 reason);
telsoa014fcda012018-03-09 14:13:49 +0000292 break;
293 }
294 case LayerType::Floor:
295 {
296 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
297 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100298 result = layerSupportObject->IsFloorSupported(OverrideDataType(input, dataType),
299 OverrideDataType(output, dataType),
300 reason);
telsoa014fcda012018-03-09 14:13:49 +0000301 break;
302 }
303 case LayerType::FullyConnected:
304 {
305 auto cLayer = boost::polymorphic_downcast<const FullyConnectedLayer*>(&layer);
306 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100307 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
308 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
309
310 TensorInfo biasInfo;
311 const TensorInfo * biasInfoPtr = nullptr;
312 static const TensorInfo dummyFloat16Bias(TensorShape({1,1,1,1}), DataType::Float16);
313 static const TensorInfo dummyFloat32Bias(TensorShape({1,1,1,1}), DataType::Float32);
314 static const TensorInfo dummyQA8Bias(TensorShape({1,1,1,1}), DataType::Signed32);
315
316 const FullyConnectedDescriptor& descriptor = cLayer->GetParameters();
317 if (descriptor.m_BiasEnabled)
318 {
319 BOOST_ASSERT(cLayer->m_Bias.get() != nullptr);
320 biasInfo = OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
321 biasInfoPtr = &biasInfo;
322 }
323 else
324 {
325 // If biases are not enabled pass a dummy tensorinfo for the validation
326 switch(input.GetDataType())
327 {
328 case DataType::Float16:
329 {
330 biasInfoPtr = &dummyFloat16Bias;
331 break;
332 }
333 case DataType::Float32:
334 {
335 biasInfoPtr = &dummyFloat32Bias;
336 break;
337 }
338 case DataType::QuantisedAsymm8:
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100339 case DataType::QuantisedSymm16:
telsoa01c577f2c2018-08-31 09:22:23 +0100340 {
341 biasInfoPtr = &dummyQA8Bias;
342 break;
343 }
344 default:
345 {
346 BOOST_ASSERT_MSG(false, "Unexpected bias type");
347 }
348 }
349 }
350
David Beck33f0ae02018-10-18 15:13:56 +0100351 result = layerSupportObject->IsFullyConnectedSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100352 OverrideDataType(input, dataType),
353 OverrideDataType(output, dataType),
354 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
355 *biasInfoPtr,
356 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100357 reason);
telsoa014fcda012018-03-09 14:13:49 +0000358 break;
359 }
narpra01b89b05f2019-01-16 09:53:09 +0000360 case LayerType::Gather:
361 {
362 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
363 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
364 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
365 result = layerSupportObject->IsGatherSupported(OverrideDataType(input0, dataType),
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100366 input1,
narpra01b89b05f2019-01-16 09:53:09 +0000367 OverrideDataType(output, dataType),
368 reason);
369 break;
370 }
telsoa014fcda012018-03-09 14:13:49 +0000371 case LayerType::Input:
372 {
373 const TensorInfo& input = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100374 result = layerSupportObject->IsInputSupported(OverrideDataType(input, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000375 break;
376 }
Kevin Mayce5045a2019-10-02 14:07:47 +0100377 case LayerType::InstanceNormalization:
378 {
379 auto cLayer = boost::polymorphic_downcast<const InstanceNormalizationLayer*>(&layer);
380 const InstanceNormalizationDescriptor& descriptor = cLayer->GetParameters();
381
382 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
383 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
384
385 result = layerSupportObject->IsInstanceNormalizationSupported(
386 OverrideDataType(input, dataType),
387 OverrideDataType(output, dataType),
388 descriptor,
389 reason);
390 break;
391 }
telsoa014fcda012018-03-09 14:13:49 +0000392 case LayerType::L2Normalization:
393 {
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100394 auto cLayer = boost::polymorphic_downcast<const L2NormalizationLayer*>(&layer);
395 const L2NormalizationDescriptor& descriptor = cLayer->GetParameters();
396
telsoa014fcda012018-03-09 14:13:49 +0000397 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100398 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100399
David Beck33f0ae02018-10-18 15:13:56 +0100400 result = layerSupportObject->IsL2NormalizationSupported(
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100401 OverrideDataType(input, dataType),
402 OverrideDataType(output, dataType),
403 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100404 reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100405 break;
406 }
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100407 case LayerType::LogSoftmax:
408 {
409 auto cLayer = boost::polymorphic_downcast<const LogSoftmaxLayer*>(&layer);
410
411 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
412 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
413
414 result = layerSupportObject->IsLogSoftmaxSupported(OverrideDataType(input, dataType),
415 OverrideDataType(output, dataType),
416 cLayer->GetParameters(),
417 reason);
418 break;
419 }
telsoa01c577f2c2018-08-31 09:22:23 +0100420 case LayerType::Lstm:
421 {
422 auto cLayer = boost::polymorphic_downcast<const LstmLayer*>(&layer);
423 const LstmDescriptor& descriptor = cLayer->GetParameters();
424
425 // All inputs.
426 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
427 dataType);
428 const TensorInfo& outputStateIn = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
429 dataType);
430 const TensorInfo& cellStateIn = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
431 dataType);
432 // All outputs
433 const TensorInfo& scratchBuffer = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
434 const TensorInfo& outputStateOut = OverrideDataType(layer.GetOutputSlot(1).GetTensorInfo(), dataType);
435 const TensorInfo& cellStateOut = OverrideDataType(layer.GetOutputSlot(2).GetTensorInfo(), dataType);
436 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(3).GetTensorInfo(), dataType);
437
438 // Basic parameters
439 const TensorInfo& inputToForgetWeights
440 = OverrideDataType(cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(), dataType);
441 const TensorInfo& inputToCellWeights
442 = OverrideDataType(cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo(), dataType);
443 const TensorInfo& inputToOutputWeights
444 = OverrideDataType(cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(), dataType);
445 const TensorInfo& recurrentToForgetWeights
446 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(), dataType);
447 const TensorInfo& recurrentToCellWeights
448 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(), dataType);
449 const TensorInfo& recurrentToOutputWeights
450 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(), dataType);
451 const TensorInfo& forgetGateBias
452 = OverrideDataType(cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo(), dataType);
453 const TensorInfo& cellBias
454 = OverrideDataType(cLayer->m_BasicParameters.m_CellBias->GetTensorInfo(), dataType);
455 const TensorInfo& outputGateBias
456 = OverrideDataType(cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo(), dataType);
457
Jan Eilersd01a83c2019-07-03 18:20:40 +0100458 LstmInputParamsInfo paramsInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100459
Jan Eilersd01a83c2019-07-03 18:20:40 +0100460 paramsInfo.m_InputToForgetWeights = &inputToForgetWeights;
461 paramsInfo.m_InputToCellWeights = &inputToCellWeights;
462 paramsInfo.m_InputToOutputWeights = &inputToOutputWeights;
463 paramsInfo.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
464 paramsInfo.m_RecurrentToCellWeights = &recurrentToCellWeights;
465 paramsInfo.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
466 paramsInfo.m_ForgetGateBias = &forgetGateBias;
467 paramsInfo.m_CellBias = &cellBias;
468 paramsInfo.m_OutputGateBias = &outputGateBias;
469
470
471 // Optional parameters
telsoa01c577f2c2018-08-31 09:22:23 +0100472 TensorInfo optInputToInputWeights;
473 TensorInfo optRecurrentToInputWeights;
474 TensorInfo optCellToInputWeights;
475 TensorInfo optInputGateBias;
476 TensorInfo optProjectionWeights;
477 TensorInfo optProjectionBias;
478 TensorInfo optCellToForgetWeights;
479 TensorInfo optCellToOutputWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100480 TensorInfo optInputLayerNormWeights;
481 TensorInfo optForgetLayerNormWeights;
482 TensorInfo optCellLayerNormWeights;
483 TensorInfo optOutputLayerNormWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100484
485 if(!descriptor.m_CifgEnabled)
486 {
487 optInputToInputWeights =
488 OverrideDataType(cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100489 paramsInfo.m_InputToInputWeights = &optInputToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100490
491 optRecurrentToInputWeights =
492 OverrideDataType(cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100493 paramsInfo.m_RecurrentToInputWeights = &optRecurrentToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100494 if (cLayer->m_CifgParameters.m_CellToInputWeights != nullptr)
495 {
496 optCellToInputWeights =
497 OverrideDataType(cLayer->m_CifgParameters.m_CellToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100498 paramsInfo.m_CellToInputWeights = &optCellToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100499 }
500 optInputGateBias =
501 OverrideDataType(cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100502 paramsInfo.m_InputGateBias = &optInputGateBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100503 }
504
505 if(descriptor.m_ProjectionEnabled)
506 {
507 optProjectionWeights =
508 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100509 paramsInfo.m_ProjectionWeights = &optProjectionWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100510 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
511 {
512 optProjectionBias =
513 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100514 paramsInfo.m_ProjectionBias = &optProjectionBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100515 }
516 }
517
518 if(descriptor.m_PeepholeEnabled)
519 {
520 optCellToForgetWeights =
521 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100522 paramsInfo.m_CellToForgetWeights = &optCellToForgetWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100523 optCellToOutputWeights =
524 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100525 paramsInfo.m_CellToOutputWeights = &optCellToOutputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100526 }
527
Jan Eilers38e05bd2019-06-26 13:10:09 +0100528 if(descriptor.m_LayerNormEnabled)
529 {
Ferran Balaguere30c16e2019-07-24 17:03:45 +0100530 if (!descriptor.m_CifgEnabled)
531 {
532 optInputLayerNormWeights = OverrideDataType(
533 cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(), dataType);
534 paramsInfo.m_InputLayerNormWeights = &optInputLayerNormWeights;
535 }
Jan Eilers38e05bd2019-06-26 13:10:09 +0100536
537 optForgetLayerNormWeights = OverrideDataType(
538 cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100539 paramsInfo.m_ForgetLayerNormWeights = &optForgetLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100540
541 optCellLayerNormWeights = OverrideDataType(
542 cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100543 paramsInfo.m_CellLayerNormWeights = &optCellLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100544
545 optOutputLayerNormWeights = OverrideDataType(
546 cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100547 paramsInfo.m_OutputLayerNormWeights = &optOutputLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100548 }
549
David Beck33f0ae02018-10-18 15:13:56 +0100550 result = layerSupportObject->IsLstmSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100551 input,
552 outputStateIn,
553 cellStateIn,
554 scratchBuffer,
555 outputStateOut,
556 cellStateOut,
557 output,
558 descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +0100559 paramsInfo,
560 reason);
telsoa014fcda012018-03-09 14:13:49 +0000561 break;
562 }
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000563 case LayerType::Maximum:
564 {
565 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
566 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
567 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
568
569 result = layerSupportObject->IsMaximumSupported(OverrideDataType(input0, dataType),
570 OverrideDataType(input1, dataType),
571 OverrideDataType(output, dataType),
572 reason);
573 break;
574 }
narpra01b89b05f2019-01-16 09:53:09 +0000575 case LayerType::MemCopy:
576 {
577 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
578 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000579
narpra01b89b05f2019-01-16 09:53:09 +0000580 result = layerSupportObject->IsMemCopySupported(OverrideDataType(input, dataType),
581 OverrideDataType(output, dataType),
582 reason);
583 break;
584 }
Derek Lambertif674aa02019-08-01 15:56:25 +0100585 case LayerType::MemImport:
586 {
587 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
588 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
589
590 result = layerSupportObject->IsMemImportSupported(OverrideDataType(input, dataType),
591 OverrideDataType(output, dataType),
592 reason);
593 break;
594 }
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100595 case LayerType::Merge:
596 {
597 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
598 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
599 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
600
601 result = layerSupportObject->IsMergeSupported(OverrideDataType(input0, dataType),
602 OverrideDataType(input1, dataType),
603 OverrideDataType(output, dataType),
604 reason);
605 break;
606 }
Jim Flynne242f2d2019-05-22 14:24:13 +0100607 case LayerType::Concat:
telsoa014fcda012018-03-09 14:13:49 +0000608 {
Jim Flynne242f2d2019-05-22 14:24:13 +0100609 auto cLayer = boost::polymorphic_downcast<const ConcatLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000610
telsoa01c577f2c2018-08-31 09:22:23 +0100611 // Get vector of all inputs.
612 auto getTensorInfo = [&dataType](const InputSlot& slot)
telsoa014fcda012018-03-09 14:13:49 +0000613 {
telsoa01c577f2c2018-08-31 09:22:23 +0100614 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
telsoa014fcda012018-03-09 14:13:49 +0000615 };
telsoa01c577f2c2018-08-31 09:22:23 +0100616 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfo);
617 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfo);
618 std::vector<TensorInfo> inputs(beginI, endI);
telsoa014fcda012018-03-09 14:13:49 +0000619
telsoa01c577f2c2018-08-31 09:22:23 +0100620 auto getTensorInfoPtr = [](const TensorInfo& info)
621 {
622 return &info;
623 };
624 auto beginPtr = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
625 auto endPtr = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
626 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
telsoa014fcda012018-03-09 14:13:49 +0000627
Nikhil Raj8599a412018-11-19 14:51:07 +0000628 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
629
Jim Flynne242f2d2019-05-22 14:24:13 +0100630 result = layerSupportObject->IsConcatSupported(inputPtrs, output, cLayer->GetParameters(), reason);
631
632
telsoa014fcda012018-03-09 14:13:49 +0000633 break;
634 }
635 case LayerType::Multiplication:
636 {
637 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
638 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100639 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100640 result = layerSupportObject->IsMultiplicationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100641 OverrideDataType(input0, dataType),
642 OverrideDataType(input1, dataType),
643 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100644 reason);
telsoa014fcda012018-03-09 14:13:49 +0000645 break;
646 }
647 case LayerType::Normalization:
648 {
649 auto cLayer = boost::polymorphic_downcast<const NormalizationLayer*>(&layer);
650 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
651 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100652 result = layerSupportObject->IsNormalizationSupported(OverrideDataType(input, dataType),
653 OverrideDataType(output, dataType),
654 cLayer->GetParameters(),
655 reason);
telsoa014fcda012018-03-09 14:13:49 +0000656 break;
657 }
658 case LayerType::Output:
659 {
660 const TensorInfo& output = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100661 result = layerSupportObject->IsOutputSupported(OverrideDataType(output, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000662 break;
663 }
664 case LayerType::Permute:
665 {
666 auto cLayer = boost::polymorphic_downcast<const PermuteLayer*>(&layer);
667 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
668 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100669 result = layerSupportObject->IsPermuteSupported(OverrideDataType(input, dataType),
670 OverrideDataType(output, dataType),
671 cLayer->GetParameters(),
672 reason);
telsoa014fcda012018-03-09 14:13:49 +0000673 break;
674 }
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100675 case LayerType::Pad:
676 {
677 auto cLayer = boost::polymorphic_downcast<const PadLayer*>(&layer);
678 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
679 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100680 result = layerSupportObject->IsPadSupported(
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100681 OverrideDataType(input, dataType),
682 OverrideDataType(output, dataType),
683 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100684 reason);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100685 break;
686 }
telsoa014fcda012018-03-09 14:13:49 +0000687 case LayerType::Pooling2d:
688 {
689 auto cLayer = boost::polymorphic_downcast<const Pooling2dLayer*>(&layer);
690 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
691 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100692 result = layerSupportObject->IsPooling2dSupported(OverrideDataType(input, dataType),
693 OverrideDataType(output, dataType),
694 cLayer->GetParameters(),
695 reason);
telsoa014fcda012018-03-09 14:13:49 +0000696 break;
697 }
Matteo Martincigh49124022019-01-11 13:25:59 +0000698 case LayerType::PreCompiled:
699 {
700 auto cLayer = boost::polymorphic_downcast<const PreCompiledLayer*>(&layer);
701 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
702 result = layerSupportObject->IsPreCompiledSupported(OverrideDataType(input, dataType),
703 cLayer->GetParameters(),
704 reason);
705 break;
706 }
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000707 case LayerType::Quantize:
708 {
709 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
710 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
711 result = layerSupportObject->IsQuantizeSupported(input, output, reason);
712 break;
713 }
James Conroyee18dc82019-07-17 11:27:46 +0100714 case LayerType::QuantizedLstm:
715 {
716 auto cLayer = boost::polymorphic_downcast<const QuantizedLstmLayer*>(&layer);
717
718 // Inputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100719 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
720 const TensorInfo& previousCellStateIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
721 const TensorInfo& previousOutputIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100722
723 // Outputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100724 const TensorInfo& cellStateOut = layer.GetOutputSlot(0).GetTensorInfo();
725 const TensorInfo& output = layer.GetOutputSlot(1).GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100726
727 // QuantizedLstm parameters
James Conroyee18dc82019-07-17 11:27:46 +0100728 QuantizedLstmInputParamsInfo paramsInfo;
729
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100730 paramsInfo.m_InputToInputWeights =
731 &cLayer->m_QuantizedLstmParameters.m_InputToInputWeights->GetTensorInfo();
732 paramsInfo.m_InputToForgetWeights =
733 &cLayer->m_QuantizedLstmParameters.m_InputToForgetWeights->GetTensorInfo();
734 paramsInfo.m_InputToCellWeights =
735 &cLayer->m_QuantizedLstmParameters.m_InputToCellWeights->GetTensorInfo();
736 paramsInfo.m_InputToOutputWeights =
737 &cLayer->m_QuantizedLstmParameters.m_InputToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100738
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100739 paramsInfo.m_RecurrentToInputWeights =
740 &cLayer->m_QuantizedLstmParameters.m_RecurrentToInputWeights->GetTensorInfo();
741 paramsInfo.m_RecurrentToForgetWeights =
742 &cLayer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights->GetTensorInfo();
743 paramsInfo.m_RecurrentToCellWeights =
744 &cLayer->m_QuantizedLstmParameters.m_RecurrentToCellWeights->GetTensorInfo();
745 paramsInfo.m_RecurrentToOutputWeights =
746 &cLayer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100747
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100748 paramsInfo.m_InputGateBias =
749 &cLayer->m_QuantizedLstmParameters.m_InputGateBias->GetTensorInfo();
750 paramsInfo.m_ForgetGateBias =
751 &cLayer->m_QuantizedLstmParameters.m_ForgetGateBias->GetTensorInfo();
752 paramsInfo.m_CellBias =
753 &cLayer->m_QuantizedLstmParameters.m_CellBias->GetTensorInfo();
754 paramsInfo.m_OutputGateBias =
755 &cLayer->m_QuantizedLstmParameters.m_OutputGateBias->GetTensorInfo();;
James Conroyee18dc82019-07-17 11:27:46 +0100756
757 result = layerSupportObject->IsQuantizedLstmSupported(input,
758 previousCellStateIn,
759 previousOutputIn,
760 cellStateOut,
761 output,
762 paramsInfo,
763 reason);
764 break;
765 }
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100766 case LayerType::Division:
767 {
768 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
769 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
770 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100771 result = layerSupportObject->IsDivisionSupported(
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100772 OverrideDataType(input0, dataType),
773 OverrideDataType(input1, dataType),
774 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100775 reason);
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100776 break;
777 }
telsoa014fcda012018-03-09 14:13:49 +0000778 case LayerType::Reshape:
779 {
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000780 auto cLayer = boost::polymorphic_downcast<const ReshapeLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000781 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000782 result = layerSupportObject->IsReshapeSupported(OverrideDataType(input, dataType),
783 cLayer->GetParameters(),
784 reason);
telsoa014fcda012018-03-09 14:13:49 +0000785 break;
786 }
Teresa Charlina9075df2019-06-27 15:41:57 +0100787 case LayerType::Resize:
788 {
789 auto cLayer = boost::polymorphic_downcast<const ResizeLayer*>(&layer);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100790 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Teresa Charlina9075df2019-06-27 15:41:57 +0100791 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
792 result = layerSupportObject->IsResizeSupported(OverrideDataType(input, dataType),
793 OverrideDataType(output, dataType),
794 cLayer->GetParameters(),
795 reason);
796 break;
797 }
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000798 case LayerType::Rsqrt:
799 {
800 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
801 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
802 result = layerSupportObject->IsRsqrtSupported(OverrideDataType(input, dataType),
803 OverrideDataType(output, dataType),
804 reason);
805 break;
806 }
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100807 case LayerType::Slice:
808 {
809 auto cLayer = boost::polymorphic_downcast<const SliceLayer*>(&layer);
810
811 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
812 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
813
814 result = layerSupportObject->IsSliceSupported(OverrideDataType(input, dataType),
815 OverrideDataType(output, dataType),
816 cLayer->GetParameters(),
817 reason);
818 break;
819 }
telsoa014fcda012018-03-09 14:13:49 +0000820 case LayerType::Softmax:
821 {
822 auto cLayer = boost::polymorphic_downcast<const SoftmaxLayer*>(&layer);
823 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100824 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100825 result = layerSupportObject->IsSoftmaxSupported(OverrideDataType(input, dataType),
826 OverrideDataType(output, dataType),
827 cLayer->GetParameters(),
828 reason);
telsoa014fcda012018-03-09 14:13:49 +0000829 break;
830 }
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000831 case LayerType::SpaceToBatchNd:
832 {
833 auto cLayer = boost::polymorphic_downcast<const SpaceToBatchNdLayer*>(&layer);
834 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
835 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
836 result = layerSupportObject->IsSpaceToBatchNdSupported(OverrideDataType(input, dataType),
837 OverrideDataType(output, dataType),
838 cLayer->GetParameters(),
839 reason);
840 break;
841 }
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100842 case LayerType::SpaceToDepth:
843 {
844 auto cLayer = boost::polymorphic_downcast<const SpaceToDepthLayer*>(&layer);
845
846 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
847 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
848
849 result = layerSupportObject->IsSpaceToDepthSupported(OverrideDataType(input, dataType),
850 OverrideDataType(output, dataType),
851 cLayer->GetParameters(),
852 reason);
853 break;
854 }
telsoa014fcda012018-03-09 14:13:49 +0000855 case LayerType::Splitter:
856 {
857 auto cLayer = boost::polymorphic_downcast<const SplitterLayer*>(&layer);
858 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +0100859
860 // Get vector of all outputs.
861 auto getTensorInfo = [&dataType](const OutputSlot& slot)
862 {
863 return OverrideDataType(slot.GetTensorInfo(), dataType);
864 };
865 auto beginI = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfo);
866 auto endI = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfo);
867 std::vector<TensorInfo> outputs(beginI, endI);
868
869 const std::vector<std::reference_wrapper<TensorInfo>> outputPtrs(outputs.begin(), outputs.end());
870
David Beck33f0ae02018-10-18 15:13:56 +0100871 result = layerSupportObject->IsSplitterSupported(OverrideDataType(input, dataType),
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +0100872 outputPtrs,
David Beck33f0ae02018-10-18 15:13:56 +0100873 cLayer->GetParameters(),
874 reason);
telsoa014fcda012018-03-09 14:13:49 +0000875 break;
876 }
Matthew Jackson2b8c1da2019-07-04 14:59:16 +0100877 case LayerType::Stack:
878 {
879 auto cLayer = boost::polymorphic_downcast<const StackLayer*>(&layer);
880
881 // Get vector of all inputs.
882 auto getTensorInfo = [&dataType](const InputSlot& slot)
883 {
884 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
885 };
886 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfo);
887 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfo);
888 std::vector<TensorInfo> inputs(beginI, endI);
889
890 auto getTensorInfoPtr = [](const TensorInfo& info)
891 {
892 return &info;
893 };
894 auto beginPtr = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
895 auto endPtr = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
896 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
897
898 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
899
900 result = layerSupportObject->IsStackSupported(inputPtrs, output, cLayer->GetParameters(), reason);
901
902 break;
903 }
Derek Lamberti013c3902019-10-21 10:46:16 +0100904 case LayerType::StandIn:
905 {
906 auto cLayer = boost::polymorphic_downcast<const StandInLayer*>(&layer);
907
908 // Get vector of all inputs.
909 auto getTensorInfoIn = [&dataType](const InputSlot& slot)
910 {
911 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
912 };
913 auto getTensorInfoOut = [&dataType](const OutputSlot& slot)
914 {
915 return OverrideDataType(slot.GetTensorInfo(), dataType);
916 };
917 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfoIn);
918 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfoIn);
919 std::vector<TensorInfo> inputs(beginI, endI);
920
921 auto beginO = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfoOut);
922 auto endO = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfoOut);
923 std::vector<TensorInfo> outputs(beginO, endO);
924
925
926 auto getTensorInfoPtr = [](const TensorInfo& info)
927 {
928 return &info;
929 };
930 auto beginPtrI = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
931 auto endPtrI = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
932 std::vector<const TensorInfo*> inputPtrs(beginPtrI, endPtrI);
933
934 auto beginPtrO = boost::make_transform_iterator(outputs.begin(), getTensorInfoPtr);
935 auto endPtrO = boost::make_transform_iterator(outputs.end(), getTensorInfoPtr);
936 std::vector<const TensorInfo*> outputPtrs(beginPtrO, endPtrO);
937
938
939 result = layerSupportObject->IsStandInSupported(inputPtrs,
940 outputPtrs,
941 cLayer->GetParameters(),
942 reason);
943 break;
944 }
Conor Kennedy430b5d82018-11-14 15:28:28 +0000945 case LayerType::StridedSlice:
946 {
947 auto cLayer = boost::polymorphic_downcast<const StridedSliceLayer*>(&layer);
948 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
949 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
950 result = layerSupportObject->IsStridedSliceSupported(OverrideDataType(input, dataType),
951 OverrideDataType(output, dataType),
952 cLayer->GetParameters(),
953 reason);
954 break;
955 }
David Beckc2044fe2018-09-05 15:00:38 +0100956 case LayerType::Subtraction:
957 {
958 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
959 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
960 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100961 result = layerSupportObject->IsSubtractionSupported(
David Beckc2044fe2018-09-05 15:00:38 +0100962 OverrideDataType(input0, dataType),
963 OverrideDataType(input1, dataType),
964 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100965 reason);
David Beckc2044fe2018-09-05 15:00:38 +0100966 break;
967 }
Sadik Armaganeff363d2019-04-05 15:25:46 +0100968 case LayerType::Switch:
969 {
970 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
971 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
972 const TensorInfo& output0 = layer.GetOutputSlot(0).GetTensorInfo();
973 const TensorInfo& output1 = layer.GetOutputSlot(1).GetTensorInfo();
974 result = layerSupportObject->IsSwitchSupported(OverrideDataType(input0, dataType),
975 OverrideDataType(input1, dataType),
976 OverrideDataType(output0, dataType),
977 OverrideDataType(output1, dataType),
978 reason);
979 break;
980 }
narpra0132b90462018-09-13 11:07:48 +0100981 case LayerType::Mean:
982 {
983 auto cLayer = boost::polymorphic_downcast<const MeanLayer*>(&layer);
984 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
985 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100986 result = layerSupportObject->IsMeanSupported(
narpra0132b90462018-09-13 11:07:48 +0100987 OverrideDataType(input, dataType),
988 OverrideDataType(output, dataType),
989 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100990 reason);
narpra0132b90462018-09-13 11:07:48 +0100991 break;
992 }
kevmay0190539692018-11-29 08:40:19 +0000993 case LayerType::Minimum:
994 {
995 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
996 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
997 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
998 result = layerSupportObject->IsMinimumSupported(OverrideDataType(input0, dataType),
999 OverrideDataType(input1, dataType),
1000 OverrideDataType(output, dataType),
1001 reason);
1002 break;
1003 }
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001004 case LayerType::Prelu:
1005 {
1006 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1007 const TensorInfo& alpha = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1008 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1009 result = layerSupportObject->IsPreluSupported(OverrideDataType(input, dataType),
1010 OverrideDataType(alpha, dataType),
1011 OverrideDataType(output, dataType),
1012 reason);
1013 break;
1014 }
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001015 case LayerType::TransposeConvolution2d:
1016 {
1017 auto cLayer = boost::polymorphic_downcast<const TransposeConvolution2dLayer*>(&layer);
1018
1019 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
1020 dataType);
1021 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
1022
1023 const TransposeConvolution2dDescriptor& descriptor = cLayer->GetParameters();
1024
1025 Optional<TensorInfo> biases;
1026 if (descriptor.m_BiasEnabled)
1027 {
1028 BOOST_ASSERT(cLayer->m_Bias.get() != nullptr);
1029 biases = OverrideDataType(cLayer->m_Bias->GetTensorInfo(),
1030 GetBiasTypeFromWeightsType(dataType));
1031 }
1032
1033 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
1034 const TensorInfo weights = OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType);
1035
1036 result = layerSupportObject->IsTransposeConvolution2dSupported(input,
1037 output,
1038 descriptor,
1039 weights,
1040 biases,
1041 reason);
1042
1043 break;
1044 }
telsoa014fcda012018-03-09 14:13:49 +00001045 default:
1046 {
1047 BOOST_ASSERT_MSG(false, "WorkloadFactory did not recognise type of layer.");
David Beck33f0ae02018-10-18 15:13:56 +01001048 reason.value() = "Unrecognised layer type";
telsoa014fcda012018-03-09 14:13:49 +00001049 result = false;
1050 break;
1051 }
1052 }
telsoa014fcda012018-03-09 14:13:49 +00001053 return result;
1054}
1055
David Beckdcb751f2018-10-03 11:42:42 +01001056bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +01001057 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +01001058 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +00001059{
David Beckdcb751f2018-10-03 11:42:42 +01001060 auto layer = boost::polymorphic_downcast<const Layer*>(&connectableLayer);
David Beck33f0ae02018-10-18 15:13:56 +01001061 return IsLayerSupported(layer->GetBackendId(), connectableLayer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +00001062}
1063
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001064// Default Implementations
Kevin May868eb142019-09-04 17:29:31 +01001065std::unique_ptr<IWorkload> IWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
1066 const WorkloadInfo& info) const
1067{
1068 return std::unique_ptr<IWorkload>();
1069}
1070
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001071std::unique_ptr<IWorkload> IWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
1072 const WorkloadInfo& info) const
1073{
1074 return std::unique_ptr<IWorkload>();
1075}
1076
1077std::unique_ptr<IWorkload> IWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
1078 const WorkloadInfo& info) const
1079{
1080 return std::unique_ptr<IWorkload>();
1081}
1082
Nikhil Rajee391d52019-09-05 17:50:44 +01001083std::unique_ptr<IWorkload> IWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
1084 const WorkloadInfo& info) const
1085{
1086 return std::unique_ptr<IWorkload>();
1087}
1088
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001089std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchNormalization(
1090 const BatchNormalizationQueueDescriptor& descriptor, const WorkloadInfo& info) const
1091{
1092 return std::unique_ptr<IWorkload>();
1093}
1094
1095std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
1096 const WorkloadInfo& Info) const
1097{
1098 return std::unique_ptr<IWorkload>();
1099}
1100
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001101std::unique_ptr<IWorkload> IWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
1102 const WorkloadInfo& info) const
1103{
1104 return std::unique_ptr<IWorkload>();
1105}
1106
Jim Flynne242f2d2019-05-22 14:24:13 +01001107std::unique_ptr<IWorkload> IWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
Jim Flynn4ed6c832019-05-20 11:02:46 +01001108 const WorkloadInfo& info) const
1109{
1110 return std::unique_ptr<IWorkload>();
1111}
1112
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001113std::unique_ptr<IWorkload> IWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
1114 const WorkloadInfo& info) const
1115{
1116 return std::unique_ptr<IWorkload>();
1117}
1118
1119std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp16ToFp32(const ConvertFp16ToFp32QueueDescriptor& descriptor,
1120 const WorkloadInfo& info) const
1121{
1122 return std::unique_ptr<IWorkload>();
1123}
1124
1125std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp32ToFp16(const ConvertFp32ToFp16QueueDescriptor& descriptor,
1126 const WorkloadInfo& info) const
1127{
1128 return std::unique_ptr<IWorkload>();
1129}
1130
1131std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
1132 const WorkloadInfo& info) const
1133{
1134 return std::unique_ptr<IWorkload>();
1135}
1136
1137std::unique_ptr<IWorkload> IWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
1138 const WorkloadInfo& info) const
1139{
1140 return std::unique_ptr<IWorkload>();
1141}
1142
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +01001143std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
1144 const WorkloadInfo& info) const
1145{
1146 return std::unique_ptr<IWorkload>();
1147}
1148
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001149std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthwiseConvolution2d(
1150 const DepthwiseConvolution2dQueueDescriptor& descriptor, const WorkloadInfo& info) const
1151{
1152 return std::unique_ptr<IWorkload>();
1153}
1154
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00001155std::unique_ptr<IWorkload> IWorkloadFactory::CreateDequantize(
1156 const DequantizeQueueDescriptor& descriptor, const WorkloadInfo& info) const
1157{
1158 return std::unique_ptr<IWorkload>();
1159}
1160
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001161std::unique_ptr<IWorkload> IWorkloadFactory::CreateDetectionPostProcess(
1162 const DetectionPostProcessQueueDescriptor& descriptor, const WorkloadInfo& info) const
1163{
1164 return std::unique_ptr<IWorkload>();
1165}
1166
1167std::unique_ptr<IWorkload> IWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
1168 const WorkloadInfo& info) const
1169{
1170 return std::unique_ptr<IWorkload>();
1171}
1172
1173std::unique_ptr<IWorkload> IWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
1174 const WorkloadInfo& Info) const
1175{
1176 return std::unique_ptr<IWorkload>();
1177}
1178
1179std::unique_ptr<IWorkload> IWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& descriptor,
1180 const WorkloadInfo& info) const
1181{
1182 return std::unique_ptr<IWorkload>();
1183}
1184
1185std::unique_ptr<IWorkload> IWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
1186 const WorkloadInfo& info) const
1187{
1188 return std::unique_ptr<IWorkload>();
1189}
1190
1191std::unique_ptr<IWorkload> IWorkloadFactory::CreateFullyConnected(const FullyConnectedQueueDescriptor& descriptor,
1192 const WorkloadInfo& info) const
1193{
1194 return std::unique_ptr<IWorkload>();
1195}
1196
1197std::unique_ptr<IWorkload> IWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
1198 const WorkloadInfo& info) const
1199{
1200 return std::unique_ptr<IWorkload>();
1201}
1202
1203std::unique_ptr<IWorkload> IWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
1204 const WorkloadInfo& info) const
1205{
1206 return std::unique_ptr<IWorkload>();
1207}
1208
Kevin Mayce5045a2019-10-02 14:07:47 +01001209std::unique_ptr<IWorkload> IWorkloadFactory::CreateInstanceNormalization(
1210 const InstanceNormalizationQueueDescriptor& descriptor,
1211 const WorkloadInfo& info) const
1212{
1213 return std::unique_ptr<IWorkload>();
1214}
1215
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001216std::unique_ptr<IWorkload> IWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
1217 const WorkloadInfo& info) const
1218{
1219 return std::unique_ptr<IWorkload>();
1220}
1221
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +01001222std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
1223 const WorkloadInfo& info) const
1224{
1225 return std::unique_ptr<IWorkload>();
1226}
1227
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001228std::unique_ptr<IWorkload> IWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
1229 const WorkloadInfo& info) const
1230{
1231 return std::unique_ptr<IWorkload>();
1232}
1233
1234std::unique_ptr<IWorkload> IWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
1235 const WorkloadInfo& info) const
1236{
1237 return std::unique_ptr<IWorkload>();
1238}
1239
1240std::unique_ptr<IWorkload> IWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
1241 const WorkloadInfo& Info) const
1242{
1243 return std::unique_ptr<IWorkload>();
1244}
1245
1246std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
1247 const WorkloadInfo& info) const
1248{
1249 return std::unique_ptr<IWorkload>();
1250}
1251
Derek Lambertif674aa02019-08-01 15:56:25 +01001252std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
1253 const WorkloadInfo& info) const
1254{
1255 return std::unique_ptr<IWorkload>();
1256}
1257
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01001258std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerge(const MergeQueueDescriptor& descriptor,
1259 const WorkloadInfo& info) const
1260{
1261 return std::unique_ptr<IWorkload>();
1262}
1263
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001264std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
1265 const WorkloadInfo& info) const
1266{
1267 return std::unique_ptr<IWorkload>();
1268}
1269
1270std::unique_ptr<IWorkload> IWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
1271 const WorkloadInfo& info) const
1272{
1273 return std::unique_ptr<IWorkload>();
1274}
1275
1276std::unique_ptr<IWorkload> IWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
1277 const WorkloadInfo& info) const
1278{
1279 return std::unique_ptr<IWorkload>();
1280}
1281
1282std::unique_ptr<IWorkload> IWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
1283 const WorkloadInfo& info) const
1284{
1285 return std::unique_ptr<IWorkload>();
1286}
1287
1288std::unique_ptr<IWorkload> IWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
1289 const WorkloadInfo& info) const
1290{
1291 return std::unique_ptr<IWorkload>();
1292}
1293
1294std::unique_ptr<IWorkload> IWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
1295 const WorkloadInfo& Info) const
1296{
1297 return std::unique_ptr<IWorkload>();
1298}
1299
1300std::unique_ptr<IWorkload> IWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
1301 const WorkloadInfo& info) const
1302{
1303 return std::unique_ptr<IWorkload>();
1304}
1305
1306std::unique_ptr<IWorkload> IWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
1307 const WorkloadInfo& info) const
1308{
1309 return std::unique_ptr<IWorkload>();
1310}
1311
1312std::unique_ptr<IWorkload> IWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& descriptor,
1313 const WorkloadInfo& info) const
1314{
1315 return std::unique_ptr<IWorkload>();
1316}
1317
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001318std::unique_ptr<IWorkload> IWorkloadFactory::CreatePrelu(const PreluQueueDescriptor &descriptor,
1319 const WorkloadInfo &info) const
1320{
1321 return std::unique_ptr<IWorkload>();
1322}
1323
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001324std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
1325 const WorkloadInfo& Info) const
1326{
1327 return std::unique_ptr<IWorkload>();
1328}
1329
James Conroyee18dc82019-07-17 11:27:46 +01001330std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& descriptor,
1331 const WorkloadInfo& info) const
1332{
1333 return std::unique_ptr<IWorkload>();
1334}
1335
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001336std::unique_ptr<IWorkload> IWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
1337 const WorkloadInfo& info) const
1338{
1339 return std::unique_ptr<IWorkload>();
1340}
1341
1342std::unique_ptr<IWorkload> IWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& descriptor,
1343 const WorkloadInfo& info) const
1344{
1345 return std::unique_ptr<IWorkload>();
1346}
1347
Teresa Charlina9075df2019-06-27 15:41:57 +01001348std::unique_ptr<IWorkload> IWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
1349 const WorkloadInfo& info) const
1350{
1351 return std::unique_ptr<IWorkload>();
1352}
1353
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001354std::unique_ptr<IWorkload> IWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& descriptor,
1355 const WorkloadInfo& info) const
1356{
1357 return std::unique_ptr<IWorkload>();
1358}
1359
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001360std::unique_ptr<IWorkload> IWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
1361 const WorkloadInfo& info) const
1362{
1363 return std::unique_ptr<IWorkload>();
1364}
1365
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001366std::unique_ptr<IWorkload> IWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
1367 const WorkloadInfo& info) const
1368{
1369 return std::unique_ptr<IWorkload>();
1370}
1371
1372std::unique_ptr<IWorkload> IWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
1373 const WorkloadInfo& info) const
1374{
1375 return std::unique_ptr<IWorkload>();
1376}
1377
1378std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
1379 const WorkloadInfo& info) const
1380{
1381 return std::unique_ptr<IWorkload>();
1382}
1383
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001384std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
1385 const WorkloadInfo& info) const
1386{
1387 return std::unique_ptr<IWorkload>();
1388}
1389
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001390std::unique_ptr<IWorkload> IWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
1391 const WorkloadInfo& info) const
1392{
1393 return std::unique_ptr<IWorkload>();
1394}
1395
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001396std::unique_ptr<IWorkload> IWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
1397 const WorkloadInfo& Info) const
1398{
1399 return std::unique_ptr<IWorkload>();
1400}
1401
1402std::unique_ptr<IWorkload> IWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
1403 const WorkloadInfo& info) const
1404{
1405 return std::unique_ptr<IWorkload>();
1406}
1407
Sadik Armaganeff363d2019-04-05 15:25:46 +01001408std::unique_ptr<IWorkload> IWorkloadFactory::CreateSwitch(const SwitchQueueDescriptor& descriptor,
1409 const WorkloadInfo& info) const
1410{
1411 return std::unique_ptr<IWorkload>();
1412}
1413
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001414std::unique_ptr<IWorkload> IWorkloadFactory::CreateTransposeConvolution2d(
1415 const TransposeConvolution2dQueueDescriptor& descriptor,
1416 const WorkloadInfo& info) const
1417{
1418 return std::unique_ptr<IWorkload>();
surmeh013537c2c2018-05-18 16:31:43 +01001419}
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001420
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001421} // namepsace armnn