blob: 29d39d14a905ebd9eaf272f3e185bc546ab4c006 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
telsoa014fcda012018-03-09 14:13:49 +00002// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
James Conroy1f58f032021-04-27 17:13:27 +01007#include <armnn/backends/TensorHandleFwd.hpp>
Matteo Martincighe5b8eb92019-11-28 15:45:42 +00008#include <armnn/backends/ITensorHandle.hpp>
telsoa014fcda012018-03-09 14:13:49 +00009
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000010#include <InternalTypes.hpp>
11
Jim Flynne242f2d2019-05-22 14:24:13 +010012#include <armnn/Deprecated.hpp>
David Beck0dbe0ee2018-09-24 15:59:27 +010013#include <armnn/Descriptors.hpp>
14#include <armnn/Exceptions.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000015#include <armnn/Types.hpp>
16#include <armnn/Tensor.hpp>
David Beck0dbe0ee2018-09-24 15:59:27 +010017
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000018#include <backendsCommon/WorkloadInfo.hpp>
telsoa014fcda012018-03-09 14:13:49 +000019
20namespace armnn
21{
22
telsoa01c577f2c2018-08-31 09:22:23 +010023//A helper function that returns the bias data type required for given input data type.
telsoa014fcda012018-03-09 14:13:49 +000024DataType GetBiasDataType(DataType inputDataType);
25
26struct WorkloadInfo;
27
28struct QueueDescriptor
29{
30 std::vector<ITensorHandle*> m_Inputs;
31 std::vector<ITensorHandle*> m_Outputs;
Keith Davisdf04d232020-10-23 17:20:05 +010032 void* m_AdditionalInfoObject;
telsoa014fcda012018-03-09 14:13:49 +000033
34 void ValidateInputsOutputs(const std::string& descName,
Narumol Prangnawarat867eba52020-02-03 12:29:56 +000035 unsigned int numExpectedIn,
36 unsigned int numExpectedOut) const;
telsoa014fcda012018-03-09 14:13:49 +000037
Keith Davisdf04d232020-10-23 17:20:05 +010038 template<typename T>
Mike Kelly07810fc2020-11-12 10:58:48 +000039 const T* GetAdditionalInformation() const
Keith Davisdf04d232020-10-23 17:20:05 +010040 {
41 return static_cast<T*>(m_AdditionalInfoObject);
42 }
telsoa014fcda012018-03-09 14:13:49 +000043
44protected:
45 ~QueueDescriptor() = default;
Keith Davisdf04d232020-10-23 17:20:05 +010046 QueueDescriptor()
47 : m_AdditionalInfoObject(nullptr)
48 {}
telsoa014fcda012018-03-09 14:13:49 +000049 QueueDescriptor(QueueDescriptor const&) = default;
50 QueueDescriptor& operator=(QueueDescriptor const&) = default;
51};
52
telsoa01c577f2c2018-08-31 09:22:23 +010053// Base class for queue descriptors which contain parameters.
telsoa014fcda012018-03-09 14:13:49 +000054template <typename LayerDescriptor>
55struct QueueDescriptorWithParameters : public QueueDescriptor
56{
57 LayerDescriptor m_Parameters;
58
59protected:
60 ~QueueDescriptorWithParameters() = default;
61 QueueDescriptorWithParameters() = default;
62 QueueDescriptorWithParameters(QueueDescriptorWithParameters const&) = default;
63 QueueDescriptorWithParameters& operator=(QueueDescriptorWithParameters const&) = default;
64};
65
Jim Flynn68db06f2020-10-06 10:14:50 +010066struct MapQueueDescriptor : QueueDescriptor
67{
68 void Validate(const WorkloadInfo& workloadInfo) const;
69};
70
Jim Flynn3a40ea52020-10-08 11:42:30 +010071struct UnmapQueueDescriptor : QueueDescriptor
72{
73 void Validate(const WorkloadInfo& workloadInfo) const;
74};
75
telsoa014fcda012018-03-09 14:13:49 +000076struct MemCopyQueueDescriptor : QueueDescriptor
77{
78 void Validate(const WorkloadInfo& workloadInfo) const;
79};
80
81using InputQueueDescriptor = MemCopyQueueDescriptor;
82using OutputQueueDescriptor = MemCopyQueueDescriptor;
83
Derek Lambertif674aa02019-08-01 15:56:25 +010084struct MemImportQueueDescriptor : QueueDescriptor
85{
86 void Validate(const WorkloadInfo& workloadInfo) const;
87};
88
89struct MemSyncQueueDescriptor : QueueDescriptor
90{
91 void Validate(const WorkloadInfo& workloadInfo) const;
92};
93
telsoa01c577f2c2018-08-31 09:22:23 +010094// Softmax layer workload data.
telsoa014fcda012018-03-09 14:13:49 +000095struct SoftmaxQueueDescriptor : QueueDescriptorWithParameters<SoftmaxDescriptor>
96{
97 void Validate(const WorkloadInfo& workloadInfo) const;
98};
99
telsoa01c577f2c2018-08-31 09:22:23 +0100100// Splitter layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000101struct SplitterQueueDescriptor : QueueDescriptorWithParameters<ViewsDescriptor>
102{
103 struct ViewOrigin
104 {
105 ViewOrigin() {}
106 ViewOrigin(std::vector<unsigned int> const& origin) : m_Origin(origin) {}
107
telsoa01c577f2c2018-08-31 09:22:23 +0100108 //View origin (size of the vector is the same as number of dimensions of the view).
telsoa014fcda012018-03-09 14:13:49 +0000109 std::vector<unsigned int> m_Origin;
110 };
111
telsoa01c577f2c2018-08-31 09:22:23 +0100112 //View defines a tensor that will be carved from the input tensor.
113 //View origins are stored here, the extents are defined by sizes of the output tensors.
telsoa014fcda012018-03-09 14:13:49 +0000114 std::vector<ViewOrigin> m_ViewOrigins;
115
116 void Validate(const WorkloadInfo& workloadInfo) const;
117};
118
Jim Flynne242f2d2019-05-22 14:24:13 +0100119// Concat layer workload data.
120struct ConcatQueueDescriptor : QueueDescriptorWithParameters<OriginsDescriptor>
telsoa014fcda012018-03-09 14:13:49 +0000121{
122 struct ViewOrigin
123 {
124 ViewOrigin() {}
125 ViewOrigin(const std::vector<unsigned int>& origin) : m_Origin(origin) {}
126
telsoa01c577f2c2018-08-31 09:22:23 +0100127 //View origin (size of the vector is the same as number of dimensions of the view).
telsoa014fcda012018-03-09 14:13:49 +0000128 std::vector<unsigned int> m_Origin;
129 };
130
telsoa01c577f2c2018-08-31 09:22:23 +0100131 //View defines a sub-area of the output tensor that will be filled with the corresponding input tensor.
132 //View origins are stored here, the extents are defined by sizes of the input tensors.
telsoa014fcda012018-03-09 14:13:49 +0000133 std::vector<ViewOrigin> m_ViewOrigins;
134
135 void Validate(const WorkloadInfo& workloadInfo) const;
136};
137
Jim Flynne242f2d2019-05-22 14:24:13 +0100138// Deprecated. Use ConcatQueueDescriptor instead
139using MergerQueueDescriptor = ConcatQueueDescriptor;
140
Matthew Jackson2b8c1da2019-07-04 14:59:16 +0100141// Stack layer workload data.
142struct StackQueueDescriptor : QueueDescriptorWithParameters<StackDescriptor>
143{
144 void Validate(const WorkloadInfo& workloadInfo) const;
145};
146
telsoa01c577f2c2018-08-31 09:22:23 +0100147// Activation layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000148struct ActivationQueueDescriptor : QueueDescriptorWithParameters<ActivationDescriptor>
149{
150 void Validate(const WorkloadInfo& workloadInfo) const;
151};
152
Nikhil Rajee391d52019-09-05 17:50:44 +0100153struct ArgMinMaxQueueDescriptor : QueueDescriptorWithParameters<ArgMinMaxDescriptor>
154{
155 void Validate(const WorkloadInfo& workloadInfo) const;
156};
157
mathad01b392e982021-04-07 12:07:30 +0100158struct CastQueueDescriptor : QueueDescriptor
159{
160 void Validate(const WorkloadInfo& workloadInfo) const;
161};
162
Ryan OSheaec6c6802020-06-05 17:17:06 +0100163// Fill layer workload data.
164struct FillQueueDescriptor : QueueDescriptorWithParameters<FillDescriptor>
165{
Ryan OSheaec6c6802020-06-05 17:17:06 +0100166 void Validate(const WorkloadInfo& workloadInfo) const;
167};
168
telsoa01c577f2c2018-08-31 09:22:23 +0100169// Fully connected layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000170struct FullyConnectedQueueDescriptor : QueueDescriptorWithParameters<FullyConnectedDescriptor>
171{
172 FullyConnectedQueueDescriptor()
173 : m_Weight(nullptr)
174 , m_Bias(nullptr)
175 {
176 }
177
James Conroy1f58f032021-04-27 17:13:27 +0100178 const ConstTensorHandle* m_Weight;
179 const ConstTensorHandle* m_Bias;
telsoa014fcda012018-03-09 14:13:49 +0000180
181 void Validate(const WorkloadInfo& workloadInfo) const;
182};
183
telsoa01c577f2c2018-08-31 09:22:23 +0100184// Permute layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000185struct PermuteQueueDescriptor : QueueDescriptorWithParameters<PermuteDescriptor>
186{
187 void Validate(const WorkloadInfo& workloadInfo) const;
188};
189
telsoa01c577f2c2018-08-31 09:22:23 +0100190// Pooling 2D layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000191struct Pooling2dQueueDescriptor : QueueDescriptorWithParameters<Pooling2dDescriptor>
192{
193 void Validate(const WorkloadInfo& workloadInfo) const;
194};
195
telsoa01c577f2c2018-08-31 09:22:23 +0100196// Convolution 2D layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000197struct Convolution2dQueueDescriptor : QueueDescriptorWithParameters<Convolution2dDescriptor>
198{
199 Convolution2dQueueDescriptor()
200 : m_Weight(nullptr)
201 , m_Bias(nullptr)
202 {
203 }
204
James Conroy1f58f032021-04-27 17:13:27 +0100205 const ConstTensorHandle* m_Weight;
206 const ConstTensorHandle* m_Bias;
telsoa014fcda012018-03-09 14:13:49 +0000207
208 void Validate(const WorkloadInfo& workloadInfo) const;
209};
210
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100211// Convolution 2D layer workload data.
212struct Convolution3dQueueDescriptor : QueueDescriptorWithParameters<Convolution3dDescriptor>
213{
214 Convolution3dQueueDescriptor()
215 : m_Weight(nullptr)
216 , m_Bias(nullptr)
217 {
218 }
219
220 const ConstTensorHandle* m_Weight;
221 const ConstTensorHandle* m_Bias;
222
223 void Validate(const WorkloadInfo& workloadInfo) const;
224};
225
Jan Eilers53ef7952021-06-02 12:01:25 +0100226/// Depthwise Convolution 2D layer workload data.
227///
228/// @note
229/// The weights are in the format [1, H, W, I*M]. Where I is the input channel size, M the depthwise mutliplier and
230/// H, W is the height and width of the filter kernel. If per channel quantization is applied
231/// the weights will be quantized along the last dimension/axis (I*M) which corresponds to the output channel size.
232/// If per channel quantization is applied the weights tensor will have I*M scales, one for each dimension
233/// of the quantization axis. You have to be aware of this when reshaping the weights tensor.
234/// Splitting the I*M axis, e.g. [1, H, W, I*M] --> [H, W, I, M], won't work without taking care of the
235/// corresponding quantization scales.
236/// If there is no per channel quantization applied reshaping the weights tensor won't cause any issues. There are
237/// preconfigured permutation functions available @link WorkloadUtils.hpp here.
238///
telsoa014fcda012018-03-09 14:13:49 +0000239struct DepthwiseConvolution2dQueueDescriptor : QueueDescriptorWithParameters<DepthwiseConvolution2dDescriptor>
240{
241 DepthwiseConvolution2dQueueDescriptor()
242 : m_Weight(nullptr)
243 , m_Bias(nullptr)
244 {
245 }
246
James Conroy1f58f032021-04-27 17:13:27 +0100247 const ConstTensorHandle* m_Weight;
248 const ConstTensorHandle* m_Bias;
telsoa014fcda012018-03-09 14:13:49 +0000249
250 void Validate(const WorkloadInfo& workloadInfo) const;
251};
252
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000253struct DetectionPostProcessQueueDescriptor : QueueDescriptorWithParameters<DetectionPostProcessDescriptor>
254{
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000255 DetectionPostProcessQueueDescriptor()
256 : m_Anchors(nullptr)
257 {
258 }
259
James Conroy1f58f032021-04-27 17:13:27 +0100260 const ConstTensorHandle* m_Anchors;
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000261
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000262 void Validate(const WorkloadInfo& workloadInfo) const;
263};
264
telsoa01c577f2c2018-08-31 09:22:23 +0100265// Normalization layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000266struct NormalizationQueueDescriptor : QueueDescriptorWithParameters<NormalizationDescriptor>
267{
268 void Validate(const WorkloadInfo& workloadInfo) const;
269};
270
telsoa01c577f2c2018-08-31 09:22:23 +0100271// Add layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000272struct AdditionQueueDescriptor : QueueDescriptor
273{
274 void Validate(const WorkloadInfo& workloadInfo) const;
275};
276
telsoa01c577f2c2018-08-31 09:22:23 +0100277// Multiplication layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000278struct MultiplicationQueueDescriptor : QueueDescriptor
279{
280 void Validate(const WorkloadInfo& workloadInfo) const;
281};
282
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100283// Division layer workload data.
284struct DivisionQueueDescriptor : QueueDescriptor
285{
286 void Validate(const WorkloadInfo& workloadInfo) const;
287};
288
David Beckc2044fe2018-09-05 15:00:38 +0100289// Subtraction layer workload data.
290struct SubtractionQueueDescriptor : QueueDescriptor
291{
292 void Validate(const WorkloadInfo& workloadInfo) const;
293};
294
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000295// Maximum layer workload data.
296struct MaximumQueueDescriptor : QueueDescriptor
297{
298 void Validate(const WorkloadInfo& workloadInfo) const;
299};
300
narpra01a6bf9122018-09-10 09:50:09 +0100301// Mean layer workload data.
narpra0132b90462018-09-13 11:07:48 +0100302struct MeanQueueDescriptor : QueueDescriptorWithParameters<MeanDescriptor>
narpra01a6bf9122018-09-10 09:50:09 +0100303{
304 void Validate(const WorkloadInfo& workloadInfo) const;
305};
306
jimfly012c9322a2018-09-19 10:59:49 +0100307// Pad layer workload data
308struct PadQueueDescriptor : QueueDescriptorWithParameters<PadDescriptor>
309{
310 void Validate(const WorkloadInfo& workloadInfo) const;
311};
312
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000313struct QuantizeQueueDescriptor : QueueDescriptor
314{
315 void Validate(const WorkloadInfo& workloadInfo) const;
316};
317
Teresa Charlincedd34f2020-03-30 11:17:30 +0100318// Deprecated use ComparisonQueueDescriptor instead
FrancisMurtagh20995952018-12-17 12:11:36 +0000319struct EqualQueueDescriptor : QueueDescriptor
320{
321 void Validate(const WorkloadInfo& workloadInfo) const;
322};
323
telsoa01c577f2c2018-08-31 09:22:23 +0100324// Batch norm layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000325struct BatchNormalizationQueueDescriptor : QueueDescriptorWithParameters<BatchNormalizationDescriptor>
326{
327 BatchNormalizationQueueDescriptor()
328 : m_Mean(nullptr)
329 , m_Variance(nullptr)
330 , m_Beta(nullptr)
331 , m_Gamma(nullptr)
332 {
333 }
334
James Conroy1f58f032021-04-27 17:13:27 +0100335 const ConstTensorHandle* m_Mean;
336 const ConstTensorHandle* m_Variance;
337 const ConstTensorHandle* m_Beta;
338 const ConstTensorHandle* m_Gamma;
telsoa014fcda012018-03-09 14:13:49 +0000339
340 void Validate(const WorkloadInfo& workloadInfo) const;
341};
342
Finn Williams2605b232020-06-10 15:53:46 +0100343struct RankQueueDescriptor : QueueDescriptor
344{
345 void Validate(const WorkloadInfo& workloadInfo) const;
346};
347
Jan Eilers1b2654f2021-09-24 15:45:46 +0100348ARMNN_NO_DEPRECATE_WARN_BEGIN
349struct
350ARMNN_DEPRECATED_MSG_REMOVAL_DATE("ResizeBilinearQueueDescriptor is deprecated use ResizeQueueDescriptor instead",
351 "22.08")
352ResizeBilinearQueueDescriptor : QueueDescriptorWithParameters<ResizeBilinearDescriptor>
telsoa014fcda012018-03-09 14:13:49 +0000353{
354 void Validate(const WorkloadInfo& workloadInfo) const;
355};
Jan Eilers1b2654f2021-09-24 15:45:46 +0100356ARMNN_NO_DEPRECATE_WARN_END
telsoa014fcda012018-03-09 14:13:49 +0000357
Teresa Charlina9075df2019-06-27 15:41:57 +0100358struct ResizeQueueDescriptor : QueueDescriptorWithParameters<ResizeDescriptor>
359{
360 void Validate(const WorkloadInfo& workloadInfo) const;
361};
362
telsoa014fcda012018-03-09 14:13:49 +0000363struct FakeQuantizationQueueDescriptor : QueueDescriptorWithParameters<FakeQuantizationDescriptor>
364{
365 FakeQuantizationQueueDescriptor()
366 : m_Min(nullptr)
367 , m_Max(nullptr)
368 {
369 }
370
James Conroy1f58f032021-04-27 17:13:27 +0100371 const ConstTensorHandle* m_Min;
372 const ConstTensorHandle* m_Max;
telsoa014fcda012018-03-09 14:13:49 +0000373
374 void Validate(const WorkloadInfo& workloadInfo) const;
375};
376
Kevin Mayce5045a2019-10-02 14:07:47 +0100377struct InstanceNormalizationQueueDescriptor : QueueDescriptorWithParameters<InstanceNormalizationDescriptor>
378{
Kevin Mayce5045a2019-10-02 14:07:47 +0100379 void Validate(const WorkloadInfo& workloadInfo) const;
380};
381
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100382struct L2NormalizationQueueDescriptor : QueueDescriptorWithParameters<L2NormalizationDescriptor>
telsoa014fcda012018-03-09 14:13:49 +0000383{
384 void Validate(const WorkloadInfo& workloadInfo) const;
385};
386
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100387struct LogSoftmaxQueueDescriptor : QueueDescriptorWithParameters<LogSoftmaxDescriptor>
388{
389 void Validate(const WorkloadInfo& workloadInfo) const;
390};
391
telsoa014fcda012018-03-09 14:13:49 +0000392struct ConstantQueueDescriptor : QueueDescriptor
393{
394 ConstantQueueDescriptor()
395 : m_LayerOutput(nullptr)
396 {
397 }
398
James Conroy1f58f032021-04-27 17:13:27 +0100399 const ConstTensorHandle* m_LayerOutput;
telsoa014fcda012018-03-09 14:13:49 +0000400
401 void Validate(const WorkloadInfo& workloadInfo) const;
402};
403
404struct ReshapeQueueDescriptor : QueueDescriptorWithParameters<ReshapeDescriptor>
405{
406 void Validate(const WorkloadInfo& workloadInfo) const;
407};
408
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000409struct SpaceToBatchNdQueueDescriptor : QueueDescriptorWithParameters<SpaceToBatchNdDescriptor>
410{
411 void Validate(const WorkloadInfo& workloadInfo) const;
412};
413
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100414struct SpaceToDepthQueueDescriptor : QueueDescriptorWithParameters<SpaceToDepthDescriptor>
415{
416 void Validate(const WorkloadInfo& workloadInfo) const;
417};
418
telsoa014fcda012018-03-09 14:13:49 +0000419struct FloorQueueDescriptor : QueueDescriptor
420{
421 void Validate(const WorkloadInfo& workloadInfo) const;
422};
423
telsoa01c577f2c2018-08-31 09:22:23 +0100424struct LstmQueueDescriptor : QueueDescriptorWithParameters<LstmDescriptor>
425{
426 LstmQueueDescriptor()
427 : m_InputToInputWeights(nullptr)
428 , m_InputToForgetWeights(nullptr)
429 , m_InputToCellWeights(nullptr)
430 , m_InputToOutputWeights(nullptr)
431 , m_RecurrentToInputWeights(nullptr)
432 , m_RecurrentToForgetWeights(nullptr)
433 , m_RecurrentToCellWeights(nullptr)
434 , m_RecurrentToOutputWeights(nullptr)
435 , m_CellToInputWeights(nullptr)
436 , m_CellToForgetWeights(nullptr)
437 , m_CellToOutputWeights(nullptr)
438 , m_InputGateBias(nullptr)
439 , m_ForgetGateBias(nullptr)
440 , m_CellBias(nullptr)
441 , m_OutputGateBias(nullptr)
442 , m_ProjectionWeights(nullptr)
443 , m_ProjectionBias(nullptr)
Jan Eilers38e05bd2019-06-26 13:10:09 +0100444 , m_InputLayerNormWeights(nullptr)
445 , m_ForgetLayerNormWeights(nullptr)
446 , m_CellLayerNormWeights(nullptr)
447 , m_OutputLayerNormWeights(nullptr)
telsoa01c577f2c2018-08-31 09:22:23 +0100448 {
449 }
450
James Conroy1f58f032021-04-27 17:13:27 +0100451 const ConstTensorHandle* m_InputToInputWeights;
452 const ConstTensorHandle* m_InputToForgetWeights;
453 const ConstTensorHandle* m_InputToCellWeights;
454 const ConstTensorHandle* m_InputToOutputWeights;
455 const ConstTensorHandle* m_RecurrentToInputWeights;
456 const ConstTensorHandle* m_RecurrentToForgetWeights;
457 const ConstTensorHandle* m_RecurrentToCellWeights;
458 const ConstTensorHandle* m_RecurrentToOutputWeights;
459 const ConstTensorHandle* m_CellToInputWeights;
460 const ConstTensorHandle* m_CellToForgetWeights;
461 const ConstTensorHandle* m_CellToOutputWeights;
462 const ConstTensorHandle* m_InputGateBias;
463 const ConstTensorHandle* m_ForgetGateBias;
464 const ConstTensorHandle* m_CellBias;
465 const ConstTensorHandle* m_OutputGateBias;
466 const ConstTensorHandle* m_ProjectionWeights;
467 const ConstTensorHandle* m_ProjectionBias;
468 const ConstTensorHandle* m_InputLayerNormWeights;
469 const ConstTensorHandle* m_ForgetLayerNormWeights;
470 const ConstTensorHandle* m_CellLayerNormWeights;
471 const ConstTensorHandle* m_OutputLayerNormWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100472
473 void Validate(const WorkloadInfo& workloadInfo) const;
474};
475
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000476struct ConvertBf16ToFp32QueueDescriptor : QueueDescriptor
477{
478 void Validate(const WorkloadInfo& workloadInfo) const;
479};
480
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000481struct ConvertFp32ToBf16QueueDescriptor : QueueDescriptor
482{
483 void Validate(const WorkloadInfo& workloadInfo) const;
484};
485
telsoa01c577f2c2018-08-31 09:22:23 +0100486struct ConvertFp16ToFp32QueueDescriptor : QueueDescriptor
487{
488 void Validate(const WorkloadInfo& workloadInfo) const;
489};
490
491struct ConvertFp32ToFp16QueueDescriptor : QueueDescriptor
492{
493 void Validate(const WorkloadInfo& workloadInfo) const;
494};
495
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000496struct BatchToSpaceNdQueueDescriptor : QueueDescriptorWithParameters<BatchToSpaceNdDescriptor>
497{
498 void Validate(const WorkloadInfo& workloadInfo) const;
499};
Conor Kennedy430b5d82018-11-14 15:28:28 +0000500
501struct StridedSliceQueueDescriptor : QueueDescriptorWithParameters<StridedSliceDescriptor>
502{
503 void Validate(const WorkloadInfo& workloadInfo) const;
504};
505
Éanna Ó Catháin20e58802018-12-04 10:29:06 +0000506// Minimum layer workload data.
kevmay0190539692018-11-29 08:40:19 +0000507struct MinimumQueueDescriptor : QueueDescriptor
508{
509 void Validate(const WorkloadInfo& workloadInfo) const;
510};
511
Teresa Charlin2b030d92020-03-27 16:40:56 +0000512// Deprecated use ComparisonQueueDescriptor instead
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000513struct GreaterQueueDescriptor : QueueDescriptor
514{
515 void Validate(const WorkloadInfo& workloadInfo) const;
516};
517
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000518struct DebugQueueDescriptor : QueueDescriptor
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000519{
janeil013fec1ea2019-11-07 09:47:20 +0000520 DebugQueueDescriptor() : m_Guid(0) {}
521
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000522 void Validate(const WorkloadInfo& workloadInfo) const;
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000523
524 LayerGuid m_Guid;
525 std::string m_LayerName;
526 unsigned int m_SlotIndex;
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000527};
528
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000529struct RsqrtQueueDescriptor : QueueDescriptor
530{
531 void Validate(const WorkloadInfo& workloadInfo) const;
532};
533
Teresa Charlin52664732020-06-29 16:27:03 +0100534struct GatherQueueDescriptor : QueueDescriptorWithParameters<GatherDescriptor>
narpra01b89b05f2019-01-16 09:53:09 +0000535{
536 void Validate(const WorkloadInfo& workloadInfo) const;
537};
538
Matteo Martincigh49124022019-01-11 13:25:59 +0000539struct PreCompiledQueueDescriptor : QueueDescriptorWithParameters<PreCompiledDescriptor>
540{
541 PreCompiledQueueDescriptor()
542 : m_PreCompiledObject(nullptr)
543 {
544 }
545
Matteo Martincigh7997a352019-04-17 15:37:30 +0100546 void* m_PreCompiledObject;
Matteo Martincigh49124022019-01-11 13:25:59 +0000547
548 void Validate(const WorkloadInfo& workloadInfo) const;
549};
550
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000551struct DequantizeQueueDescriptor : QueueDescriptor
552{
553 void Validate(const WorkloadInfo& workloadInfo) const;
554};
555
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100556struct MergeQueueDescriptor : QueueDescriptor
557{
558 void Validate(const WorkloadInfo& workloadInfo) const;
559};
560
Sadik Armaganeff363d2019-04-05 15:25:46 +0100561struct SwitchQueueDescriptor : QueueDescriptor
562{
563 void Validate(const WorkloadInfo& workloadInfo) const;
564};
565
Matteo Martincigh0e406ee2019-06-12 15:42:18 +0100566struct PreluQueueDescriptor : QueueDescriptor
567{
568 void Validate(const WorkloadInfo& workloadInfo) const;
569};
570
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100571struct TransposeConvolution2dQueueDescriptor : QueueDescriptorWithParameters<TransposeConvolution2dDescriptor>
572{
573 TransposeConvolution2dQueueDescriptor() :
574 m_Weight(nullptr),
575 m_Bias(nullptr)
576 {}
577
James Conroy1f58f032021-04-27 17:13:27 +0100578 const ConstTensorHandle* m_Weight;
579 const ConstTensorHandle* m_Bias;
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100580
581 void Validate(const WorkloadInfo& workloadInfo) const;
582};
583
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000584struct TransposeQueueDescriptor : QueueDescriptorWithParameters<TransposeDescriptor>
585{
586 void Validate(const WorkloadInfo& workloadInfo) const;
587};
588
James Conroy586a9aa2020-03-20 08:49:33 +0000589struct QLstmQueueDescriptor : QueueDescriptorWithParameters<QLstmDescriptor>
590{
591 QLstmQueueDescriptor()
592 : m_InputToInputWeights(nullptr)
593 , m_InputToForgetWeights(nullptr)
594 , m_InputToCellWeights(nullptr)
595 , m_InputToOutputWeights(nullptr)
596 , m_RecurrentToInputWeights(nullptr)
597 , m_RecurrentToForgetWeights(nullptr)
598 , m_RecurrentToCellWeights(nullptr)
599 , m_RecurrentToOutputWeights(nullptr)
600 , m_CellToInputWeights(nullptr)
601 , m_CellToForgetWeights(nullptr)
602 , m_CellToOutputWeights(nullptr)
603 , m_InputGateBias(nullptr)
604 , m_ForgetGateBias(nullptr)
605 , m_CellBias(nullptr)
606 , m_OutputGateBias(nullptr)
607 , m_ProjectionWeights(nullptr)
608 , m_ProjectionBias(nullptr)
609 , m_InputLayerNormWeights(nullptr)
610 , m_ForgetLayerNormWeights(nullptr)
611 , m_CellLayerNormWeights(nullptr)
612 , m_OutputLayerNormWeights(nullptr)
613 {
614 }
615
James Conroy1f58f032021-04-27 17:13:27 +0100616 const ConstTensorHandle* m_InputToInputWeights;
617 const ConstTensorHandle* m_InputToForgetWeights;
618 const ConstTensorHandle* m_InputToCellWeights;
619 const ConstTensorHandle* m_InputToOutputWeights;
620 const ConstTensorHandle* m_RecurrentToInputWeights;
621 const ConstTensorHandle* m_RecurrentToForgetWeights;
622 const ConstTensorHandle* m_RecurrentToCellWeights;
623 const ConstTensorHandle* m_RecurrentToOutputWeights;
624 const ConstTensorHandle* m_CellToInputWeights;
625 const ConstTensorHandle* m_CellToForgetWeights;
626 const ConstTensorHandle* m_CellToOutputWeights;
627 const ConstTensorHandle* m_InputGateBias;
628 const ConstTensorHandle* m_ForgetGateBias;
629 const ConstTensorHandle* m_CellBias;
630 const ConstTensorHandle* m_OutputGateBias;
631 const ConstTensorHandle* m_ProjectionWeights;
632 const ConstTensorHandle* m_ProjectionBias;
633 const ConstTensorHandle* m_InputLayerNormWeights;
634 const ConstTensorHandle* m_ForgetLayerNormWeights;
635 const ConstTensorHandle* m_CellLayerNormWeights;
636 const ConstTensorHandle* m_OutputLayerNormWeights;
James Conroy586a9aa2020-03-20 08:49:33 +0000637
638 void Validate(const WorkloadInfo& workloadInfo) const;
639};
640
James Conroyee18dc82019-07-17 11:27:46 +0100641struct QuantizedLstmQueueDescriptor : QueueDescriptor
642{
643 QuantizedLstmQueueDescriptor()
644 : m_InputToInputWeights(nullptr)
645 , m_InputToForgetWeights(nullptr)
646 , m_InputToCellWeights(nullptr)
647 , m_InputToOutputWeights(nullptr)
648
649 , m_RecurrentToInputWeights(nullptr)
650 , m_RecurrentToForgetWeights(nullptr)
651 , m_RecurrentToCellWeights(nullptr)
652 , m_RecurrentToOutputWeights(nullptr)
653
654 , m_InputGateBias(nullptr)
655 , m_ForgetGateBias(nullptr)
656 , m_CellBias(nullptr)
657 , m_OutputGateBias(nullptr)
658 {}
659
James Conroy1f58f032021-04-27 17:13:27 +0100660 const ConstTensorHandle* m_InputToInputWeights;
661 const ConstTensorHandle* m_InputToForgetWeights;
662 const ConstTensorHandle* m_InputToCellWeights;
663 const ConstTensorHandle* m_InputToOutputWeights;
James Conroyee18dc82019-07-17 11:27:46 +0100664
James Conroy1f58f032021-04-27 17:13:27 +0100665 const ConstTensorHandle* m_RecurrentToInputWeights;
666 const ConstTensorHandle* m_RecurrentToForgetWeights;
667 const ConstTensorHandle* m_RecurrentToCellWeights;
668 const ConstTensorHandle* m_RecurrentToOutputWeights;
James Conroyee18dc82019-07-17 11:27:46 +0100669
James Conroy1f58f032021-04-27 17:13:27 +0100670 const ConstTensorHandle* m_InputGateBias;
671 const ConstTensorHandle* m_ForgetGateBias;
672 const ConstTensorHandle* m_CellBias;
673 const ConstTensorHandle* m_OutputGateBias;
James Conroyee18dc82019-07-17 11:27:46 +0100674
675 void Validate(const WorkloadInfo& workloadInfo) const;
676};
677
Kevin May868eb142019-09-04 17:29:31 +0100678struct AbsQueueDescriptor : QueueDescriptor
679{
680 void Validate(const WorkloadInfo& workloadInfo) const;
681};
682
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100683struct SliceQueueDescriptor : QueueDescriptorWithParameters<SliceDescriptor>
684{
685 void Validate(const WorkloadInfo& workloadInfo) const;
686};
687
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100688struct DepthToSpaceQueueDescriptor : QueueDescriptorWithParameters<DepthToSpaceDescriptor>
689{
690 void Validate(const WorkloadInfo& workloadInfo) const;
691};
692
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100693struct ComparisonQueueDescriptor : QueueDescriptorWithParameters<ComparisonDescriptor>
694{
695 void Validate(const WorkloadInfo& workloadInfo) const;
696};
697
josh minor4a3c6102020-01-06 16:40:46 -0600698struct ElementwiseUnaryQueueDescriptor : QueueDescriptorWithParameters<ElementwiseUnaryDescriptor>
699{
700 void Validate(const WorkloadInfo& workloadInfo) const;
701};
702
James Conroyaba90cd2020-11-06 16:28:18 +0000703struct LogicalBinaryQueueDescriptor : QueueDescriptorWithParameters<LogicalBinaryDescriptor>
704{
705 void Validate(const WorkloadInfo& workloadInfo) const;
706};
707
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000708struct ReduceQueueDescriptor : QueueDescriptorWithParameters<ReduceDescriptor>
709{
710 void Validate(const WorkloadInfo& workloadInfo) const;
711};
712
Keith Davis3ae3f972021-05-21 16:33:48 +0100713struct ShapeQueueDescriptor : QueueDescriptor
714{
715 void Validate(const WorkloadInfo& workloadInfo) const;
716};
717
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +0100718struct UnidirectionalSequenceLstmQueueDescriptor : QueueDescriptorWithParameters<LstmDescriptor>
719{
720 UnidirectionalSequenceLstmQueueDescriptor()
721 : m_InputToInputWeights(nullptr)
722 , m_InputToForgetWeights(nullptr)
723 , m_InputToCellWeights(nullptr)
724 , m_InputToOutputWeights(nullptr)
725 , m_RecurrentToInputWeights(nullptr)
726 , m_RecurrentToForgetWeights(nullptr)
727 , m_RecurrentToCellWeights(nullptr)
728 , m_RecurrentToOutputWeights(nullptr)
729 , m_CellToInputWeights(nullptr)
730 , m_CellToForgetWeights(nullptr)
731 , m_CellToOutputWeights(nullptr)
732 , m_InputGateBias(nullptr)
733 , m_ForgetGateBias(nullptr)
734 , m_CellBias(nullptr)
735 , m_OutputGateBias(nullptr)
736 , m_ProjectionWeights(nullptr)
737 , m_ProjectionBias(nullptr)
738 , m_InputLayerNormWeights(nullptr)
739 , m_ForgetLayerNormWeights(nullptr)
740 , m_CellLayerNormWeights(nullptr)
741 , m_OutputLayerNormWeights(nullptr)
742 {
743 }
744
745 const ConstTensorHandle* m_InputToInputWeights;
746 const ConstTensorHandle* m_InputToForgetWeights;
747 const ConstTensorHandle* m_InputToCellWeights;
748 const ConstTensorHandle* m_InputToOutputWeights;
749 const ConstTensorHandle* m_RecurrentToInputWeights;
750 const ConstTensorHandle* m_RecurrentToForgetWeights;
751 const ConstTensorHandle* m_RecurrentToCellWeights;
752 const ConstTensorHandle* m_RecurrentToOutputWeights;
753 const ConstTensorHandle* m_CellToInputWeights;
754 const ConstTensorHandle* m_CellToForgetWeights;
755 const ConstTensorHandle* m_CellToOutputWeights;
756 const ConstTensorHandle* m_InputGateBias;
757 const ConstTensorHandle* m_ForgetGateBias;
758 const ConstTensorHandle* m_CellBias;
759 const ConstTensorHandle* m_OutputGateBias;
760 const ConstTensorHandle* m_ProjectionWeights;
761 const ConstTensorHandle* m_ProjectionBias;
762 const ConstTensorHandle* m_InputLayerNormWeights;
763 const ConstTensorHandle* m_ForgetLayerNormWeights;
764 const ConstTensorHandle* m_CellLayerNormWeights;
765 const ConstTensorHandle* m_OutputLayerNormWeights;
766
767 void Validate(const WorkloadInfo& workloadInfo) const;
768};
769
Simon Obute51f67772021-09-03 15:50:13 +0100770struct ChannelShuffleQueueDescriptor : QueueDescriptorWithParameters<ChannelShuffleDescriptor>
771{
772 void Validate(const WorkloadInfo& workloadInfo) const;
773};
774
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100775} // namespace armnn