blob: 4e56aaf8233b62ad523c3069cc403e1ef49781f1 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
telsoa014fcda012018-03-09 14:13:49 +00002// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
James Conroy1f58f032021-04-27 17:13:27 +01007#include <armnn/backends/TensorHandleFwd.hpp>
Matteo Martincighe5b8eb92019-11-28 15:45:42 +00008#include <armnn/backends/ITensorHandle.hpp>
telsoa014fcda012018-03-09 14:13:49 +00009
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000010#include <InternalTypes.hpp>
11
Jim Flynne242f2d2019-05-22 14:24:13 +010012#include <armnn/Deprecated.hpp>
David Beck0dbe0ee2018-09-24 15:59:27 +010013#include <armnn/Descriptors.hpp>
14#include <armnn/Exceptions.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000015#include <armnn/Types.hpp>
16#include <armnn/Tensor.hpp>
David Beck0dbe0ee2018-09-24 15:59:27 +010017
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000018#include <backendsCommon/WorkloadInfo.hpp>
telsoa014fcda012018-03-09 14:13:49 +000019
20namespace armnn
21{
22
telsoa01c577f2c2018-08-31 09:22:23 +010023//A helper function that returns the bias data type required for given input data type.
telsoa014fcda012018-03-09 14:13:49 +000024DataType GetBiasDataType(DataType inputDataType);
25
26struct WorkloadInfo;
27
28struct QueueDescriptor
29{
30 std::vector<ITensorHandle*> m_Inputs;
31 std::vector<ITensorHandle*> m_Outputs;
Keith Davisdf04d232020-10-23 17:20:05 +010032 void* m_AdditionalInfoObject;
telsoa014fcda012018-03-09 14:13:49 +000033
34 void ValidateInputsOutputs(const std::string& descName,
Narumol Prangnawarat867eba52020-02-03 12:29:56 +000035 unsigned int numExpectedIn,
36 unsigned int numExpectedOut) const;
telsoa014fcda012018-03-09 14:13:49 +000037
Keith Davisdf04d232020-10-23 17:20:05 +010038 template<typename T>
Mike Kelly07810fc2020-11-12 10:58:48 +000039 const T* GetAdditionalInformation() const
Keith Davisdf04d232020-10-23 17:20:05 +010040 {
41 return static_cast<T*>(m_AdditionalInfoObject);
42 }
telsoa014fcda012018-03-09 14:13:49 +000043
44protected:
45 ~QueueDescriptor() = default;
Keith Davisdf04d232020-10-23 17:20:05 +010046 QueueDescriptor()
47 : m_AdditionalInfoObject(nullptr)
48 {}
telsoa014fcda012018-03-09 14:13:49 +000049 QueueDescriptor(QueueDescriptor const&) = default;
50 QueueDescriptor& operator=(QueueDescriptor const&) = default;
51};
52
telsoa01c577f2c2018-08-31 09:22:23 +010053// Base class for queue descriptors which contain parameters.
telsoa014fcda012018-03-09 14:13:49 +000054template <typename LayerDescriptor>
55struct QueueDescriptorWithParameters : public QueueDescriptor
56{
57 LayerDescriptor m_Parameters;
58
59protected:
60 ~QueueDescriptorWithParameters() = default;
61 QueueDescriptorWithParameters() = default;
62 QueueDescriptorWithParameters(QueueDescriptorWithParameters const&) = default;
63 QueueDescriptorWithParameters& operator=(QueueDescriptorWithParameters const&) = default;
64};
65
Jim Flynn68db06f2020-10-06 10:14:50 +010066struct MapQueueDescriptor : QueueDescriptor
67{
68 void Validate(const WorkloadInfo& workloadInfo) const;
69};
70
Jim Flynn3a40ea52020-10-08 11:42:30 +010071struct UnmapQueueDescriptor : QueueDescriptor
72{
73 void Validate(const WorkloadInfo& workloadInfo) const;
74};
75
telsoa014fcda012018-03-09 14:13:49 +000076struct MemCopyQueueDescriptor : QueueDescriptor
77{
78 void Validate(const WorkloadInfo& workloadInfo) const;
79};
80
81using InputQueueDescriptor = MemCopyQueueDescriptor;
82using OutputQueueDescriptor = MemCopyQueueDescriptor;
83
Derek Lambertif674aa02019-08-01 15:56:25 +010084struct MemImportQueueDescriptor : QueueDescriptor
85{
86 void Validate(const WorkloadInfo& workloadInfo) const;
87};
88
89struct MemSyncQueueDescriptor : QueueDescriptor
90{
91 void Validate(const WorkloadInfo& workloadInfo) const;
92};
93
telsoa01c577f2c2018-08-31 09:22:23 +010094// Softmax layer workload data.
telsoa014fcda012018-03-09 14:13:49 +000095struct SoftmaxQueueDescriptor : QueueDescriptorWithParameters<SoftmaxDescriptor>
96{
97 void Validate(const WorkloadInfo& workloadInfo) const;
98};
99
telsoa01c577f2c2018-08-31 09:22:23 +0100100// Splitter layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000101struct SplitterQueueDescriptor : QueueDescriptorWithParameters<ViewsDescriptor>
102{
103 struct ViewOrigin
104 {
105 ViewOrigin() {}
106 ViewOrigin(std::vector<unsigned int> const& origin) : m_Origin(origin) {}
107
telsoa01c577f2c2018-08-31 09:22:23 +0100108 //View origin (size of the vector is the same as number of dimensions of the view).
telsoa014fcda012018-03-09 14:13:49 +0000109 std::vector<unsigned int> m_Origin;
110 };
111
telsoa01c577f2c2018-08-31 09:22:23 +0100112 //View defines a tensor that will be carved from the input tensor.
113 //View origins are stored here, the extents are defined by sizes of the output tensors.
telsoa014fcda012018-03-09 14:13:49 +0000114 std::vector<ViewOrigin> m_ViewOrigins;
115
116 void Validate(const WorkloadInfo& workloadInfo) const;
117};
118
Jim Flynne242f2d2019-05-22 14:24:13 +0100119// Concat layer workload data.
120struct ConcatQueueDescriptor : QueueDescriptorWithParameters<OriginsDescriptor>
telsoa014fcda012018-03-09 14:13:49 +0000121{
122 struct ViewOrigin
123 {
124 ViewOrigin() {}
125 ViewOrigin(const std::vector<unsigned int>& origin) : m_Origin(origin) {}
126
telsoa01c577f2c2018-08-31 09:22:23 +0100127 //View origin (size of the vector is the same as number of dimensions of the view).
telsoa014fcda012018-03-09 14:13:49 +0000128 std::vector<unsigned int> m_Origin;
129 };
130
telsoa01c577f2c2018-08-31 09:22:23 +0100131 //View defines a sub-area of the output tensor that will be filled with the corresponding input tensor.
132 //View origins are stored here, the extents are defined by sizes of the input tensors.
telsoa014fcda012018-03-09 14:13:49 +0000133 std::vector<ViewOrigin> m_ViewOrigins;
134
135 void Validate(const WorkloadInfo& workloadInfo) const;
136};
137
Jim Flynne242f2d2019-05-22 14:24:13 +0100138// Deprecated. Use ConcatQueueDescriptor instead
139using MergerQueueDescriptor = ConcatQueueDescriptor;
140
Matthew Jackson2b8c1da2019-07-04 14:59:16 +0100141// Stack layer workload data.
142struct StackQueueDescriptor : QueueDescriptorWithParameters<StackDescriptor>
143{
144 void Validate(const WorkloadInfo& workloadInfo) const;
145};
146
telsoa01c577f2c2018-08-31 09:22:23 +0100147// Activation layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000148struct ActivationQueueDescriptor : QueueDescriptorWithParameters<ActivationDescriptor>
149{
150 void Validate(const WorkloadInfo& workloadInfo) const;
151};
152
Nikhil Rajee391d52019-09-05 17:50:44 +0100153struct ArgMinMaxQueueDescriptor : QueueDescriptorWithParameters<ArgMinMaxDescriptor>
154{
155 void Validate(const WorkloadInfo& workloadInfo) const;
156};
157
mathad01b392e982021-04-07 12:07:30 +0100158struct CastQueueDescriptor : QueueDescriptor
159{
160 void Validate(const WorkloadInfo& workloadInfo) const;
161};
162
Ryan OSheaec6c6802020-06-05 17:17:06 +0100163// Fill layer workload data.
164struct FillQueueDescriptor : QueueDescriptorWithParameters<FillDescriptor>
165{
Ryan OSheaec6c6802020-06-05 17:17:06 +0100166 void Validate(const WorkloadInfo& workloadInfo) const;
167};
168
telsoa01c577f2c2018-08-31 09:22:23 +0100169// Fully connected layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000170struct FullyConnectedQueueDescriptor : QueueDescriptorWithParameters<FullyConnectedDescriptor>
171{
172 FullyConnectedQueueDescriptor()
173 : m_Weight(nullptr)
174 , m_Bias(nullptr)
175 {
176 }
177
James Conroy1f58f032021-04-27 17:13:27 +0100178 const ConstTensorHandle* m_Weight;
179 const ConstTensorHandle* m_Bias;
telsoa014fcda012018-03-09 14:13:49 +0000180
181 void Validate(const WorkloadInfo& workloadInfo) const;
182};
183
telsoa01c577f2c2018-08-31 09:22:23 +0100184// Permute layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000185struct PermuteQueueDescriptor : QueueDescriptorWithParameters<PermuteDescriptor>
186{
187 void Validate(const WorkloadInfo& workloadInfo) const;
188};
189
telsoa01c577f2c2018-08-31 09:22:23 +0100190// Pooling 2D layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000191struct Pooling2dQueueDescriptor : QueueDescriptorWithParameters<Pooling2dDescriptor>
192{
193 void Validate(const WorkloadInfo& workloadInfo) const;
194};
195
telsoa01c577f2c2018-08-31 09:22:23 +0100196// Convolution 2D layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000197struct Convolution2dQueueDescriptor : QueueDescriptorWithParameters<Convolution2dDescriptor>
198{
199 Convolution2dQueueDescriptor()
200 : m_Weight(nullptr)
201 , m_Bias(nullptr)
202 {
203 }
204
James Conroy1f58f032021-04-27 17:13:27 +0100205 const ConstTensorHandle* m_Weight;
206 const ConstTensorHandle* m_Bias;
telsoa014fcda012018-03-09 14:13:49 +0000207
208 void Validate(const WorkloadInfo& workloadInfo) const;
209};
210
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100211// Convolution 3D layer workload data.
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100212struct Convolution3dQueueDescriptor : QueueDescriptorWithParameters<Convolution3dDescriptor>
213{
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100214 void Validate(const WorkloadInfo& workloadInfo) const;
215};
216
Jan Eilers53ef7952021-06-02 12:01:25 +0100217/// Depthwise Convolution 2D layer workload data.
218///
219/// @note
220/// The weights are in the format [1, H, W, I*M]. Where I is the input channel size, M the depthwise mutliplier and
221/// H, W is the height and width of the filter kernel. If per channel quantization is applied
222/// the weights will be quantized along the last dimension/axis (I*M) which corresponds to the output channel size.
223/// If per channel quantization is applied the weights tensor will have I*M scales, one for each dimension
224/// of the quantization axis. You have to be aware of this when reshaping the weights tensor.
225/// Splitting the I*M axis, e.g. [1, H, W, I*M] --> [H, W, I, M], won't work without taking care of the
226/// corresponding quantization scales.
227/// If there is no per channel quantization applied reshaping the weights tensor won't cause any issues. There are
228/// preconfigured permutation functions available @link WorkloadUtils.hpp here.
229///
telsoa014fcda012018-03-09 14:13:49 +0000230struct DepthwiseConvolution2dQueueDescriptor : QueueDescriptorWithParameters<DepthwiseConvolution2dDescriptor>
231{
232 DepthwiseConvolution2dQueueDescriptor()
233 : m_Weight(nullptr)
234 , m_Bias(nullptr)
235 {
236 }
237
James Conroy1f58f032021-04-27 17:13:27 +0100238 const ConstTensorHandle* m_Weight;
239 const ConstTensorHandle* m_Bias;
telsoa014fcda012018-03-09 14:13:49 +0000240
241 void Validate(const WorkloadInfo& workloadInfo) const;
242};
243
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000244struct DetectionPostProcessQueueDescriptor : QueueDescriptorWithParameters<DetectionPostProcessDescriptor>
245{
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000246 DetectionPostProcessQueueDescriptor()
247 : m_Anchors(nullptr)
248 {
249 }
250
James Conroy1f58f032021-04-27 17:13:27 +0100251 const ConstTensorHandle* m_Anchors;
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000252
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000253 void Validate(const WorkloadInfo& workloadInfo) const;
254};
255
telsoa01c577f2c2018-08-31 09:22:23 +0100256// Normalization layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000257struct NormalizationQueueDescriptor : QueueDescriptorWithParameters<NormalizationDescriptor>
258{
259 void Validate(const WorkloadInfo& workloadInfo) const;
260};
261
telsoa01c577f2c2018-08-31 09:22:23 +0100262// Add layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000263struct AdditionQueueDescriptor : QueueDescriptor
264{
265 void Validate(const WorkloadInfo& workloadInfo) const;
266};
267
telsoa01c577f2c2018-08-31 09:22:23 +0100268// Multiplication layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000269struct MultiplicationQueueDescriptor : QueueDescriptor
270{
271 void Validate(const WorkloadInfo& workloadInfo) const;
272};
273
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100274// Division layer workload data.
275struct DivisionQueueDescriptor : QueueDescriptor
276{
277 void Validate(const WorkloadInfo& workloadInfo) const;
278};
279
David Beckc2044fe2018-09-05 15:00:38 +0100280// Subtraction layer workload data.
281struct SubtractionQueueDescriptor : QueueDescriptor
282{
283 void Validate(const WorkloadInfo& workloadInfo) const;
284};
285
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000286// Maximum layer workload data.
287struct MaximumQueueDescriptor : QueueDescriptor
288{
289 void Validate(const WorkloadInfo& workloadInfo) const;
290};
291
narpra01a6bf9122018-09-10 09:50:09 +0100292// Mean layer workload data.
narpra0132b90462018-09-13 11:07:48 +0100293struct MeanQueueDescriptor : QueueDescriptorWithParameters<MeanDescriptor>
narpra01a6bf9122018-09-10 09:50:09 +0100294{
295 void Validate(const WorkloadInfo& workloadInfo) const;
296};
297
jimfly012c9322a2018-09-19 10:59:49 +0100298// Pad layer workload data
299struct PadQueueDescriptor : QueueDescriptorWithParameters<PadDescriptor>
300{
301 void Validate(const WorkloadInfo& workloadInfo) const;
302};
303
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000304struct QuantizeQueueDescriptor : QueueDescriptor
305{
306 void Validate(const WorkloadInfo& workloadInfo) const;
307};
308
Teresa Charlincedd34f2020-03-30 11:17:30 +0100309// Deprecated use ComparisonQueueDescriptor instead
FrancisMurtagh20995952018-12-17 12:11:36 +0000310struct EqualQueueDescriptor : QueueDescriptor
311{
312 void Validate(const WorkloadInfo& workloadInfo) const;
313};
314
telsoa01c577f2c2018-08-31 09:22:23 +0100315// Batch norm layer workload data.
telsoa014fcda012018-03-09 14:13:49 +0000316struct BatchNormalizationQueueDescriptor : QueueDescriptorWithParameters<BatchNormalizationDescriptor>
317{
318 BatchNormalizationQueueDescriptor()
319 : m_Mean(nullptr)
320 , m_Variance(nullptr)
321 , m_Beta(nullptr)
322 , m_Gamma(nullptr)
323 {
324 }
325
James Conroy1f58f032021-04-27 17:13:27 +0100326 const ConstTensorHandle* m_Mean;
327 const ConstTensorHandle* m_Variance;
328 const ConstTensorHandle* m_Beta;
329 const ConstTensorHandle* m_Gamma;
telsoa014fcda012018-03-09 14:13:49 +0000330
331 void Validate(const WorkloadInfo& workloadInfo) const;
332};
333
Finn Williams2605b232020-06-10 15:53:46 +0100334struct RankQueueDescriptor : QueueDescriptor
335{
336 void Validate(const WorkloadInfo& workloadInfo) const;
337};
338
Jan Eilers1b2654f2021-09-24 15:45:46 +0100339ARMNN_NO_DEPRECATE_WARN_BEGIN
340struct
341ARMNN_DEPRECATED_MSG_REMOVAL_DATE("ResizeBilinearQueueDescriptor is deprecated use ResizeQueueDescriptor instead",
342 "22.08")
343ResizeBilinearQueueDescriptor : QueueDescriptorWithParameters<ResizeBilinearDescriptor>
telsoa014fcda012018-03-09 14:13:49 +0000344{
345 void Validate(const WorkloadInfo& workloadInfo) const;
346};
Jan Eilers1b2654f2021-09-24 15:45:46 +0100347ARMNN_NO_DEPRECATE_WARN_END
telsoa014fcda012018-03-09 14:13:49 +0000348
Teresa Charlina9075df2019-06-27 15:41:57 +0100349struct ResizeQueueDescriptor : QueueDescriptorWithParameters<ResizeDescriptor>
350{
351 void Validate(const WorkloadInfo& workloadInfo) const;
352};
353
telsoa014fcda012018-03-09 14:13:49 +0000354struct FakeQuantizationQueueDescriptor : QueueDescriptorWithParameters<FakeQuantizationDescriptor>
355{
356 FakeQuantizationQueueDescriptor()
357 : m_Min(nullptr)
358 , m_Max(nullptr)
359 {
360 }
361
James Conroy1f58f032021-04-27 17:13:27 +0100362 const ConstTensorHandle* m_Min;
363 const ConstTensorHandle* m_Max;
telsoa014fcda012018-03-09 14:13:49 +0000364
365 void Validate(const WorkloadInfo& workloadInfo) const;
366};
367
Kevin Mayce5045a2019-10-02 14:07:47 +0100368struct InstanceNormalizationQueueDescriptor : QueueDescriptorWithParameters<InstanceNormalizationDescriptor>
369{
Kevin Mayce5045a2019-10-02 14:07:47 +0100370 void Validate(const WorkloadInfo& workloadInfo) const;
371};
372
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100373struct L2NormalizationQueueDescriptor : QueueDescriptorWithParameters<L2NormalizationDescriptor>
telsoa014fcda012018-03-09 14:13:49 +0000374{
375 void Validate(const WorkloadInfo& workloadInfo) const;
376};
377
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100378struct LogSoftmaxQueueDescriptor : QueueDescriptorWithParameters<LogSoftmaxDescriptor>
379{
380 void Validate(const WorkloadInfo& workloadInfo) const;
381};
382
telsoa014fcda012018-03-09 14:13:49 +0000383struct ConstantQueueDescriptor : QueueDescriptor
384{
385 ConstantQueueDescriptor()
386 : m_LayerOutput(nullptr)
387 {
388 }
389
James Conroy1f58f032021-04-27 17:13:27 +0100390 const ConstTensorHandle* m_LayerOutput;
telsoa014fcda012018-03-09 14:13:49 +0000391
392 void Validate(const WorkloadInfo& workloadInfo) const;
393};
394
395struct ReshapeQueueDescriptor : QueueDescriptorWithParameters<ReshapeDescriptor>
396{
397 void Validate(const WorkloadInfo& workloadInfo) const;
398};
399
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000400struct SpaceToBatchNdQueueDescriptor : QueueDescriptorWithParameters<SpaceToBatchNdDescriptor>
401{
402 void Validate(const WorkloadInfo& workloadInfo) const;
403};
404
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100405struct SpaceToDepthQueueDescriptor : QueueDescriptorWithParameters<SpaceToDepthDescriptor>
406{
407 void Validate(const WorkloadInfo& workloadInfo) const;
408};
409
telsoa014fcda012018-03-09 14:13:49 +0000410struct FloorQueueDescriptor : QueueDescriptor
411{
412 void Validate(const WorkloadInfo& workloadInfo) const;
413};
414
telsoa01c577f2c2018-08-31 09:22:23 +0100415struct LstmQueueDescriptor : QueueDescriptorWithParameters<LstmDescriptor>
416{
417 LstmQueueDescriptor()
418 : m_InputToInputWeights(nullptr)
419 , m_InputToForgetWeights(nullptr)
420 , m_InputToCellWeights(nullptr)
421 , m_InputToOutputWeights(nullptr)
422 , m_RecurrentToInputWeights(nullptr)
423 , m_RecurrentToForgetWeights(nullptr)
424 , m_RecurrentToCellWeights(nullptr)
425 , m_RecurrentToOutputWeights(nullptr)
426 , m_CellToInputWeights(nullptr)
427 , m_CellToForgetWeights(nullptr)
428 , m_CellToOutputWeights(nullptr)
429 , m_InputGateBias(nullptr)
430 , m_ForgetGateBias(nullptr)
431 , m_CellBias(nullptr)
432 , m_OutputGateBias(nullptr)
433 , m_ProjectionWeights(nullptr)
434 , m_ProjectionBias(nullptr)
Jan Eilers38e05bd2019-06-26 13:10:09 +0100435 , m_InputLayerNormWeights(nullptr)
436 , m_ForgetLayerNormWeights(nullptr)
437 , m_CellLayerNormWeights(nullptr)
438 , m_OutputLayerNormWeights(nullptr)
telsoa01c577f2c2018-08-31 09:22:23 +0100439 {
440 }
441
James Conroy1f58f032021-04-27 17:13:27 +0100442 const ConstTensorHandle* m_InputToInputWeights;
443 const ConstTensorHandle* m_InputToForgetWeights;
444 const ConstTensorHandle* m_InputToCellWeights;
445 const ConstTensorHandle* m_InputToOutputWeights;
446 const ConstTensorHandle* m_RecurrentToInputWeights;
447 const ConstTensorHandle* m_RecurrentToForgetWeights;
448 const ConstTensorHandle* m_RecurrentToCellWeights;
449 const ConstTensorHandle* m_RecurrentToOutputWeights;
450 const ConstTensorHandle* m_CellToInputWeights;
451 const ConstTensorHandle* m_CellToForgetWeights;
452 const ConstTensorHandle* m_CellToOutputWeights;
453 const ConstTensorHandle* m_InputGateBias;
454 const ConstTensorHandle* m_ForgetGateBias;
455 const ConstTensorHandle* m_CellBias;
456 const ConstTensorHandle* m_OutputGateBias;
457 const ConstTensorHandle* m_ProjectionWeights;
458 const ConstTensorHandle* m_ProjectionBias;
459 const ConstTensorHandle* m_InputLayerNormWeights;
460 const ConstTensorHandle* m_ForgetLayerNormWeights;
461 const ConstTensorHandle* m_CellLayerNormWeights;
462 const ConstTensorHandle* m_OutputLayerNormWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100463
464 void Validate(const WorkloadInfo& workloadInfo) const;
465};
466
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000467struct ConvertBf16ToFp32QueueDescriptor : QueueDescriptor
468{
469 void Validate(const WorkloadInfo& workloadInfo) const;
470};
471
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000472struct ConvertFp32ToBf16QueueDescriptor : QueueDescriptor
473{
474 void Validate(const WorkloadInfo& workloadInfo) const;
475};
476
telsoa01c577f2c2018-08-31 09:22:23 +0100477struct ConvertFp16ToFp32QueueDescriptor : QueueDescriptor
478{
479 void Validate(const WorkloadInfo& workloadInfo) const;
480};
481
482struct ConvertFp32ToFp16QueueDescriptor : QueueDescriptor
483{
484 void Validate(const WorkloadInfo& workloadInfo) const;
485};
486
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000487struct BatchToSpaceNdQueueDescriptor : QueueDescriptorWithParameters<BatchToSpaceNdDescriptor>
488{
489 void Validate(const WorkloadInfo& workloadInfo) const;
490};
Conor Kennedy430b5d82018-11-14 15:28:28 +0000491
492struct StridedSliceQueueDescriptor : QueueDescriptorWithParameters<StridedSliceDescriptor>
493{
494 void Validate(const WorkloadInfo& workloadInfo) const;
495};
496
Éanna Ó Catháin20e58802018-12-04 10:29:06 +0000497// Minimum layer workload data.
kevmay0190539692018-11-29 08:40:19 +0000498struct MinimumQueueDescriptor : QueueDescriptor
499{
500 void Validate(const WorkloadInfo& workloadInfo) const;
501};
502
Teresa Charlin2b030d92020-03-27 16:40:56 +0000503// Deprecated use ComparisonQueueDescriptor instead
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000504struct GreaterQueueDescriptor : QueueDescriptor
505{
506 void Validate(const WorkloadInfo& workloadInfo) const;
507};
508
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000509struct DebugQueueDescriptor : QueueDescriptor
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000510{
janeil013fec1ea2019-11-07 09:47:20 +0000511 DebugQueueDescriptor() : m_Guid(0) {}
512
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000513 void Validate(const WorkloadInfo& workloadInfo) const;
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000514
515 LayerGuid m_Guid;
516 std::string m_LayerName;
517 unsigned int m_SlotIndex;
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000518};
519
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000520struct RsqrtQueueDescriptor : QueueDescriptor
521{
522 void Validate(const WorkloadInfo& workloadInfo) const;
523};
524
Teresa Charlin52664732020-06-29 16:27:03 +0100525struct GatherQueueDescriptor : QueueDescriptorWithParameters<GatherDescriptor>
narpra01b89b05f2019-01-16 09:53:09 +0000526{
527 void Validate(const WorkloadInfo& workloadInfo) const;
528};
529
Matteo Martincigh49124022019-01-11 13:25:59 +0000530struct PreCompiledQueueDescriptor : QueueDescriptorWithParameters<PreCompiledDescriptor>
531{
532 PreCompiledQueueDescriptor()
533 : m_PreCompiledObject(nullptr)
534 {
535 }
536
Matteo Martincigh7997a352019-04-17 15:37:30 +0100537 void* m_PreCompiledObject;
Matteo Martincigh49124022019-01-11 13:25:59 +0000538
539 void Validate(const WorkloadInfo& workloadInfo) const;
540};
541
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000542struct DequantizeQueueDescriptor : QueueDescriptor
543{
544 void Validate(const WorkloadInfo& workloadInfo) const;
545};
546
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100547struct MergeQueueDescriptor : QueueDescriptor
548{
549 void Validate(const WorkloadInfo& workloadInfo) const;
550};
551
Sadik Armaganeff363d2019-04-05 15:25:46 +0100552struct SwitchQueueDescriptor : QueueDescriptor
553{
554 void Validate(const WorkloadInfo& workloadInfo) const;
555};
556
Matteo Martincigh0e406ee2019-06-12 15:42:18 +0100557struct PreluQueueDescriptor : QueueDescriptor
558{
559 void Validate(const WorkloadInfo& workloadInfo) const;
560};
561
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100562struct TransposeConvolution2dQueueDescriptor : QueueDescriptorWithParameters<TransposeConvolution2dDescriptor>
563{
564 TransposeConvolution2dQueueDescriptor() :
565 m_Weight(nullptr),
566 m_Bias(nullptr)
567 {}
568
James Conroy1f58f032021-04-27 17:13:27 +0100569 const ConstTensorHandle* m_Weight;
570 const ConstTensorHandle* m_Bias;
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100571
572 void Validate(const WorkloadInfo& workloadInfo) const;
573};
574
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000575struct TransposeQueueDescriptor : QueueDescriptorWithParameters<TransposeDescriptor>
576{
577 void Validate(const WorkloadInfo& workloadInfo) const;
578};
579
James Conroy586a9aa2020-03-20 08:49:33 +0000580struct QLstmQueueDescriptor : QueueDescriptorWithParameters<QLstmDescriptor>
581{
582 QLstmQueueDescriptor()
583 : m_InputToInputWeights(nullptr)
584 , m_InputToForgetWeights(nullptr)
585 , m_InputToCellWeights(nullptr)
586 , m_InputToOutputWeights(nullptr)
587 , m_RecurrentToInputWeights(nullptr)
588 , m_RecurrentToForgetWeights(nullptr)
589 , m_RecurrentToCellWeights(nullptr)
590 , m_RecurrentToOutputWeights(nullptr)
591 , m_CellToInputWeights(nullptr)
592 , m_CellToForgetWeights(nullptr)
593 , m_CellToOutputWeights(nullptr)
594 , m_InputGateBias(nullptr)
595 , m_ForgetGateBias(nullptr)
596 , m_CellBias(nullptr)
597 , m_OutputGateBias(nullptr)
598 , m_ProjectionWeights(nullptr)
599 , m_ProjectionBias(nullptr)
600 , m_InputLayerNormWeights(nullptr)
601 , m_ForgetLayerNormWeights(nullptr)
602 , m_CellLayerNormWeights(nullptr)
603 , m_OutputLayerNormWeights(nullptr)
604 {
605 }
606
James Conroy1f58f032021-04-27 17:13:27 +0100607 const ConstTensorHandle* m_InputToInputWeights;
608 const ConstTensorHandle* m_InputToForgetWeights;
609 const ConstTensorHandle* m_InputToCellWeights;
610 const ConstTensorHandle* m_InputToOutputWeights;
611 const ConstTensorHandle* m_RecurrentToInputWeights;
612 const ConstTensorHandle* m_RecurrentToForgetWeights;
613 const ConstTensorHandle* m_RecurrentToCellWeights;
614 const ConstTensorHandle* m_RecurrentToOutputWeights;
615 const ConstTensorHandle* m_CellToInputWeights;
616 const ConstTensorHandle* m_CellToForgetWeights;
617 const ConstTensorHandle* m_CellToOutputWeights;
618 const ConstTensorHandle* m_InputGateBias;
619 const ConstTensorHandle* m_ForgetGateBias;
620 const ConstTensorHandle* m_CellBias;
621 const ConstTensorHandle* m_OutputGateBias;
622 const ConstTensorHandle* m_ProjectionWeights;
623 const ConstTensorHandle* m_ProjectionBias;
624 const ConstTensorHandle* m_InputLayerNormWeights;
625 const ConstTensorHandle* m_ForgetLayerNormWeights;
626 const ConstTensorHandle* m_CellLayerNormWeights;
627 const ConstTensorHandle* m_OutputLayerNormWeights;
James Conroy586a9aa2020-03-20 08:49:33 +0000628
629 void Validate(const WorkloadInfo& workloadInfo) const;
630};
631
James Conroyee18dc82019-07-17 11:27:46 +0100632struct QuantizedLstmQueueDescriptor : QueueDescriptor
633{
634 QuantizedLstmQueueDescriptor()
635 : m_InputToInputWeights(nullptr)
636 , m_InputToForgetWeights(nullptr)
637 , m_InputToCellWeights(nullptr)
638 , m_InputToOutputWeights(nullptr)
639
640 , m_RecurrentToInputWeights(nullptr)
641 , m_RecurrentToForgetWeights(nullptr)
642 , m_RecurrentToCellWeights(nullptr)
643 , m_RecurrentToOutputWeights(nullptr)
644
645 , m_InputGateBias(nullptr)
646 , m_ForgetGateBias(nullptr)
647 , m_CellBias(nullptr)
648 , m_OutputGateBias(nullptr)
649 {}
650
James Conroy1f58f032021-04-27 17:13:27 +0100651 const ConstTensorHandle* m_InputToInputWeights;
652 const ConstTensorHandle* m_InputToForgetWeights;
653 const ConstTensorHandle* m_InputToCellWeights;
654 const ConstTensorHandle* m_InputToOutputWeights;
James Conroyee18dc82019-07-17 11:27:46 +0100655
James Conroy1f58f032021-04-27 17:13:27 +0100656 const ConstTensorHandle* m_RecurrentToInputWeights;
657 const ConstTensorHandle* m_RecurrentToForgetWeights;
658 const ConstTensorHandle* m_RecurrentToCellWeights;
659 const ConstTensorHandle* m_RecurrentToOutputWeights;
James Conroyee18dc82019-07-17 11:27:46 +0100660
James Conroy1f58f032021-04-27 17:13:27 +0100661 const ConstTensorHandle* m_InputGateBias;
662 const ConstTensorHandle* m_ForgetGateBias;
663 const ConstTensorHandle* m_CellBias;
664 const ConstTensorHandle* m_OutputGateBias;
James Conroyee18dc82019-07-17 11:27:46 +0100665
666 void Validate(const WorkloadInfo& workloadInfo) const;
667};
668
Kevin May868eb142019-09-04 17:29:31 +0100669struct AbsQueueDescriptor : QueueDescriptor
670{
671 void Validate(const WorkloadInfo& workloadInfo) const;
672};
673
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100674struct SliceQueueDescriptor : QueueDescriptorWithParameters<SliceDescriptor>
675{
676 void Validate(const WorkloadInfo& workloadInfo) const;
677};
678
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100679struct DepthToSpaceQueueDescriptor : QueueDescriptorWithParameters<DepthToSpaceDescriptor>
680{
681 void Validate(const WorkloadInfo& workloadInfo) const;
682};
683
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100684struct ComparisonQueueDescriptor : QueueDescriptorWithParameters<ComparisonDescriptor>
685{
686 void Validate(const WorkloadInfo& workloadInfo) const;
687};
688
josh minor4a3c6102020-01-06 16:40:46 -0600689struct ElementwiseUnaryQueueDescriptor : QueueDescriptorWithParameters<ElementwiseUnaryDescriptor>
690{
691 void Validate(const WorkloadInfo& workloadInfo) const;
692};
693
James Conroyaba90cd2020-11-06 16:28:18 +0000694struct LogicalBinaryQueueDescriptor : QueueDescriptorWithParameters<LogicalBinaryDescriptor>
695{
696 void Validate(const WorkloadInfo& workloadInfo) const;
697};
698
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000699struct ReduceQueueDescriptor : QueueDescriptorWithParameters<ReduceDescriptor>
700{
701 void Validate(const WorkloadInfo& workloadInfo) const;
702};
703
Keith Davis3ae3f972021-05-21 16:33:48 +0100704struct ShapeQueueDescriptor : QueueDescriptor
705{
706 void Validate(const WorkloadInfo& workloadInfo) const;
707};
708
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +0100709struct UnidirectionalSequenceLstmQueueDescriptor : QueueDescriptorWithParameters<LstmDescriptor>
710{
711 UnidirectionalSequenceLstmQueueDescriptor()
712 : m_InputToInputWeights(nullptr)
713 , m_InputToForgetWeights(nullptr)
714 , m_InputToCellWeights(nullptr)
715 , m_InputToOutputWeights(nullptr)
716 , m_RecurrentToInputWeights(nullptr)
717 , m_RecurrentToForgetWeights(nullptr)
718 , m_RecurrentToCellWeights(nullptr)
719 , m_RecurrentToOutputWeights(nullptr)
720 , m_CellToInputWeights(nullptr)
721 , m_CellToForgetWeights(nullptr)
722 , m_CellToOutputWeights(nullptr)
723 , m_InputGateBias(nullptr)
724 , m_ForgetGateBias(nullptr)
725 , m_CellBias(nullptr)
726 , m_OutputGateBias(nullptr)
727 , m_ProjectionWeights(nullptr)
728 , m_ProjectionBias(nullptr)
729 , m_InputLayerNormWeights(nullptr)
730 , m_ForgetLayerNormWeights(nullptr)
731 , m_CellLayerNormWeights(nullptr)
732 , m_OutputLayerNormWeights(nullptr)
733 {
734 }
735
736 const ConstTensorHandle* m_InputToInputWeights;
737 const ConstTensorHandle* m_InputToForgetWeights;
738 const ConstTensorHandle* m_InputToCellWeights;
739 const ConstTensorHandle* m_InputToOutputWeights;
740 const ConstTensorHandle* m_RecurrentToInputWeights;
741 const ConstTensorHandle* m_RecurrentToForgetWeights;
742 const ConstTensorHandle* m_RecurrentToCellWeights;
743 const ConstTensorHandle* m_RecurrentToOutputWeights;
744 const ConstTensorHandle* m_CellToInputWeights;
745 const ConstTensorHandle* m_CellToForgetWeights;
746 const ConstTensorHandle* m_CellToOutputWeights;
747 const ConstTensorHandle* m_InputGateBias;
748 const ConstTensorHandle* m_ForgetGateBias;
749 const ConstTensorHandle* m_CellBias;
750 const ConstTensorHandle* m_OutputGateBias;
751 const ConstTensorHandle* m_ProjectionWeights;
752 const ConstTensorHandle* m_ProjectionBias;
753 const ConstTensorHandle* m_InputLayerNormWeights;
754 const ConstTensorHandle* m_ForgetLayerNormWeights;
755 const ConstTensorHandle* m_CellLayerNormWeights;
756 const ConstTensorHandle* m_OutputLayerNormWeights;
757
758 void Validate(const WorkloadInfo& workloadInfo) const;
759};
760
Simon Obute51f67772021-09-03 15:50:13 +0100761struct ChannelShuffleQueueDescriptor : QueueDescriptorWithParameters<ChannelShuffleDescriptor>
762{
763 void Validate(const WorkloadInfo& workloadInfo) const;
764};
765
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100766} // namespace armnn