blob: ff4b80d22b947dce7f9b6dabed5b43a4f37e183e [file] [log] [blame]
David Becke97c6e02018-10-03 13:09:28 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include <armnn/ILayerSupport.hpp>
7
8namespace armnn
9{
10
11namespace
12{
13
14bool DefaultLayerSupport(const char* func,
15 const char* file,
16 unsigned int line,
17 char* reasonIfUnsupported,
18 size_t reasonIfUnsupportedMaxLength)
19{
20 if (reasonIfUnsupported != nullptr && reasonIfUnsupportedMaxLength > 0)
21 {
22 snprintf(reasonIfUnsupported,
23 reasonIfUnsupportedMaxLength,
24 "%s is not supported [%s:%d]",
25 func,
26 file,
27 line);
28 }
29 return false;
30}
31
32}
33
34bool ILayerSupport::IsActivationSupported(const TensorInfo& input,
35 const TensorInfo& output,
36 const ActivationDescriptor& descriptor,
37 char* reasonIfUnsupported,
38 size_t reasonIfUnsupportedMaxLength) const
39{
40 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
41}
42
43bool ILayerSupport::IsAdditionSupported(const TensorInfo& input0,
44 const TensorInfo& input1,
45 const TensorInfo& output,
46 char* reasonIfUnsupported,
47 size_t reasonIfUnsupportedMaxLength) const
48{
49 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
50}
51
52bool ILayerSupport::IsBatchNormalizationSupported(const TensorInfo& input,
53 const TensorInfo& output,
54 const TensorInfo& mean,
55 const TensorInfo& var,
56 const TensorInfo& beta,
57 const TensorInfo& gamma,
58 const BatchNormalizationDescriptor& descriptor,
59 char* reasonIfUnsupported,
60 size_t reasonIfUnsupportedMaxLength) const
61{
62 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
63}
64
65bool ILayerSupport::IsConstantSupported(const TensorInfo& output,
66 char* reasonIfUnsupported,
67 size_t reasonIfUnsupportedMaxLength) const
68{
69 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
70}
71
72bool ILayerSupport::IsConvertFp16ToFp32Supported(const TensorInfo& input,
73 const TensorInfo& output,
74 char* reasonIfUnsupported,
75 size_t reasonIfUnsupportedMaxLength) const
76{
77 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
78}
79
80bool ILayerSupport::IsConvertFp32ToFp16Supported(const TensorInfo& input,
81 const TensorInfo& output,
82 char* reasonIfUnsupported,
83 size_t reasonIfUnsupportedMaxLength) const
84{
85 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
86}
87
88bool ILayerSupport::IsConvolution2dSupported(const TensorInfo& input,
89 const TensorInfo& output,
90 const Convolution2dDescriptor& descriptor,
91 const TensorInfo& weights,
David Beck5eec11d2018-10-04 15:43:17 +010092 const Optional<TensorInfo>& biases,
David Becke97c6e02018-10-03 13:09:28 +010093 char* reasonIfUnsupported,
94 size_t reasonIfUnsupportedMaxLength) const
95{
96 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
97}
98
99bool ILayerSupport::IsDepthwiseConvolutionSupported(const TensorInfo& input,
100 const TensorInfo& output,
101 const DepthwiseConvolution2dDescriptor& descriptor,
102 const TensorInfo& weights,
David Beck5eec11d2018-10-04 15:43:17 +0100103 const Optional<TensorInfo>& biases,
David Becke97c6e02018-10-03 13:09:28 +0100104 char* reasonIfUnsupported,
105 size_t reasonIfUnsupportedMaxLength) const
106{
107 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
108}
109
110bool ILayerSupport::IsDivisionSupported(const TensorInfo& input0,
111 const TensorInfo& input1,
112 const TensorInfo& output,
113 char* reasonIfUnsupported,
114 size_t reasonIfUnsupportedMaxLength) const
115{
116 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
117}
118
119bool ILayerSupport::IsSubtractionSupported(const TensorInfo& input0,
120 const TensorInfo& input1,
121 const TensorInfo& output,
122 char* reasonIfUnsupported,
123 size_t reasonIfUnsupportedMaxLength) const
124{
125 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
126}
127
128bool ILayerSupport::IsInputSupported(const TensorInfo& input,
129 char* reasonIfUnsupported,
130 size_t reasonIfUnsupportedMaxLength) const
131{
132 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
133}
134
135bool ILayerSupport::IsFullyConnectedSupported(const TensorInfo& input,
136 const TensorInfo& output,
137 const TensorInfo& weights,
138 const TensorInfo& biases,
139 const FullyConnectedDescriptor& descriptor,
140 char* reasonIfUnsupported,
141 size_t reasonIfUnsupportedMaxLength) const
142{
143 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
144}
145
146bool ILayerSupport::IsL2NormalizationSupported(const TensorInfo& input,
147 const TensorInfo& output,
148 const L2NormalizationDescriptor& descriptor,
149 char* reasonIfUnsupported,
150 size_t reasonIfUnsupportedMaxLength) const
151{
152 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
153}
154
155bool ILayerSupport::IsLstmSupported(const TensorInfo& input,
156 const TensorInfo& outputStateIn,
157 const TensorInfo& cellStateIn,
158 const TensorInfo& scratchBuffer,
159 const TensorInfo& outputStateOut,
160 const TensorInfo& cellStateOut,
161 const TensorInfo& output,
162 const LstmDescriptor& descriptor,
163 const TensorInfo& inputToForgetWeights,
164 const TensorInfo& inputToCellWeights,
165 const TensorInfo& inputToOutputWeights,
166 const TensorInfo& recurrentToForgetWeights,
167 const TensorInfo& recurrentToCellWeights,
168 const TensorInfo& recurrentToOutputWeights,
169 const TensorInfo& forgetGateBias,
170 const TensorInfo& cellBias,
171 const TensorInfo& outputGateBias,
172 const TensorInfo* inputToInputWeights,
173 const TensorInfo* recurrentToInputWeights,
174 const TensorInfo* cellToInputWeights,
175 const TensorInfo* inputGateBias,
176 const TensorInfo* projectionWeights,
177 const TensorInfo* projectionBias,
178 const TensorInfo* cellToForgetWeights,
179 const TensorInfo* cellToOutputWeights,
180 char* reasonIfUnsupported,
181 size_t reasonIfUnsupportedMaxLength) const
182{
183 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
184}
185
186bool ILayerSupport::IsMergerSupported(const std::vector<const TensorInfo*> inputs,
187 const OriginsDescriptor& descriptor,
188 char* reasonIfUnsupported,
189 size_t reasonIfUnsupportedMaxLength) const
190{
191 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
192}
193
194bool ILayerSupport::IsMultiplicationSupported(const TensorInfo& input0,
195 const TensorInfo& input1,
196 const TensorInfo& output,
197 char* reasonIfUnsupported,
198 size_t reasonIfUnsupportedMaxLength) const
199{
200 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
201}
202
203bool ILayerSupport::IsNormalizationSupported(const TensorInfo& input,
204 const TensorInfo& output,
205 const NormalizationDescriptor& descriptor,
206 char* reasonIfUnsupported,
207 size_t reasonIfUnsupportedMaxLength) const
208{
209 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
210}
211
212bool ILayerSupport::IsOutputSupported(const TensorInfo& output,
213 char* reasonIfUnsupported,
214 size_t reasonIfUnsupportedMaxLength) const
215{
216 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
217}
218
219bool ILayerSupport::IsPermuteSupported(const TensorInfo& input,
220 const TensorInfo& output,
221 const PermuteDescriptor& descriptor,
222 char* reasonIfUnsupported,
223 size_t reasonIfUnsupportedMaxLength) const
224{
225 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
226}
227
228bool ILayerSupport::IsPooling2dSupported(const TensorInfo& input,
229 const TensorInfo& output,
230 const Pooling2dDescriptor& descriptor,
231 char* reasonIfUnsupported,
232 size_t reasonIfUnsupportedMaxLength) const
233{
234 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
235}
236
237bool ILayerSupport::IsResizeBilinearSupported(const TensorInfo& input,
238 char* reasonIfUnsupported,
239 size_t reasonIfUnsupportedMaxLength) const
240{
241 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
242}
243
244bool ILayerSupport::IsSoftmaxSupported(const TensorInfo& input,
245 const TensorInfo& output,
246 const SoftmaxDescriptor& descriptor,
247 char* reasonIfUnsupported,
248 size_t reasonIfUnsupportedMaxLength) const
249{
250 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
251}
252
253bool ILayerSupport::IsSplitterSupported(const TensorInfo& input,
254 const ViewsDescriptor& descriptor,
255 char* reasonIfUnsupported,
256 size_t reasonIfUnsupportedMaxLength) const
257{
258 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
259}
260
261bool ILayerSupport::IsFakeQuantizationSupported(const TensorInfo& input,
262 const FakeQuantizationDescriptor& descriptor,
263 char* reasonIfUnsupported,
264 size_t reasonIfUnsupportedMaxLength) const
265{
266 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
267}
268
269bool ILayerSupport::IsReshapeSupported(const TensorInfo& input,
270 char* reasonIfUnsupported,
271 size_t reasonIfUnsupportedMaxLength) const
272{
273 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
274}
275
276bool ILayerSupport::IsFloorSupported(const TensorInfo& input,
277 const TensorInfo& output,
278 char* reasonIfUnsupported,
279 size_t reasonIfUnsupportedMaxLength) const
280{
281 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
282}
283
284bool ILayerSupport::IsMeanSupported(const TensorInfo& input,
285 const TensorInfo& output,
286 const MeanDescriptor& descriptor,
287 char* reasonIfUnsupported,
288 size_t reasonIfUnsupportedMaxLength) const
289{
290 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
291}
292
293bool ILayerSupport::IsPadSupported(const TensorInfo& input,
294 const TensorInfo& output,
295 const PadDescriptor& descriptor,
296 char* reasonIfUnsupported,
297 size_t reasonIfUnsupportedMaxLength) const
298{
299 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported, reasonIfUnsupportedMaxLength);
300}
301
302}