blob: 56af9c4d684da34257f9fdb64a989f6cfe62c85d [file] [log] [blame]
David Monahan8a570462023-11-22 13:24:25 +00001//
David Monahanbd738082023-12-08 12:50:02 +00002// Copyright © 2022-2024 Arm Ltd and Contributors. All rights reserved.
David Monahan8a570462023-11-22 13:24:25 +00003// SPDX-License-Identifier: MIT
4//
5
6#include "GpuFsaLayerSupport.hpp"
7
8#include <armnn/Types.hpp>
9#include <armnn/utility/IgnoreUnused.hpp>
10#include <armnn/utility/PolymorphicDowncast.hpp>
11
12#if defined(ARMCOMPUTEGPUFSA_ENABLED)
Tracy Narinebc5a5d52024-02-06 15:22:41 +000013#include "layers/GpuFsaCast.hpp"
David Monahanbd738082023-12-08 12:50:02 +000014#include "layers/GpuFsaConvolution2d.hpp"
Tianle Chengfbfa49e2024-01-23 11:21:48 +000015#include "layers/GpuFsaDepthwiseConvolution2d.hpp"
Tracy Narinee7d27852024-01-26 09:13:19 +000016#include "layers/GpuFsaElementwiseBinaryAdd.hpp"
John Mcloughlin829e13e2024-01-31 11:00:27 +000017#include "layers/GpuFsaElementwiseBinarySub.hpp"
Teresa Charlina52bca22024-02-01 17:36:48 +000018#include "layers/GpuFsaPooling2d.hpp"
Teresa Charlin1d6b7312024-02-07 22:02:48 +000019#include "layers/GpuFsaResize.hpp"
David Monahan8a570462023-11-22 13:24:25 +000020#endif
21
22#include <vector>
23
24namespace armnn
25{
26
27template<typename ... Args>
28bool IsGpuFsaBackendSupported(Optional<std::string&> reasonIfUnsupported, Args... args)
29{
30 IgnoreUnused(reasonIfUnsupported, (args)...);
31#if defined(ARMCOMPUTEGPUFSA_ENABLED)
32 return true;
33#else
34 if (reasonIfUnsupported)
35 {
36 reasonIfUnsupported.value() = "The armnn library has been built without CL support";
37 }
38 return false;
39#endif
40}
41
42#if defined(ARMCOMPUTEGPUFSA_ENABLED)
43#define FORWARD_GPUFSA_LAYER_SUPPORT_FUNC(expr) (expr)
44#else
45#define FORWARD_GPUFSA_LAYER_SUPPORT_FUNC(expr) IsGpuFsaBackendSupported(reasonIfUnsupported)
46#endif
47
48#if defined(ARMCOMPUTEGPUFSA_ENABLED)
49template<class FuncType, class... Args>
50inline bool CheckIsLayerSupported(FuncType&& func, Optional<std::string&> reasonIfUnsupported, Args&&... args)
51{
52 arm_compute::Status aclStatus = func(std::forward<Args>(args)...);
53 const bool supported = (aclStatus.error_code() == arm_compute::ErrorCode::OK);
54 if (!supported && reasonIfUnsupported)
55 {
56 reasonIfUnsupported.value() = aclStatus.error_description();
57 }
58 return supported;
59}
60
61#define FORWARD_LAYER_VALIDATE_FUNC(func, reasonIfUnsupported, ...) \
62 return CheckIsLayerSupported(func, reasonIfUnsupported, __VA_ARGS__);
63#else
64#define FORWARD_LAYER_VALIDATE_FUNC(func, reasonIfUnsupported, ...) \
65 return IsGpuFsaBackendSupported(reasonIfUnsupported, __VA_ARGS__);
66#endif
67
68bool GpuFsaLayerSupport::IsLayerSupported(const LayerType& type,
69 const std::vector<TensorInfo>& infos,
70 const BaseDescriptor& descriptor,
71 const Optional<LstmInputParamsInfo>& lstmParamsInfo,
72 const Optional<QuantizedLstmInputParamsInfo>& quantizedLstmInputParamsInfo,
73 Optional<std::string&> reasonIfUnsupported) const
74{
75 IgnoreUnused(lstmParamsInfo);
76 IgnoreUnused(quantizedLstmInputParamsInfo);
77
Tracy Narinebc5a5d52024-02-06 15:22:41 +000078 switch (type)
79 {
80 case LayerType::Cast:
81 {
82 if (infos.size() != 2)
83 {
84 throw InvalidArgumentException("Invalid number of cast TensorInfos. "
85 "TensorInfos should be of format: {input, output}.");
86 }
87 FORWARD_LAYER_VALIDATE_FUNC(GpuFsaCastValidate,
88 reasonIfUnsupported,
89 infos[0],
90 infos[1]);
91 }
David Monahan8a570462023-11-22 13:24:25 +000092 case LayerType::Convolution2d:
93 {
94 if (infos.size() != 4)
95 {
96 throw InvalidArgumentException("Invalid number of Convolution2d TensorInfos. "
97 "TensorInfos should be of format: {input, output, weights, biases}.");
98 }
99
Tracy Narinee7d27852024-01-26 09:13:19 +0000100 auto desc = PolymorphicDowncast<const Convolution2dDescriptor*>(&descriptor);
David Monahan8a570462023-11-22 13:24:25 +0000101 if (infos[3] == TensorInfo())
102 {
103 FORWARD_LAYER_VALIDATE_FUNC(GpuFsaConvolution2dValidate,
104 reasonIfUnsupported,
105 infos[0],
Tracy Narinee7d27852024-01-26 09:13:19 +0000106 *desc,
David Monahan8a570462023-11-22 13:24:25 +0000107 infos[2],
108 EmptyOptional());
109 }
110 else
111 {
112 FORWARD_LAYER_VALIDATE_FUNC(GpuFsaConvolution2dValidate,
113 reasonIfUnsupported,
114 infos[0],
Tracy Narinee7d27852024-01-26 09:13:19 +0000115 *desc,
David Monahan8a570462023-11-22 13:24:25 +0000116 infos[2],
117 infos[3]);
118 }
119 }
Tianle Chengfbfa49e2024-01-23 11:21:48 +0000120 case LayerType::DepthwiseConvolution2d:
121 {
122 if (infos.size() != 4)
123 {
124 throw InvalidArgumentException("Invalid number of DepthwiseConvolution2dDescriptor TensorInfos. "
125 "TensorInfos should be of format: {input, output, weights, biases}.");
126 }
127
Tracy Narinebc5a5d52024-02-06 15:22:41 +0000128 auto desc = PolymorphicDowncast<const DepthwiseConvolution2dDescriptor*>(&descriptor);
Tianle Chengfbfa49e2024-01-23 11:21:48 +0000129 if (infos[3] == TensorInfo())
130 {
131 FORWARD_LAYER_VALIDATE_FUNC(GpuFsaDepthwiseConvolution2dValidate,
132 reasonIfUnsupported,
133 infos[0],
Tracy Narinee7d27852024-01-26 09:13:19 +0000134 *desc,
Tianle Chengfbfa49e2024-01-23 11:21:48 +0000135 infos[2],
136 EmptyOptional());
137 }
138 else
139 {
140 FORWARD_LAYER_VALIDATE_FUNC(GpuFsaDepthwiseConvolution2dValidate,
141 reasonIfUnsupported,
142 infos[0],
Tracy Narinee7d27852024-01-26 09:13:19 +0000143 *desc,
Tianle Chengfbfa49e2024-01-23 11:21:48 +0000144 infos[2],
145 infos[3]);
146 }
147 }
Tracy Narinee7d27852024-01-26 09:13:19 +0000148 case LayerType::ElementwiseBinary:
149 {
150 if (infos.size() != 3)
151 {
152 throw InvalidArgumentException("Invalid number of ElementwiseBinary TensorInfos. "
153 "TensorInfos should be of format: {input0, input1, output0}.");
154 }
155
Tracy Narinebc5a5d52024-02-06 15:22:41 +0000156 auto desc = PolymorphicDowncast<const ElementwiseBinaryDescriptor*>(&descriptor);
Tracy Narinee7d27852024-01-26 09:13:19 +0000157 if (desc->m_Operation == BinaryOperation::Add)
158 {
159 FORWARD_LAYER_VALIDATE_FUNC(GpuFsaElementwiseBinaryAddValidate,
160 reasonIfUnsupported,
161 infos[0],
162 infos[1]);
163 }
John Mcloughlin829e13e2024-01-31 11:00:27 +0000164 else if (desc->m_Operation == BinaryOperation::Sub)
165 {
166 FORWARD_LAYER_VALIDATE_FUNC(GpuFsaElementwiseBinarySubValidate,
167 reasonIfUnsupported,
168 infos[0],
169 infos[1]);
170 }
Tracy Narinee7d27852024-01-26 09:13:19 +0000171 else
172 {
173 throw InvalidArgumentException("Invalid ElementwiseBinary BinaryOperation operation.");
174 }
Teresa Charlina52bca22024-02-01 17:36:48 +0000175 }
176 case LayerType::Pooling2d:
177 {
178 if (infos.size() != 2)
179 {
180 throw InvalidArgumentException("Invalid number of Pooling2d TensorInfos. "
181 "TensorInfos should be of format: {input, output}.");
182 }
183
184 auto desc = PolymorphicDowncast<const Pooling2dDescriptor*>(&descriptor);
185
186 FORWARD_LAYER_VALIDATE_FUNC(GpuFsaPooling2dValidate,
187 reasonIfUnsupported,
188 infos[0],
189 *desc);
Tracy Narinee7d27852024-01-26 09:13:19 +0000190 }
Teresa Charlin1d6b7312024-02-07 22:02:48 +0000191 case LayerType::Resize:
192 {
193 if (infos.size() != 2)
194 {
195 throw InvalidArgumentException("Invalid number of Resize TensorInfos. "
196 "TensorInfos should be of format: {input, output}.");
197 }
198
199 auto desc = PolymorphicDowncast<const ResizeDescriptor*>(&descriptor);
200
201 FORWARD_LAYER_VALIDATE_FUNC(GpuFsaResizeValidate,
202 reasonIfUnsupported,
203 infos[0],
204 *desc);
205 }
David Monahan8a570462023-11-22 13:24:25 +0000206 case LayerType::Constant:
207 case LayerType::Input:
208 case LayerType::Output:
209 return IsGpuFsaBackendSupported(reasonIfUnsupported, infos[0]);
210 default:
211 // Layers not supported in the GpuFsa backend.
212 return false;
213 }
214}
215
216} // namespace armnn