blob: ec9614ad6d22357e0c2a14d1b70029da3c337630 [file] [log] [blame]
Eric Kunzee5e26762020-10-13 16:11:07 -07001
Tai Ly8690a082023-12-18 20:40:24 +00002// Copyright (c) 2020-2024, ARM Limited.
Eric Kunzee5e26762020-10-13 16:11:07 -07003//
4// Licensed under the Apache License, Version 2.0 (the "License");
5// you may not use this file except in compliance with the License.
6// You may obtain a copy of the License at
7//
8// http://www.apache.org/licenses/LICENSE-2.0
9//
10// Unless required by applicable law or agreed to in writing, software
11// distributed under the License is distributed on an "AS IS" BASIS,
12// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13// See the License for the specific language governing permissions and
14// limitations under the License.
15
16#include "data_layout.h"
17#include "quant_util.h"
18
19using namespace TosaReference;
20using namespace Eigen;
21using namespace tosa;
22
Tai Lya4d748b2023-03-28 22:06:56 +000023template <int Rank, TOSA_REF_TYPE Dtype>
Jerry Ge9c9c8da2023-07-19 23:08:16 +000024OpConcat<Rank, Dtype>::OpConcat(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
Kevin Chengacb550f2021-06-29 15:32:19 -070025 : GraphNode(sgt_, Op_CONCAT, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -070026{
Kevin Chengad15dfa2021-03-04 15:15:03 -080027 setRequiredOperands(-1, 1);
Jerry Ge0bd4ec82023-05-01 18:36:43 +000028 setRequiredRank(1);
Eric Kunzee5e26762020-10-13 16:11:07 -070029
30 INIT_ATTRIBUTE(Axis);
31}
32
Tai Lya4d748b2023-03-28 22:06:56 +000033template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -070034OpConcat<Rank, Dtype>::~OpConcat()
35{
36 if (attribute)
37 delete attribute;
38}
39
Tai Lya4d748b2023-03-28 22:06:56 +000040template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -070041int OpConcat<Rank, Dtype>::checkTensorAttributes()
42{
Jerry Gea793f462023-04-11 00:05:02 +000043 // Check Tosa Level
44 auto tosa_level = g_func_config.tosa_level;
45 LEVEL_CHECK(Rank <= tosa_level.MAX_RANK, "Rank should be smaller than or equal to MAX_RANK");
46
Eric Kunzee5e26762020-10-13 16:11:07 -070047 if (validateRequiredOperands())
48 return 1;
49
Kevin Chengad15dfa2021-03-04 15:15:03 -080050 if (inputs.empty())
Eric Kunzee5e26762020-10-13 16:11:07 -070051 {
Kevin Chengad15dfa2021-03-04 15:15:03 -080052 printNodeValidationError("Concat operator must have at least one input tensor");
Eric Kunzee5e26762020-10-13 16:11:07 -070053 return 1;
54 }
Kevin Chengcc61be32021-10-14 17:09:57 -070055
56 int32_t num_inputs = inputs.size();
57
Eric Kunzee5e26762020-10-13 16:11:07 -070058 // output and input must be the same types and rank
Kevin Chengcc61be32021-10-14 17:09:57 -070059 for (int32_t i = 0; i < num_inputs; i++)
Eric Kunzee5e26762020-10-13 16:11:07 -070060 {
Kevin Chengad15dfa2021-03-04 15:15:03 -080061 if (inputs[i]->matchRankType(*outputs[0]))
62 {
Kevin Chengcc61be32021-10-14 17:09:57 -070063 printNodeValidationError("OpConcat: input ranks and types must match");
Kevin Chengad15dfa2021-03-04 15:15:03 -080064 return 1;
65 }
66 ins.push_back(dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[i]));
Eric Kunzee5e26762020-10-13 16:11:07 -070067 }
68
Kevin Chengcc61be32021-10-14 17:09:57 -070069 if (attribute->axis() < 0 || (size_t)attribute->axis() >= Rank)
Eric Kunzee5e26762020-10-13 16:11:07 -070070 {
Kevin Chengcc61be32021-10-14 17:09:57 -070071 printNodeValidationError("OpConcat: axis is beyond output tensor rank");
Eric Kunzee5e26762020-10-13 16:11:07 -070072 return 1;
73 }
74
Kevin Chengcc61be32021-10-14 17:09:57 -070075 int32_t output_dim_on_axis = 0;
76 for (int32_t j = 0; j < num_inputs; j++)
77 {
78 for (int32_t i = 0; i < Rank; i++)
79 {
80 int32_t input_dim = inputs[j]->getShape()[i];
81 if (i == attribute->axis())
82 {
83 output_dim_on_axis += input_dim;
84 }
85 else if (input_dim != outputs[0]->getShape()[i])
86 {
87 printNodeValidationError("OpConcat: input dimension not matching output dimension");
88 return 1;
89 }
90 }
91 }
92
Kevin Cheng6e528662021-10-20 17:35:33 +000093 ERROR_IF(output_dim_on_axis != outputs[0]->getShape()[attribute->axis()],
Kevin Chengcc61be32021-10-14 17:09:57 -070094 "OpConcat: sum of input dimension on axis not equal to output dimension on axis");
95
96 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
97
Eric Kunzee5e26762020-10-13 16:11:07 -070098 return 0;
99}
100
Tai Lya4d748b2023-03-28 22:06:56 +0000101template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700102int OpConcat<Rank, Dtype>::eval()
103{
104
105 int32_t reversed_axis = Rank - 1 - attribute->axis();
106
107 for (int32_t d = 0; d < Rank; d++)
108 {
109 reverser[d] = Rank - 1 - d;
110 }
111
Kevin Chengad15dfa2021-03-04 15:15:03 -0800112 TIn result = ins[0]->getTensor().shuffle(reverser);
Eric Kunzee5e26762020-10-13 16:11:07 -0700113
Kevin Chengad15dfa2021-03-04 15:15:03 -0800114 for (size_t i = 1; i < ins.size(); i++)
115 {
116 TIn in_reversed = ins[i]->getTensor().shuffle(reverser);
117 TIn temp = result.concatenate(in_reversed, reversed_axis);
118 result = temp;
119 }
120 out->getTensor() = result.shuffle(reverser);
Eric Kunzee5e26762020-10-13 16:11:07 -0700121
122 return GraphNode::eval();
123}
124
Tai Lya4d748b2023-03-28 22:06:56 +0000125template <int Rank, TOSA_REF_TYPE Dtype>
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000126OpPad<Rank, Dtype>::OpPad(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
Kevin Chengacb550f2021-06-29 15:32:19 -0700127 : GraphNode(sgt_, Op_PAD, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700128{
Tai Lye095da72024-01-25 22:00:18 +0000129 setRequiredOperands(2, 1);
Jerry Ge0bd4ec82023-05-01 18:36:43 +0000130 setRequiredRank(1);
Eric Kunzee5e26762020-10-13 16:11:07 -0700131
Kevin Chengfe392ce2021-10-18 21:51:55 +0000132 INIT_ATTRIBUTE(Pad);
Eric Kunzee5e26762020-10-13 16:11:07 -0700133}
134
Tai Lya4d748b2023-03-28 22:06:56 +0000135template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700136OpPad<Rank, Dtype>::~OpPad()
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000137{}
Eric Kunzee5e26762020-10-13 16:11:07 -0700138
Tai Lya4d748b2023-03-28 22:06:56 +0000139template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700140int OpPad<Rank, Dtype>::checkTensorAttributes()
141{
Jerry Gea793f462023-04-11 00:05:02 +0000142 // Check Tosa Level
143 auto tosa_level = g_func_config.tosa_level;
144 LEVEL_CHECK(Rank <= tosa_level.MAX_RANK, "Rank should be smaller than or equal to MAX_RANK");
145
Eric Kunzee5e26762020-10-13 16:11:07 -0700146 if (validateRequiredOperands())
147 return 1;
148
149 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
150 {
151 return 1;
152 }
153
154 // output and input must be the same types
155 if (inputs[0]->matchRankType(*outputs[0]))
156 {
157 printNodeValidationError("Failure to match input and output type and rank");
158 return 1;
159 }
160
Tai Lye095da72024-01-25 22:00:18 +0000161 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
162 padding = dynamic_cast<TosaReference::TensorTemplate<TPadding>*>(inputs[1]);
163 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
Kevin Chengfe392ce2021-10-18 21:51:55 +0000164 ASSERT_MEM(in && out);
165
Eric Kunzee5e26762020-10-13 16:11:07 -0700166 return 0;
167}
168
Tai Lya4d748b2023-03-28 22:06:56 +0000169template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700170int OpPad<Rank, Dtype>::eval()
171{
Kevin Chengfe392ce2021-10-18 21:51:55 +0000172 InEigenType pad_value = 0;
173
174 switch (Dtype)
Kevin Chengcc61be32021-10-14 17:09:57 -0700175 {
Tai Lya4d748b2023-03-28 22:06:56 +0000176 case TOSA_REF_TYPE_BOOL:
177 case TOSA_REF_TYPE_INT8:
178 case TOSA_REF_TYPE_INT16:
179 case TOSA_REF_TYPE_INT32:
Kevin Chengfe392ce2021-10-18 21:51:55 +0000180 pad_value = (InEigenType)attribute->pad_const_int();
181 break;
Tai Lya4d748b2023-03-28 22:06:56 +0000182 case TOSA_REF_TYPE_FP16:
183 case TOSA_REF_TYPE_BF16:
184 case TOSA_REF_TYPE_FP32:
185 case TOSA_REF_TYPE_FP64:
Kevin Chengfe392ce2021-10-18 21:51:55 +0000186 pad_value = (InEigenType)attribute->pad_const_fp();
187 break;
TatWai Chong86c403b2022-06-06 20:46:01 -0700188 default:
189 printNodeValidationError("Unsupported data type");
190 break;
Kevin Chengcc61be32021-10-14 17:09:57 -0700191 }
192
Tai Lye095da72024-01-25 22:00:18 +0000193 // padding is an 1D array of [Rank * 2], with ordering:
194 // [Rank0_front, Rank0_back, Rank1_front, Rank1_back, ..., Rank(N-1)_front, Rank(N-1)_back]
195 TPadding padding_val = this->padding->getTensor();
196 ERROR_IF(padding_val.size() != (Rank * 2), "OpPad: padding length needs to be (rank(input1) * 2)");
197 for (int i = 0; i < Rank; i++)
198 {
199 auto pad_front = padding_val(2 * i);
200 auto pad_back = padding_val(2 * i + 1);
201 ERROR_IF((pad_front < 0) || (pad_back < 0), "OpPad: padding can't be smaller than 0");
202 ERROR_IF(out->getShape()[i] != pad_front + in->getShape()[i] + pad_back,
203 "OpPad: output shape not equal to input plus padding");
204 paddings_array[i] = std::make_pair(pad_front, pad_back);
205 }
206
Eric Kunzee5e26762020-10-13 16:11:07 -0700207 this->out->getTensor() = this->in->getTensor().pad(this->paddings_array, pad_value);
208
209 return GraphNode::eval();
210}
211
Won Jeona21b2e82023-08-10 10:33:01 +0000212template <int Rank, TOSA_REF_TYPE Dtype>
213OpDim<Rank, Dtype>::OpDim(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
214 : GraphNode(sgt_, Op_DIM, id_)
215{
216 setRequiredOperands(1, 1);
217
218 INIT_ATTRIBUTE(Axis);
219}
220
221template <int Rank, TOSA_REF_TYPE Dtype>
222OpDim<Rank, Dtype>::~OpDim()
223{
224 if (attribute)
225 delete attribute;
226}
227
228template <int Rank, TOSA_REF_TYPE Dtype>
229int OpDim<Rank, Dtype>::checkTensorAttributes()
230{
231 // Check Tosa Level
232 auto tosa_level = g_func_config.tosa_level;
233 LEVEL_CHECK(Rank <= tosa_level.MAX_RANK, "Rank should be smaller than or equal to MAX_RANK");
234
235 if (validateRequiredOperands())
236 return 1;
237
238 if (validateRequiredRank(inputs[0]))
239 return 1;
240
241 if (attribute->axis() < 0 || (size_t)attribute->axis() >= Rank)
242 {
243 printNodeValidationError("OpDim: axis must between [0, input_rank - 1]");
244 return 1;
245 }
246
247 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
248 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
249
250 ASSERT_MEM(in && out);
251
252 return 0;
253}
254
255template <int Rank, TOSA_REF_TYPE Dtype>
256int OpDim<Rank, Dtype>::eval()
257{
258 int32_t axis = attribute->axis();
259 int64_t out_val = in->getShape()[axis];
260
Tai Ly8690a082023-12-18 20:40:24 +0000261 this->out->getTensor().setValues({ out_val });
Won Jeona21b2e82023-08-10 10:33:01 +0000262
263 return GraphNode::eval();
264}
265
Tai Lya4d748b2023-03-28 22:06:56 +0000266template <int InRank, int OutRank, TOSA_REF_TYPE Dtype>
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000267OpReshape<InRank, OutRank, Dtype>::OpReshape(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
Kevin Chengacb550f2021-06-29 15:32:19 -0700268 : GraphNode(sgt_, Op_RESHAPE, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700269{
Tai Ly8690a082023-12-18 20:40:24 +0000270 setRequiredOperands(2, 1);
Eric Kunzee5e26762020-10-13 16:11:07 -0700271}
272
Tai Lya4d748b2023-03-28 22:06:56 +0000273template <int InRank, int OutRank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700274OpReshape<InRank, OutRank, Dtype>::~OpReshape()
Tai Ly8690a082023-12-18 20:40:24 +0000275{}
Eric Kunzee5e26762020-10-13 16:11:07 -0700276
Tai Lya4d748b2023-03-28 22:06:56 +0000277template <int InRank, int OutRank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700278int OpReshape<InRank, OutRank, Dtype>::checkTensorAttributes()
279{
Jerry Gea793f462023-04-11 00:05:02 +0000280 // Check Tosa Level
281 auto tosa_level = g_func_config.tosa_level;
282 LEVEL_CHECK(InRank <= tosa_level.MAX_RANK, "InRank should be smaller than or equal to MAX_RANK");
283 LEVEL_CHECK(OutRank <= tosa_level.MAX_RANK, "OutRank should be smaller than or equal to MAX_RANK");
284
Eric Kunzee5e26762020-10-13 16:11:07 -0700285 if (validateRequiredOperands())
286 return 1;
287
Eric Kunzee5e26762020-10-13 16:11:07 -0700288 // output and input must be the same types
289 if (inputs[0]->matchType(*outputs[0]))
290 {
291 printNodeValidationError("OpReshape: Input and output types must match");
292 return 1;
293 }
294
Kevin Chengcc61be32021-10-14 17:09:57 -0700295 ERROR_IF(inputs[0]->getElementCount() != outputs[0]->getElementCount(),
296 "Input tensor size does not match output tensor size");
297
Eric Kunzee5e26762020-10-13 16:11:07 -0700298 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
299 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
300
Tai Ly8690a082023-12-18 20:40:24 +0000301 // note: do not assert mem on shape input, because it may be {} for reshape to scalar
302 // and also, because the shape input is not actually used in eval()
303
304 ASSERT_MEM(in && out)
305
Eric Kunzee5e26762020-10-13 16:11:07 -0700306 return 0;
307}
308
Tai Lya4d748b2023-03-28 22:06:56 +0000309template <int InRank, int OutRank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700310int OpReshape<InRank, OutRank, Dtype>::eval()
311{
Eric Kunzee5e26762020-10-13 16:11:07 -0700312 for (int32_t d = 0; d < OutRank; d++)
313 {
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000314 array_shape[d] = getOutputs()[0]->getShape()[OutRank - 1 - d];
Eric Kunzee5e26762020-10-13 16:11:07 -0700315 out_reverser[d] = OutRank - 1 - d;
Eric Kunzee5e26762020-10-13 16:11:07 -0700316 }
317
318 for (int32_t d = 0; d < InRank; d++)
319 {
320 in_reverser[d] = InRank - 1 - d;
321 }
322
323 // Eigen Tensor is col-major, and we're referencing row-major result
324 // need to reverse it to row-major before reshape, and perform another reverse afterward
325
326 // input tensor rank 0 can't do .shuffle(), need to be handled otherwise
327 TIn in_reversed;
328 if (InRank > 1)
329 {
330 in_reversed = in->getTensor().shuffle(in_reverser);
331 }
332 else
333 {
334 in_reversed = in->getTensor();
335 }
336
337 TOut in_reshaped = in_reversed.reshape(array_shape);
338
339 // output tensor can be rank 0, .reshape() and .shuffle() don't work, need to be handled otherwise
340 if (OutRank > 1)
341 {
342 out->getTensor() = in_reshaped.shuffle(out_reverser);
343 }
344 else
345 {
346 out->getTensor() = in_reshaped;
347 }
348
349 return GraphNode::eval();
350}
351
Tai Lya4d748b2023-03-28 22:06:56 +0000352template <int Rank, TOSA_REF_TYPE Dtype>
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000353OpReverse<Rank, Dtype>::OpReverse(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
Kevin Chengacb550f2021-06-29 15:32:19 -0700354 : GraphNode(sgt_, Op_REVERSE, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700355{
356 setRequiredOperands(1, 1);
Jerry Ge0bd4ec82023-05-01 18:36:43 +0000357 setRequiredRank(1);
Eric Kunzee5e26762020-10-13 16:11:07 -0700358
359 INIT_ATTRIBUTE(Axis);
360}
361
Tai Lya4d748b2023-03-28 22:06:56 +0000362template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700363OpReverse<Rank, Dtype>::~OpReverse()
364{
365 if (attribute)
366 delete attribute;
367}
368
Tai Lya4d748b2023-03-28 22:06:56 +0000369template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700370int OpReverse<Rank, Dtype>::checkTensorAttributes()
371{
Jerry Gea793f462023-04-11 00:05:02 +0000372 // Check Tosa Level
373 auto tosa_level = g_func_config.tosa_level;
374 LEVEL_CHECK(Rank <= tosa_level.MAX_RANK, "Rank should be smaller than or equal to MAX_RANK");
375
Eric Kunzee5e26762020-10-13 16:11:07 -0700376 if (validateRequiredOperands())
377 return 1;
378
379 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
380 {
381 return 1;
382 }
383
384 // output and input must be the same types
385 if (inputs[0]->matchRankTypeShape(*outputs[0]))
386 {
387 printNodeValidationError("Failure to match input and output rank/type/shape");
388 return 1;
389 }
390
391 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
392 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
393
394 ASSERT_MEM(in && out);
395
396 if (attribute->axis() < 0 || attribute->axis() >= inputs[0]->getRank())
397 {
398 printNodeValidationError("Reverse axis must between [0, input_rank - 1]");
399 return 1;
400 }
401
402 // transform list of axis into true or false list
403 // e.g. rank=4, axis=[1,2], reverse array would be [false, true, true, false]
404 for (int i = 0; i < Rank; i++)
405 {
406 reverse_array[i] = false;
407 }
408 reverse_array[attribute->axis()] = true;
409
410 return 0;
411}
412
Tai Lya4d748b2023-03-28 22:06:56 +0000413template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700414int OpReverse<Rank, Dtype>::eval()
415{
416 out->getTensor() = in->getTensor().reverse(reverse_array);
417
418 return GraphNode::eval();
419}
420
Tai Lya4d748b2023-03-28 22:06:56 +0000421template <int Rank, TOSA_REF_TYPE Dtype>
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000422OpSlice<Rank, Dtype>::OpSlice(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
Kevin Chengacb550f2021-06-29 15:32:19 -0700423 : GraphNode(sgt_, Op_SLICE, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700424{
TatWai Chong01f937a2024-01-24 22:57:07 -0800425 setRequiredOperands(3, 1);
Jerry Ge0bd4ec82023-05-01 18:36:43 +0000426 setRequiredRank(1);
Eric Kunzee5e26762020-10-13 16:11:07 -0700427}
428
Tai Lya4d748b2023-03-28 22:06:56 +0000429template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700430OpSlice<Rank, Dtype>::~OpSlice()
TatWai Chong01f937a2024-01-24 22:57:07 -0800431{}
Eric Kunzee5e26762020-10-13 16:11:07 -0700432
Tai Lya4d748b2023-03-28 22:06:56 +0000433template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700434int OpSlice<Rank, Dtype>::checkTensorAttributes()
435{
Jerry Gea793f462023-04-11 00:05:02 +0000436 // Check Tosa Level
437 auto tosa_level = g_func_config.tosa_level;
438 LEVEL_CHECK(Rank <= tosa_level.MAX_RANK, "Rank should be smaller than or equal to MAX_RANK");
439
Eric Kunzee5e26762020-10-13 16:11:07 -0700440 if (validateRequiredOperands())
441 return 1;
442
443 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
444 {
445 return 1;
446 }
447
448 // output and input must be the same types
Luke Huttona4e48ca2023-02-22 11:53:48 +0000449 if (inputs[0]->matchRankType(*outputs[0]))
Eric Kunzee5e26762020-10-13 16:11:07 -0700450 {
Luke Huttona4e48ca2023-02-22 11:53:48 +0000451 printNodeValidationError("Failure to match input and output rank or type");
Eric Kunzee5e26762020-10-13 16:11:07 -0700452 return 1;
453 }
454
TatWai Chong01f937a2024-01-24 22:57:07 -0800455 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
456 start = dynamic_cast<TosaReference::TensorTemplate<TInShape>*>(inputs[1]);
457 size = dynamic_cast<TosaReference::TensorTemplate<TInShape>*>(inputs[2]);
458 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
Eric Kunzee5e26762020-10-13 16:11:07 -0700459
Luke Huttona4e48ca2023-02-22 11:53:48 +0000460 ASSERT_MEM(in && out);
461
TatWai Chong01f937a2024-01-24 22:57:07 -0800462 return 0;
463}
464
465template <int Rank, TOSA_REF_TYPE Dtype>
466int OpSlice<Rank, Dtype>::eval()
467{
468 ERROR_IF(start->getElementCount() != in->getRank(), "OpSlice: start array length needs to be rank(input)");
469 ERROR_IF(size->getElementCount() != in->getRank(), "OpSlice: size array length needs to be rank(input)");
Eric Kunzee5e26762020-10-13 16:11:07 -0700470
Kevin Chengcc61be32021-10-14 17:09:57 -0700471 for (int32_t i = 0; i < in->getRank(); i++)
Eric Kunzee5e26762020-10-13 16:11:07 -0700472 {
TatWai Chong01f937a2024-01-24 22:57:07 -0800473 int32_t b = start->getTensor()(i);
474 int32_t s = size->getTensor()(i);
Kevin Chengcc61be32021-10-14 17:09:57 -0700475 ERROR_IF(b < 0 || b >= in->getShape()[i], "OpSlice: start out of boundary");
476 ERROR_IF((b + s) < 0 || (b + s) > in->getShape()[i], "OpSlice: (start+size) out of boundary");
477 ERROR_IF(s <= 0, "OpSlice: output must be positive");
478 ERROR_IF(s != out->getShape()[i], "OpSlice: size doesn't match output tensor dimension");
479 begin_array[i] = b;
480 size_array[i] = s;
Eric Kunzee5e26762020-10-13 16:11:07 -0700481 }
482
Eric Kunzee5e26762020-10-13 16:11:07 -0700483 out->getTensor() = in->getTensor().slice(begin_array, size_array);
484
485 return GraphNode::eval();
486}
487
Tai Lya4d748b2023-03-28 22:06:56 +0000488template <int Rank, TOSA_REF_TYPE Dtype>
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000489OpTileBase<Rank, Dtype>::OpTileBase(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
Kevin Chengacb550f2021-06-29 15:32:19 -0700490 : GraphNode(sgt_, Op_TILE, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700491{
Tai Ly8690a082023-12-18 20:40:24 +0000492 setRequiredOperands(2, 1);
Jerry Ge0bd4ec82023-05-01 18:36:43 +0000493 setRequiredRank(1);
Eric Kunzee5e26762020-10-13 16:11:07 -0700494}
495
Tai Lya4d748b2023-03-28 22:06:56 +0000496template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700497OpTileBase<Rank, Dtype>::~OpTileBase()
Tai Ly8690a082023-12-18 20:40:24 +0000498{}
Eric Kunzee5e26762020-10-13 16:11:07 -0700499
Tai Lya4d748b2023-03-28 22:06:56 +0000500template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700501int OpTileBase<Rank, Dtype>::checkTensorAttributes()
502{
Jerry Gea793f462023-04-11 00:05:02 +0000503 // Check Tosa Level
504 auto tosa_level = g_func_config.tosa_level;
505 LEVEL_CHECK(Rank <= tosa_level.MAX_RANK, "Rank should be smaller than or equal to MAX_RANK");
506
Eric Kunzee5e26762020-10-13 16:11:07 -0700507 if (validateRequiredOperands())
508 return 1;
509
510 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
511 {
512 return 1;
513 }
514
515 // output and input must be the same ranks and types
516 if (inputs[0]->matchRankType(*outputs[0]))
517 {
518 printNodeValidationError("Failure to match input and output rank or type");
519 return 1;
520 }
521
Tai Ly8690a082023-12-18 20:40:24 +0000522 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
523 multiples = dynamic_cast<TosaReference::TensorTemplate<TInMultiples>*>(inputs[1]);
524 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
Eric Kunzee5e26762020-10-13 16:11:07 -0700525
Tai Ly8690a082023-12-18 20:40:24 +0000526 ASSERT_MEM(in && multiples && out);
Luke Huttona4e48ca2023-02-22 11:53:48 +0000527
Tai Ly8690a082023-12-18 20:40:24 +0000528 if (multiples->getElementCount() != Rank)
Eric Kunzee5e26762020-10-13 16:11:07 -0700529 {
530 printNodeValidationError("1D list 'multiples' must have size equal to input rank");
531 return 1;
532 }
533
Eric Kunzee5e26762020-10-13 16:11:07 -0700534 return 0;
535}
536
Tai Lya4d748b2023-03-28 22:06:56 +0000537template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700538int OpTile<Rank, Dtype>::eval()
539{
540 // primary template shouldn't be called
Tai Lya4d748b2023-03-28 22:06:56 +0000541 FATAL_ERROR("OpTile rank=%i, dtype=%s: not implemented yet", Rank, EnumNameTOSAREFTYPE(Dtype));
Eric Kunzee5e26762020-10-13 16:11:07 -0700542}
543
Tai Lya4d748b2023-03-28 22:06:56 +0000544template <TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700545int OpTile<1, Dtype>::eval()
546{
547 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
548 {
549 int32_t id0 = od0 % this->in->getShape()[0];
550 this->out->getTensor()(od0) = this->in->getTensor()(id0);
551 }
552
553 return GraphNode::eval();
554}
555
Tai Lya4d748b2023-03-28 22:06:56 +0000556template <TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700557int OpTile<2, Dtype>::eval()
558{
559 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
560 {
561 int32_t id0 = od0 % this->in->getShape()[0];
562 for (int32_t od1 = 0; od1 < this->out->getShape()[1]; od1++)
563 {
564 int32_t id1 = od1 % this->in->getShape()[1];
565 this->out->getTensor()(od0, od1) = this->in->getTensor()(id0, id1);
566 }
567 }
568
569 return GraphNode::eval();
570}
571
Tai Lya4d748b2023-03-28 22:06:56 +0000572template <TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700573int OpTile<3, Dtype>::eval()
574{
575 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
576 {
577 int32_t id0 = od0 % this->in->getShape()[0];
578 for (int32_t od1 = 0; od1 < this->out->getShape()[1]; od1++)
579 {
580 int32_t id1 = od1 % this->in->getShape()[1];
581 for (int32_t od2 = 0; od2 < this->out->getShape()[2]; od2++)
582 {
583 int32_t id2 = od2 % this->in->getShape()[2];
584 this->out->getTensor()(od0, od1, od2) = this->in->getTensor()(id0, id1, id2);
585 }
586 }
587 }
588
589 return GraphNode::eval();
590}
591
Tai Lya4d748b2023-03-28 22:06:56 +0000592template <TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700593int OpTile<4, Dtype>::eval()
594{
595 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
596 {
597 int32_t id0 = od0 % this->in->getShape()[0];
598 for (int32_t od1 = 0; od1 < this->out->getShape()[1]; od1++)
599 {
600 int32_t id1 = od1 % this->in->getShape()[1];
601 for (int32_t od2 = 0; od2 < this->out->getShape()[2]; od2++)
602 {
603 int32_t id2 = od2 % this->in->getShape()[2];
604 for (int32_t od3 = 0; od3 < this->out->getShape()[3]; od3++)
605 {
606 int32_t id3 = od3 % this->in->getShape()[3];
607 this->out->getTensor()(od0, od1, od2, od3) = this->in->getTensor()(id0, id1, id2, id3);
608 }
609 }
610 }
611 }
612
613 return GraphNode::eval();
614}
615
Tai Lya4d748b2023-03-28 22:06:56 +0000616template <TOSA_REF_TYPE Dtype>
Luke Huttona4e48ca2023-02-22 11:53:48 +0000617int OpTile<5, Dtype>::eval()
618{
619 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
620 {
621 int32_t id0 = od0 % this->in->getShape()[0];
622 for (int32_t od1 = 0; od1 < this->out->getShape()[1]; od1++)
623 {
624 int32_t id1 = od1 % this->in->getShape()[1];
625 for (int32_t od2 = 0; od2 < this->out->getShape()[2]; od2++)
626 {
627 int32_t id2 = od2 % this->in->getShape()[2];
628 for (int32_t od3 = 0; od3 < this->out->getShape()[3]; od3++)
629 {
630 int32_t id3 = od3 % this->in->getShape()[3];
631 for (int32_t od4 = 0; od4 < this->out->getShape()[4]; od4++)
632 {
633 int32_t id4 = od4 % this->in->getShape()[4];
634 this->out->getTensor()(od0, od1, od2, od3, od4) =
635 this->in->getTensor()(id0, id1, id2, id3, id4);
636 }
637 }
638 }
639 }
640 }
641
642 return GraphNode::eval();
643}
644
Tai Lya4d748b2023-03-28 22:06:56 +0000645template <TOSA_REF_TYPE Dtype>
Luke Huttona4e48ca2023-02-22 11:53:48 +0000646int OpTile<6, Dtype>::eval()
647{
648 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
649 {
650 int32_t id0 = od0 % this->in->getShape()[0];
651 for (int32_t od1 = 0; od1 < this->out->getShape()[1]; od1++)
652 {
653 int32_t id1 = od1 % this->in->getShape()[1];
654 for (int32_t od2 = 0; od2 < this->out->getShape()[2]; od2++)
655 {
656 int32_t id2 = od2 % this->in->getShape()[2];
657 for (int32_t od3 = 0; od3 < this->out->getShape()[3]; od3++)
658 {
659 int32_t id3 = od3 % this->in->getShape()[3];
660 for (int32_t od4 = 0; od4 < this->out->getShape()[4]; od4++)
661 {
662 int32_t id4 = od4 % this->in->getShape()[4];
663 for (int32_t od5 = 0; od5 < this->out->getShape()[5]; od5++)
664 {
665 int32_t id5 = od5 % this->in->getShape()[5];
666 this->out->getTensor()(od0, od1, od2, od3, od4, od5) =
667 this->in->getTensor()(id0, id1, id2, id3, id4, id5);
668 }
669 }
670 }
671 }
672 }
673 }
674
675 return GraphNode::eval();
676}
677
Tai Lya4d748b2023-03-28 22:06:56 +0000678template <int Rank, TOSA_REF_TYPE Dtype>
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000679OpTranspose<Rank, Dtype>::OpTranspose(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
Kevin Chengacb550f2021-06-29 15:32:19 -0700680 : GraphNode(sgt_, Op_TRANSPOSE, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700681{
Kevin Chengfe392ce2021-10-18 21:51:55 +0000682 setRequiredOperands(1, 1);
Jerry Ge0bd4ec82023-05-01 18:36:43 +0000683 setRequiredRank(1);
Kevin Chengfe392ce2021-10-18 21:51:55 +0000684
685 INIT_ATTRIBUTE(Transpose);
Eric Kunzee5e26762020-10-13 16:11:07 -0700686}
687
Tai Lya4d748b2023-03-28 22:06:56 +0000688template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700689OpTranspose<Rank, Dtype>::~OpTranspose()
Jerry Gea6827492022-11-16 10:41:55 -0800690{
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000691 if (attribute)
692 delete attribute;
Jerry Gea6827492022-11-16 10:41:55 -0800693}
Eric Kunzee5e26762020-10-13 16:11:07 -0700694
Tai Lya4d748b2023-03-28 22:06:56 +0000695template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700696int OpTranspose<Rank, Dtype>::checkTensorAttributes()
697{
Jerry Gea793f462023-04-11 00:05:02 +0000698 // Check Tosa Level
699 auto tosa_level = g_func_config.tosa_level;
700 LEVEL_CHECK(Rank <= tosa_level.MAX_RANK, "Rank should be smaller than or equal to MAX_RANK");
701
Eric Kunzee5e26762020-10-13 16:11:07 -0700702 if (validateRequiredOperands())
703 return 1;
704
705 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
706 {
707 return 1;
708 }
709
710 // output and input must be the same types
711 if (inputs[0]->matchRankType(*outputs[0]))
712 {
713 printNodeValidationError("Failure to match input and output rank and type");
714 return 1;
715 }
716
717 if (inputs[0]->getElementCount() != outputs[0]->getElementCount())
718 {
719 printNodeValidationError("Failure to match input and output total element count");
720 return 1;
721 }
722
Kevin Chengfe392ce2021-10-18 21:51:55 +0000723 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
724 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
725
726 ASSERT_MEM(in && out);
Eric Kunzee5e26762020-10-13 16:11:07 -0700727
TatWai Chong86c403b2022-06-06 20:46:01 -0700728 ERROR_IF(attribute->perms().size() != Rank, "OpTranspose: perms array size needs to match rank(input)");
Kevin Chengf3e016f2021-11-02 01:15:50 +0000729
730 std::array<bool, Rank> index_used;
731 index_used.fill(false);
732 for (int32_t d = 0; d < Rank; d++)
733 {
TatWai Chong86c403b2022-06-06 20:46:01 -0700734 int32_t index = attribute->perms()[d];
Kevin Chengf3e016f2021-11-02 01:15:50 +0000735 ERROR_IF(index < 0 or index >= Rank, "OpTranspose: index out of boundary");
736 ERROR_IF(index_used[index], "OpTranspose: index duplicated in perm attribute");
737 index_used[index] = true;
738 ERROR_IF(in->getShape()[index] != out->getShape()[d], "OpTranspose: input output shape mismatch");
739 perm_array[d] = index;
740 }
741
Eric Kunzee5e26762020-10-13 16:11:07 -0700742 return 0;
743}
744
Tai Lya4d748b2023-03-28 22:06:56 +0000745template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700746int OpTranspose<Rank, Dtype>::eval()
747{
Eric Kunzee5e26762020-10-13 16:11:07 -0700748 out->getTensor() = in->getTensor().shuffle(perm_array);
749
750 return GraphNode::eval();
751}
752
753// template explicit instantiation
James Ward8b390432022-08-12 20:48:56 +0100754DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, FP16)
James Ward24dbc422022-10-19 12:20:31 +0100755DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, BF16)
Jeremy Johnsonbc2a3db2022-09-27 13:50:00 +0100756DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, FP32)
Eric Kunzee5e26762020-10-13 16:11:07 -0700757DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, INT8)
758DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, INT16)
759DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, INT32)
760DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, BOOL)
Tai Lya4d748b2023-03-28 22:06:56 +0000761DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, FP64)
Eric Kunzee5e26762020-10-13 16:11:07 -0700762
James Ward8b390432022-08-12 20:48:56 +0100763DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, FP16);
James Ward24dbc422022-10-19 12:20:31 +0100764DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, BF16);
Jeremy Johnsonbc2a3db2022-09-27 13:50:00 +0100765DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, FP32);
Eric Kunzee5e26762020-10-13 16:11:07 -0700766DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, INT8);
767DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, INT16);
768DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, INT32);
769DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, BOOL);
Tai Lya4d748b2023-03-28 22:06:56 +0000770DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, FP64);
Eric Kunzee5e26762020-10-13 16:11:07 -0700771
Won Jeona21b2e82023-08-10 10:33:01 +0000772DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, FP16);
773DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, BF16);
774DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, FP32);
775DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, INT8);
776DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, INT16);
777DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, INT32);
778DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, BOOL);
779
James Ward8b390432022-08-12 20:48:56 +0100780DEF_INSTANTIATE_RESHAPE(OpReshape, FP16);
James Ward24dbc422022-10-19 12:20:31 +0100781DEF_INSTANTIATE_RESHAPE(OpReshape, BF16);
Jeremy Johnsonbc2a3db2022-09-27 13:50:00 +0100782DEF_INSTANTIATE_RESHAPE(OpReshape, FP32);
Eric Kunzee5e26762020-10-13 16:11:07 -0700783DEF_INSTANTIATE_RESHAPE(OpReshape, INT8);
784DEF_INSTANTIATE_RESHAPE(OpReshape, INT16);
785DEF_INSTANTIATE_RESHAPE(OpReshape, INT32);
786DEF_INSTANTIATE_RESHAPE(OpReshape, BOOL);
Tai Lya4d748b2023-03-28 22:06:56 +0000787DEF_INSTANTIATE_RESHAPE(OpReshape, FP64);
Eric Kunzee5e26762020-10-13 16:11:07 -0700788
James Ward8b390432022-08-12 20:48:56 +0100789DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, FP16);
James Ward24dbc422022-10-19 12:20:31 +0100790DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, BF16);
Jeremy Johnsonbc2a3db2022-09-27 13:50:00 +0100791DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, FP32);
Eric Kunzee5e26762020-10-13 16:11:07 -0700792DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, INT8);
793DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, INT16);
794DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, INT32);
795DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, BOOL);
Tai Lya4d748b2023-03-28 22:06:56 +0000796DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, FP64);
Eric Kunzee5e26762020-10-13 16:11:07 -0700797
Luke Huttona4e48ca2023-02-22 11:53:48 +0000798DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, FP16);
799DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, BF16);
800DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, FP32);
801DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, INT8);
802DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, INT16);
803DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, INT32);
804DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, BOOL);
Tai Lya4d748b2023-03-28 22:06:56 +0000805DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, FP64);
Eric Kunzee5e26762020-10-13 16:11:07 -0700806
Luke Huttona4e48ca2023-02-22 11:53:48 +0000807DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, FP16);
808DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, BF16);
809DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, FP32);
810DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, INT8);
811DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, INT16);
812DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, INT32);
813DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, BOOL);
Tai Lya4d748b2023-03-28 22:06:56 +0000814DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, FP64);
Jared Smolens98c281f2022-12-20 15:09:25 -0800815
Luke Huttona4e48ca2023-02-22 11:53:48 +0000816DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, FP16);
817DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, BF16);
818DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, FP32);
819DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, INT8);
820DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, INT16);
821DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, INT32);
822DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, BOOL);
Tai Lya4d748b2023-03-28 22:06:56 +0000823DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, FP64);
Eric Kunzee5e26762020-10-13 16:11:07 -0700824
Luke Huttona4e48ca2023-02-22 11:53:48 +0000825DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, FP16);
826DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, BF16);
827DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, FP32);
828DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, INT8);
829DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, INT16);
830DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, INT32);
831DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, BOOL);
Tai Lya4d748b2023-03-28 22:06:56 +0000832DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, FP64);