blob: b6ad7042755aff0e0ea37341f429a542ee237814 [file] [log] [blame]
Eric Kunzee5e26762020-10-13 16:11:07 -07001
Tai Ly8690a082023-12-18 20:40:24 +00002// Copyright (c) 2020-2024, ARM Limited.
Eric Kunzee5e26762020-10-13 16:11:07 -07003//
4// Licensed under the Apache License, Version 2.0 (the "License");
5// you may not use this file except in compliance with the License.
6// You may obtain a copy of the License at
7//
8// http://www.apache.org/licenses/LICENSE-2.0
9//
10// Unless required by applicable law or agreed to in writing, software
11// distributed under the License is distributed on an "AS IS" BASIS,
12// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13// See the License for the specific language governing permissions and
14// limitations under the License.
15
16#include "data_layout.h"
17#include "quant_util.h"
18
19using namespace TosaReference;
20using namespace Eigen;
21using namespace tosa;
22
Tai Lya4d748b2023-03-28 22:06:56 +000023template <int Rank, TOSA_REF_TYPE Dtype>
Jerry Ge9c9c8da2023-07-19 23:08:16 +000024OpConcat<Rank, Dtype>::OpConcat(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
Kevin Chengacb550f2021-06-29 15:32:19 -070025 : GraphNode(sgt_, Op_CONCAT, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -070026{
Kevin Chengad15dfa2021-03-04 15:15:03 -080027 setRequiredOperands(-1, 1);
Jerry Ge0bd4ec82023-05-01 18:36:43 +000028 setRequiredRank(1);
Eric Kunzee5e26762020-10-13 16:11:07 -070029
30 INIT_ATTRIBUTE(Axis);
31}
32
Tai Lya4d748b2023-03-28 22:06:56 +000033template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -070034OpConcat<Rank, Dtype>::~OpConcat()
35{
36 if (attribute)
37 delete attribute;
38}
39
Tai Lya4d748b2023-03-28 22:06:56 +000040template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -070041int OpConcat<Rank, Dtype>::checkTensorAttributes()
42{
Jerry Gea793f462023-04-11 00:05:02 +000043 // Check Tosa Level
44 auto tosa_level = g_func_config.tosa_level;
45 LEVEL_CHECK(Rank <= tosa_level.MAX_RANK, "Rank should be smaller than or equal to MAX_RANK");
46
Eric Kunzee5e26762020-10-13 16:11:07 -070047 if (validateRequiredOperands())
48 return 1;
49
Kevin Chengad15dfa2021-03-04 15:15:03 -080050 if (inputs.empty())
Eric Kunzee5e26762020-10-13 16:11:07 -070051 {
Kevin Chengad15dfa2021-03-04 15:15:03 -080052 printNodeValidationError("Concat operator must have at least one input tensor");
Eric Kunzee5e26762020-10-13 16:11:07 -070053 return 1;
54 }
Kevin Chengcc61be32021-10-14 17:09:57 -070055
56 int32_t num_inputs = inputs.size();
57
Eric Kunzee5e26762020-10-13 16:11:07 -070058 // output and input must be the same types and rank
Kevin Chengcc61be32021-10-14 17:09:57 -070059 for (int32_t i = 0; i < num_inputs; i++)
Eric Kunzee5e26762020-10-13 16:11:07 -070060 {
Kevin Chengad15dfa2021-03-04 15:15:03 -080061 if (inputs[i]->matchRankType(*outputs[0]))
62 {
Kevin Chengcc61be32021-10-14 17:09:57 -070063 printNodeValidationError("OpConcat: input ranks and types must match");
Kevin Chengad15dfa2021-03-04 15:15:03 -080064 return 1;
65 }
66 ins.push_back(dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[i]));
Eric Kunzee5e26762020-10-13 16:11:07 -070067 }
68
Kevin Chengcc61be32021-10-14 17:09:57 -070069 if (attribute->axis() < 0 || (size_t)attribute->axis() >= Rank)
Eric Kunzee5e26762020-10-13 16:11:07 -070070 {
Kevin Chengcc61be32021-10-14 17:09:57 -070071 printNodeValidationError("OpConcat: axis is beyond output tensor rank");
Eric Kunzee5e26762020-10-13 16:11:07 -070072 return 1;
73 }
74
Kevin Chengcc61be32021-10-14 17:09:57 -070075 int32_t output_dim_on_axis = 0;
76 for (int32_t j = 0; j < num_inputs; j++)
77 {
78 for (int32_t i = 0; i < Rank; i++)
79 {
80 int32_t input_dim = inputs[j]->getShape()[i];
81 if (i == attribute->axis())
82 {
83 output_dim_on_axis += input_dim;
84 }
85 else if (input_dim != outputs[0]->getShape()[i])
86 {
87 printNodeValidationError("OpConcat: input dimension not matching output dimension");
88 return 1;
89 }
90 }
91 }
92
Kevin Cheng6e528662021-10-20 17:35:33 +000093 ERROR_IF(output_dim_on_axis != outputs[0]->getShape()[attribute->axis()],
Kevin Chengcc61be32021-10-14 17:09:57 -070094 "OpConcat: sum of input dimension on axis not equal to output dimension on axis");
95
96 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
97
Eric Kunzee5e26762020-10-13 16:11:07 -070098 return 0;
99}
100
Tai Lya4d748b2023-03-28 22:06:56 +0000101template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700102int OpConcat<Rank, Dtype>::eval()
103{
104
105 int32_t reversed_axis = Rank - 1 - attribute->axis();
106
107 for (int32_t d = 0; d < Rank; d++)
108 {
109 reverser[d] = Rank - 1 - d;
110 }
111
Kevin Chengad15dfa2021-03-04 15:15:03 -0800112 TIn result = ins[0]->getTensor().shuffle(reverser);
Eric Kunzee5e26762020-10-13 16:11:07 -0700113
Kevin Chengad15dfa2021-03-04 15:15:03 -0800114 for (size_t i = 1; i < ins.size(); i++)
115 {
116 TIn in_reversed = ins[i]->getTensor().shuffle(reverser);
117 TIn temp = result.concatenate(in_reversed, reversed_axis);
118 result = temp;
119 }
120 out->getTensor() = result.shuffle(reverser);
Eric Kunzee5e26762020-10-13 16:11:07 -0700121
122 return GraphNode::eval();
123}
124
Tai Lya4d748b2023-03-28 22:06:56 +0000125template <int Rank, TOSA_REF_TYPE Dtype>
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000126OpPad<Rank, Dtype>::OpPad(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
Kevin Chengacb550f2021-06-29 15:32:19 -0700127 : GraphNode(sgt_, Op_PAD, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700128{
Tai Lye095da72024-01-25 22:00:18 +0000129 setRequiredOperands(2, 1);
Jerry Ge0bd4ec82023-05-01 18:36:43 +0000130 setRequiredRank(1);
Eric Kunzee5e26762020-10-13 16:11:07 -0700131
Kevin Chengfe392ce2021-10-18 21:51:55 +0000132 INIT_ATTRIBUTE(Pad);
Eric Kunzee5e26762020-10-13 16:11:07 -0700133}
134
Tai Lya4d748b2023-03-28 22:06:56 +0000135template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700136OpPad<Rank, Dtype>::~OpPad()
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000137{}
Eric Kunzee5e26762020-10-13 16:11:07 -0700138
Tai Lya4d748b2023-03-28 22:06:56 +0000139template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700140int OpPad<Rank, Dtype>::checkTensorAttributes()
141{
Jerry Gea793f462023-04-11 00:05:02 +0000142 // Check Tosa Level
143 auto tosa_level = g_func_config.tosa_level;
144 LEVEL_CHECK(Rank <= tosa_level.MAX_RANK, "Rank should be smaller than or equal to MAX_RANK");
145
Eric Kunzee5e26762020-10-13 16:11:07 -0700146 if (validateRequiredOperands())
147 return 1;
148
149 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
150 {
151 return 1;
152 }
153
154 // output and input must be the same types
155 if (inputs[0]->matchRankType(*outputs[0]))
156 {
157 printNodeValidationError("Failure to match input and output type and rank");
158 return 1;
159 }
160
Tai Lye095da72024-01-25 22:00:18 +0000161 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
162 padding = dynamic_cast<TosaReference::TensorTemplate<TPadding>*>(inputs[1]);
163 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
Kevin Chengfe392ce2021-10-18 21:51:55 +0000164 ASSERT_MEM(in && out);
165
Eric Kunzee5e26762020-10-13 16:11:07 -0700166 return 0;
167}
168
Tai Lya4d748b2023-03-28 22:06:56 +0000169template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700170int OpPad<Rank, Dtype>::eval()
171{
Kevin Chengfe392ce2021-10-18 21:51:55 +0000172 InEigenType pad_value = 0;
173
174 switch (Dtype)
Kevin Chengcc61be32021-10-14 17:09:57 -0700175 {
Tai Lya4d748b2023-03-28 22:06:56 +0000176 case TOSA_REF_TYPE_BOOL:
177 case TOSA_REF_TYPE_INT8:
178 case TOSA_REF_TYPE_INT16:
179 case TOSA_REF_TYPE_INT32:
Kevin Chengfe392ce2021-10-18 21:51:55 +0000180 pad_value = (InEigenType)attribute->pad_const_int();
181 break;
Tai Lya4d748b2023-03-28 22:06:56 +0000182 case TOSA_REF_TYPE_FP16:
183 case TOSA_REF_TYPE_BF16:
184 case TOSA_REF_TYPE_FP32:
185 case TOSA_REF_TYPE_FP64:
Won Jeon3195a662024-02-27 17:52:45 +0000186 case TOSA_REF_TYPE_FP8E4M3:
187 case TOSA_REF_TYPE_FP8E5M2:
Kevin Chengfe392ce2021-10-18 21:51:55 +0000188 pad_value = (InEigenType)attribute->pad_const_fp();
189 break;
TatWai Chong86c403b2022-06-06 20:46:01 -0700190 default:
Won Jeon3195a662024-02-27 17:52:45 +0000191 ASSERT_MSG(false, "TOSA_REF_TYPE %s is not supported.", EnumNameTOSAREFTYPE(Dtype));
TatWai Chong86c403b2022-06-06 20:46:01 -0700192 break;
Kevin Chengcc61be32021-10-14 17:09:57 -0700193 }
194
Tai Lye095da72024-01-25 22:00:18 +0000195 // padding is an 1D array of [Rank * 2], with ordering:
196 // [Rank0_front, Rank0_back, Rank1_front, Rank1_back, ..., Rank(N-1)_front, Rank(N-1)_back]
197 TPadding padding_val = this->padding->getTensor();
198 ERROR_IF(padding_val.size() != (Rank * 2), "OpPad: padding length needs to be (rank(input1) * 2)");
199 for (int i = 0; i < Rank; i++)
200 {
201 auto pad_front = padding_val(2 * i);
202 auto pad_back = padding_val(2 * i + 1);
203 ERROR_IF((pad_front < 0) || (pad_back < 0), "OpPad: padding can't be smaller than 0");
204 ERROR_IF(out->getShape()[i] != pad_front + in->getShape()[i] + pad_back,
205 "OpPad: output shape not equal to input plus padding");
206 paddings_array[i] = std::make_pair(pad_front, pad_back);
207 }
208
Eric Kunzee5e26762020-10-13 16:11:07 -0700209 this->out->getTensor() = this->in->getTensor().pad(this->paddings_array, pad_value);
210
211 return GraphNode::eval();
212}
213
Won Jeona21b2e82023-08-10 10:33:01 +0000214template <int Rank, TOSA_REF_TYPE Dtype>
215OpDim<Rank, Dtype>::OpDim(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
216 : GraphNode(sgt_, Op_DIM, id_)
217{
218 setRequiredOperands(1, 1);
219
220 INIT_ATTRIBUTE(Axis);
221}
222
223template <int Rank, TOSA_REF_TYPE Dtype>
224OpDim<Rank, Dtype>::~OpDim()
225{
226 if (attribute)
227 delete attribute;
228}
229
230template <int Rank, TOSA_REF_TYPE Dtype>
231int OpDim<Rank, Dtype>::checkTensorAttributes()
232{
233 // Check Tosa Level
234 auto tosa_level = g_func_config.tosa_level;
235 LEVEL_CHECK(Rank <= tosa_level.MAX_RANK, "Rank should be smaller than or equal to MAX_RANK");
236
237 if (validateRequiredOperands())
238 return 1;
239
240 if (validateRequiredRank(inputs[0]))
241 return 1;
242
243 if (attribute->axis() < 0 || (size_t)attribute->axis() >= Rank)
244 {
245 printNodeValidationError("OpDim: axis must between [0, input_rank - 1]");
246 return 1;
247 }
248
249 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
250 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
251
252 ASSERT_MEM(in && out);
253
254 return 0;
255}
256
257template <int Rank, TOSA_REF_TYPE Dtype>
258int OpDim<Rank, Dtype>::eval()
259{
260 int32_t axis = attribute->axis();
261 int64_t out_val = in->getShape()[axis];
262
Tai Ly8690a082023-12-18 20:40:24 +0000263 this->out->getTensor().setValues({ out_val });
Won Jeona21b2e82023-08-10 10:33:01 +0000264
265 return GraphNode::eval();
266}
267
Tai Lya4d748b2023-03-28 22:06:56 +0000268template <int InRank, int OutRank, TOSA_REF_TYPE Dtype>
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000269OpReshape<InRank, OutRank, Dtype>::OpReshape(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
Kevin Chengacb550f2021-06-29 15:32:19 -0700270 : GraphNode(sgt_, Op_RESHAPE, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700271{
Tai Ly8690a082023-12-18 20:40:24 +0000272 setRequiredOperands(2, 1);
Eric Kunzee5e26762020-10-13 16:11:07 -0700273}
274
Tai Lya4d748b2023-03-28 22:06:56 +0000275template <int InRank, int OutRank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700276OpReshape<InRank, OutRank, Dtype>::~OpReshape()
Tai Ly8690a082023-12-18 20:40:24 +0000277{}
Eric Kunzee5e26762020-10-13 16:11:07 -0700278
Tai Lya4d748b2023-03-28 22:06:56 +0000279template <int InRank, int OutRank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700280int OpReshape<InRank, OutRank, Dtype>::checkTensorAttributes()
281{
Jerry Gea793f462023-04-11 00:05:02 +0000282 // Check Tosa Level
283 auto tosa_level = g_func_config.tosa_level;
284 LEVEL_CHECK(InRank <= tosa_level.MAX_RANK, "InRank should be smaller than or equal to MAX_RANK");
285 LEVEL_CHECK(OutRank <= tosa_level.MAX_RANK, "OutRank should be smaller than or equal to MAX_RANK");
286
Eric Kunzee5e26762020-10-13 16:11:07 -0700287 if (validateRequiredOperands())
288 return 1;
289
Eric Kunzee5e26762020-10-13 16:11:07 -0700290 // output and input must be the same types
291 if (inputs[0]->matchType(*outputs[0]))
292 {
293 printNodeValidationError("OpReshape: Input and output types must match");
294 return 1;
295 }
296
Kevin Chengcc61be32021-10-14 17:09:57 -0700297 ERROR_IF(inputs[0]->getElementCount() != outputs[0]->getElementCount(),
298 "Input tensor size does not match output tensor size");
299
Eric Kunzee5e26762020-10-13 16:11:07 -0700300 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
301 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
302
Tai Ly8690a082023-12-18 20:40:24 +0000303 // note: do not assert mem on shape input, because it may be {} for reshape to scalar
304 // and also, because the shape input is not actually used in eval()
305
306 ASSERT_MEM(in && out)
307
Eric Kunzee5e26762020-10-13 16:11:07 -0700308 return 0;
309}
310
Tai Lya4d748b2023-03-28 22:06:56 +0000311template <int InRank, int OutRank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700312int OpReshape<InRank, OutRank, Dtype>::eval()
313{
Eric Kunzee5e26762020-10-13 16:11:07 -0700314 for (int32_t d = 0; d < OutRank; d++)
315 {
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000316 array_shape[d] = getOutputs()[0]->getShape()[OutRank - 1 - d];
Eric Kunzee5e26762020-10-13 16:11:07 -0700317 out_reverser[d] = OutRank - 1 - d;
Eric Kunzee5e26762020-10-13 16:11:07 -0700318 }
319
320 for (int32_t d = 0; d < InRank; d++)
321 {
322 in_reverser[d] = InRank - 1 - d;
323 }
324
325 // Eigen Tensor is col-major, and we're referencing row-major result
326 // need to reverse it to row-major before reshape, and perform another reverse afterward
327
328 // input tensor rank 0 can't do .shuffle(), need to be handled otherwise
329 TIn in_reversed;
330 if (InRank > 1)
331 {
332 in_reversed = in->getTensor().shuffle(in_reverser);
333 }
334 else
335 {
336 in_reversed = in->getTensor();
337 }
338
339 TOut in_reshaped = in_reversed.reshape(array_shape);
340
341 // output tensor can be rank 0, .reshape() and .shuffle() don't work, need to be handled otherwise
342 if (OutRank > 1)
343 {
344 out->getTensor() = in_reshaped.shuffle(out_reverser);
345 }
346 else
347 {
348 out->getTensor() = in_reshaped;
349 }
350
351 return GraphNode::eval();
352}
353
Tai Lya4d748b2023-03-28 22:06:56 +0000354template <int Rank, TOSA_REF_TYPE Dtype>
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000355OpReverse<Rank, Dtype>::OpReverse(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
Kevin Chengacb550f2021-06-29 15:32:19 -0700356 : GraphNode(sgt_, Op_REVERSE, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700357{
358 setRequiredOperands(1, 1);
Jerry Ge0bd4ec82023-05-01 18:36:43 +0000359 setRequiredRank(1);
Eric Kunzee5e26762020-10-13 16:11:07 -0700360
361 INIT_ATTRIBUTE(Axis);
362}
363
Tai Lya4d748b2023-03-28 22:06:56 +0000364template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700365OpReverse<Rank, Dtype>::~OpReverse()
366{
367 if (attribute)
368 delete attribute;
369}
370
Tai Lya4d748b2023-03-28 22:06:56 +0000371template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700372int OpReverse<Rank, Dtype>::checkTensorAttributes()
373{
Jerry Gea793f462023-04-11 00:05:02 +0000374 // Check Tosa Level
375 auto tosa_level = g_func_config.tosa_level;
376 LEVEL_CHECK(Rank <= tosa_level.MAX_RANK, "Rank should be smaller than or equal to MAX_RANK");
377
Eric Kunzee5e26762020-10-13 16:11:07 -0700378 if (validateRequiredOperands())
379 return 1;
380
381 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
382 {
383 return 1;
384 }
385
386 // output and input must be the same types
387 if (inputs[0]->matchRankTypeShape(*outputs[0]))
388 {
389 printNodeValidationError("Failure to match input and output rank/type/shape");
390 return 1;
391 }
392
393 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
394 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
395
396 ASSERT_MEM(in && out);
397
398 if (attribute->axis() < 0 || attribute->axis() >= inputs[0]->getRank())
399 {
400 printNodeValidationError("Reverse axis must between [0, input_rank - 1]");
401 return 1;
402 }
403
404 // transform list of axis into true or false list
405 // e.g. rank=4, axis=[1,2], reverse array would be [false, true, true, false]
406 for (int i = 0; i < Rank; i++)
407 {
408 reverse_array[i] = false;
409 }
410 reverse_array[attribute->axis()] = true;
411
412 return 0;
413}
414
Tai Lya4d748b2023-03-28 22:06:56 +0000415template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700416int OpReverse<Rank, Dtype>::eval()
417{
418 out->getTensor() = in->getTensor().reverse(reverse_array);
419
420 return GraphNode::eval();
421}
422
Tai Lya4d748b2023-03-28 22:06:56 +0000423template <int Rank, TOSA_REF_TYPE Dtype>
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000424OpSlice<Rank, Dtype>::OpSlice(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
Kevin Chengacb550f2021-06-29 15:32:19 -0700425 : GraphNode(sgt_, Op_SLICE, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700426{
TatWai Chong01f937a2024-01-24 22:57:07 -0800427 setRequiredOperands(3, 1);
Jerry Ge0bd4ec82023-05-01 18:36:43 +0000428 setRequiredRank(1);
Eric Kunzee5e26762020-10-13 16:11:07 -0700429}
430
Tai Lya4d748b2023-03-28 22:06:56 +0000431template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700432OpSlice<Rank, Dtype>::~OpSlice()
TatWai Chong01f937a2024-01-24 22:57:07 -0800433{}
Eric Kunzee5e26762020-10-13 16:11:07 -0700434
Tai Lya4d748b2023-03-28 22:06:56 +0000435template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700436int OpSlice<Rank, Dtype>::checkTensorAttributes()
437{
Jerry Gea793f462023-04-11 00:05:02 +0000438 // Check Tosa Level
439 auto tosa_level = g_func_config.tosa_level;
440 LEVEL_CHECK(Rank <= tosa_level.MAX_RANK, "Rank should be smaller than or equal to MAX_RANK");
441
Eric Kunzee5e26762020-10-13 16:11:07 -0700442 if (validateRequiredOperands())
443 return 1;
444
445 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
446 {
447 return 1;
448 }
449
450 // output and input must be the same types
Luke Huttona4e48ca2023-02-22 11:53:48 +0000451 if (inputs[0]->matchRankType(*outputs[0]))
Eric Kunzee5e26762020-10-13 16:11:07 -0700452 {
Luke Huttona4e48ca2023-02-22 11:53:48 +0000453 printNodeValidationError("Failure to match input and output rank or type");
Eric Kunzee5e26762020-10-13 16:11:07 -0700454 return 1;
455 }
456
TatWai Chong01f937a2024-01-24 22:57:07 -0800457 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
TatWai Chong97f1c0e2024-02-05 11:56:46 -0800458 start = dynamic_cast<TosaReference::TensorTemplate<TSlicing>*>(inputs[1]);
459 size = dynamic_cast<TosaReference::TensorTemplate<TSlicing>*>(inputs[2]);
TatWai Chong01f937a2024-01-24 22:57:07 -0800460 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
Eric Kunzee5e26762020-10-13 16:11:07 -0700461
TatWai Chong97f1c0e2024-02-05 11:56:46 -0800462 ASSERT_MEM(in && out && start && size);
Luke Huttona4e48ca2023-02-22 11:53:48 +0000463
TatWai Chong01f937a2024-01-24 22:57:07 -0800464 return 0;
465}
466
467template <int Rank, TOSA_REF_TYPE Dtype>
468int OpSlice<Rank, Dtype>::eval()
469{
TatWai Chong97f1c0e2024-02-05 11:56:46 -0800470 TSlicing start_tensor = start->getTensor();
471 TSlicing size_tensor = size->getTensor();
472
473 // According to https://eigen.tuxfamily.org/dox/unsupported/eigen_tensors.html
474 // The type of size() is <Tensor-Type>::Index, but can always handily use it like an int.
475 // However, apply explicit cast to int32_t is preferred.
476 ERROR_IF(static_cast<int32_t>(start_tensor.size()) != in->getRank(),
477 "OpSlice: start array length needs to be rank(input)");
478 ERROR_IF(static_cast<int32_t>(size_tensor.size()) != in->getRank(),
479 "OpSlice: size array length needs to be rank(input)");
Eric Kunzee5e26762020-10-13 16:11:07 -0700480
Kevin Chengcc61be32021-10-14 17:09:57 -0700481 for (int32_t i = 0; i < in->getRank(); i++)
Eric Kunzee5e26762020-10-13 16:11:07 -0700482 {
TatWai Chong97f1c0e2024-02-05 11:56:46 -0800483 int32_t b = start_tensor(i);
484 int32_t s = size_tensor(i);
Kevin Chengcc61be32021-10-14 17:09:57 -0700485 ERROR_IF(b < 0 || b >= in->getShape()[i], "OpSlice: start out of boundary");
486 ERROR_IF((b + s) < 0 || (b + s) > in->getShape()[i], "OpSlice: (start+size) out of boundary");
487 ERROR_IF(s <= 0, "OpSlice: output must be positive");
488 ERROR_IF(s != out->getShape()[i], "OpSlice: size doesn't match output tensor dimension");
489 begin_array[i] = b;
490 size_array[i] = s;
Eric Kunzee5e26762020-10-13 16:11:07 -0700491 }
492
Eric Kunzee5e26762020-10-13 16:11:07 -0700493 out->getTensor() = in->getTensor().slice(begin_array, size_array);
494
495 return GraphNode::eval();
496}
497
Tai Lya4d748b2023-03-28 22:06:56 +0000498template <int Rank, TOSA_REF_TYPE Dtype>
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000499OpTileBase<Rank, Dtype>::OpTileBase(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
Kevin Chengacb550f2021-06-29 15:32:19 -0700500 : GraphNode(sgt_, Op_TILE, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700501{
Tai Ly8690a082023-12-18 20:40:24 +0000502 setRequiredOperands(2, 1);
Jerry Ge0bd4ec82023-05-01 18:36:43 +0000503 setRequiredRank(1);
Eric Kunzee5e26762020-10-13 16:11:07 -0700504}
505
Tai Lya4d748b2023-03-28 22:06:56 +0000506template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700507OpTileBase<Rank, Dtype>::~OpTileBase()
Tai Ly8690a082023-12-18 20:40:24 +0000508{}
Eric Kunzee5e26762020-10-13 16:11:07 -0700509
Tai Lya4d748b2023-03-28 22:06:56 +0000510template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700511int OpTileBase<Rank, Dtype>::checkTensorAttributes()
512{
Jerry Gea793f462023-04-11 00:05:02 +0000513 // Check Tosa Level
514 auto tosa_level = g_func_config.tosa_level;
515 LEVEL_CHECK(Rank <= tosa_level.MAX_RANK, "Rank should be smaller than or equal to MAX_RANK");
516
Eric Kunzee5e26762020-10-13 16:11:07 -0700517 if (validateRequiredOperands())
518 return 1;
519
520 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
521 {
522 return 1;
523 }
524
525 // output and input must be the same ranks and types
526 if (inputs[0]->matchRankType(*outputs[0]))
527 {
528 printNodeValidationError("Failure to match input and output rank or type");
529 return 1;
530 }
531
Tai Ly8690a082023-12-18 20:40:24 +0000532 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
533 multiples = dynamic_cast<TosaReference::TensorTemplate<TInMultiples>*>(inputs[1]);
534 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
Eric Kunzee5e26762020-10-13 16:11:07 -0700535
Tai Ly8690a082023-12-18 20:40:24 +0000536 ASSERT_MEM(in && multiples && out);
Luke Huttona4e48ca2023-02-22 11:53:48 +0000537
Tai Ly8690a082023-12-18 20:40:24 +0000538 if (multiples->getElementCount() != Rank)
Eric Kunzee5e26762020-10-13 16:11:07 -0700539 {
540 printNodeValidationError("1D list 'multiples' must have size equal to input rank");
541 return 1;
542 }
543
Eric Kunzee5e26762020-10-13 16:11:07 -0700544 return 0;
545}
546
Tai Lya4d748b2023-03-28 22:06:56 +0000547template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700548int OpTile<Rank, Dtype>::eval()
549{
550 // primary template shouldn't be called
Tai Lya4d748b2023-03-28 22:06:56 +0000551 FATAL_ERROR("OpTile rank=%i, dtype=%s: not implemented yet", Rank, EnumNameTOSAREFTYPE(Dtype));
Eric Kunzee5e26762020-10-13 16:11:07 -0700552}
553
Tai Lya4d748b2023-03-28 22:06:56 +0000554template <TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700555int OpTile<1, Dtype>::eval()
556{
557 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
558 {
559 int32_t id0 = od0 % this->in->getShape()[0];
560 this->out->getTensor()(od0) = this->in->getTensor()(id0);
561 }
562
563 return GraphNode::eval();
564}
565
Tai Lya4d748b2023-03-28 22:06:56 +0000566template <TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700567int OpTile<2, Dtype>::eval()
568{
569 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
570 {
571 int32_t id0 = od0 % this->in->getShape()[0];
572 for (int32_t od1 = 0; od1 < this->out->getShape()[1]; od1++)
573 {
574 int32_t id1 = od1 % this->in->getShape()[1];
575 this->out->getTensor()(od0, od1) = this->in->getTensor()(id0, id1);
576 }
577 }
578
579 return GraphNode::eval();
580}
581
Tai Lya4d748b2023-03-28 22:06:56 +0000582template <TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700583int OpTile<3, Dtype>::eval()
584{
585 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
586 {
587 int32_t id0 = od0 % this->in->getShape()[0];
588 for (int32_t od1 = 0; od1 < this->out->getShape()[1]; od1++)
589 {
590 int32_t id1 = od1 % this->in->getShape()[1];
591 for (int32_t od2 = 0; od2 < this->out->getShape()[2]; od2++)
592 {
593 int32_t id2 = od2 % this->in->getShape()[2];
594 this->out->getTensor()(od0, od1, od2) = this->in->getTensor()(id0, id1, id2);
595 }
596 }
597 }
598
599 return GraphNode::eval();
600}
601
Tai Lya4d748b2023-03-28 22:06:56 +0000602template <TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700603int OpTile<4, Dtype>::eval()
604{
605 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
606 {
607 int32_t id0 = od0 % this->in->getShape()[0];
608 for (int32_t od1 = 0; od1 < this->out->getShape()[1]; od1++)
609 {
610 int32_t id1 = od1 % this->in->getShape()[1];
611 for (int32_t od2 = 0; od2 < this->out->getShape()[2]; od2++)
612 {
613 int32_t id2 = od2 % this->in->getShape()[2];
614 for (int32_t od3 = 0; od3 < this->out->getShape()[3]; od3++)
615 {
616 int32_t id3 = od3 % this->in->getShape()[3];
617 this->out->getTensor()(od0, od1, od2, od3) = this->in->getTensor()(id0, id1, id2, id3);
618 }
619 }
620 }
621 }
622
623 return GraphNode::eval();
624}
625
Tai Lya4d748b2023-03-28 22:06:56 +0000626template <TOSA_REF_TYPE Dtype>
Luke Huttona4e48ca2023-02-22 11:53:48 +0000627int OpTile<5, Dtype>::eval()
628{
629 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
630 {
631 int32_t id0 = od0 % this->in->getShape()[0];
632 for (int32_t od1 = 0; od1 < this->out->getShape()[1]; od1++)
633 {
634 int32_t id1 = od1 % this->in->getShape()[1];
635 for (int32_t od2 = 0; od2 < this->out->getShape()[2]; od2++)
636 {
637 int32_t id2 = od2 % this->in->getShape()[2];
638 for (int32_t od3 = 0; od3 < this->out->getShape()[3]; od3++)
639 {
640 int32_t id3 = od3 % this->in->getShape()[3];
641 for (int32_t od4 = 0; od4 < this->out->getShape()[4]; od4++)
642 {
643 int32_t id4 = od4 % this->in->getShape()[4];
644 this->out->getTensor()(od0, od1, od2, od3, od4) =
645 this->in->getTensor()(id0, id1, id2, id3, id4);
646 }
647 }
648 }
649 }
650 }
651
652 return GraphNode::eval();
653}
654
Tai Lya4d748b2023-03-28 22:06:56 +0000655template <TOSA_REF_TYPE Dtype>
Luke Huttona4e48ca2023-02-22 11:53:48 +0000656int OpTile<6, Dtype>::eval()
657{
658 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
659 {
660 int32_t id0 = od0 % this->in->getShape()[0];
661 for (int32_t od1 = 0; od1 < this->out->getShape()[1]; od1++)
662 {
663 int32_t id1 = od1 % this->in->getShape()[1];
664 for (int32_t od2 = 0; od2 < this->out->getShape()[2]; od2++)
665 {
666 int32_t id2 = od2 % this->in->getShape()[2];
667 for (int32_t od3 = 0; od3 < this->out->getShape()[3]; od3++)
668 {
669 int32_t id3 = od3 % this->in->getShape()[3];
670 for (int32_t od4 = 0; od4 < this->out->getShape()[4]; od4++)
671 {
672 int32_t id4 = od4 % this->in->getShape()[4];
673 for (int32_t od5 = 0; od5 < this->out->getShape()[5]; od5++)
674 {
675 int32_t id5 = od5 % this->in->getShape()[5];
676 this->out->getTensor()(od0, od1, od2, od3, od4, od5) =
677 this->in->getTensor()(id0, id1, id2, id3, id4, id5);
678 }
679 }
680 }
681 }
682 }
683 }
684
685 return GraphNode::eval();
686}
687
Tai Lya4d748b2023-03-28 22:06:56 +0000688template <int Rank, TOSA_REF_TYPE Dtype>
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000689OpTranspose<Rank, Dtype>::OpTranspose(SubgraphTraverser* sgt_, TosaAttributeBase* attribute_, uint64_t id_)
Kevin Chengacb550f2021-06-29 15:32:19 -0700690 : GraphNode(sgt_, Op_TRANSPOSE, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700691{
Kevin Chengfe392ce2021-10-18 21:51:55 +0000692 setRequiredOperands(1, 1);
Jerry Ge0bd4ec82023-05-01 18:36:43 +0000693 setRequiredRank(1);
Kevin Chengfe392ce2021-10-18 21:51:55 +0000694
695 INIT_ATTRIBUTE(Transpose);
Eric Kunzee5e26762020-10-13 16:11:07 -0700696}
697
Tai Lya4d748b2023-03-28 22:06:56 +0000698template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700699OpTranspose<Rank, Dtype>::~OpTranspose()
Jerry Gea6827492022-11-16 10:41:55 -0800700{
Jerry Ge9c9c8da2023-07-19 23:08:16 +0000701 if (attribute)
702 delete attribute;
Jerry Gea6827492022-11-16 10:41:55 -0800703}
Eric Kunzee5e26762020-10-13 16:11:07 -0700704
Tai Lya4d748b2023-03-28 22:06:56 +0000705template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700706int OpTranspose<Rank, Dtype>::checkTensorAttributes()
707{
Jerry Gea793f462023-04-11 00:05:02 +0000708 // Check Tosa Level
709 auto tosa_level = g_func_config.tosa_level;
710 LEVEL_CHECK(Rank <= tosa_level.MAX_RANK, "Rank should be smaller than or equal to MAX_RANK");
711
Eric Kunzee5e26762020-10-13 16:11:07 -0700712 if (validateRequiredOperands())
713 return 1;
714
715 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
716 {
717 return 1;
718 }
719
720 // output and input must be the same types
721 if (inputs[0]->matchRankType(*outputs[0]))
722 {
723 printNodeValidationError("Failure to match input and output rank and type");
724 return 1;
725 }
726
727 if (inputs[0]->getElementCount() != outputs[0]->getElementCount())
728 {
729 printNodeValidationError("Failure to match input and output total element count");
730 return 1;
731 }
732
Kevin Chengfe392ce2021-10-18 21:51:55 +0000733 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
734 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
735
736 ASSERT_MEM(in && out);
Eric Kunzee5e26762020-10-13 16:11:07 -0700737
TatWai Chong86c403b2022-06-06 20:46:01 -0700738 ERROR_IF(attribute->perms().size() != Rank, "OpTranspose: perms array size needs to match rank(input)");
Kevin Chengf3e016f2021-11-02 01:15:50 +0000739
740 std::array<bool, Rank> index_used;
741 index_used.fill(false);
742 for (int32_t d = 0; d < Rank; d++)
743 {
TatWai Chong86c403b2022-06-06 20:46:01 -0700744 int32_t index = attribute->perms()[d];
Kevin Chengf3e016f2021-11-02 01:15:50 +0000745 ERROR_IF(index < 0 or index >= Rank, "OpTranspose: index out of boundary");
746 ERROR_IF(index_used[index], "OpTranspose: index duplicated in perm attribute");
747 index_used[index] = true;
748 ERROR_IF(in->getShape()[index] != out->getShape()[d], "OpTranspose: input output shape mismatch");
749 perm_array[d] = index;
750 }
751
Eric Kunzee5e26762020-10-13 16:11:07 -0700752 return 0;
753}
754
Tai Lya4d748b2023-03-28 22:06:56 +0000755template <int Rank, TOSA_REF_TYPE Dtype>
Eric Kunzee5e26762020-10-13 16:11:07 -0700756int OpTranspose<Rank, Dtype>::eval()
757{
Eric Kunzee5e26762020-10-13 16:11:07 -0700758 out->getTensor() = in->getTensor().shuffle(perm_array);
759
760 return GraphNode::eval();
761}
762
763// template explicit instantiation
James Ward8b390432022-08-12 20:48:56 +0100764DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, FP16)
James Ward24dbc422022-10-19 12:20:31 +0100765DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, BF16)
Jeremy Johnsonbc2a3db2022-09-27 13:50:00 +0100766DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, FP32)
Eric Kunzee5e26762020-10-13 16:11:07 -0700767DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, INT8)
768DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, INT16)
769DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, INT32)
770DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, BOOL)
Tai Lya4d748b2023-03-28 22:06:56 +0000771DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, FP64)
Won Jeon2c34b462024-02-06 18:37:00 +0000772DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, FP8E4M3);
773DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, FP8E5M2);
Eric Kunzee5e26762020-10-13 16:11:07 -0700774
James Ward8b390432022-08-12 20:48:56 +0100775DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, FP16);
James Ward24dbc422022-10-19 12:20:31 +0100776DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, BF16);
Jeremy Johnsonbc2a3db2022-09-27 13:50:00 +0100777DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, FP32);
Eric Kunzee5e26762020-10-13 16:11:07 -0700778DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, INT8);
779DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, INT16);
780DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, INT32);
781DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, BOOL);
Tai Lya4d748b2023-03-28 22:06:56 +0000782DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, FP64);
Won Jeon2c34b462024-02-06 18:37:00 +0000783DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, FP8E4M3);
784DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, FP8E5M2);
Eric Kunzee5e26762020-10-13 16:11:07 -0700785
Won Jeona21b2e82023-08-10 10:33:01 +0000786DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, FP16);
787DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, BF16);
788DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, FP32);
789DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, INT8);
790DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, INT16);
791DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, INT32);
792DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, BOOL);
Won Jeon2c34b462024-02-06 18:37:00 +0000793DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, FP8E4M3);
794DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpDim, FP8E5M2);
Won Jeona21b2e82023-08-10 10:33:01 +0000795
James Ward8b390432022-08-12 20:48:56 +0100796DEF_INSTANTIATE_RESHAPE(OpReshape, FP16);
James Ward24dbc422022-10-19 12:20:31 +0100797DEF_INSTANTIATE_RESHAPE(OpReshape, BF16);
Jeremy Johnsonbc2a3db2022-09-27 13:50:00 +0100798DEF_INSTANTIATE_RESHAPE(OpReshape, FP32);
Eric Kunzee5e26762020-10-13 16:11:07 -0700799DEF_INSTANTIATE_RESHAPE(OpReshape, INT8);
800DEF_INSTANTIATE_RESHAPE(OpReshape, INT16);
801DEF_INSTANTIATE_RESHAPE(OpReshape, INT32);
802DEF_INSTANTIATE_RESHAPE(OpReshape, BOOL);
Tai Lya4d748b2023-03-28 22:06:56 +0000803DEF_INSTANTIATE_RESHAPE(OpReshape, FP64);
Won Jeon2c34b462024-02-06 18:37:00 +0000804DEF_INSTANTIATE_RESHAPE(OpReshape, FP8E4M3);
805DEF_INSTANTIATE_RESHAPE(OpReshape, FP8E5M2);
Eric Kunzee5e26762020-10-13 16:11:07 -0700806
James Ward8b390432022-08-12 20:48:56 +0100807DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, FP16);
James Ward24dbc422022-10-19 12:20:31 +0100808DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, BF16);
Jeremy Johnsonbc2a3db2022-09-27 13:50:00 +0100809DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, FP32);
Eric Kunzee5e26762020-10-13 16:11:07 -0700810DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, INT8);
811DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, INT16);
812DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, INT32);
813DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, BOOL);
Tai Lya4d748b2023-03-28 22:06:56 +0000814DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, FP64);
Won Jeon2c34b462024-02-06 18:37:00 +0000815DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, FP8E4M3);
816DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, FP8E5M2);
Eric Kunzee5e26762020-10-13 16:11:07 -0700817
Luke Huttona4e48ca2023-02-22 11:53:48 +0000818DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, FP16);
819DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, BF16);
820DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, FP32);
821DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, INT8);
822DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, INT16);
823DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, INT32);
824DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, BOOL);
Tai Lya4d748b2023-03-28 22:06:56 +0000825DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, FP64);
Won Jeon2c34b462024-02-06 18:37:00 +0000826DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, FP8E4M3);
827DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpSlice, FP8E5M2);
Eric Kunzee5e26762020-10-13 16:11:07 -0700828
Luke Huttona4e48ca2023-02-22 11:53:48 +0000829DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, FP16);
830DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, BF16);
831DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, FP32);
832DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, INT8);
833DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, INT16);
834DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, INT32);
835DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, BOOL);
Tai Lya4d748b2023-03-28 22:06:56 +0000836DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, FP64);
Won Jeon2c34b462024-02-06 18:37:00 +0000837DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, FP8E4M3);
838DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTileBase, FP8E5M2);
Jared Smolens98c281f2022-12-20 15:09:25 -0800839
Luke Huttona4e48ca2023-02-22 11:53:48 +0000840DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, FP16);
841DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, BF16);
842DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, FP32);
843DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, INT8);
844DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, INT16);
845DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, INT32);
846DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, BOOL);
Tai Lya4d748b2023-03-28 22:06:56 +0000847DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, FP64);
Won Jeon2c34b462024-02-06 18:37:00 +0000848DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, FP8E4M3);
849DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTile, FP8E5M2);
Eric Kunzee5e26762020-10-13 16:11:07 -0700850
Luke Huttona4e48ca2023-02-22 11:53:48 +0000851DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, FP16);
852DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, BF16);
853DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, FP32);
854DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, INT8);
855DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, INT16);
856DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, INT32);
857DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, BOOL);
Tai Lya4d748b2023-03-28 22:06:56 +0000858DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, FP64);
Won Jeon2c34b462024-02-06 18:37:00 +0000859DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, FP8E4M3);
860DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpTranspose, FP8E5M2);