blob: 86326f579c2059ff98c4552c18e52286fde202dd [file] [log] [blame]
Eric Kunzee5e26762020-10-13 16:11:07 -07001
Kevin Cheng3a478572021-01-22 17:21:02 -08002// Copyright (c) 2020-2021, ARM Limited.
Eric Kunzee5e26762020-10-13 16:11:07 -07003//
4// Licensed under the Apache License, Version 2.0 (the "License");
5// you may not use this file except in compliance with the License.
6// You may obtain a copy of the License at
7//
8// http://www.apache.org/licenses/LICENSE-2.0
9//
10// Unless required by applicable law or agreed to in writing, software
11// distributed under the License is distributed on an "AS IS" BASIS,
12// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13// See the License for the specific language governing permissions and
14// limitations under the License.
15
16#include "data_layout.h"
17#include "quant_util.h"
18
19using namespace TosaReference;
20using namespace Eigen;
21using namespace tosa;
22
23template <int Rank, DType Dtype>
Kevin Chengacb550f2021-06-29 15:32:19 -070024OpConcat<Rank, Dtype>::OpConcat(SubgraphTraverser* sgt_,
25 TosaAttributeBase* attribute_,
26 TosaQuantInfoBase* qinfo_,
27 uint64_t id_)
28 : GraphNode(sgt_, Op_CONCAT, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -070029{
Kevin Chengad15dfa2021-03-04 15:15:03 -080030 setRequiredOperands(-1, 1);
Eric Kunzee5e26762020-10-13 16:11:07 -070031 setRequiredRank(1, 6);
32
33 INIT_ATTRIBUTE(Axis);
34}
35
36template <int Rank, DType Dtype>
37OpConcat<Rank, Dtype>::~OpConcat()
38{
39 if (attribute)
40 delete attribute;
41}
42
43template <int Rank, DType Dtype>
44int OpConcat<Rank, Dtype>::checkTensorAttributes()
45{
46 if (validateRequiredOperands())
47 return 1;
48
Kevin Chengad15dfa2021-03-04 15:15:03 -080049 if (inputs.empty())
Eric Kunzee5e26762020-10-13 16:11:07 -070050 {
Kevin Chengad15dfa2021-03-04 15:15:03 -080051 printNodeValidationError("Concat operator must have at least one input tensor");
Eric Kunzee5e26762020-10-13 16:11:07 -070052 return 1;
53 }
Eric Kunzee5e26762020-10-13 16:11:07 -070054 // output and input must be the same types and rank
Kevin Chengad15dfa2021-03-04 15:15:03 -080055 for (size_t i = 0; i < inputs.size(); i++)
Eric Kunzee5e26762020-10-13 16:11:07 -070056 {
Kevin Chengad15dfa2021-03-04 15:15:03 -080057 if (inputs[i]->matchRankType(*outputs[0]))
58 {
59 printNodeValidationError("Concat operator input ranks and types must match");
60 return 1;
61 }
62 ins.push_back(dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[i]));
Eric Kunzee5e26762020-10-13 16:11:07 -070063 }
64
Eric Kunzee5e26762020-10-13 16:11:07 -070065 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
66
Kevin Chengad15dfa2021-03-04 15:15:03 -080067 if (attribute->axis() < 0 || (size_t)attribute->axis() >= inputs[0]->getShape().size())
Eric Kunzee5e26762020-10-13 16:11:07 -070068 {
69 printNodeValidationError("Axis is beyond input tensor rank");
70 return 1;
71 }
72
73 return 0;
74}
75
76template <int Rank, DType Dtype>
77int OpConcat<Rank, Dtype>::eval()
78{
79
80 int32_t reversed_axis = Rank - 1 - attribute->axis();
81
82 for (int32_t d = 0; d < Rank; d++)
83 {
84 reverser[d] = Rank - 1 - d;
85 }
86
Kevin Chengad15dfa2021-03-04 15:15:03 -080087 TIn result = ins[0]->getTensor().shuffle(reverser);
Eric Kunzee5e26762020-10-13 16:11:07 -070088
Kevin Chengad15dfa2021-03-04 15:15:03 -080089 for (size_t i = 1; i < ins.size(); i++)
90 {
91 TIn in_reversed = ins[i]->getTensor().shuffle(reverser);
92 TIn temp = result.concatenate(in_reversed, reversed_axis);
93 result = temp;
94 }
95 out->getTensor() = result.shuffle(reverser);
Eric Kunzee5e26762020-10-13 16:11:07 -070096
97 return GraphNode::eval();
98}
99
100template <int Rank, DType Dtype>
Kevin Chengacb550f2021-06-29 15:32:19 -0700101OpPad<Rank, Dtype>::OpPad(SubgraphTraverser* sgt_,
102 TosaAttributeBase* attribute_,
103 TosaQuantInfoBase* qinfo_,
104 uint64_t id_)
105 : GraphNode(sgt_, Op_PAD, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700106{
107 setRequiredOperands(2, 1);
108 setRequiredRank(0, 6);
109
110 INIT_QINFO(Pad);
111}
112
113template <int Rank, DType Dtype>
114OpPad<Rank, Dtype>::~OpPad()
115{
116 if (qinfo)
117 delete qinfo;
118}
119
120template <int Rank, DType Dtype>
121int OpPad<Rank, Dtype>::checkTensorAttributes()
122{
123 if (validateRequiredOperands())
124 return 1;
125
126 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
127 {
128 return 1;
129 }
130
131 // output and input must be the same types
132 if (inputs[0]->matchRankType(*outputs[0]))
133 {
134 printNodeValidationError("Failure to match input and output type and rank");
135 return 1;
136 }
137
138 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
139 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
140 TosaReference::TensorTemplate<ETensor2<int32_t>>* paddings =
141 dynamic_cast<TosaReference::TensorTemplate<ETensor2<int32_t>>*>(inputs[1]);
142
143 for (int i = 0; i < Rank; i++)
144 {
145 paddings_array[i] = std::make_pair(paddings->getTensor()(i, 0), paddings->getTensor()(i, 1));
146 }
147
148 return 0;
149}
150
151template <int Rank, DType Dtype>
152int OpPad<Rank, Dtype>::eval()
153{
154 InEigenType pad_value = 0;
155 if (this->qinfo)
156 {
157 pad_value = (InEigenType)this->qinfo->input_zp();
158 }
159
160 this->out->getTensor() = this->in->getTensor().pad(this->paddings_array, pad_value);
161
162 return GraphNode::eval();
163}
164
165template <int InRank, int OutRank, DType Dtype>
Kevin Chengacb550f2021-06-29 15:32:19 -0700166OpReshape<InRank, OutRank, Dtype>::OpReshape(SubgraphTraverser* sgt_,
167 TosaAttributeBase* attribute_,
168 TosaQuantInfoBase* qinfo_,
169 uint64_t id_)
170 : GraphNode(sgt_, Op_RESHAPE, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700171{
172 setRequiredOperands(1, 1);
173 setRequiredRank(0, 6);
174
175 INIT_ATTRIBUTE(Reshape);
176}
177
178template <int InRank, int OutRank, DType Dtype>
179OpReshape<InRank, OutRank, Dtype>::~OpReshape()
180{
181 if (attribute)
182 delete attribute;
183}
184
185template <int InRank, int OutRank, DType Dtype>
186int OpReshape<InRank, OutRank, Dtype>::checkTensorAttributes()
187{
188 uint32_t minusOneCount = 0;
189
190 if (validateRequiredOperands())
191 return 1;
192
193 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
194 {
195 return 1;
196 }
197
198 // output and input must be the same types
199 if (inputs[0]->matchType(*outputs[0]))
200 {
201 printNodeValidationError("OpReshape: Input and output types must match");
202 return 1;
203 }
204
205 for (uint32_t d = 0; d < OutRank; d++)
206 {
207 if (attribute->shape()[d] == -1)
208 {
209 minusOneCount++;
210 }
211 }
212
213 if (minusOneCount > 1)
214 {
215 printNodeValidationError("OpReshape: new shape has more than one -1 dimension");
216 return 1;
217 }
218
219 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
220 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
221
222 return 0;
223}
224
225template <int InRank, int OutRank, DType Dtype>
226int OpReshape<InRank, OutRank, Dtype>::eval()
227{
228 uint32_t remainingSize = in->getElementCount();
229
230 // If there is a -1 dimension, find the remainder in one pass over the output shape
231 for (int32_t d = 0; d < OutRank; d++)
232 {
233 if (attribute->shape()[d] != -1)
234 {
235 remainingSize = remainingSize / attribute->shape()[d];
236 }
237 }
238
239 for (int32_t d = 0; d < OutRank; d++)
240 {
241 array_shape[d] = attribute->shape()[OutRank - 1 - d];
242 out_reverser[d] = OutRank - 1 - d;
243
244 // Jam in the remainder here
245 if (array_shape[d] == -1)
246 {
247 array_shape[d] = remainingSize;
248 }
249 }
250
251 for (int32_t d = 0; d < InRank; d++)
252 {
253 in_reverser[d] = InRank - 1 - d;
254 }
255
256 // Eigen Tensor is col-major, and we're referencing row-major result
257 // need to reverse it to row-major before reshape, and perform another reverse afterward
258
259 // input tensor rank 0 can't do .shuffle(), need to be handled otherwise
260 TIn in_reversed;
261 if (InRank > 1)
262 {
263 in_reversed = in->getTensor().shuffle(in_reverser);
264 }
265 else
266 {
267 in_reversed = in->getTensor();
268 }
269
270 TOut in_reshaped = in_reversed.reshape(array_shape);
271
272 // output tensor can be rank 0, .reshape() and .shuffle() don't work, need to be handled otherwise
273 if (OutRank > 1)
274 {
275 out->getTensor() = in_reshaped.shuffle(out_reverser);
276 }
277 else
278 {
279 out->getTensor() = in_reshaped;
280 }
281
282 return GraphNode::eval();
283}
284
285template <int Rank, DType Dtype>
Kevin Chengacb550f2021-06-29 15:32:19 -0700286OpReverse<Rank, Dtype>::OpReverse(SubgraphTraverser* sgt_,
287 TosaAttributeBase* attribute_,
288 TosaQuantInfoBase* qinfo_,
289 uint64_t id_)
290 : GraphNode(sgt_, Op_REVERSE, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700291{
292 setRequiredOperands(1, 1);
293 setRequiredRank(1, 6);
294
295 INIT_ATTRIBUTE(Axis);
296}
297
298template <int Rank, DType Dtype>
299OpReverse<Rank, Dtype>::~OpReverse()
300{
301 if (attribute)
302 delete attribute;
303}
304
305template <int Rank, DType Dtype>
306int OpReverse<Rank, Dtype>::checkTensorAttributes()
307{
308 if (validateRequiredOperands())
309 return 1;
310
311 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
312 {
313 return 1;
314 }
315
316 // output and input must be the same types
317 if (inputs[0]->matchRankTypeShape(*outputs[0]))
318 {
319 printNodeValidationError("Failure to match input and output rank/type/shape");
320 return 1;
321 }
322
323 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
324 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
325
326 ASSERT_MEM(in && out);
327
328 if (attribute->axis() < 0 || attribute->axis() >= inputs[0]->getRank())
329 {
330 printNodeValidationError("Reverse axis must between [0, input_rank - 1]");
331 return 1;
332 }
333
334 // transform list of axis into true or false list
335 // e.g. rank=4, axis=[1,2], reverse array would be [false, true, true, false]
336 for (int i = 0; i < Rank; i++)
337 {
338 reverse_array[i] = false;
339 }
340 reverse_array[attribute->axis()] = true;
341
342 return 0;
343}
344
345template <int Rank, DType Dtype>
346int OpReverse<Rank, Dtype>::eval()
347{
348 out->getTensor() = in->getTensor().reverse(reverse_array);
349
350 return GraphNode::eval();
351}
352
353template <int Rank, DType Dtype>
Kevin Chengacb550f2021-06-29 15:32:19 -0700354OpSlice<Rank, Dtype>::OpSlice(SubgraphTraverser* sgt_,
355 TosaAttributeBase* attribute_,
356 TosaQuantInfoBase* qinfo_,
357 uint64_t id_)
358 : GraphNode(sgt_, Op_SLICE, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700359{
360 setRequiredOperands(1, 1);
361 setRequiredRank(0, 6);
362
363 INIT_ATTRIBUTE(Slice);
364}
365
366template <int Rank, DType Dtype>
367OpSlice<Rank, Dtype>::~OpSlice()
368{
369 if (attribute)
370 delete attribute;
371}
372
373template <int Rank, DType Dtype>
374int OpSlice<Rank, Dtype>::checkTensorAttributes()
375{
376 if (validateRequiredOperands())
377 return 1;
378
379 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
380 {
381 return 1;
382 }
383
384 // output and input must be the same types
385 if (inputs[0]->matchType(*outputs[0]))
386 {
387 printNodeValidationError("Failure to match input and output type");
388 return 1;
389 }
390
391 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
392 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
393
394 for (size_t i = 0; i < attribute->begin().size(); i++)
395 {
396 begin_array[i] = attribute->begin()[i];
397 }
398
399 for (size_t i = 0; i < attribute->size().size(); i++)
400 {
401 if (attribute->size()[i] != 0)
402 {
403 size_array[i] = attribute->size()[i];
404 }
405 else
406 {
407 // Tensorflow assigns a zero size to dimensions that are kept
408 // Eigen expects size to be the full size of the dimension
409 size_array[i] = in->getTensor().dimension(0);
410 }
411 }
412
413 return 0;
414}
415
416template <int Rank, DType Dtype>
417int OpSlice<Rank, Dtype>::eval()
418{
419 out->getTensor() = in->getTensor().slice(begin_array, size_array);
420
421 return GraphNode::eval();
422}
423
424template <int Rank, DType Dtype>
Kevin Chengacb550f2021-06-29 15:32:19 -0700425OpTileBase<Rank, Dtype>::OpTileBase(SubgraphTraverser* sgt_,
426 TosaAttributeBase* attribute_,
427 TosaQuantInfoBase* qinfo_,
428 uint64_t id_)
429 : GraphNode(sgt_, Op_TILE, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700430{
431 setRequiredOperands(1, 1);
432 setRequiredRank(0, 6);
433
434 INIT_ATTRIBUTE(Tile);
435}
436
437template <int Rank, DType Dtype>
438OpTileBase<Rank, Dtype>::~OpTileBase()
439{
440 if (attribute)
441 delete attribute;
442}
443
444template <int Rank, DType Dtype>
445int OpTileBase<Rank, Dtype>::checkTensorAttributes()
446{
447 if (validateRequiredOperands())
448 return 1;
449
450 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
451 {
452 return 1;
453 }
454
455 // output and input must be the same ranks and types
456 if (inputs[0]->matchRankType(*outputs[0]))
457 {
458 printNodeValidationError("Failure to match input and output rank or type");
459 return 1;
460 }
461
462 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
463 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
464
465 if (attribute->multiples().size() != Rank)
466 {
467 printNodeValidationError("1D list 'multiples' must have size equal to input rank");
468 return 1;
469 }
470
471 for (int32_t d = 0; d < Rank; d++)
472 {
473 if (in->getShape()[d] * attribute->multiples()[d] != out->getShape()[d])
474 {
475 printNodeValidationError("unexpected output shape");
476 return 1;
477 }
478 }
479
480 return 0;
481}
482
483template <int Rank, DType Dtype>
484int OpTile<Rank, Dtype>::eval()
485{
486 // primary template shouldn't be called
Kevin Chengacb550f2021-06-29 15:32:19 -0700487 FATAL_ERROR("OpTile rank=%i, dtype=%s: not implemented yet", Rank, EnumNamesDType()[Dtype]);
Eric Kunzee5e26762020-10-13 16:11:07 -0700488}
489
490template <DType Dtype>
491int OpTile<1, Dtype>::eval()
492{
493 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
494 {
495 int32_t id0 = od0 % this->in->getShape()[0];
496 this->out->getTensor()(od0) = this->in->getTensor()(id0);
497 }
498
499 return GraphNode::eval();
500}
501
502template <DType Dtype>
503int OpTile<2, Dtype>::eval()
504{
505 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
506 {
507 int32_t id0 = od0 % this->in->getShape()[0];
508 for (int32_t od1 = 0; od1 < this->out->getShape()[1]; od1++)
509 {
510 int32_t id1 = od1 % this->in->getShape()[1];
511 this->out->getTensor()(od0, od1) = this->in->getTensor()(id0, id1);
512 }
513 }
514
515 return GraphNode::eval();
516}
517
518template <DType Dtype>
519int OpTile<3, Dtype>::eval()
520{
521 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
522 {
523 int32_t id0 = od0 % this->in->getShape()[0];
524 for (int32_t od1 = 0; od1 < this->out->getShape()[1]; od1++)
525 {
526 int32_t id1 = od1 % this->in->getShape()[1];
527 for (int32_t od2 = 0; od2 < this->out->getShape()[2]; od2++)
528 {
529 int32_t id2 = od2 % this->in->getShape()[2];
530 this->out->getTensor()(od0, od1, od2) = this->in->getTensor()(id0, id1, id2);
531 }
532 }
533 }
534
535 return GraphNode::eval();
536}
537
538template <DType Dtype>
539int OpTile<4, Dtype>::eval()
540{
541 for (int32_t od0 = 0; od0 < this->out->getShape()[0]; od0++)
542 {
543 int32_t id0 = od0 % this->in->getShape()[0];
544 for (int32_t od1 = 0; od1 < this->out->getShape()[1]; od1++)
545 {
546 int32_t id1 = od1 % this->in->getShape()[1];
547 for (int32_t od2 = 0; od2 < this->out->getShape()[2]; od2++)
548 {
549 int32_t id2 = od2 % this->in->getShape()[2];
550 for (int32_t od3 = 0; od3 < this->out->getShape()[3]; od3++)
551 {
552 int32_t id3 = od3 % this->in->getShape()[3];
553 this->out->getTensor()(od0, od1, od2, od3) = this->in->getTensor()(id0, id1, id2, id3);
554 }
555 }
556 }
557 }
558
559 return GraphNode::eval();
560}
561
562template <int Rank, DType Dtype>
Kevin Chengacb550f2021-06-29 15:32:19 -0700563OpTranspose<Rank, Dtype>::OpTranspose(SubgraphTraverser* sgt_,
564 TosaAttributeBase* attribute_,
565 TosaQuantInfoBase* qinfo_,
566 uint64_t id_)
567 : GraphNode(sgt_, Op_TRANSPOSE, id_)
Eric Kunzee5e26762020-10-13 16:11:07 -0700568{
569 setRequiredOperands(2, 1);
570 setRequiredRank(0, 6);
571}
572
573template <int Rank, DType Dtype>
574OpTranspose<Rank, Dtype>::~OpTranspose()
575{}
576
577template <int Rank, DType Dtype>
578int OpTranspose<Rank, Dtype>::checkTensorAttributes()
579{
580 if (validateRequiredOperands())
581 return 1;
582
583 if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0]))
584 {
585 return 1;
586 }
587
588 // output and input must be the same types
589 if (inputs[0]->matchRankType(*outputs[0]))
590 {
591 printNodeValidationError("Failure to match input and output rank and type");
592 return 1;
593 }
594
595 if (inputs[0]->getElementCount() != outputs[0]->getElementCount())
596 {
597 printNodeValidationError("Failure to match input and output total element count");
598 return 1;
599 }
600
601 in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]);
602 out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]);
603 perm_tensor = dynamic_cast<TosaReference::TensorTemplate<ETensor1<int32_t>>*>(inputs[1]);
604
605 return 0;
606}
607
608template <int Rank, DType Dtype>
609int OpTranspose<Rank, Dtype>::eval()
610{
611 for (int32_t d = 0; d < Rank; d++)
612 {
613 perm_array[d] = this->perm_tensor->getTensor().data()[d];
614 }
615
616 out->getTensor() = in->getTensor().shuffle(perm_array);
617
618 return GraphNode::eval();
619}
620
621// template explicit instantiation
622DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, FLOAT)
Eric Kunzee5e26762020-10-13 16:11:07 -0700623DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, INT8)
624DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, INT16)
625DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, INT32)
626DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpConcat, BOOL)
627
628DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, FLOAT);
Eric Kunzee5e26762020-10-13 16:11:07 -0700629DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, INT8);
630DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, INT16);
631DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, INT32);
632DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpPad, BOOL);
633
634DEF_INSTANTIATE_RESHAPE(OpReshape, FLOAT);
Eric Kunzee5e26762020-10-13 16:11:07 -0700635DEF_INSTANTIATE_RESHAPE(OpReshape, INT8);
636DEF_INSTANTIATE_RESHAPE(OpReshape, INT16);
637DEF_INSTANTIATE_RESHAPE(OpReshape, INT32);
638DEF_INSTANTIATE_RESHAPE(OpReshape, BOOL);
639
640DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, FLOAT);
Eric Kunzee5e26762020-10-13 16:11:07 -0700641DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, INT8);
642DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, INT16);
643DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, INT32);
644DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReverse, BOOL);
645
646DEF_INSTANTIATE_RANK0_6_ONE_RANK_ONE_TYPE(OpSlice, FLOAT);
Eric Kunzee5e26762020-10-13 16:11:07 -0700647DEF_INSTANTIATE_RANK0_6_ONE_RANK_ONE_TYPE(OpSlice, INT8);
648DEF_INSTANTIATE_RANK0_6_ONE_RANK_ONE_TYPE(OpSlice, INT16);
649DEF_INSTANTIATE_RANK0_6_ONE_RANK_ONE_TYPE(OpSlice, INT32);
650DEF_INSTANTIATE_RANK0_6_ONE_RANK_ONE_TYPE(OpSlice, BOOL);
651
652DEF_INSTANTIATE_RANK0_6_ONE_RANK_ONE_TYPE(OpTile, FLOAT);
Eric Kunzee5e26762020-10-13 16:11:07 -0700653DEF_INSTANTIATE_RANK0_6_ONE_RANK_ONE_TYPE(OpTile, INT8);
654DEF_INSTANTIATE_RANK0_6_ONE_RANK_ONE_TYPE(OpTile, INT16);
655DEF_INSTANTIATE_RANK0_6_ONE_RANK_ONE_TYPE(OpTile, INT32);
656DEF_INSTANTIATE_RANK0_6_ONE_RANK_ONE_TYPE(OpTile, BOOL);
657
658DEF_INSTANTIATE_RANK0_6_ONE_RANK_ONE_TYPE(OpTranspose, FLOAT);
Eric Kunzee5e26762020-10-13 16:11:07 -0700659DEF_INSTANTIATE_RANK0_6_ONE_RANK_ONE_TYPE(OpTranspose, INT8);
660DEF_INSTANTIATE_RANK0_6_ONE_RANK_ONE_TYPE(OpTranspose, INT16);
661DEF_INSTANTIATE_RANK0_6_ONE_RANK_ONE_TYPE(OpTranspose, INT32);
662DEF_INSTANTIATE_RANK0_6_ONE_RANK_ONE_TYPE(OpTranspose, BOOL);