Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 1 | |
James Ward | 8b39043 | 2022-08-12 20:48:56 +0100 | [diff] [blame] | 2 | // Copyright (c) 2020-2022, ARM Limited. |
Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 3 | // |
| 4 | // Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | // you may not use this file except in compliance with the License. |
| 6 | // You may obtain a copy of the License at |
| 7 | // |
| 8 | // http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | // |
| 10 | // Unless required by applicable law or agreed to in writing, software |
| 11 | // distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | // See the License for the specific language governing permissions and |
| 14 | // limitations under the License. |
| 15 | |
| 16 | #include "reduction.h" |
| 17 | #include "quant_util.h" |
| 18 | |
| 19 | using namespace TosaReference; |
| 20 | using namespace Eigen; |
| 21 | using namespace tosa; |
| 22 | |
| 23 | template <int Rank, DType Dtype> |
Kevin Cheng | acb550f | 2021-06-29 15:32:19 -0700 | [diff] [blame] | 24 | ReduceNode<Rank, Dtype>::ReduceNode(SubgraphTraverser* sgt_, const Op& op_, TosaAttributeBase* attribute_, uint64_t id_) |
| 25 | : GraphNode(sgt_, op_, id_) |
Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 26 | { |
| 27 | setRequiredOperands(1, 1); |
| 28 | setRequiredRank(0, 4); |
| 29 | |
| 30 | INIT_ATTRIBUTE(Axis); |
| 31 | } |
| 32 | |
| 33 | template <int Rank, DType Dtype> |
| 34 | ReduceNode<Rank, Dtype>::~ReduceNode() |
| 35 | { |
| 36 | if (attribute) |
| 37 | delete attribute; |
| 38 | } |
| 39 | |
| 40 | template <int Rank, DType Dtype> |
| 41 | int ReduceNode<Rank, Dtype>::checkTensorAttributes() |
| 42 | { |
| 43 | if (validateRequiredOperands()) |
| 44 | return 1; |
| 45 | |
| 46 | if (validateRequiredRank(inputs[0]) || validateRequiredRank(outputs[0])) |
| 47 | { |
| 48 | return 1; |
| 49 | } |
| 50 | |
| 51 | if (attribute->axis() < 0 || attribute->axis() >= inputs[0]->getRank()) |
| 52 | { |
Kevin Cheng | ec5586c | 2021-10-06 14:37:37 -0700 | [diff] [blame] | 53 | printNodeValidationError("ReduceOp: axis must between [0, input_rank - 1]"); |
Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 54 | return 1; |
| 55 | } |
| 56 | |
Kevin Cheng | ec5586c | 2021-10-06 14:37:37 -0700 | [diff] [blame] | 57 | if (inputs[0]->matchRankType(*outputs[0])) |
Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 58 | { |
Kevin Cheng | ec5586c | 2021-10-06 14:37:37 -0700 | [diff] [blame] | 59 | printNodeValidationError("ReduceOp: Input and output tensor ranks must match"); |
| 60 | return 1; |
| 61 | } |
| 62 | |
| 63 | if (outputs[0]->getShape()[attribute->axis()] != 1) |
| 64 | { |
| 65 | printNodeValidationError("ReduceOp: Output tensor shape[axis] needs to be 1."); |
Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 66 | return 1; |
| 67 | } |
| 68 | |
| 69 | in = dynamic_cast<TosaReference::TensorTemplate<TIn>*>(inputs[0]); |
| 70 | out = dynamic_cast<TosaReference::TensorTemplate<TOut>*>(outputs[0]); |
| 71 | |
Kevin Cheng | ec5586c | 2021-10-06 14:37:37 -0700 | [diff] [blame] | 72 | if ((!in) || (!out)) |
| 73 | { |
| 74 | printNodeValidationError("ReduceOp: Input or output fail to cast to Eigen tensor since rank/type not expected"); |
| 75 | return 1; |
| 76 | } |
Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 77 | |
| 78 | dims[0] = this->attribute->axis(); |
| 79 | |
| 80 | return 0; |
| 81 | } |
| 82 | |
James Ward | 24dbc42 | 2022-10-19 12:20:31 +0100 | [diff] [blame] | 83 | // These 2 reducers are to overcome a bug introduced in Eigen between 3.3.7 and 3.4.0 |
| 84 | // The in-built .any and .all operations now fail on an assert in TensorMorphing.h:150 |
| 85 | // which seems to be due to incorrect data being passed internally as m_impl |
| 86 | struct AllReducer { |
| 87 | static const bool PacketAccess = false; |
| 88 | void reduce(const bool val, bool* accum) { |
| 89 | *accum = *accum && val; |
| 90 | } |
| 91 | bool initialize() const { return true; } |
| 92 | bool finalize(const bool accum) const { return accum; } |
| 93 | }; |
| 94 | struct AnyReducer { |
| 95 | static const bool PacketAccess = false; |
| 96 | void reduce(const bool val, bool* accum) { |
| 97 | *accum = *accum || val; |
| 98 | } |
| 99 | bool initialize() const { return false; } |
| 100 | bool finalize(const bool accum) const { return accum; } |
| 101 | }; |
| 102 | |
Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 103 | template <int Rank, DType Dtype> |
| 104 | int OpReduceAll<Rank, Dtype>::eval() |
| 105 | { |
James Ward | 24dbc42 | 2022-10-19 12:20:31 +0100 | [diff] [blame] | 106 | this->out->getTensor() = this->in->getTensor().reduce(this->dims, AllReducer()).reshape(this->out->getTensor().dimensions()); |
Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 107 | |
| 108 | return GraphNode::eval(); |
| 109 | } |
| 110 | |
| 111 | template <int Rank, DType Dtype> |
| 112 | int OpReduceAny<Rank, Dtype>::eval() |
| 113 | { |
James Ward | 24dbc42 | 2022-10-19 12:20:31 +0100 | [diff] [blame] | 114 | this->out->getTensor() = this->in->getTensor().reduce(this->dims, AnyReducer()).reshape(this->out->getTensor().dimensions()); |
Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 115 | |
| 116 | return GraphNode::eval(); |
| 117 | } |
| 118 | |
| 119 | template <int Rank, DType Dtype> |
| 120 | int OpReduceMax<Rank, Dtype>::eval() |
| 121 | { |
| 122 | this->out->getTensor() = this->in->getTensor().maximum(this->dims).reshape(this->out->getTensor().dimensions()); |
| 123 | |
| 124 | return GraphNode::eval(); |
| 125 | } |
| 126 | |
| 127 | template <int Rank, DType Dtype> |
| 128 | int OpReduceMin<Rank, Dtype>::eval() |
| 129 | { |
| 130 | this->out->getTensor() = this->in->getTensor().minimum(this->dims).reshape(this->out->getTensor().dimensions()); |
| 131 | |
| 132 | return GraphNode::eval(); |
| 133 | } |
| 134 | |
| 135 | template <int Rank, DType Dtype> |
| 136 | int OpReduceProduct<Rank, Dtype>::eval() |
| 137 | { |
James Ward | 24dbc42 | 2022-10-19 12:20:31 +0100 | [diff] [blame] | 138 | switch(Dtype) |
| 139 | { |
| 140 | case DType_FP16: |
| 141 | case DType_BF16: |
| 142 | this->out->getTensor() = this->in->getTensor().prod(this->dims).reshape(this->out->getTensor().dimensions()).unaryExpr([](float f){return fpTrunc<Dtype>(f);}); |
| 143 | break; |
| 144 | default: |
| 145 | this->out->getTensor() = this->in->getTensor().prod(this->dims).reshape(this->out->getTensor().dimensions()); |
| 146 | break; |
| 147 | } |
Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 148 | |
| 149 | return GraphNode::eval(); |
| 150 | } |
| 151 | |
| 152 | template <int Rank, DType Dtype> |
| 153 | int OpReduceSum<Rank, Dtype>::eval() |
| 154 | { |
James Ward | 24dbc42 | 2022-10-19 12:20:31 +0100 | [diff] [blame] | 155 | switch(Dtype) |
| 156 | { |
| 157 | case DType_FP16: |
| 158 | case DType_BF16: |
| 159 | this->out->getTensor() = this->in->getTensor().sum(this->dims).reshape(this->out->getTensor().dimensions()).unaryExpr([](float f){return fpTrunc<Dtype>(f);}); |
| 160 | break; |
| 161 | default: |
| 162 | this->out->getTensor() = this->in->getTensor().sum(this->dims).reshape(this->out->getTensor().dimensions()); |
| 163 | break; |
| 164 | } |
Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 165 | |
| 166 | return GraphNode::eval(); |
| 167 | } |
| 168 | |
Jeremy Johnson | 7de9b45 | 2022-04-05 14:31:37 +0100 | [diff] [blame] | 169 | struct SumRequiresReducer { |
| 170 | static const bool PacketAccess = false; |
| 171 | SumRequiresReducer(SubgraphTraverser* parent_sgt) : parent_sgt(parent_sgt) {} |
| 172 | void reduce(const int32_t val, int32_t* accum) { |
| 173 | int64_t res_in_64 = static_cast<int64_t>(*accum) + val; |
| 174 | int64_t i32_max_in_64 = static_cast<int64_t>(std::numeric_limits<int32_t>::max()); |
| 175 | int64_t i32_min_in_64 = static_cast<int64_t>(std::numeric_limits<int32_t>::min()); |
| 176 | REQUIRE(res_in_64 <= i32_max_in_64 && res_in_64 >= i32_min_in_64, "OpReduceSum: result not in i32 range"); |
| 177 | *accum = static_cast<int32_t>(res_in_64); |
| 178 | } |
| 179 | int32_t initialize() const { return 0; } |
| 180 | int32_t finalize(const int32_t accum) const { return accum; } |
| 181 | |
| 182 | private: |
| 183 | SubgraphTraverser* parent_sgt; |
| 184 | }; |
| 185 | |
| 186 | template <int Rank, DType Dtype> |
| 187 | int OpReduceSumInt<Rank, Dtype>::eval() |
| 188 | { |
| 189 | this->out->getTensor() = this->in->getTensor().reduce(this->dims, SumRequiresReducer(this->parent_sgt)).reshape(this->out->getTensor().dimensions()); |
| 190 | |
| 191 | return GraphNode::eval(); |
| 192 | } |
| 193 | |
Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 194 | // template explicit instantiation |
| 195 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceAll, BOOL); |
| 196 | |
| 197 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceAny, BOOL); |
| 198 | |
James Ward | 8b39043 | 2022-08-12 20:48:56 +0100 | [diff] [blame] | 199 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceMax, FP16); |
James Ward | 24dbc42 | 2022-10-19 12:20:31 +0100 | [diff] [blame] | 200 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceMax, BF16); |
Jeremy Johnson | bc2a3db | 2022-09-27 13:50:00 +0100 | [diff] [blame] | 201 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceMax, FP32); |
Kevin Cheng | 3a47857 | 2021-01-22 17:21:02 -0800 | [diff] [blame] | 202 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceMax, INT8); |
Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 203 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceMax, INT16); |
| 204 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceMax, INT32); |
| 205 | |
James Ward | 8b39043 | 2022-08-12 20:48:56 +0100 | [diff] [blame] | 206 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceMin, FP16); |
James Ward | 24dbc42 | 2022-10-19 12:20:31 +0100 | [diff] [blame] | 207 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceMin, BF16); |
Jeremy Johnson | bc2a3db | 2022-09-27 13:50:00 +0100 | [diff] [blame] | 208 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceMin, FP32); |
Kevin Cheng | 3a47857 | 2021-01-22 17:21:02 -0800 | [diff] [blame] | 209 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceMin, INT8); |
Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 210 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceMin, INT16); |
| 211 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceMin, INT32); |
| 212 | |
James Ward | 8b39043 | 2022-08-12 20:48:56 +0100 | [diff] [blame] | 213 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceProduct, FP16); |
James Ward | 24dbc42 | 2022-10-19 12:20:31 +0100 | [diff] [blame] | 214 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceProduct, BF16); |
Jeremy Johnson | bc2a3db | 2022-09-27 13:50:00 +0100 | [diff] [blame] | 215 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceProduct, FP32); |
Eric Kunze | e5e2676 | 2020-10-13 16:11:07 -0700 | [diff] [blame] | 216 | |
James Ward | 8b39043 | 2022-08-12 20:48:56 +0100 | [diff] [blame] | 217 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceSum, FP16); |
James Ward | 24dbc42 | 2022-10-19 12:20:31 +0100 | [diff] [blame] | 218 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceSum, BF16); |
Jeremy Johnson | bc2a3db | 2022-09-27 13:50:00 +0100 | [diff] [blame] | 219 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceSum, FP32); |
Jeremy Johnson | 7de9b45 | 2022-04-05 14:31:37 +0100 | [diff] [blame] | 220 | DEF_INSTANTIATE_RANK1_6_ONE_RANK_ONE_TYPE(OpReduceSumInt, INT32); |