blob: 2eb5e9a61ebb0ae054610c4d1c2c376bbfa9cec2 [file] [log] [blame]
Michalis Spyrou25f45a42018-08-08 12:53:05 +01001/*
Teresa Charlin562bee52021-04-13 17:44:15 +01002 * Copyright (c) 2018-2021 Arm Limited.
Michalis Spyrou25f45a42018-08-08 12:53:05 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/runtime/NEON/functions/NELSTMLayer.h"
25
Michalis Spyrou25f45a42018-08-08 12:53:05 +010026#include "arm_compute/core/Utils.h"
27#include "arm_compute/core/Validate.h"
Michele Di Giorgio47a89902020-03-09 19:32:33 +000028#include "arm_compute/core/utils/misc/InfoHelpers.h"
Michalis Spyrou25f45a42018-08-08 12:53:05 +010029#include "arm_compute/core/utils/misc/ShapeCalculator.h"
30#include "arm_compute/core/utils/quantization/AsymmHelpers.h"
31#include "arm_compute/runtime/common/LSTMParams.h"
32
Michele Di Giorgio47a89902020-03-09 19:32:33 +000033namespace arm_compute
34{
Michalis Spyrou25f45a42018-08-08 12:53:05 +010035using namespace arm_compute::misc::shape_calculator;
Michele Di Giorgio47a89902020-03-09 19:32:33 +000036using namespace arm_compute::utils::info_helpers;
Michalis Spyrou25f45a42018-08-08 12:53:05 +010037
Michalis Spyrouebcebf12020-10-21 00:04:14 +010038NELSTMLayer::~NELSTMLayer() = default;
39
Michalis Spyrou25f45a42018-08-08 12:53:05 +010040NELSTMLayer::NELSTMLayer(std::shared_ptr<IMemoryManager> memory_manager)
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +010041 : _memory_group(std::move(memory_manager)), _fully_connected_input_gate(), _accum_input_gate1(), _subtract_input_gate(), _pixelwise_mul_input_gate(), _activation_input_gate(),
42 _fully_connected_forget_gate(), _accum_forget_gate1(), _pixelwise_mul_forget_gate(), _activation_forget_gate(), _fully_connected_cell_state(), _gemm_cell_state1(), _transpose_cell_state(),
43 _accum_cell_state1(), _accum_cell_state2(), _pixelwise_mul_cell_state1(), _activation_cell_state(), _cell_clip(), _pixelwise_mul_cell_state2(), _fully_connected_output(),
44 _pixelwise_mul_output_state1(), _accum_output1(), _activation_output(), _activation_output_state(), _pixelwise_mul_output_state2(), _fully_connected_output_state(), _projection_clip(),
45 _copy_cell_state(), _copy_output(), _concat_scratch_buffer(), _concat_inputs_forget_gate(), _concat_weights_forget_gate(), _concat_weights_input_gate(), _concat_weights_output(),
46 _mean_std_norm_input_gate(), _pixelwise_mul_input_gate_coeff(), _accum_input_gate_bias(), _mean_std_norm_forget_gate(), _pixelwise_mul_forget_gate_coeff(), _accum_forget_gate_bias(),
47 _mean_std_norm_cell_gate(), _pixelwise_mul_cell_gate_coeff(), _accum_cell_gate_bias(), _mean_std_norm_output_gate(), _pixelwise_mul_output_gate_coeff(), _accum_output_gate_bias(), _input_gate_out1(),
48 _input_gate_out2(), _input_gate_out3(), _input_gate_out4(), _forget_gate_out1(), _forget_gate_out2(), _forget_gate_out3(), _forget_gate_out4(), _forget_gate_out5(), _forget_gate_out6(),
49 _cell_state_out1(), _cell_state_out2(), _cell_state_out3(), _cell_state_out4(), _cell_state_out5(), _output1(), _output2(), _output3(), _output4(), _cell_state_activation(), _output_state1(), _ones(),
50 _input_layer_norm_out1(), _input_layer_norm_out2(), _forget_layer_norm_out1(), _forget_layer_norm_out2(), _cell_layer_norm_out1(), _cell_layer_norm_out2(), _output_layer_norm_out1(),
51 _output_layer_norm_out2(), _run_peephole_opt(false), _run_cifg_opt(false), _perform_cell_clipping(false), _has_projection_weights(false), _perform_projection_clipping(false), _is_prepared(false),
52 _is_layer_norm_lstm(false)
Michalis Spyrou25f45a42018-08-08 12:53:05 +010053{
54}
55
56void NELSTMLayer::configure(const ITensor *input,
57 const ITensor *input_to_forget_weights, const ITensor *input_to_cell_weights, const ITensor *input_to_output_weights,
58 const ITensor *recurrent_to_forget_weights, const ITensor *recurrent_to_cell_weights, const ITensor *recurrent_to_output_weights,
59 const ITensor *forget_gate_bias, const ITensor *cell_bias, const ITensor *output_gate_bias,
60 const ITensor *output_state_in, const ITensor *cell_state_in,
61 ITensor *scratch_buffer, ITensor *output_state_out, ITensor *cell_state_out, ITensor *output,
62 const LSTMParams<ITensor> &lstm_params, const ActivationLayerInfo &activation_info, float cell_threshold, float projection_threshold)
63{
64 ARM_COMPUTE_ERROR_ON_NULLPTR(input,
65 input_to_forget_weights, input_to_cell_weights, input_to_output_weights,
66 recurrent_to_forget_weights, recurrent_to_cell_weights, recurrent_to_output_weights,
67 forget_gate_bias, cell_bias, output_gate_bias,
68 output_state_in, cell_state_in,
69 scratch_buffer, output_state_out, cell_state_out, output);
70
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +010071 _is_layer_norm_lstm = lstm_params.use_layer_norm();
72
Michalis Spyrou25f45a42018-08-08 12:53:05 +010073 // Set lstm parameters
Michele Di Giorgio47a89902020-03-09 19:32:33 +000074 LSTMParams<ITensorInfo> lstm_params_info{};
75 build_lstm_params_tensor_info(lstm_params, &lstm_params_info);
Michalis Spyrou25f45a42018-08-08 12:53:05 +010076
77 // Validate
78 ARM_COMPUTE_ERROR_THROW_ON(NELSTMLayer::validate(input->info(), input_to_forget_weights->info(),
79 input_to_cell_weights->info(), input_to_output_weights->info(),
80 recurrent_to_forget_weights->info(), recurrent_to_cell_weights->info(), recurrent_to_output_weights->info(),
81 forget_gate_bias->info(), cell_bias->info(), output_gate_bias->info(),
82 output_state_in->info(), cell_state_in->info(),
83 scratch_buffer->info(), output_state_out->info(), cell_state_out->info(), output->info(),
84 lstm_params_info, activation_info, cell_threshold, projection_threshold));
85
Georgios Pinitasda953f22019-04-02 17:27:03 +010086 const TensorShape cell_state_shape = cell_state_in->info()->tensor_shape();
Michalis Spyrou25f45a42018-08-08 12:53:05 +010087
88 // Configure block that calculates the forget gate
89 // forget_gate = Activation(input * input_to_forget_weights + output_state_in * recurrent_to_forget_weights + PixelWiseMul(cell_state, cell_to_forget_weights) + forget_gate_bias)
John Kesapides917959c2019-02-04 12:37:29 +000090 // We optimize this as follows:
91 // forget_gate = Activation( (input,output_state_in) * (input_to_forget_weights,recurrent_to_forget_weights) + PixelWiseMul(cell_state, cell_to_forget_weights) + forget_gate_bias)
Michalis Spyrou25f45a42018-08-08 12:53:05 +010092 _forget_gate_out1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
Michalis Spyrou25f45a42018-08-08 12:53:05 +010093 _forget_gate_out3.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
94 _forget_gate_out5.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
95
John Kesapides917959c2019-02-04 12:37:29 +000096 std::vector<const ITensor *> inputs_vector;
97 inputs_vector.emplace_back(input);
98 inputs_vector.emplace_back(output_state_in);
99
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100100 _memory_group.manage(&_forget_gate_out2);
Georgios Pinitas09f24972019-05-17 18:14:40 +0100101 _concat_inputs_forget_gate.configure(inputs_vector, &_forget_gate_out2, Window::DimX);
John Kesapides917959c2019-02-04 12:37:29 +0000102
103 std::vector<const ITensor *> weights_vector;
104
105 weights_vector.emplace_back(input_to_forget_weights);
106 weights_vector.emplace_back(recurrent_to_forget_weights);
107
Georgios Pinitas09f24972019-05-17 18:14:40 +0100108 _concat_weights_forget_gate.configure(weights_vector, &_forget_gate_out6, Window::DimX);
John Kesapides917959c2019-02-04 12:37:29 +0000109
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100110 _memory_group.manage(&_forget_gate_out5);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100111 _fully_connected_forget_gate.configure(&_forget_gate_out2, &_forget_gate_out6, (_is_layer_norm_lstm) ? nullptr : forget_gate_bias, &_forget_gate_out5);
John Kesapides917959c2019-02-04 12:37:29 +0000112 _memory_group.manage(&_forget_gate_out1);
113 _memory_group.manage(&_forget_gate_out3);
114 _forget_gate_out6.allocator()->allocate();
115
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100116 Tensor *forget_gate_out = &_forget_gate_out5;
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100117 if(lstm_params.has_peephole_opt())
118 {
119 _forget_gate_out4.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
120
121 _run_peephole_opt = true;
122 _memory_group.manage(&_forget_gate_out4);
123 _pixelwise_mul_forget_gate.configure(cell_state_in, lstm_params.cell_to_forget_weights(), &_forget_gate_out4, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100124 _accum_forget_gate1.configure(&_forget_gate_out5, &_forget_gate_out4, &_forget_gate_out3, ConvertPolicy::SATURATE);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100125 _forget_gate_out4.allocator()->allocate();
126 _forget_gate_out5.allocator()->allocate();
127 forget_gate_out = &_forget_gate_out3;
128 }
129 else
130 {
131 _forget_gate_out3.allocator()->allocate();
132 }
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100133 if(_is_layer_norm_lstm)
134 {
135 _forget_layer_norm_out1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
136 _forget_layer_norm_out2.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
137 _memory_group.manage(&_forget_layer_norm_out1);
138 _memory_group.manage(&_forget_layer_norm_out2);
139 _mean_std_norm_forget_gate.configure(forget_gate_out);
140 _pixelwise_mul_forget_gate_coeff.configure(forget_gate_out, lstm_params.forget_layer_norm_weights(), &_forget_layer_norm_out1, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
141 // forget_gate_out is going to be reassigned, so allocate the tensor that it was assigned to before
142 forget_gate_out->allocator()->allocate();
143 _accum_forget_gate_bias.configure(&_forget_layer_norm_out1, forget_gate_bias, &_forget_layer_norm_out2, ConvertPolicy::SATURATE);
144 _forget_layer_norm_out1.allocator()->allocate();
145 forget_gate_out = &_forget_layer_norm_out2;
146 }
Georgios Pinitas4f859822019-02-06 18:08:04 +0000147 _activation_forget_gate.configure(forget_gate_out, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100148
149 // Configure block that calculates the input gate
150 // input_gate = Activation(input * input_to_input_weights + output_state * recurrent_to_input_weights + PixelWiseMul(cell_state, cell_to_input_weights) + input_gate_bias), without CIFG
151 // input_gate = 1 - forget_gate, with CIFG
John Kesapides917959c2019-02-04 12:37:29 +0000152 // We optimize this as follows:
153 // input_gate = Activation((input,output_state) * (input_to_input_weights,recurrent_to_input_weights) + PixelWiseMul(cell_state, cell_to_input_weights) + input_gate_bias), without CIFG
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100154 _input_gate_out1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
Georgios Pinitas4f859822019-02-06 18:08:04 +0000155 Tensor *input_gate_out = &_input_gate_out1;
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100156 if(lstm_params.has_cifg_opt())
157 {
158 _memory_group.manage(&_input_gate_out1);
159 _ones.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
Georgios Pinitas4f859822019-02-06 18:08:04 +0000160 _subtract_input_gate.configure(&_ones, forget_gate_out, &_input_gate_out1, ConvertPolicy::SATURATE);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100161 _ones.allocator()->allocate();
162 _run_cifg_opt = true;
163 }
164 else
165 {
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100166 _input_gate_out3.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
167 _input_gate_out4.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
John Kesapides917959c2019-02-04 12:37:29 +0000168
169 std::vector<const ITensor *> lstm_weights;
170 lstm_weights.emplace_back(lstm_params.input_to_input_weights());
171 lstm_weights.emplace_back(lstm_params.recurrent_to_input_weights());
172
Georgios Pinitas09f24972019-05-17 18:14:40 +0100173 _concat_weights_input_gate.configure(lstm_weights, &_input_gate_out2, Window::DimX);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100174
175 _memory_group.manage(&_input_gate_out1);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100176 _memory_group.manage(&_input_gate_out4);
John Kesapides917959c2019-02-04 12:37:29 +0000177
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100178 _fully_connected_input_gate.configure(&_forget_gate_out2, &_input_gate_out2, (_is_layer_norm_lstm) ? nullptr : lstm_params.input_gate_bias(), &_input_gate_out3);
John Kesapides917959c2019-02-04 12:37:29 +0000179 _input_gate_out2.allocator()->allocate();
180 input_gate_out = &_input_gate_out3;
181
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100182 if(_run_peephole_opt)
183 {
John Kesapides917959c2019-02-04 12:37:29 +0000184 _memory_group.manage(&_input_gate_out4);
185 _pixelwise_mul_input_gate.configure(cell_state_in, lstm_params.cell_to_input_weights(), &_input_gate_out4, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100186 _accum_input_gate1.configure(&_input_gate_out3, &_input_gate_out4, &_input_gate_out1, ConvertPolicy::SATURATE);
John Kesapides917959c2019-02-04 12:37:29 +0000187 _input_gate_out3.allocator()->allocate();
Georgios Pinitas4f859822019-02-06 18:08:04 +0000188 _input_gate_out4.allocator()->allocate();
Georgios Pinitas4f859822019-02-06 18:08:04 +0000189 input_gate_out = &_input_gate_out1;
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100190 }
Georgios Pinitas4f859822019-02-06 18:08:04 +0000191 else
192 {
193 _input_gate_out1.allocator()->allocate();
194 }
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100195
196 if(_is_layer_norm_lstm)
197 {
198 _input_layer_norm_out1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
199 _input_layer_norm_out2.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
200 _memory_group.manage(&_input_layer_norm_out1);
201 _memory_group.manage(&_input_layer_norm_out2);
202 _mean_std_norm_input_gate.configure(input_gate_out);
203 _pixelwise_mul_input_gate_coeff.configure(input_gate_out, lstm_params.input_layer_norm_weights(), &_input_layer_norm_out1, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
204 // input_gate_out is going to be reassigned, so allocate the tensor that it was assigned to before
205 input_gate_out->allocator()->allocate();
206 _accum_input_gate_bias.configure(&_input_layer_norm_out1, lstm_params.input_gate_bias(), &_input_layer_norm_out2, ConvertPolicy::SATURATE);
207 _input_layer_norm_out1.allocator()->allocate();
208 input_gate_out = &_input_layer_norm_out2;
209 }
Georgios Pinitas4f859822019-02-06 18:08:04 +0000210 _activation_input_gate.configure(input_gate_out, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100211 }
212
213 // Configure block that calculates the cell state
214 // cell_state = Clip((PixelwiseMul(input_gate, Activation(input * input_to_cell_weights + output_state_in * recurrent_to_cell_weights + cell_bias)) + PixelwiseMul(forget_gate, cell_state)), cell_threshold)
215 TensorShape cell_state1_shape = compute_transposed_shape(*recurrent_to_output_weights->info());
216 _cell_state_out1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
217 _cell_state_out2.allocator()->init(TensorInfo(cell_state1_shape, 1, input->info()->data_type()));
218 _cell_state_out3.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
219 _cell_state_out4.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
220 _cell_state_out5.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
221
222 _memory_group.manage(&_cell_state_out1);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100223 _fully_connected_cell_state.configure(input, input_to_cell_weights, (_is_layer_norm_lstm) ? nullptr : cell_bias, &_cell_state_out1);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100224 _memory_group.manage(&_cell_state_out2);
225 _transpose_cell_state.configure(recurrent_to_cell_weights, &_cell_state_out2);
226 _memory_group.manage(&_cell_state_out3);
227 _gemm_cell_state1.configure(output_state_in, &_cell_state_out2, nullptr, &_cell_state_out3, 1.f, 0.f);
228 _cell_state_out2.allocator()->allocate();
229 _memory_group.manage(&_cell_state_out4);
230 _accum_cell_state1.configure(&_cell_state_out1, &_cell_state_out3, &_cell_state_out4, ConvertPolicy::SATURATE);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100231 Tensor *cell_state_out_ptr = &_cell_state_out4;
232 if(_is_layer_norm_lstm)
233 {
234 _cell_layer_norm_out1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
235 _cell_layer_norm_out2.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
236 _memory_group.manage(&_cell_layer_norm_out1);
237 _memory_group.manage(&_cell_layer_norm_out2);
238 _mean_std_norm_cell_gate.configure(cell_state_out_ptr);
239 _pixelwise_mul_cell_gate_coeff.configure(cell_state_out_ptr, lstm_params.cell_layer_norm_weights(), &_cell_layer_norm_out1, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
240 // cell_state_out_ptr is going to be reassigned, so allocate the tensor that it was assigned to before
241 cell_state_out_ptr->allocator()->allocate();
242 _accum_cell_gate_bias.configure(&_cell_layer_norm_out1, cell_bias, &_cell_layer_norm_out2, ConvertPolicy::SATURATE);
243 _cell_layer_norm_out1.allocator()->allocate();
244 cell_state_out_ptr = &_cell_layer_norm_out2;
245 }
246 _activation_cell_state.configure(cell_state_out_ptr, nullptr, activation_info);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100247 _memory_group.manage(&_cell_state_out5);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100248 _pixelwise_mul_cell_state1.configure(cell_state_out_ptr, input_gate_out, &_cell_state_out5, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
249 cell_state_out_ptr->allocator()->allocate();
Georgios Pinitas4f859822019-02-06 18:08:04 +0000250 _pixelwise_mul_cell_state2.configure(forget_gate_out, cell_state_in, &_cell_state_out3, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100251 _accum_cell_state2.configure(&_cell_state_out5, &_cell_state_out3, &_cell_state_out1, ConvertPolicy::SATURATE);
252 _cell_state_out3.allocator()->allocate();
253 _cell_state_out5.allocator()->allocate();
254 // Perform clipping
255 if(cell_threshold != 0.f)
256 {
257 _perform_cell_clipping = true;
258 _cell_clip.configure(&_cell_state_out1, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, -cell_threshold, cell_threshold));
259 }
260
261 // Configure block that calculates the output
262 // output_state_out = Activation(input * input_to_output_weights + output_state_in * recurrent_to_output_weights + PixelWiseMul(cell_state, cell_to_output_weights) + output_gate_bias)
John Kesapides917959c2019-02-04 12:37:29 +0000263 // We optimize this as follows:
264 // output_state_out = Activation( (input,output_state_in) * (input_to_output_weights, recurrent_to_output_weights) + PixelWiseMul(cell_state, cell_to_output_weights) + output_gate_bias)
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100265 _output1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
John Kesapides917959c2019-02-04 12:37:29 +0000266 _output4.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100267
John Kesapides917959c2019-02-04 12:37:29 +0000268 std::vector<const ITensor *> in_out_weights;
269 in_out_weights.emplace_back(input_to_output_weights);
270 in_out_weights.emplace_back(recurrent_to_output_weights);
271
Georgios Pinitas09f24972019-05-17 18:14:40 +0100272 _concat_weights_output.configure(in_out_weights, &_output2, Window::DimX);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100273 _memory_group.manage(&_output1);
John Kesapides917959c2019-02-04 12:37:29 +0000274 _memory_group.manage(&_output4);
275
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100276 _fully_connected_output.configure(&_forget_gate_out2, &_output2, (_is_layer_norm_lstm) ? nullptr : output_gate_bias, &_output4);
John Kesapides917959c2019-02-04 12:37:29 +0000277
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100278 _output2.allocator()->allocate();
John Kesapides917959c2019-02-04 12:37:29 +0000279 _forget_gate_out2.allocator()->allocate();
280
281 Tensor *output_gate_out = &_output4;
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100282 if(lstm_params.has_peephole_opt())
283 {
John Kesapides917959c2019-02-04 12:37:29 +0000284 _output3.allocator()->init(TensorInfo(_cell_state_out1.info()->tensor_shape(), 1, input->info()->data_type()));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100285
John Kesapides917959c2019-02-04 12:37:29 +0000286 _memory_group.manage(&_output3);
287 _pixelwise_mul_output_state1.configure(&_cell_state_out1, lstm_params.cell_to_output_weights(), &_output3, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100288 _accum_output1.configure(&_output4, &_output3, &_output1, ConvertPolicy::SATURATE);
John Kesapides917959c2019-02-04 12:37:29 +0000289 _output4.allocator()->allocate();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100290 output_gate_out = &_output1;
291
292 // Allocate intermediate buffers
John Kesapides917959c2019-02-04 12:37:29 +0000293 _output3.allocator()->allocate();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100294 }
295 else
296 {
297 _output1.allocator()->allocate();
298 }
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100299 if(_is_layer_norm_lstm)
300 {
301 _output_layer_norm_out1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
302 _output_layer_norm_out2.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
303 _memory_group.manage(&_output_layer_norm_out1);
304 _memory_group.manage(&_output_layer_norm_out2);
305 _mean_std_norm_output_gate.configure(output_gate_out);
306 _pixelwise_mul_output_gate_coeff.configure(output_gate_out, lstm_params.output_layer_norm_weights(), &_output_layer_norm_out1, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
307 // output_gate_out is going to be reassigned, so allocate the tensor that it was assigned to before
308 output_gate_out->allocator()->allocate();
309 _accum_output_gate_bias.configure(&_output_layer_norm_out1, output_gate_bias, &_output_layer_norm_out2, ConvertPolicy::SATURATE);
310 _output_layer_norm_out1.allocator()->allocate();
311 output_gate_out = &_output_layer_norm_out2;
312 }
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100313 _activation_output.configure(output_gate_out, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100314
315 // Configure block that calculates the output state
316 /** lstm_res = PixelwiseMul(output, Activation(cell_state))
317 *
318 * -- Clip(lstm_res * projection_weights + projection_bias, projection_threshold) , if there is a projection
319 * /
320 * output_state = --
321 * \
322 * -- lstm_res , otherwise
323 */
324 ITensor *output_state_out_tmp = lstm_params.has_projection() ? &_output_state1 : output_state_out;
325 _cell_state_activation.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
326 _output_state1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
327
328 _memory_group.manage(&_cell_state_activation);
329 _activation_output_state.configure(&_cell_state_out1, &_cell_state_activation, activation_info);
330 _pixelwise_mul_output_state2.configure(&_cell_state_activation, output_gate_out, output_state_out_tmp, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
331 _cell_state_activation.allocator()->allocate();
Georgios Pinitas13a20802019-01-16 18:21:08 +0000332 output_gate_out->allocator()->allocate();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100333
334 if(lstm_params.has_projection())
335 {
336 _has_projection_weights = true;
337 _fully_connected_output_state.configure(output_state_out_tmp, lstm_params.projection_weights(), lstm_params.projection_bias(), output_state_out);
338 _output_state1.allocator()->allocate();
339 // Perform clipping
340 if(projection_threshold != 0.f)
341 {
342 _perform_projection_clipping = true;
343 _projection_clip.configure(output_state_out, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, -projection_threshold, projection_threshold));
344 }
345 }
346
347 // Copy cell state and output
348 _copy_cell_state.configure(&_cell_state_out1, cell_state_out);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100349 _copy_output.configure(output_state_out, output);
350
351 // Vector for holding the tensors to store in scratch buffer
Georgios Pinitas4667ddd2020-07-13 21:21:33 +0100352 std::vector<const ITensor *> scratch_inputs;
Georgios Pinitas0cc37c32018-11-14 15:54:26 +0000353 if(!lstm_params.has_cifg_opt())
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100354 {
Georgios Pinitas4f859822019-02-06 18:08:04 +0000355 scratch_inputs.emplace_back(input_gate_out);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100356 }
357 scratch_inputs.emplace_back(&_cell_state_out1);
358 scratch_inputs.emplace_back(forget_gate_out);
359 scratch_inputs.emplace_back(output_gate_out);
Georgios Pinitas09f24972019-05-17 18:14:40 +0100360 _concat_scratch_buffer.configure(scratch_inputs, scratch_buffer, Window::DimX);
Georgios Pinitas4f859822019-02-06 18:08:04 +0000361 input_gate_out->allocator()->allocate();
362 _cell_state_out1.allocator()->allocate();
363 forget_gate_out->allocator()->allocate();
364 output_gate_out->allocator()->allocate();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100365}
366
367Status NELSTMLayer::validate(const ITensorInfo *input,
368 const ITensorInfo *input_to_forget_weights, const ITensorInfo *input_to_cell_weights, const ITensorInfo *input_to_output_weights,
369 const ITensorInfo *recurrent_to_forget_weights, const ITensorInfo *recurrent_to_cell_weights, const ITensorInfo *recurrent_to_output_weights,
370 const ITensorInfo *forget_gate_bias, const ITensorInfo *cell_bias, const ITensorInfo *output_gate_bias,
371 const ITensorInfo *output_state_in, const ITensorInfo *cell_state_in,
372 const ITensorInfo *scratch_buffer, const ITensorInfo *output_state_out, const ITensorInfo *cell_state_out, const ITensorInfo *output,
373 const LSTMParams<ITensorInfo> &lstm_params, const ActivationLayerInfo &activation_info, float cell_threshold, float projection_threshold)
374{
375 ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input,
376 input_to_forget_weights, input_to_cell_weights, input_to_output_weights,
377 recurrent_to_forget_weights, recurrent_to_cell_weights, recurrent_to_output_weights,
378 forget_gate_bias, cell_bias, output_gate_bias,
379 output_state_in, cell_state_in,
380 scratch_buffer, output_state_out, cell_state_out, output);
381
382 // Check data types
383 ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::F16, DataType::F32);
384 ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input,
385 input_to_forget_weights, input_to_cell_weights, input_to_output_weights,
386 recurrent_to_forget_weights, recurrent_to_cell_weights, recurrent_to_output_weights,
387 forget_gate_bias, cell_bias, output_gate_bias,
388 output_state_in, cell_state_in,
389 scratch_buffer, output_state_out, cell_state_out, output);
390
391 // Check dimensions
392 ARM_COMPUTE_RETURN_ERROR_ON(input->num_dimensions() > 2);
393 ARM_COMPUTE_RETURN_ERROR_ON(input_to_forget_weights->num_dimensions() > 2);
394 ARM_COMPUTE_RETURN_ERROR_ON(input_to_cell_weights->num_dimensions() > 2);
395 ARM_COMPUTE_RETURN_ERROR_ON(input_to_output_weights->num_dimensions() > 2);
396 ARM_COMPUTE_RETURN_ERROR_ON(recurrent_to_forget_weights->num_dimensions() > 2);
397 ARM_COMPUTE_RETURN_ERROR_ON(recurrent_to_cell_weights->num_dimensions() > 2);
398 ARM_COMPUTE_RETURN_ERROR_ON(recurrent_to_output_weights->num_dimensions() > 2);
399 ARM_COMPUTE_RETURN_ERROR_ON(forget_gate_bias->num_dimensions() > 1);
400 ARM_COMPUTE_RETURN_ERROR_ON(cell_bias->num_dimensions() > 1);
401 ARM_COMPUTE_RETURN_ERROR_ON(output_gate_bias->num_dimensions() > 1);
402 ARM_COMPUTE_RETURN_ERROR_ON(output_state_in->num_dimensions() > 2);
403 ARM_COMPUTE_RETURN_ERROR_ON(cell_state_in->num_dimensions() > 2);
404 ARM_COMPUTE_RETURN_ERROR_ON(scratch_buffer->num_dimensions() > 2);
405 ARM_COMPUTE_RETURN_ERROR_ON(output_state_out->num_dimensions() > 2);
406 ARM_COMPUTE_RETURN_ERROR_ON(cell_state_out->num_dimensions() > 2);
407 ARM_COMPUTE_RETURN_ERROR_ON(output->num_dimensions() > 2);
408 ARM_COMPUTE_RETURN_ERROR_ON(cell_bias->dimension(0) * 4 != scratch_buffer->dimension(0)
409 && cell_bias->dimension(0) * 3 != scratch_buffer->dimension(0));
410
411 const unsigned int num_batches = input->dimension(1);
412 const unsigned int num_cells = input_to_output_weights->dimension(1);
413
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100414 if(lstm_params.use_layer_norm())
415 {
416 // If CIFG is used, input layer normalization weights tensor is omitted
417 if(lstm_params.has_cifg_opt())
418 {
419 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.input_layer_norm_weights() != nullptr);
420 }
421 else
422 {
423 ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(lstm_params.input_layer_norm_weights());
424 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.input_layer_norm_weights()->num_dimensions() > 1);
Sang-Hoon Parkd5c020a2020-05-06 21:01:19 +0100425 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.input_layer_norm_weights()->dimension(0) != num_cells);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100426 ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, lstm_params.input_layer_norm_weights());
427 }
428
429 ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(lstm_params.forget_layer_norm_weights(), lstm_params.cell_layer_norm_weights(), lstm_params.output_layer_norm_weights());
430 ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, lstm_params.forget_layer_norm_weights(), lstm_params.cell_layer_norm_weights(), lstm_params.output_layer_norm_weights());
431 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.forget_layer_norm_weights()->num_dimensions() > 1);
432 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.cell_layer_norm_weights()->num_dimensions() > 1);
433 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.output_layer_norm_weights()->num_dimensions() > 1);
Sang-Hoon Parkd5c020a2020-05-06 21:01:19 +0100434 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.forget_layer_norm_weights()->dimension(0) != num_cells);
435 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.cell_layer_norm_weights()->dimension(0) != num_cells);
436 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.output_layer_norm_weights()->dimension(0) != num_cells);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100437 }
438
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100439 // Check peephole optimization
440 if(lstm_params.has_peephole_opt())
441 {
442 ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(lstm_params.cell_to_output_weights(), lstm_params.cell_to_forget_weights());
443 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.cell_to_forget_weights()->num_dimensions() > 1);
444 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.cell_to_output_weights()->num_dimensions() > 1);
445 }
446
447 TensorShape units_out_transposed_shape = compute_transposed_shape(*recurrent_to_output_weights);
448 TensorShape num_units_transposed_shape = compute_transposed_shape(*forget_gate_bias);
449 const TensorInfo units_out_transposed_info = TensorInfo(units_out_transposed_shape, 1, input->data_type());
450 const TensorInfo num_units_transposed_info = TensorInfo(num_units_transposed_shape, 1, input->data_type());
451
452 TensorInfo input_gate = TensorInfo(TensorShape(num_cells, num_batches), 1, input->data_type());
453 TensorInfo forget_gate = TensorInfo(TensorShape(num_cells, num_batches), 1, input->data_type());
454 TensorInfo output_gate_tmp = TensorInfo(TensorShape(num_cells, num_batches), 1, input->data_type());
455 TensorInfo cell_state_tmp = TensorInfo(TensorShape(num_cells, num_batches), 1, input->data_type());
456
John Kesapides917959c2019-02-04 12:37:29 +0000457 std::vector<const ITensorInfo *> inputs_vector;
458 inputs_vector.emplace_back(input);
459 inputs_vector.emplace_back(output_state_in);
Georgios Pinitas09f24972019-05-17 18:14:40 +0100460 const TensorShape concat_shape = arm_compute::misc::shape_calculator::calculate_concatenate_shape(inputs_vector, 0);
461 TensorInfo forget_gate_concat = TensorInfo(concat_shape, 1, input->data_type());
462 ARM_COMPUTE_RETURN_ON_ERROR(NEConcatenateLayer::validate(inputs_vector, &forget_gate_concat, Window::DimX));
John Kesapides917959c2019-02-04 12:37:29 +0000463
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100464 // Validate forget gate
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100465 ARM_COMPUTE_RETURN_ON_ERROR(NEFullyConnectedLayer::validate(input, input_to_forget_weights, (lstm_params.use_layer_norm()) ? nullptr : forget_gate_bias, &forget_gate));
John Kesapides917959c2019-02-04 12:37:29 +0000466
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100467 if(lstm_params.has_peephole_opt())
468 {
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100469 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(cell_state_in, lstm_params.cell_to_forget_weights(), &forget_gate, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100470 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&forget_gate, &forget_gate, &forget_gate, ConvertPolicy::SATURATE));
471 }
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100472 if(lstm_params.use_layer_norm())
473 {
474 ARM_COMPUTE_RETURN_ON_ERROR(NEMeanStdDevNormalizationLayer::validate(&forget_gate));
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100475 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&forget_gate, lstm_params.forget_layer_norm_weights(), &forget_gate, 1, ConvertPolicy::SATURATE,
476 RoundingPolicy::TO_ZERO));
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100477 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&forget_gate, forget_gate_bias, &forget_gate, ConvertPolicy::SATURATE));
478 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100479 ARM_COMPUTE_RETURN_ON_ERROR(NEActivationLayer::validate(&forget_gate, &forget_gate, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC)));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100480
481 // Validate input gate
482 if(!lstm_params.has_cifg_opt())
483 {
484 ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(lstm_params.input_to_input_weights(),
485 lstm_params.recurrent_to_input_weights(),
486 lstm_params.input_gate_bias());
487 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.input_to_input_weights()->num_dimensions() > 2);
488 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.recurrent_to_input_weights()->num_dimensions() > 2);
489 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.input_gate_bias()->num_dimensions() > 1);
490
John Kesapides917959c2019-02-04 12:37:29 +0000491 std::vector<const ITensorInfo *> lstm_weights;
492 lstm_weights.emplace_back(lstm_params.input_to_input_weights());
493 lstm_weights.emplace_back(lstm_params.recurrent_to_input_weights());
Georgios Pinitas09f24972019-05-17 18:14:40 +0100494 TensorShape lstm_weights_concat_shape = arm_compute::misc::shape_calculator::calculate_concatenate_shape(lstm_weights, 0);
495 TensorInfo lstm_gate_concat = TensorInfo(lstm_weights_concat_shape, 1, input->data_type());
496 ARM_COMPUTE_RETURN_ON_ERROR(NEConcatenateLayer::validate(lstm_weights, &lstm_gate_concat, Window::DimX));
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100497 ARM_COMPUTE_RETURN_ON_ERROR(NEFullyConnectedLayer::validate(input, lstm_params.input_to_input_weights(), (lstm_params.use_layer_norm()) ? nullptr : lstm_params.input_gate_bias(), &input_gate));
John Kesapides917959c2019-02-04 12:37:29 +0000498
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100499 if(lstm_params.has_peephole_opt())
500 {
501 ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(lstm_params.cell_to_input_weights());
502 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.cell_to_input_weights()->num_dimensions() > 1);
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100503 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(cell_state_in, lstm_params.cell_to_input_weights(), &input_gate, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100504 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&input_gate, &input_gate, &input_gate, ConvertPolicy::SATURATE));
505 }
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100506
507 if(lstm_params.use_layer_norm())
508 {
509 ARM_COMPUTE_RETURN_ON_ERROR(NEMeanStdDevNormalizationLayer::validate(&input_gate));
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100510 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&input_gate, lstm_params.input_layer_norm_weights(), &input_gate, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO));
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100511 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&input_gate, lstm_params.input_gate_bias(), &input_gate, ConvertPolicy::SATURATE));
512 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100513 ARM_COMPUTE_RETURN_ON_ERROR(NEActivationLayer::validate(&input_gate, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC)));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100514 }
515 else
516 {
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100517 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticSubtraction::validate(&forget_gate, &forget_gate, &forget_gate, ConvertPolicy::SATURATE));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100518 }
519
520 // Validate cell state
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100521 ARM_COMPUTE_RETURN_ON_ERROR(NEFullyConnectedLayer::validate(input, input_to_cell_weights, (lstm_params.use_layer_norm()) ? nullptr : cell_bias, &cell_state_tmp));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100522 ARM_COMPUTE_RETURN_ON_ERROR(NEGEMM::validate(output_state_in, &units_out_transposed_info, nullptr, &cell_state_tmp, 1.f, 0.f, GEMMInfo()));
523 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&cell_state_tmp, &cell_state_tmp, &cell_state_tmp, ConvertPolicy::SATURATE));
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100524 if(lstm_params.use_layer_norm())
525 {
526 ARM_COMPUTE_RETURN_ON_ERROR(NEMeanStdDevNormalizationLayer::validate(&cell_state_tmp));
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100527 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&cell_state_tmp, lstm_params.cell_layer_norm_weights(), &cell_state_tmp, 1, ConvertPolicy::SATURATE,
528 RoundingPolicy::TO_ZERO));
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100529 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&cell_state_tmp, cell_bias, &cell_state_tmp, ConvertPolicy::SATURATE));
530 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100531 ARM_COMPUTE_RETURN_ON_ERROR(NEActivationLayer::validate(&cell_state_tmp, nullptr, activation_info));
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100532 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&cell_state_tmp, &input_gate, &cell_state_tmp, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO));
533 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&cell_state_tmp, &forget_gate, &cell_state_tmp, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100534 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&cell_state_tmp, &cell_state_tmp, &cell_state_tmp, ConvertPolicy::SATURATE));
535 if(cell_threshold != 0.f)
536 {
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100537 ARM_COMPUTE_RETURN_ON_ERROR(NEActivationLayer::validate(&cell_state_tmp, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, -cell_threshold,
538 cell_threshold)));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100539 }
540
541 // Validate output gate tmp
John Kesapides917959c2019-02-04 12:37:29 +0000542 std::vector<const ITensorInfo *> in_out_weights;
543 in_out_weights.emplace_back(input_to_output_weights);
544 in_out_weights.emplace_back(recurrent_to_output_weights);
Georgios Pinitas09f24972019-05-17 18:14:40 +0100545 TensorShape in_out_weights_concat_shape = arm_compute::misc::shape_calculator::calculate_concatenate_shape(in_out_weights, 0);
546 TensorInfo in_out_gate_concat = TensorInfo(in_out_weights_concat_shape, 1, input->data_type());
547 ARM_COMPUTE_RETURN_ON_ERROR(NEConcatenateLayer::validate(in_out_weights, &in_out_gate_concat, Window::DimX));
John Kesapides917959c2019-02-04 12:37:29 +0000548
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100549 ARM_COMPUTE_RETURN_ON_ERROR(NEFullyConnectedLayer::validate(input, input_to_output_weights, (lstm_params.use_layer_norm()) ? nullptr : output_gate_bias, &output_gate_tmp));
John Kesapides917959c2019-02-04 12:37:29 +0000550
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100551 if(lstm_params.has_peephole_opt())
552 {
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100553 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&cell_state_tmp, lstm_params.cell_to_output_weights(), &output_gate_tmp, 1, ConvertPolicy::SATURATE,
554 RoundingPolicy::TO_ZERO));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100555 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&output_gate_tmp, &output_gate_tmp, &output_gate_tmp, ConvertPolicy::SATURATE));
556 }
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100557 if(lstm_params.use_layer_norm())
558 {
559 ARM_COMPUTE_RETURN_ON_ERROR(NEMeanStdDevNormalizationLayer::validate(&output_gate_tmp));
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100560 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&output_gate_tmp, lstm_params.output_layer_norm_weights(), &output_gate_tmp, 1, ConvertPolicy::SATURATE,
561 RoundingPolicy::TO_ZERO));
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100562 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&output_gate_tmp, output_gate_bias, &output_gate_tmp, ConvertPolicy::SATURATE));
563 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100564 ARM_COMPUTE_RETURN_ON_ERROR(NEActivationLayer::validate(&output_gate_tmp, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC)));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100565
566 // Validate output state
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100567 ARM_COMPUTE_RETURN_ON_ERROR(NEActivationLayer::validate(&cell_state_tmp, &cell_state_tmp, activation_info));
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100568 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&cell_state_tmp, &output_gate_tmp, &output_gate_tmp, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100569 if(lstm_params.has_projection())
570 {
571 ARM_COMPUTE_RETURN_ON_ERROR(NEFullyConnectedLayer::validate(&output_gate_tmp, lstm_params.projection_weights(), lstm_params.projection_bias(), output_state_out));
572 if(projection_threshold != 0.f)
573 {
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100574 ARM_COMPUTE_RETURN_ON_ERROR(NEActivationLayer::validate(output_state_out, output_state_out,
575 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, -projection_threshold, projection_threshold)));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100576 }
577 }
578
579 // Validate copy kernel
Michalis Spyrouebcebf12020-10-21 00:04:14 +0100580 ARM_COMPUTE_RETURN_ON_ERROR(NECopy::validate(&cell_state_tmp, cell_state_out));
581 ARM_COMPUTE_RETURN_ON_ERROR(NECopy::validate(output_state_out, output));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100582
583 // Validate scratch concatenation
Georgios Pinitas4667ddd2020-07-13 21:21:33 +0100584 std::vector<const ITensorInfo *> inputs_vector_info_raw;
Georgios Pinitas0cc37c32018-11-14 15:54:26 +0000585 if(!lstm_params.has_cifg_opt())
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100586 {
587 inputs_vector_info_raw.push_back(&input_gate);
588 }
589 inputs_vector_info_raw.push_back(&cell_state_tmp);
590 inputs_vector_info_raw.push_back(&forget_gate);
591 inputs_vector_info_raw.push_back(&output_gate_tmp);
592
Georgios Pinitas09f24972019-05-17 18:14:40 +0100593 ARM_COMPUTE_RETURN_ON_ERROR(NEConcatenateLayer::validate(inputs_vector_info_raw, scratch_buffer, Window::DimX));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100594 return Status{};
595}
596
597void NELSTMLayer::run()
598{
John Kesapides917959c2019-02-04 12:37:29 +0000599 prepare();
600
Georgios Pinitasda953f22019-04-02 17:27:03 +0100601 MemoryGroupResourceScope scope_mg(_memory_group);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100602
Michalis Spyrou2761c2f2019-03-22 13:06:08 +0000603 _concat_inputs_forget_gate.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100604 _fully_connected_forget_gate.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100605
606 if(_run_peephole_opt)
607 {
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100608 _pixelwise_mul_forget_gate.run();
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100609 _accum_forget_gate1.run();
610 }
611 if(_is_layer_norm_lstm)
612 {
613 _mean_std_norm_forget_gate.run();
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100614 _pixelwise_mul_forget_gate_coeff.run();
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100615 _accum_forget_gate_bias.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100616 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100617 _activation_forget_gate.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100618
619 if(_run_cifg_opt)
620 {
621 if(_ones.info()->data_type() == DataType::F16)
622 {
623 std::fill_n(reinterpret_cast<half *>(_ones.buffer()), _ones.info()->total_size() / _ones.info()->element_size(), 1);
624 }
625 else
626 {
627 std::fill_n(reinterpret_cast<float *>(_ones.buffer()), _ones.info()->total_size() / _ones.info()->element_size(), 1);
628 }
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100629 _subtract_input_gate.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100630 }
631 else
632 {
633 _fully_connected_input_gate.run();
John Kesapides917959c2019-02-04 12:37:29 +0000634
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100635 if(_run_peephole_opt)
636 {
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100637 _pixelwise_mul_input_gate.run();
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100638 _accum_input_gate1.run();
639 }
640
641 if(_is_layer_norm_lstm)
642 {
643 _mean_std_norm_input_gate.run();
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100644 _pixelwise_mul_input_gate_coeff.run();
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100645 _accum_input_gate_bias.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100646 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100647 _activation_input_gate.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100648 }
649
650 _fully_connected_cell_state.run();
Michalis Spyrouebcebf12020-10-21 00:04:14 +0100651 _transpose_cell_state.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100652 _gemm_cell_state1.run();
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100653 _accum_cell_state1.run();
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100654 if(_is_layer_norm_lstm)
655 {
656 _mean_std_norm_cell_gate.run();
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100657 _pixelwise_mul_cell_gate_coeff.run();
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100658 _accum_cell_gate_bias.run();
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100659 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100660 _activation_cell_state.run();
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100661 _pixelwise_mul_cell_state1.run();
662 _pixelwise_mul_cell_state2.run();
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100663 _accum_cell_state2.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100664
665 if(_perform_cell_clipping)
666 {
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100667 _cell_clip.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100668 }
669
670 _fully_connected_output.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100671 if(_run_peephole_opt)
672 {
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100673 _pixelwise_mul_output_state1.run();
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100674 _accum_output1.run();
675 }
676 if(_is_layer_norm_lstm)
677 {
678 _mean_std_norm_output_gate.run();
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100679 _pixelwise_mul_output_gate_coeff.run();
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100680 _accum_output_gate_bias.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100681 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100682 _activation_output.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100683
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100684 _activation_output_state.run();
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100685 _pixelwise_mul_output_state2.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100686
687 if(_has_projection_weights)
688 {
689 _fully_connected_output_state.run();
690 if(_perform_projection_clipping)
691 {
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100692 _projection_clip.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100693 }
694 }
695
Michalis Spyrouebcebf12020-10-21 00:04:14 +0100696 _copy_cell_state.run();
697 _copy_output.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100698
699 _concat_scratch_buffer.run();
John Kesapides917959c2019-02-04 12:37:29 +0000700}
701
702void NELSTMLayer::prepare()
703{
704 if(!_is_prepared)
705 {
John Kesapides917959c2019-02-04 12:37:29 +0000706 _concat_weights_forget_gate.run();
707 if(!_run_cifg_opt)
708 {
709 _concat_weights_input_gate.run();
710 }
711 _concat_weights_output.run();
712 _is_prepared = true;
713 }
714}
Michele Di Giorgio47a89902020-03-09 19:32:33 +0000715} // namespace arm_compute