blob: 467c51b1a62a955057b643e9ff57df6b7420db12 [file] [log] [blame]
Michalis Spyrou25f45a42018-08-08 12:53:05 +01001/*
Michele Di Giorgio47a89902020-03-09 19:32:33 +00002 * Copyright (c) 2018-2020 ARM Limited.
Michalis Spyrou25f45a42018-08-08 12:53:05 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/runtime/NEON/functions/NELSTMLayer.h"
25
Michalis Spyrou25f45a42018-08-08 12:53:05 +010026#include "arm_compute/core/Utils.h"
27#include "arm_compute/core/Validate.h"
Michele Di Giorgio47a89902020-03-09 19:32:33 +000028#include "arm_compute/core/utils/misc/InfoHelpers.h"
Michalis Spyrou25f45a42018-08-08 12:53:05 +010029#include "arm_compute/core/utils/misc/ShapeCalculator.h"
30#include "arm_compute/core/utils/quantization/AsymmHelpers.h"
31#include "arm_compute/runtime/common/LSTMParams.h"
32
Michele Di Giorgio47a89902020-03-09 19:32:33 +000033namespace arm_compute
34{
Michalis Spyrou25f45a42018-08-08 12:53:05 +010035using namespace arm_compute::misc::shape_calculator;
Michele Di Giorgio47a89902020-03-09 19:32:33 +000036using namespace arm_compute::utils::info_helpers;
Michalis Spyrou25f45a42018-08-08 12:53:05 +010037
38NELSTMLayer::NELSTMLayer(std::shared_ptr<IMemoryManager> memory_manager)
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +010039 : _memory_group(std::move(memory_manager)), _fully_connected_input_gate(), _accum_input_gate1(), _subtract_input_gate(), _pixelwise_mul_input_gate(), _activation_input_gate(),
40 _fully_connected_forget_gate(), _accum_forget_gate1(), _pixelwise_mul_forget_gate(), _activation_forget_gate(), _fully_connected_cell_state(), _gemm_cell_state1(), _transpose_cell_state(),
41 _accum_cell_state1(), _accum_cell_state2(), _pixelwise_mul_cell_state1(), _activation_cell_state(), _cell_clip(), _pixelwise_mul_cell_state2(), _fully_connected_output(),
42 _pixelwise_mul_output_state1(), _accum_output1(), _activation_output(), _activation_output_state(), _pixelwise_mul_output_state2(), _fully_connected_output_state(), _projection_clip(),
43 _copy_cell_state(), _copy_output(), _concat_scratch_buffer(), _concat_inputs_forget_gate(), _concat_weights_forget_gate(), _concat_weights_input_gate(), _concat_weights_output(),
44 _mean_std_norm_input_gate(), _pixelwise_mul_input_gate_coeff(), _accum_input_gate_bias(), _mean_std_norm_forget_gate(), _pixelwise_mul_forget_gate_coeff(), _accum_forget_gate_bias(),
45 _mean_std_norm_cell_gate(), _pixelwise_mul_cell_gate_coeff(), _accum_cell_gate_bias(), _mean_std_norm_output_gate(), _pixelwise_mul_output_gate_coeff(), _accum_output_gate_bias(), _input_gate_out1(),
46 _input_gate_out2(), _input_gate_out3(), _input_gate_out4(), _forget_gate_out1(), _forget_gate_out2(), _forget_gate_out3(), _forget_gate_out4(), _forget_gate_out5(), _forget_gate_out6(),
47 _cell_state_out1(), _cell_state_out2(), _cell_state_out3(), _cell_state_out4(), _cell_state_out5(), _output1(), _output2(), _output3(), _output4(), _cell_state_activation(), _output_state1(), _ones(),
48 _input_layer_norm_out1(), _input_layer_norm_out2(), _forget_layer_norm_out1(), _forget_layer_norm_out2(), _cell_layer_norm_out1(), _cell_layer_norm_out2(), _output_layer_norm_out1(),
49 _output_layer_norm_out2(), _run_peephole_opt(false), _run_cifg_opt(false), _perform_cell_clipping(false), _has_projection_weights(false), _perform_projection_clipping(false), _is_prepared(false),
50 _is_layer_norm_lstm(false)
Michalis Spyrou25f45a42018-08-08 12:53:05 +010051{
52}
53
54void NELSTMLayer::configure(const ITensor *input,
55 const ITensor *input_to_forget_weights, const ITensor *input_to_cell_weights, const ITensor *input_to_output_weights,
56 const ITensor *recurrent_to_forget_weights, const ITensor *recurrent_to_cell_weights, const ITensor *recurrent_to_output_weights,
57 const ITensor *forget_gate_bias, const ITensor *cell_bias, const ITensor *output_gate_bias,
58 const ITensor *output_state_in, const ITensor *cell_state_in,
59 ITensor *scratch_buffer, ITensor *output_state_out, ITensor *cell_state_out, ITensor *output,
60 const LSTMParams<ITensor> &lstm_params, const ActivationLayerInfo &activation_info, float cell_threshold, float projection_threshold)
61{
62 ARM_COMPUTE_ERROR_ON_NULLPTR(input,
63 input_to_forget_weights, input_to_cell_weights, input_to_output_weights,
64 recurrent_to_forget_weights, recurrent_to_cell_weights, recurrent_to_output_weights,
65 forget_gate_bias, cell_bias, output_gate_bias,
66 output_state_in, cell_state_in,
67 scratch_buffer, output_state_out, cell_state_out, output);
68
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +010069 _is_layer_norm_lstm = lstm_params.use_layer_norm();
70
Michalis Spyrou25f45a42018-08-08 12:53:05 +010071 // Set lstm parameters
Michele Di Giorgio47a89902020-03-09 19:32:33 +000072 LSTMParams<ITensorInfo> lstm_params_info{};
73 build_lstm_params_tensor_info(lstm_params, &lstm_params_info);
Michalis Spyrou25f45a42018-08-08 12:53:05 +010074
75 // Validate
76 ARM_COMPUTE_ERROR_THROW_ON(NELSTMLayer::validate(input->info(), input_to_forget_weights->info(),
77 input_to_cell_weights->info(), input_to_output_weights->info(),
78 recurrent_to_forget_weights->info(), recurrent_to_cell_weights->info(), recurrent_to_output_weights->info(),
79 forget_gate_bias->info(), cell_bias->info(), output_gate_bias->info(),
80 output_state_in->info(), cell_state_in->info(),
81 scratch_buffer->info(), output_state_out->info(), cell_state_out->info(), output->info(),
82 lstm_params_info, activation_info, cell_threshold, projection_threshold));
83
Georgios Pinitasda953f22019-04-02 17:27:03 +010084 const TensorShape cell_state_shape = cell_state_in->info()->tensor_shape();
Michalis Spyrou25f45a42018-08-08 12:53:05 +010085
86 // Configure block that calculates the forget gate
87 // forget_gate = Activation(input * input_to_forget_weights + output_state_in * recurrent_to_forget_weights + PixelWiseMul(cell_state, cell_to_forget_weights) + forget_gate_bias)
John Kesapides917959c2019-02-04 12:37:29 +000088 // We optimize this as follows:
89 // forget_gate = Activation( (input,output_state_in) * (input_to_forget_weights,recurrent_to_forget_weights) + PixelWiseMul(cell_state, cell_to_forget_weights) + forget_gate_bias)
Michalis Spyrou25f45a42018-08-08 12:53:05 +010090 _forget_gate_out1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
Michalis Spyrou25f45a42018-08-08 12:53:05 +010091 _forget_gate_out3.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
92 _forget_gate_out5.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
93
John Kesapides917959c2019-02-04 12:37:29 +000094 std::vector<const ITensor *> inputs_vector;
95 inputs_vector.emplace_back(input);
96 inputs_vector.emplace_back(output_state_in);
97
Michalis Spyrou25f45a42018-08-08 12:53:05 +010098 _memory_group.manage(&_forget_gate_out2);
Georgios Pinitas09f24972019-05-17 18:14:40 +010099 _concat_inputs_forget_gate.configure(inputs_vector, &_forget_gate_out2, Window::DimX);
John Kesapides917959c2019-02-04 12:37:29 +0000100
101 std::vector<const ITensor *> weights_vector;
102
103 weights_vector.emplace_back(input_to_forget_weights);
104 weights_vector.emplace_back(recurrent_to_forget_weights);
105
Georgios Pinitas09f24972019-05-17 18:14:40 +0100106 _concat_weights_forget_gate.configure(weights_vector, &_forget_gate_out6, Window::DimX);
John Kesapides917959c2019-02-04 12:37:29 +0000107
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100108 _memory_group.manage(&_forget_gate_out5);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100109 _fully_connected_forget_gate.configure(&_forget_gate_out2, &_forget_gate_out6, (_is_layer_norm_lstm) ? nullptr : forget_gate_bias, &_forget_gate_out5);
John Kesapides917959c2019-02-04 12:37:29 +0000110 _memory_group.manage(&_forget_gate_out1);
111 _memory_group.manage(&_forget_gate_out3);
112 _forget_gate_out6.allocator()->allocate();
113
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100114 Tensor *forget_gate_out = &_forget_gate_out5;
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100115 if(lstm_params.has_peephole_opt())
116 {
117 _forget_gate_out4.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
118
119 _run_peephole_opt = true;
120 _memory_group.manage(&_forget_gate_out4);
121 _pixelwise_mul_forget_gate.configure(cell_state_in, lstm_params.cell_to_forget_weights(), &_forget_gate_out4, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100122 _accum_forget_gate1.configure(&_forget_gate_out5, &_forget_gate_out4, &_forget_gate_out3, ConvertPolicy::SATURATE);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100123 _forget_gate_out4.allocator()->allocate();
124 _forget_gate_out5.allocator()->allocate();
125 forget_gate_out = &_forget_gate_out3;
126 }
127 else
128 {
129 _forget_gate_out3.allocator()->allocate();
130 }
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100131 if(_is_layer_norm_lstm)
132 {
133 _forget_layer_norm_out1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
134 _forget_layer_norm_out2.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
135 _memory_group.manage(&_forget_layer_norm_out1);
136 _memory_group.manage(&_forget_layer_norm_out2);
137 _mean_std_norm_forget_gate.configure(forget_gate_out);
138 _pixelwise_mul_forget_gate_coeff.configure(forget_gate_out, lstm_params.forget_layer_norm_weights(), &_forget_layer_norm_out1, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
139 // forget_gate_out is going to be reassigned, so allocate the tensor that it was assigned to before
140 forget_gate_out->allocator()->allocate();
141 _accum_forget_gate_bias.configure(&_forget_layer_norm_out1, forget_gate_bias, &_forget_layer_norm_out2, ConvertPolicy::SATURATE);
142 _forget_layer_norm_out1.allocator()->allocate();
143 forget_gate_out = &_forget_layer_norm_out2;
144 }
Georgios Pinitas4f859822019-02-06 18:08:04 +0000145 _activation_forget_gate.configure(forget_gate_out, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100146
147 // Configure block that calculates the input gate
148 // input_gate = Activation(input * input_to_input_weights + output_state * recurrent_to_input_weights + PixelWiseMul(cell_state, cell_to_input_weights) + input_gate_bias), without CIFG
149 // input_gate = 1 - forget_gate, with CIFG
John Kesapides917959c2019-02-04 12:37:29 +0000150 // We optimize this as follows:
151 // input_gate = Activation((input,output_state) * (input_to_input_weights,recurrent_to_input_weights) + PixelWiseMul(cell_state, cell_to_input_weights) + input_gate_bias), without CIFG
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100152 _input_gate_out1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
Georgios Pinitas4f859822019-02-06 18:08:04 +0000153 Tensor *input_gate_out = &_input_gate_out1;
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100154 if(lstm_params.has_cifg_opt())
155 {
156 _memory_group.manage(&_input_gate_out1);
157 _ones.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
Georgios Pinitas4f859822019-02-06 18:08:04 +0000158 _subtract_input_gate.configure(&_ones, forget_gate_out, &_input_gate_out1, ConvertPolicy::SATURATE);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100159 _ones.allocator()->allocate();
160 _run_cifg_opt = true;
161 }
162 else
163 {
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100164 _input_gate_out3.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
165 _input_gate_out4.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
John Kesapides917959c2019-02-04 12:37:29 +0000166
167 std::vector<const ITensor *> lstm_weights;
168 lstm_weights.emplace_back(lstm_params.input_to_input_weights());
169 lstm_weights.emplace_back(lstm_params.recurrent_to_input_weights());
170
Georgios Pinitas09f24972019-05-17 18:14:40 +0100171 _concat_weights_input_gate.configure(lstm_weights, &_input_gate_out2, Window::DimX);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100172
173 _memory_group.manage(&_input_gate_out1);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100174 _memory_group.manage(&_input_gate_out4);
John Kesapides917959c2019-02-04 12:37:29 +0000175
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100176 _fully_connected_input_gate.configure(&_forget_gate_out2, &_input_gate_out2, (_is_layer_norm_lstm) ? nullptr : lstm_params.input_gate_bias(), &_input_gate_out3);
John Kesapides917959c2019-02-04 12:37:29 +0000177 _input_gate_out2.allocator()->allocate();
178 input_gate_out = &_input_gate_out3;
179
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100180 if(_run_peephole_opt)
181 {
John Kesapides917959c2019-02-04 12:37:29 +0000182 _memory_group.manage(&_input_gate_out4);
183 _pixelwise_mul_input_gate.configure(cell_state_in, lstm_params.cell_to_input_weights(), &_input_gate_out4, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100184 _accum_input_gate1.configure(&_input_gate_out3, &_input_gate_out4, &_input_gate_out1, ConvertPolicy::SATURATE);
John Kesapides917959c2019-02-04 12:37:29 +0000185 _input_gate_out3.allocator()->allocate();
Georgios Pinitas4f859822019-02-06 18:08:04 +0000186 _input_gate_out4.allocator()->allocate();
Georgios Pinitas4f859822019-02-06 18:08:04 +0000187 input_gate_out = &_input_gate_out1;
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100188 }
Georgios Pinitas4f859822019-02-06 18:08:04 +0000189 else
190 {
191 _input_gate_out1.allocator()->allocate();
192 }
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100193
194 if(_is_layer_norm_lstm)
195 {
196 _input_layer_norm_out1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
197 _input_layer_norm_out2.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
198 _memory_group.manage(&_input_layer_norm_out1);
199 _memory_group.manage(&_input_layer_norm_out2);
200 _mean_std_norm_input_gate.configure(input_gate_out);
201 _pixelwise_mul_input_gate_coeff.configure(input_gate_out, lstm_params.input_layer_norm_weights(), &_input_layer_norm_out1, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
202 // input_gate_out is going to be reassigned, so allocate the tensor that it was assigned to before
203 input_gate_out->allocator()->allocate();
204 _accum_input_gate_bias.configure(&_input_layer_norm_out1, lstm_params.input_gate_bias(), &_input_layer_norm_out2, ConvertPolicy::SATURATE);
205 _input_layer_norm_out1.allocator()->allocate();
206 input_gate_out = &_input_layer_norm_out2;
207 }
Georgios Pinitas4f859822019-02-06 18:08:04 +0000208 _activation_input_gate.configure(input_gate_out, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100209 }
210
211 // Configure block that calculates the cell state
212 // cell_state = Clip((PixelwiseMul(input_gate, Activation(input * input_to_cell_weights + output_state_in * recurrent_to_cell_weights + cell_bias)) + PixelwiseMul(forget_gate, cell_state)), cell_threshold)
213 TensorShape cell_state1_shape = compute_transposed_shape(*recurrent_to_output_weights->info());
214 _cell_state_out1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
215 _cell_state_out2.allocator()->init(TensorInfo(cell_state1_shape, 1, input->info()->data_type()));
216 _cell_state_out3.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
217 _cell_state_out4.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
218 _cell_state_out5.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
219
220 _memory_group.manage(&_cell_state_out1);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100221 _fully_connected_cell_state.configure(input, input_to_cell_weights, (_is_layer_norm_lstm) ? nullptr : cell_bias, &_cell_state_out1);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100222 _memory_group.manage(&_cell_state_out2);
223 _transpose_cell_state.configure(recurrent_to_cell_weights, &_cell_state_out2);
224 _memory_group.manage(&_cell_state_out3);
225 _gemm_cell_state1.configure(output_state_in, &_cell_state_out2, nullptr, &_cell_state_out3, 1.f, 0.f);
226 _cell_state_out2.allocator()->allocate();
227 _memory_group.manage(&_cell_state_out4);
228 _accum_cell_state1.configure(&_cell_state_out1, &_cell_state_out3, &_cell_state_out4, ConvertPolicy::SATURATE);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100229 Tensor *cell_state_out_ptr = &_cell_state_out4;
230 if(_is_layer_norm_lstm)
231 {
232 _cell_layer_norm_out1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
233 _cell_layer_norm_out2.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
234 _memory_group.manage(&_cell_layer_norm_out1);
235 _memory_group.manage(&_cell_layer_norm_out2);
236 _mean_std_norm_cell_gate.configure(cell_state_out_ptr);
237 _pixelwise_mul_cell_gate_coeff.configure(cell_state_out_ptr, lstm_params.cell_layer_norm_weights(), &_cell_layer_norm_out1, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
238 // cell_state_out_ptr is going to be reassigned, so allocate the tensor that it was assigned to before
239 cell_state_out_ptr->allocator()->allocate();
240 _accum_cell_gate_bias.configure(&_cell_layer_norm_out1, cell_bias, &_cell_layer_norm_out2, ConvertPolicy::SATURATE);
241 _cell_layer_norm_out1.allocator()->allocate();
242 cell_state_out_ptr = &_cell_layer_norm_out2;
243 }
244 _activation_cell_state.configure(cell_state_out_ptr, nullptr, activation_info);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100245 _memory_group.manage(&_cell_state_out5);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100246 _pixelwise_mul_cell_state1.configure(cell_state_out_ptr, input_gate_out, &_cell_state_out5, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
247 cell_state_out_ptr->allocator()->allocate();
Georgios Pinitas4f859822019-02-06 18:08:04 +0000248 _pixelwise_mul_cell_state2.configure(forget_gate_out, cell_state_in, &_cell_state_out3, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100249 _accum_cell_state2.configure(&_cell_state_out5, &_cell_state_out3, &_cell_state_out1, ConvertPolicy::SATURATE);
250 _cell_state_out3.allocator()->allocate();
251 _cell_state_out5.allocator()->allocate();
252 // Perform clipping
253 if(cell_threshold != 0.f)
254 {
255 _perform_cell_clipping = true;
256 _cell_clip.configure(&_cell_state_out1, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, -cell_threshold, cell_threshold));
257 }
258
259 // Configure block that calculates the output
260 // output_state_out = Activation(input * input_to_output_weights + output_state_in * recurrent_to_output_weights + PixelWiseMul(cell_state, cell_to_output_weights) + output_gate_bias)
John Kesapides917959c2019-02-04 12:37:29 +0000261 // We optimize this as follows:
262 // output_state_out = Activation( (input,output_state_in) * (input_to_output_weights, recurrent_to_output_weights) + PixelWiseMul(cell_state, cell_to_output_weights) + output_gate_bias)
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100263 _output1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
John Kesapides917959c2019-02-04 12:37:29 +0000264 _output4.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100265
John Kesapides917959c2019-02-04 12:37:29 +0000266 std::vector<const ITensor *> in_out_weights;
267 in_out_weights.emplace_back(input_to_output_weights);
268 in_out_weights.emplace_back(recurrent_to_output_weights);
269
Georgios Pinitas09f24972019-05-17 18:14:40 +0100270 _concat_weights_output.configure(in_out_weights, &_output2, Window::DimX);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100271 _memory_group.manage(&_output1);
John Kesapides917959c2019-02-04 12:37:29 +0000272 _memory_group.manage(&_output4);
273
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100274 _fully_connected_output.configure(&_forget_gate_out2, &_output2, (_is_layer_norm_lstm) ? nullptr : output_gate_bias, &_output4);
John Kesapides917959c2019-02-04 12:37:29 +0000275
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100276 _output2.allocator()->allocate();
John Kesapides917959c2019-02-04 12:37:29 +0000277 _forget_gate_out2.allocator()->allocate();
278
279 Tensor *output_gate_out = &_output4;
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100280 if(lstm_params.has_peephole_opt())
281 {
John Kesapides917959c2019-02-04 12:37:29 +0000282 _output3.allocator()->init(TensorInfo(_cell_state_out1.info()->tensor_shape(), 1, input->info()->data_type()));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100283
John Kesapides917959c2019-02-04 12:37:29 +0000284 _memory_group.manage(&_output3);
285 _pixelwise_mul_output_state1.configure(&_cell_state_out1, lstm_params.cell_to_output_weights(), &_output3, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100286 _accum_output1.configure(&_output4, &_output3, &_output1, ConvertPolicy::SATURATE);
John Kesapides917959c2019-02-04 12:37:29 +0000287 _output4.allocator()->allocate();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100288 output_gate_out = &_output1;
289
290 // Allocate intermediate buffers
John Kesapides917959c2019-02-04 12:37:29 +0000291 _output3.allocator()->allocate();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100292 }
293 else
294 {
295 _output1.allocator()->allocate();
296 }
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100297 if(_is_layer_norm_lstm)
298 {
299 _output_layer_norm_out1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
300 _output_layer_norm_out2.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
301 _memory_group.manage(&_output_layer_norm_out1);
302 _memory_group.manage(&_output_layer_norm_out2);
303 _mean_std_norm_output_gate.configure(output_gate_out);
304 _pixelwise_mul_output_gate_coeff.configure(output_gate_out, lstm_params.output_layer_norm_weights(), &_output_layer_norm_out1, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
305 // output_gate_out is going to be reassigned, so allocate the tensor that it was assigned to before
306 output_gate_out->allocator()->allocate();
307 _accum_output_gate_bias.configure(&_output_layer_norm_out1, output_gate_bias, &_output_layer_norm_out2, ConvertPolicy::SATURATE);
308 _output_layer_norm_out1.allocator()->allocate();
309 output_gate_out = &_output_layer_norm_out2;
310 }
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100311 _activation_output.configure(output_gate_out, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100312
313 // Configure block that calculates the output state
314 /** lstm_res = PixelwiseMul(output, Activation(cell_state))
315 *
316 * -- Clip(lstm_res * projection_weights + projection_bias, projection_threshold) , if there is a projection
317 * /
318 * output_state = --
319 * \
320 * -- lstm_res , otherwise
321 */
322 ITensor *output_state_out_tmp = lstm_params.has_projection() ? &_output_state1 : output_state_out;
323 _cell_state_activation.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
324 _output_state1.allocator()->init(TensorInfo(cell_state_shape, 1, input->info()->data_type()));
325
326 _memory_group.manage(&_cell_state_activation);
327 _activation_output_state.configure(&_cell_state_out1, &_cell_state_activation, activation_info);
328 _pixelwise_mul_output_state2.configure(&_cell_state_activation, output_gate_out, output_state_out_tmp, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO);
329 _cell_state_activation.allocator()->allocate();
Georgios Pinitas13a20802019-01-16 18:21:08 +0000330 output_gate_out->allocator()->allocate();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100331
332 if(lstm_params.has_projection())
333 {
334 _has_projection_weights = true;
335 _fully_connected_output_state.configure(output_state_out_tmp, lstm_params.projection_weights(), lstm_params.projection_bias(), output_state_out);
336 _output_state1.allocator()->allocate();
337 // Perform clipping
338 if(projection_threshold != 0.f)
339 {
340 _perform_projection_clipping = true;
341 _projection_clip.configure(output_state_out, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, -projection_threshold, projection_threshold));
342 }
343 }
344
345 // Copy cell state and output
346 _copy_cell_state.configure(&_cell_state_out1, cell_state_out);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100347 _copy_output.configure(output_state_out, output);
348
349 // Vector for holding the tensors to store in scratch buffer
350 std::vector<ITensor *> scratch_inputs;
Georgios Pinitas0cc37c32018-11-14 15:54:26 +0000351 if(!lstm_params.has_cifg_opt())
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100352 {
Georgios Pinitas4f859822019-02-06 18:08:04 +0000353 scratch_inputs.emplace_back(input_gate_out);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100354 }
355 scratch_inputs.emplace_back(&_cell_state_out1);
356 scratch_inputs.emplace_back(forget_gate_out);
357 scratch_inputs.emplace_back(output_gate_out);
Georgios Pinitas09f24972019-05-17 18:14:40 +0100358 _concat_scratch_buffer.configure(scratch_inputs, scratch_buffer, Window::DimX);
Georgios Pinitas4f859822019-02-06 18:08:04 +0000359 input_gate_out->allocator()->allocate();
360 _cell_state_out1.allocator()->allocate();
361 forget_gate_out->allocator()->allocate();
362 output_gate_out->allocator()->allocate();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100363}
364
365Status NELSTMLayer::validate(const ITensorInfo *input,
366 const ITensorInfo *input_to_forget_weights, const ITensorInfo *input_to_cell_weights, const ITensorInfo *input_to_output_weights,
367 const ITensorInfo *recurrent_to_forget_weights, const ITensorInfo *recurrent_to_cell_weights, const ITensorInfo *recurrent_to_output_weights,
368 const ITensorInfo *forget_gate_bias, const ITensorInfo *cell_bias, const ITensorInfo *output_gate_bias,
369 const ITensorInfo *output_state_in, const ITensorInfo *cell_state_in,
370 const ITensorInfo *scratch_buffer, const ITensorInfo *output_state_out, const ITensorInfo *cell_state_out, const ITensorInfo *output,
371 const LSTMParams<ITensorInfo> &lstm_params, const ActivationLayerInfo &activation_info, float cell_threshold, float projection_threshold)
372{
373 ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input,
374 input_to_forget_weights, input_to_cell_weights, input_to_output_weights,
375 recurrent_to_forget_weights, recurrent_to_cell_weights, recurrent_to_output_weights,
376 forget_gate_bias, cell_bias, output_gate_bias,
377 output_state_in, cell_state_in,
378 scratch_buffer, output_state_out, cell_state_out, output);
379
380 // Check data types
381 ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::F16, DataType::F32);
382 ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input,
383 input_to_forget_weights, input_to_cell_weights, input_to_output_weights,
384 recurrent_to_forget_weights, recurrent_to_cell_weights, recurrent_to_output_weights,
385 forget_gate_bias, cell_bias, output_gate_bias,
386 output_state_in, cell_state_in,
387 scratch_buffer, output_state_out, cell_state_out, output);
388
389 // Check dimensions
390 ARM_COMPUTE_RETURN_ERROR_ON(input->num_dimensions() > 2);
391 ARM_COMPUTE_RETURN_ERROR_ON(input_to_forget_weights->num_dimensions() > 2);
392 ARM_COMPUTE_RETURN_ERROR_ON(input_to_cell_weights->num_dimensions() > 2);
393 ARM_COMPUTE_RETURN_ERROR_ON(input_to_output_weights->num_dimensions() > 2);
394 ARM_COMPUTE_RETURN_ERROR_ON(recurrent_to_forget_weights->num_dimensions() > 2);
395 ARM_COMPUTE_RETURN_ERROR_ON(recurrent_to_cell_weights->num_dimensions() > 2);
396 ARM_COMPUTE_RETURN_ERROR_ON(recurrent_to_output_weights->num_dimensions() > 2);
397 ARM_COMPUTE_RETURN_ERROR_ON(forget_gate_bias->num_dimensions() > 1);
398 ARM_COMPUTE_RETURN_ERROR_ON(cell_bias->num_dimensions() > 1);
399 ARM_COMPUTE_RETURN_ERROR_ON(output_gate_bias->num_dimensions() > 1);
400 ARM_COMPUTE_RETURN_ERROR_ON(output_state_in->num_dimensions() > 2);
401 ARM_COMPUTE_RETURN_ERROR_ON(cell_state_in->num_dimensions() > 2);
402 ARM_COMPUTE_RETURN_ERROR_ON(scratch_buffer->num_dimensions() > 2);
403 ARM_COMPUTE_RETURN_ERROR_ON(output_state_out->num_dimensions() > 2);
404 ARM_COMPUTE_RETURN_ERROR_ON(cell_state_out->num_dimensions() > 2);
405 ARM_COMPUTE_RETURN_ERROR_ON(output->num_dimensions() > 2);
406 ARM_COMPUTE_RETURN_ERROR_ON(cell_bias->dimension(0) * 4 != scratch_buffer->dimension(0)
407 && cell_bias->dimension(0) * 3 != scratch_buffer->dimension(0));
408
409 const unsigned int num_batches = input->dimension(1);
410 const unsigned int num_cells = input_to_output_weights->dimension(1);
411
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100412 if(lstm_params.use_layer_norm())
413 {
414 // If CIFG is used, input layer normalization weights tensor is omitted
415 if(lstm_params.has_cifg_opt())
416 {
417 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.input_layer_norm_weights() != nullptr);
418 }
419 else
420 {
421 ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(lstm_params.input_layer_norm_weights());
422 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.input_layer_norm_weights()->num_dimensions() > 1);
Sang-Hoon Parkd5c020a2020-05-06 21:01:19 +0100423 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.input_layer_norm_weights()->dimension(0) != num_cells);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100424 ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, lstm_params.input_layer_norm_weights());
425 }
426
427 ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(lstm_params.forget_layer_norm_weights(), lstm_params.cell_layer_norm_weights(), lstm_params.output_layer_norm_weights());
428 ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, lstm_params.forget_layer_norm_weights(), lstm_params.cell_layer_norm_weights(), lstm_params.output_layer_norm_weights());
429 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.forget_layer_norm_weights()->num_dimensions() > 1);
430 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.cell_layer_norm_weights()->num_dimensions() > 1);
431 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.output_layer_norm_weights()->num_dimensions() > 1);
Sang-Hoon Parkd5c020a2020-05-06 21:01:19 +0100432 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.forget_layer_norm_weights()->dimension(0) != num_cells);
433 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.cell_layer_norm_weights()->dimension(0) != num_cells);
434 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.output_layer_norm_weights()->dimension(0) != num_cells);
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100435 }
436
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100437 // Check peephole optimization
438 if(lstm_params.has_peephole_opt())
439 {
440 ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(lstm_params.cell_to_output_weights(), lstm_params.cell_to_forget_weights());
441 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.cell_to_forget_weights()->num_dimensions() > 1);
442 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.cell_to_output_weights()->num_dimensions() > 1);
443 }
444
445 TensorShape units_out_transposed_shape = compute_transposed_shape(*recurrent_to_output_weights);
446 TensorShape num_units_transposed_shape = compute_transposed_shape(*forget_gate_bias);
447 const TensorInfo units_out_transposed_info = TensorInfo(units_out_transposed_shape, 1, input->data_type());
448 const TensorInfo num_units_transposed_info = TensorInfo(num_units_transposed_shape, 1, input->data_type());
449
450 TensorInfo input_gate = TensorInfo(TensorShape(num_cells, num_batches), 1, input->data_type());
451 TensorInfo forget_gate = TensorInfo(TensorShape(num_cells, num_batches), 1, input->data_type());
452 TensorInfo output_gate_tmp = TensorInfo(TensorShape(num_cells, num_batches), 1, input->data_type());
453 TensorInfo cell_state_tmp = TensorInfo(TensorShape(num_cells, num_batches), 1, input->data_type());
454
John Kesapides917959c2019-02-04 12:37:29 +0000455 std::vector<const ITensorInfo *> inputs_vector;
456 inputs_vector.emplace_back(input);
457 inputs_vector.emplace_back(output_state_in);
Georgios Pinitas09f24972019-05-17 18:14:40 +0100458 const TensorShape concat_shape = arm_compute::misc::shape_calculator::calculate_concatenate_shape(inputs_vector, 0);
459 TensorInfo forget_gate_concat = TensorInfo(concat_shape, 1, input->data_type());
460 ARM_COMPUTE_RETURN_ON_ERROR(NEConcatenateLayer::validate(inputs_vector, &forget_gate_concat, Window::DimX));
John Kesapides917959c2019-02-04 12:37:29 +0000461
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100462 // Validate forget gate
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100463 ARM_COMPUTE_RETURN_ON_ERROR(NEFullyConnectedLayer::validate(input, input_to_forget_weights, (lstm_params.use_layer_norm()) ? nullptr : forget_gate_bias, &forget_gate));
John Kesapides917959c2019-02-04 12:37:29 +0000464
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100465 if(lstm_params.has_peephole_opt())
466 {
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100467 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(cell_state_in, lstm_params.cell_to_forget_weights(), &forget_gate, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100468 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&forget_gate, &forget_gate, &forget_gate, ConvertPolicy::SATURATE));
469 }
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100470 if(lstm_params.use_layer_norm())
471 {
472 ARM_COMPUTE_RETURN_ON_ERROR(NEMeanStdDevNormalizationLayer::validate(&forget_gate));
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100473 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&forget_gate, lstm_params.forget_layer_norm_weights(), &forget_gate, 1, ConvertPolicy::SATURATE,
474 RoundingPolicy::TO_ZERO));
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100475 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&forget_gate, forget_gate_bias, &forget_gate, ConvertPolicy::SATURATE));
476 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100477 ARM_COMPUTE_RETURN_ON_ERROR(NEActivationLayer::validate(&forget_gate, &forget_gate, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC)));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100478
479 // Validate input gate
480 if(!lstm_params.has_cifg_opt())
481 {
482 ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(lstm_params.input_to_input_weights(),
483 lstm_params.recurrent_to_input_weights(),
484 lstm_params.input_gate_bias());
485 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.input_to_input_weights()->num_dimensions() > 2);
486 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.recurrent_to_input_weights()->num_dimensions() > 2);
487 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.input_gate_bias()->num_dimensions() > 1);
488
John Kesapides917959c2019-02-04 12:37:29 +0000489 std::vector<const ITensorInfo *> lstm_weights;
490 lstm_weights.emplace_back(lstm_params.input_to_input_weights());
491 lstm_weights.emplace_back(lstm_params.recurrent_to_input_weights());
Georgios Pinitas09f24972019-05-17 18:14:40 +0100492 TensorShape lstm_weights_concat_shape = arm_compute::misc::shape_calculator::calculate_concatenate_shape(lstm_weights, 0);
493 TensorInfo lstm_gate_concat = TensorInfo(lstm_weights_concat_shape, 1, input->data_type());
494 ARM_COMPUTE_RETURN_ON_ERROR(NEConcatenateLayer::validate(lstm_weights, &lstm_gate_concat, Window::DimX));
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100495 ARM_COMPUTE_RETURN_ON_ERROR(NEFullyConnectedLayer::validate(input, lstm_params.input_to_input_weights(), (lstm_params.use_layer_norm()) ? nullptr : lstm_params.input_gate_bias(), &input_gate));
John Kesapides917959c2019-02-04 12:37:29 +0000496
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100497 if(lstm_params.has_peephole_opt())
498 {
499 ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(lstm_params.cell_to_input_weights());
500 ARM_COMPUTE_RETURN_ERROR_ON(lstm_params.cell_to_input_weights()->num_dimensions() > 1);
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100501 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(cell_state_in, lstm_params.cell_to_input_weights(), &input_gate, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100502 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&input_gate, &input_gate, &input_gate, ConvertPolicy::SATURATE));
503 }
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100504
505 if(lstm_params.use_layer_norm())
506 {
507 ARM_COMPUTE_RETURN_ON_ERROR(NEMeanStdDevNormalizationLayer::validate(&input_gate));
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100508 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&input_gate, lstm_params.input_layer_norm_weights(), &input_gate, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO));
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100509 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&input_gate, lstm_params.input_gate_bias(), &input_gate, ConvertPolicy::SATURATE));
510 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100511 ARM_COMPUTE_RETURN_ON_ERROR(NEActivationLayer::validate(&input_gate, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC)));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100512 }
513 else
514 {
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100515 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticSubtraction::validate(&forget_gate, &forget_gate, &forget_gate, ConvertPolicy::SATURATE));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100516 }
517
518 // Validate cell state
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100519 ARM_COMPUTE_RETURN_ON_ERROR(NEFullyConnectedLayer::validate(input, input_to_cell_weights, (lstm_params.use_layer_norm()) ? nullptr : cell_bias, &cell_state_tmp));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100520 ARM_COMPUTE_RETURN_ON_ERROR(NEGEMM::validate(output_state_in, &units_out_transposed_info, nullptr, &cell_state_tmp, 1.f, 0.f, GEMMInfo()));
521 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&cell_state_tmp, &cell_state_tmp, &cell_state_tmp, ConvertPolicy::SATURATE));
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100522 if(lstm_params.use_layer_norm())
523 {
524 ARM_COMPUTE_RETURN_ON_ERROR(NEMeanStdDevNormalizationLayer::validate(&cell_state_tmp));
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100525 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&cell_state_tmp, lstm_params.cell_layer_norm_weights(), &cell_state_tmp, 1, ConvertPolicy::SATURATE,
526 RoundingPolicy::TO_ZERO));
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100527 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&cell_state_tmp, cell_bias, &cell_state_tmp, ConvertPolicy::SATURATE));
528 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100529 ARM_COMPUTE_RETURN_ON_ERROR(NEActivationLayer::validate(&cell_state_tmp, nullptr, activation_info));
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100530 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&cell_state_tmp, &input_gate, &cell_state_tmp, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO));
531 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&cell_state_tmp, &forget_gate, &cell_state_tmp, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100532 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&cell_state_tmp, &cell_state_tmp, &cell_state_tmp, ConvertPolicy::SATURATE));
533 if(cell_threshold != 0.f)
534 {
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100535 ARM_COMPUTE_RETURN_ON_ERROR(NEActivationLayer::validate(&cell_state_tmp, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, -cell_threshold,
536 cell_threshold)));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100537 }
538
539 // Validate output gate tmp
John Kesapides917959c2019-02-04 12:37:29 +0000540 std::vector<const ITensorInfo *> in_out_weights;
541 in_out_weights.emplace_back(input_to_output_weights);
542 in_out_weights.emplace_back(recurrent_to_output_weights);
Georgios Pinitas09f24972019-05-17 18:14:40 +0100543 TensorShape in_out_weights_concat_shape = arm_compute::misc::shape_calculator::calculate_concatenate_shape(in_out_weights, 0);
544 TensorInfo in_out_gate_concat = TensorInfo(in_out_weights_concat_shape, 1, input->data_type());
545 ARM_COMPUTE_RETURN_ON_ERROR(NEConcatenateLayer::validate(in_out_weights, &in_out_gate_concat, Window::DimX));
John Kesapides917959c2019-02-04 12:37:29 +0000546
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100547 ARM_COMPUTE_RETURN_ON_ERROR(NEFullyConnectedLayer::validate(input, input_to_output_weights, (lstm_params.use_layer_norm()) ? nullptr : output_gate_bias, &output_gate_tmp));
John Kesapides917959c2019-02-04 12:37:29 +0000548
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100549 if(lstm_params.has_peephole_opt())
550 {
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100551 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&cell_state_tmp, lstm_params.cell_to_output_weights(), &output_gate_tmp, 1, ConvertPolicy::SATURATE,
552 RoundingPolicy::TO_ZERO));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100553 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&output_gate_tmp, &output_gate_tmp, &output_gate_tmp, ConvertPolicy::SATURATE));
554 }
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100555 if(lstm_params.use_layer_norm())
556 {
557 ARM_COMPUTE_RETURN_ON_ERROR(NEMeanStdDevNormalizationLayer::validate(&output_gate_tmp));
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100558 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&output_gate_tmp, lstm_params.output_layer_norm_weights(), &output_gate_tmp, 1, ConvertPolicy::SATURATE,
559 RoundingPolicy::TO_ZERO));
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100560 ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAddition::validate(&output_gate_tmp, output_gate_bias, &output_gate_tmp, ConvertPolicy::SATURATE));
561 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100562 ARM_COMPUTE_RETURN_ON_ERROR(NEActivationLayer::validate(&output_gate_tmp, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC)));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100563
564 // Validate output state
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100565 ARM_COMPUTE_RETURN_ON_ERROR(NEActivationLayer::validate(&cell_state_tmp, &cell_state_tmp, activation_info));
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100566 ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplication::validate(&cell_state_tmp, &output_gate_tmp, &output_gate_tmp, 1, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100567 if(lstm_params.has_projection())
568 {
569 ARM_COMPUTE_RETURN_ON_ERROR(NEFullyConnectedLayer::validate(&output_gate_tmp, lstm_params.projection_weights(), lstm_params.projection_bias(), output_state_out));
570 if(projection_threshold != 0.f)
571 {
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100572 ARM_COMPUTE_RETURN_ON_ERROR(NEActivationLayer::validate(output_state_out, output_state_out,
573 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, -projection_threshold, projection_threshold)));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100574 }
575 }
576
577 // Validate copy kernel
578 ARM_COMPUTE_RETURN_ON_ERROR(NECopyKernel::validate(&cell_state_tmp, cell_state_out));
579 ARM_COMPUTE_RETURN_ON_ERROR(NECopyKernel::validate(output_state_out, output));
580
581 // Validate scratch concatenation
582 std::vector<ITensorInfo *> inputs_vector_info_raw;
Georgios Pinitas0cc37c32018-11-14 15:54:26 +0000583 if(!lstm_params.has_cifg_opt())
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100584 {
585 inputs_vector_info_raw.push_back(&input_gate);
586 }
587 inputs_vector_info_raw.push_back(&cell_state_tmp);
588 inputs_vector_info_raw.push_back(&forget_gate);
589 inputs_vector_info_raw.push_back(&output_gate_tmp);
590
Georgios Pinitas09f24972019-05-17 18:14:40 +0100591 ARM_COMPUTE_RETURN_ON_ERROR(NEConcatenateLayer::validate(inputs_vector_info_raw, scratch_buffer, Window::DimX));
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100592 return Status{};
593}
594
595void NELSTMLayer::run()
596{
John Kesapides917959c2019-02-04 12:37:29 +0000597 prepare();
598
Georgios Pinitasda953f22019-04-02 17:27:03 +0100599 MemoryGroupResourceScope scope_mg(_memory_group);
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100600
Michalis Spyrou2761c2f2019-03-22 13:06:08 +0000601 _concat_inputs_forget_gate.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100602 _fully_connected_forget_gate.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100603
604 if(_run_peephole_opt)
605 {
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100606 _pixelwise_mul_forget_gate.run();
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100607 _accum_forget_gate1.run();
608 }
609 if(_is_layer_norm_lstm)
610 {
611 _mean_std_norm_forget_gate.run();
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100612 _pixelwise_mul_forget_gate_coeff.run();
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100613 _accum_forget_gate_bias.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100614 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100615 _activation_forget_gate.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100616
617 if(_run_cifg_opt)
618 {
619 if(_ones.info()->data_type() == DataType::F16)
620 {
621 std::fill_n(reinterpret_cast<half *>(_ones.buffer()), _ones.info()->total_size() / _ones.info()->element_size(), 1);
622 }
623 else
624 {
625 std::fill_n(reinterpret_cast<float *>(_ones.buffer()), _ones.info()->total_size() / _ones.info()->element_size(), 1);
626 }
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100627 _subtract_input_gate.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100628 }
629 else
630 {
631 _fully_connected_input_gate.run();
John Kesapides917959c2019-02-04 12:37:29 +0000632
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100633 if(_run_peephole_opt)
634 {
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100635 _pixelwise_mul_input_gate.run();
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100636 _accum_input_gate1.run();
637 }
638
639 if(_is_layer_norm_lstm)
640 {
641 _mean_std_norm_input_gate.run();
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100642 _pixelwise_mul_input_gate_coeff.run();
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100643 _accum_input_gate_bias.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100644 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100645 _activation_input_gate.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100646 }
647
648 _fully_connected_cell_state.run();
649 NEScheduler::get().schedule(&_transpose_cell_state, Window::DimY);
650 _gemm_cell_state1.run();
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100651 _accum_cell_state1.run();
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100652 if(_is_layer_norm_lstm)
653 {
654 _mean_std_norm_cell_gate.run();
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100655 _pixelwise_mul_cell_gate_coeff.run();
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100656 _accum_cell_gate_bias.run();
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100657 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100658 _activation_cell_state.run();
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100659 _pixelwise_mul_cell_state1.run();
660 _pixelwise_mul_cell_state2.run();
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100661 _accum_cell_state2.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100662
663 if(_perform_cell_clipping)
664 {
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100665 _cell_clip.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100666 }
667
668 _fully_connected_output.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100669 if(_run_peephole_opt)
670 {
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100671 _pixelwise_mul_output_state1.run();
Michele Di Giorgio0cbfda62019-06-13 17:01:29 +0100672 _accum_output1.run();
673 }
674 if(_is_layer_norm_lstm)
675 {
676 _mean_std_norm_output_gate.run();
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100677 _pixelwise_mul_output_gate_coeff.run();
Michalis Spyrou173ba9b2020-06-23 17:25:43 +0100678 _accum_output_gate_bias.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100679 }
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100680 _activation_output.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100681
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100682 _activation_output_state.run();
Michalis Spyrou6eb73452020-07-02 17:39:25 +0100683 _pixelwise_mul_output_state2.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100684
685 if(_has_projection_weights)
686 {
687 _fully_connected_output_state.run();
688 if(_perform_projection_clipping)
689 {
Georgios Pinitas1fd2c802020-06-16 17:44:46 +0100690 _projection_clip.run();
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100691 }
692 }
693
694 NEScheduler::get().schedule(&_copy_cell_state, Window::DimY);
695 NEScheduler::get().schedule(&_copy_output, Window::DimY);
696
697 _concat_scratch_buffer.run();
John Kesapides917959c2019-02-04 12:37:29 +0000698}
699
700void NELSTMLayer::prepare()
701{
702 if(!_is_prepared)
703 {
John Kesapides917959c2019-02-04 12:37:29 +0000704 _concat_weights_forget_gate.run();
705 if(!_run_cifg_opt)
706 {
707 _concat_weights_input_gate.run();
708 }
709 _concat_weights_output.run();
710 _is_prepared = true;
711 }
712}
Michele Di Giorgio47a89902020-03-09 19:32:33 +0000713} // namespace arm_compute