blob: d5f3c5de34b79c03d3d06d74a114fb3d27422609 [file] [log] [blame]
arovir019e53a352018-08-31 15:26:35 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
arovir019e53a352018-08-31 15:26:35 +01004//
5
6#include "NeonLstmFloatWorkload.hpp"
Les Bellde9011b2018-10-03 10:37:52 +01007#include "NeonWorkloadUtils.hpp"
8
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009#include "aclCommon/ArmComputeTensorUtils.hpp"
Matteo Martincighe5b8eb92019-11-28 15:45:42 +000010
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000011#include "neon/NeonTensorHandle.hpp"
arovir019e53a352018-08-31 15:26:35 +010012
13namespace armnn
14{
Les Bellde9011b2018-10-03 10:37:52 +010015using namespace armcomputetensorutils;
16
17NeonLstmFloatWorkload::NeonLstmFloatWorkload(const LstmQueueDescriptor &descriptor, const WorkloadInfo &info)
arovir019e53a352018-08-31 15:26:35 +010018 : FloatWorkload<LstmQueueDescriptor>(descriptor, info)
19{
Les Bellde9011b2018-10-03 10:37:52 +010020 arm_compute::LSTMParams<arm_compute::ITensor> lstm_param;
21
22 // Basic parameters
23 m_InputToForgetWeightsTensor = std::make_unique<arm_compute::Tensor>();
24 BuildArmComputeTensor(*m_InputToForgetWeightsTensor, m_Data.m_InputToForgetWeights->GetTensorInfo());
25
26 m_InputToCellWeightsTensor = std::make_unique<arm_compute::Tensor>();
27 BuildArmComputeTensor(*m_InputToCellWeightsTensor, m_Data.m_InputToCellWeights->GetTensorInfo());
28
29 m_InputToOutputWeightsTensor = std::make_unique<arm_compute::Tensor>();
30 BuildArmComputeTensor(*m_InputToOutputWeightsTensor, m_Data.m_InputToOutputWeights->GetTensorInfo());
31
32 m_RecurrentToForgetWeightsTensor = std::make_unique<arm_compute::Tensor>();
33 BuildArmComputeTensor(*m_RecurrentToForgetWeightsTensor, m_Data.m_RecurrentToForgetWeights->GetTensorInfo());
34
35 m_RecurrentToCellWeightsTensor = std::make_unique<arm_compute::Tensor>();
36 BuildArmComputeTensor(*m_RecurrentToCellWeightsTensor, m_Data.m_RecurrentToCellWeights->GetTensorInfo());
37
38 m_RecurrentToOutputWeightsTensor = std::make_unique<arm_compute::Tensor>();
39 BuildArmComputeTensor(*m_RecurrentToOutputWeightsTensor, m_Data.m_RecurrentToOutputWeights->GetTensorInfo());
40
41 m_ForgetGateBiasTensor = std::make_unique<arm_compute::Tensor>();
42 BuildArmComputeTensor(*m_ForgetGateBiasTensor, m_Data.m_ForgetGateBias->GetTensorInfo());
43
44 m_CellBiasTensor = std::make_unique<arm_compute::Tensor>();
45 BuildArmComputeTensor(*m_CellBiasTensor, m_Data.m_CellBias->GetTensorInfo());
46
47 m_OutputGateBiasTensor = std::make_unique<arm_compute::Tensor>();
48 BuildArmComputeTensor(*m_OutputGateBiasTensor, m_Data.m_OutputGateBias->GetTensorInfo());
49
50 // for future reference: check the AndroidNN API for the logic here
51 if (!m_Data.m_Parameters.m_CifgEnabled)
52 {
53 m_InputToInputWeightsTensor = std::make_unique<arm_compute::Tensor>();
54 BuildArmComputeTensor(*m_InputToInputWeightsTensor, m_Data.m_InputToInputWeights->GetTensorInfo());
55
56 m_RecurrentToInputWeightsTensor = std::make_unique<arm_compute::Tensor>();
57 BuildArmComputeTensor(*m_RecurrentToInputWeightsTensor, m_Data.m_RecurrentToInputWeights->GetTensorInfo());
58
59 m_CellToInputWeightsTensor = std::make_unique<arm_compute::Tensor>();
60 if (m_Data.m_CellToInputWeights != nullptr)
61 {
62 BuildArmComputeTensor(*m_CellToInputWeightsTensor, m_Data.m_CellToInputWeights->GetTensorInfo());
63 }
64
65 m_InputGateBiasTensor = std::make_unique<arm_compute::Tensor>();
66 BuildArmComputeTensor(*m_InputGateBiasTensor, m_Data.m_InputGateBias->GetTensorInfo());
67
68 lstm_param.set_cifg_params(m_InputToInputWeightsTensor.get(),
69 m_RecurrentToInputWeightsTensor.get(),
70 m_Data.m_CellToInputWeights != nullptr ? m_CellToInputWeightsTensor.get() : nullptr,
71 m_InputGateBiasTensor.get());
72 }
73
74 if (m_Data.m_Parameters.m_ProjectionEnabled)
75 {
76 m_ProjectionWeightsTensor = std::make_unique<arm_compute::Tensor>();
77 BuildArmComputeTensor(*m_ProjectionWeightsTensor, m_Data.m_ProjectionWeights->GetTensorInfo());
78
79 m_ProjectionBiasTensor = std::make_unique<arm_compute::Tensor>();
80 if (m_Data.m_ProjectionBias != nullptr)
81 {
82 BuildArmComputeTensor(*m_ProjectionBiasTensor, m_Data.m_ProjectionBias->GetTensorInfo());
83 }
84
85 lstm_param.set_projection_params(m_ProjectionWeightsTensor.get(),
86 m_Data.m_ProjectionBias != nullptr ? m_ProjectionBiasTensor.get() : nullptr);
87 }
88
89 if (m_Data.m_Parameters.m_PeepholeEnabled)
90 {
91 m_CellToForgetWeightsTensor = std::make_unique<arm_compute::Tensor>();
92 BuildArmComputeTensor(*m_CellToForgetWeightsTensor, m_Data.m_CellToForgetWeights->GetTensorInfo());
93
94 m_CellToOutputWeightsTensor = std::make_unique<arm_compute::Tensor>();
95 BuildArmComputeTensor(*m_CellToOutputWeightsTensor, m_Data.m_CellToOutputWeights->GetTensorInfo());
96
97 lstm_param.set_peephole_params(m_CellToForgetWeightsTensor.get(), m_CellToOutputWeightsTensor.get());
98 }
99
Jan Eilersad5293a2019-07-08 09:57:55 +0100100 if (m_Data.m_Parameters.m_LayerNormEnabled)
101 {
102 m_InputLayerNormWeightsTensor = std::make_unique<arm_compute::Tensor>();
103 if (!m_Data.m_Parameters.m_CifgEnabled)
104 {
105 BuildArmComputeTensor(*m_InputLayerNormWeightsTensor, m_Data.m_InputLayerNormWeights->GetTensorInfo());
106 }
107
108 m_ForgetLayerNormWeightsTensor = std::make_unique<arm_compute::Tensor>();
109 BuildArmComputeTensor(*m_ForgetLayerNormWeightsTensor, m_Data.m_ForgetLayerNormWeights->GetTensorInfo());
110
111 m_CellLayerNormWeightsTensor = std::make_unique<arm_compute::Tensor>();
112 BuildArmComputeTensor(*m_CellLayerNormWeightsTensor, m_Data.m_CellLayerNormWeights->GetTensorInfo());
113
114 m_OutputLayerNormWeightsTensor = std::make_unique<arm_compute::Tensor>();
115 BuildArmComputeTensor(*m_OutputLayerNormWeightsTensor, m_Data.m_OutputLayerNormWeights->GetTensorInfo());
116
117 lstm_param.set_layer_normalization_params(m_Data.m_Parameters.m_CifgEnabled ?
118 nullptr : m_InputLayerNormWeightsTensor.get(),
119 m_ForgetLayerNormWeightsTensor.get(),
120 m_CellLayerNormWeightsTensor.get(),
121 m_OutputLayerNormWeightsTensor.get());
122 }
123
Derek Lambertic81855f2019-06-13 17:34:19 +0100124 const arm_compute::ITensor& input = static_cast<IAclTensorHandle*>(m_Data.m_Inputs[0])->GetTensor();
125 const arm_compute::ITensor& output_state_in = static_cast<IAclTensorHandle*>(m_Data.m_Inputs[1])->GetTensor();
126 const arm_compute::ITensor& cell_state_in = static_cast<IAclTensorHandle*>(m_Data.m_Inputs[2])->GetTensor();
Les Bellde9011b2018-10-03 10:37:52 +0100127
Derek Lambertic81855f2019-06-13 17:34:19 +0100128 arm_compute::ITensor& output_state_out = static_cast<IAclTensorHandle*>(m_Data.m_Outputs[1])->GetTensor();
129 arm_compute::ITensor& cell_state_out = static_cast<IAclTensorHandle*>(m_Data.m_Outputs[2])->GetTensor();
130 arm_compute::ITensor& output = static_cast<IAclTensorHandle*>(m_Data.m_Outputs[3])->GetTensor();
Les Bellde9011b2018-10-03 10:37:52 +0100131
132 // Get the batch_size and the num_units from the cellStateIn dimensions
133 const TensorInfo& inputTensorInfo = info.m_InputTensorInfos[2];
134 const unsigned int batch_size = boost::numeric_cast<unsigned int>(inputTensorInfo.GetShape()[0]);
135 const unsigned int num_units = boost::numeric_cast<unsigned int>(inputTensorInfo.GetShape()[1]);
136
137 m_ScratchBuffer = std::make_unique<arm_compute::Tensor>();
138 if (m_Data.m_Parameters.m_CifgEnabled)
139 {
Jan Eilersad5293a2019-07-08 09:57:55 +0100140 // 2D tensor with dimensions [num_units * 3, batch_size] with CIFG
Matteo Martincigha65b7ae2018-11-14 12:39:55 +0000141 armnn::TensorInfo scratchBuffer1({ batch_size, num_units * 3 }, DataType::Float32);
Les Bellde9011b2018-10-03 10:37:52 +0100142 BuildArmComputeTensor(*m_ScratchBuffer, scratchBuffer1);
143 }
144 else
145 {
Jan Eilersad5293a2019-07-08 09:57:55 +0100146 // scratch_buffer [num_units * 4, batch_size] without CIFG
Matteo Martincigha65b7ae2018-11-14 12:39:55 +0000147 armnn::TensorInfo scratchBuffer2({ batch_size, num_units * 4 }, DataType::Float32);
Les Bellde9011b2018-10-03 10:37:52 +0100148 BuildArmComputeTensor(*m_ScratchBuffer, scratchBuffer2);
149 }
150
151 float cell_threshold = m_Data.m_Parameters.m_ClippingThresCell;
152 float projection_threshold = m_Data.m_Parameters.m_ClippingThresProj;
153
154 // for preparing the object for the class ActivationLayerInfo, we need to consider 5 situations
155 arm_compute::ActivationLayerInfo activationLayerInfo;
156 if (m_Data.m_Parameters.m_ActivationFunc == 0)
157 {
158 // no activation, do nothing
159 }
160 else if (m_Data.m_Parameters.m_ActivationFunc == 1)
161 {
162 activationLayerInfo = arm_compute::ActivationLayerInfo(
163 arm_compute::ActivationLayerInfo::ActivationFunction::RELU);
164 }
165 else if (m_Data.m_Parameters.m_ActivationFunc == 3)
166 {
167 activationLayerInfo = arm_compute::ActivationLayerInfo(
168 arm_compute::ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.0);
169 }
170 else if (m_Data.m_Parameters.m_ActivationFunc == 4)
171 {
172 activationLayerInfo = arm_compute::ActivationLayerInfo(
173 arm_compute::ActivationLayerInfo::ActivationFunction::TANH, 1.0, 1.0);
174 }
175 else if (m_Data.m_Parameters.m_ActivationFunc == 6)
176 {
177 activationLayerInfo = arm_compute::ActivationLayerInfo(
178 arm_compute::ActivationLayerInfo::ActivationFunction::LOGISTIC);
179 }
180 else
181 {
182 throw armnn::Exception("Wrong Type of Activation Function!");
183 }
184
185
186 m_LstmLayer.configure(&input, m_InputToForgetWeightsTensor.get(), m_InputToCellWeightsTensor.get(),
187 m_InputToOutputWeightsTensor.get(), m_RecurrentToForgetWeightsTensor.get(),
188 m_RecurrentToCellWeightsTensor.get(), m_RecurrentToOutputWeightsTensor.get(),
189 m_ForgetGateBiasTensor.get(), m_CellBiasTensor.get(), m_OutputGateBiasTensor.get(),
190 &output_state_in, &cell_state_in, m_ScratchBuffer.get(), &output_state_out,
191 &cell_state_out, &output, lstm_param, activationLayerInfo,
192 cell_threshold, projection_threshold);
193
194 armcomputetensorutils::InitialiseArmComputeTensorEmpty(*m_ScratchBuffer);
195
Nattapat Chaimanowong177d8d22018-10-16 13:21:27 +0100196 InitializeArmComputeTensorData(*m_InputToForgetWeightsTensor,
197 m_Data.m_InputToForgetWeights);
198 InitializeArmComputeTensorData(*m_InputToCellWeightsTensor,
199 m_Data.m_InputToCellWeights);
200 InitializeArmComputeTensorData(*m_InputToOutputWeightsTensor,
201 m_Data.m_InputToOutputWeights);
202 InitializeArmComputeTensorData(*m_RecurrentToForgetWeightsTensor,
203 m_Data.m_RecurrentToForgetWeights);
204 InitializeArmComputeTensorData(*m_RecurrentToCellWeightsTensor,
205 m_Data.m_RecurrentToCellWeights);
206 InitializeArmComputeTensorData(*m_RecurrentToOutputWeightsTensor,
207 m_Data.m_RecurrentToOutputWeights);
208 InitializeArmComputeTensorData(*m_ForgetGateBiasTensor,
209 m_Data.m_ForgetGateBias);
210 InitializeArmComputeTensorData(*m_CellBiasTensor,
211 m_Data.m_CellBias);
212 InitializeArmComputeTensorData(*m_OutputGateBiasTensor,
213 m_Data.m_OutputGateBias);
Les Bellde9011b2018-10-03 10:37:52 +0100214
215 if (!m_Data.m_Parameters.m_CifgEnabled)
216 {
Nattapat Chaimanowong177d8d22018-10-16 13:21:27 +0100217 InitializeArmComputeTensorData(*m_InputToInputWeightsTensor,
218 m_Data.m_InputToInputWeights);
219 InitializeArmComputeTensorData(*m_RecurrentToInputWeightsTensor,
220 m_Data.m_RecurrentToInputWeights);
Les Bellde9011b2018-10-03 10:37:52 +0100221 if (m_Data.m_CellToInputWeights != nullptr)
222 {
Nattapat Chaimanowong177d8d22018-10-16 13:21:27 +0100223 InitializeArmComputeTensorData(*m_CellToInputWeightsTensor,
224 m_Data.m_CellToInputWeights);
Les Bellde9011b2018-10-03 10:37:52 +0100225 }
Nattapat Chaimanowong177d8d22018-10-16 13:21:27 +0100226 InitializeArmComputeTensorData(*m_InputGateBiasTensor,
227 m_Data.m_InputGateBias);
Les Bellde9011b2018-10-03 10:37:52 +0100228 }
229
230 if (m_Data.m_Parameters.m_ProjectionEnabled)
231 {
Nattapat Chaimanowong177d8d22018-10-16 13:21:27 +0100232 InitializeArmComputeTensorData(*m_ProjectionWeightsTensor,
233 m_Data.m_ProjectionWeights);
Les Bellde9011b2018-10-03 10:37:52 +0100234 if (m_Data.m_ProjectionBias != nullptr)
235 {
Nattapat Chaimanowong177d8d22018-10-16 13:21:27 +0100236 InitializeArmComputeTensorData(*m_ProjectionBiasTensor,
237 m_Data.m_ProjectionBias);
Les Bellde9011b2018-10-03 10:37:52 +0100238 }
239 }
240
241 if (m_Data.m_Parameters.m_PeepholeEnabled)
242 {
Nattapat Chaimanowong177d8d22018-10-16 13:21:27 +0100243 InitializeArmComputeTensorData(*m_CellToForgetWeightsTensor,
244 m_Data.m_CellToForgetWeights);
245 InitializeArmComputeTensorData(*m_CellToOutputWeightsTensor,
246 m_Data.m_CellToOutputWeights);
Les Bellde9011b2018-10-03 10:37:52 +0100247 }
248
Jan Eilersad5293a2019-07-08 09:57:55 +0100249 if (m_Data.m_Parameters.m_LayerNormEnabled)
250 {
251 if (!m_Data.m_Parameters.m_CifgEnabled)
252 {
253 InitializeArmComputeTensorData(*m_InputLayerNormWeightsTensor, m_Data.m_InputLayerNormWeights);
254 }
255 InitializeArmComputeTensorData(*m_ForgetLayerNormWeightsTensor, m_Data.m_ForgetLayerNormWeights);
256 InitializeArmComputeTensorData(*m_CellLayerNormWeightsTensor, m_Data.m_CellLayerNormWeights);
257 InitializeArmComputeTensorData(*m_OutputLayerNormWeightsTensor, m_Data.m_OutputLayerNormWeights);
258 }
259
Les Bellde9011b2018-10-03 10:37:52 +0100260 // Force Compute Library to perform the necessary copying and reshaping, after which
261 // delete all the input tensors that will no longer be needed
262 m_LstmLayer.prepare();
263 FreeUnusedTensors();
arovir019e53a352018-08-31 15:26:35 +0100264}
265
266void NeonLstmFloatWorkload::Execute() const
267{
Les Bellde9011b2018-10-03 10:37:52 +0100268 m_LstmLayer.run();
arovir019e53a352018-08-31 15:26:35 +0100269}
270
Les Bellde9011b2018-10-03 10:37:52 +0100271arm_compute::Status NeonLstmFloatWorkloadValidate(const TensorInfo& input,
272 const TensorInfo& outputStateIn,
273 const TensorInfo& cellStateIn,
274 const TensorInfo& scratchBuffer,
275 const TensorInfo& outputStateOut,
276 const TensorInfo& cellStateOut,
277 const TensorInfo& output,
278 const LstmDescriptor& descriptor,
Jan Eilersad5293a2019-07-08 09:57:55 +0100279 const LstmInputParamsInfo& paramsInfo)
Les Bellde9011b2018-10-03 10:37:52 +0100280{
281 arm_compute::LSTMParams<arm_compute::ITensorInfo> lstm_params_info;
282
Jan Eilersad5293a2019-07-08 09:57:55 +0100283 // The inputs and outputs
Les Bellde9011b2018-10-03 10:37:52 +0100284 const arm_compute::TensorInfo aclInputInfo = BuildArmComputeTensorInfo(input);
285 const arm_compute::TensorInfo aclOutputStateInInfo = BuildArmComputeTensorInfo(outputStateIn);
286 const arm_compute::TensorInfo aclCellStateInInfo = BuildArmComputeTensorInfo(cellStateIn);
287 const arm_compute::TensorInfo aclScratchBufferInfo = BuildArmComputeTensorInfo(scratchBuffer);
288 const arm_compute::TensorInfo aclOutputStateOutInfo = BuildArmComputeTensorInfo(outputStateOut);
289 const arm_compute::TensorInfo aclCellStateOutInfo = BuildArmComputeTensorInfo(cellStateOut);
290 const arm_compute::TensorInfo aclOutputInfo = BuildArmComputeTensorInfo(output);
291
292 // Basic parameters
Jan Eilersad5293a2019-07-08 09:57:55 +0100293 const arm_compute::TensorInfo aclInputToForgetWeightsInfo
Francis Murtaghbb590b42019-08-14 09:51:36 +0100294 = BuildArmComputeTensorInfo(paramsInfo.GetInputToForgetWeights());
Jan Eilersad5293a2019-07-08 09:57:55 +0100295 const arm_compute::TensorInfo aclInputToCellWeightsInfo
Francis Murtaghbb590b42019-08-14 09:51:36 +0100296 = BuildArmComputeTensorInfo(paramsInfo.GetInputToCellWeights());
Jan Eilersad5293a2019-07-08 09:57:55 +0100297 const arm_compute::TensorInfo aclInputToOutputWeightsInfo
Francis Murtaghbb590b42019-08-14 09:51:36 +0100298 = BuildArmComputeTensorInfo(paramsInfo.GetInputToOutputWeights());
Les Bellde9011b2018-10-03 10:37:52 +0100299 const arm_compute::TensorInfo aclRecurrentToForgetWeightsInfo
Francis Murtaghbb590b42019-08-14 09:51:36 +0100300 = BuildArmComputeTensorInfo(paramsInfo.GetRecurrentToForgetWeights());
Les Bellde9011b2018-10-03 10:37:52 +0100301 const arm_compute::TensorInfo aclRecurrentToCellWeightsInfo
Francis Murtaghbb590b42019-08-14 09:51:36 +0100302 = BuildArmComputeTensorInfo(paramsInfo.GetRecurrentToCellWeights());
Les Bellde9011b2018-10-03 10:37:52 +0100303 const arm_compute::TensorInfo aclRecurrentToOutputWeightsInfo
Francis Murtaghbb590b42019-08-14 09:51:36 +0100304 = BuildArmComputeTensorInfo(paramsInfo.GetRecurrentToOutputWeights());
Jan Eilersad5293a2019-07-08 09:57:55 +0100305 const arm_compute::TensorInfo aclForgetGateBiasInfo
Francis Murtaghbb590b42019-08-14 09:51:36 +0100306 = BuildArmComputeTensorInfo(paramsInfo.GetForgetGateBias());
Jan Eilersad5293a2019-07-08 09:57:55 +0100307 const arm_compute::TensorInfo aclCellBiasInfo
Francis Murtaghbb590b42019-08-14 09:51:36 +0100308 = BuildArmComputeTensorInfo(paramsInfo.GetCellBias());
Jan Eilersad5293a2019-07-08 09:57:55 +0100309 const arm_compute::TensorInfo aclOutputGateBiasInfo
Francis Murtaghbb590b42019-08-14 09:51:36 +0100310 = BuildArmComputeTensorInfo(paramsInfo.GetOutputGateBias());
Les Bellde9011b2018-10-03 10:37:52 +0100311
312 arm_compute::TensorInfo aclInputToInputWeightsInfo;
313 arm_compute::TensorInfo aclRecurrentToInputWeightsInfo;
314 arm_compute::TensorInfo aclCellToInputWeightsInfo;
315 arm_compute::TensorInfo aclInputGateBiasInfo;
316 arm_compute::TensorInfo aclProjectionWeightsInfo;
317 arm_compute::TensorInfo aclProjectionBiasInfo;
318 arm_compute::TensorInfo aclCellToForgetWeightsInfo;
319 arm_compute::TensorInfo aclCellToOutputWeightsInfo;
320
Jan Eilersad5293a2019-07-08 09:57:55 +0100321 arm_compute::TensorInfo aclInputLayerNormWeightsInfo;
322 arm_compute::TensorInfo aclForgetLayerNormWeightsInfo;
323 arm_compute::TensorInfo aclCellLayerNormWeightsInfo;
324 arm_compute::TensorInfo aclOutputLayerNormWeightsInfo;
325
326
Les Bellde9011b2018-10-03 10:37:52 +0100327 if (!descriptor.m_CifgEnabled)
328 {
Jan Eilersad5293a2019-07-08 09:57:55 +0100329 if (descriptor.m_PeepholeEnabled)
Les Bellde9011b2018-10-03 10:37:52 +0100330 {
Francis Murtaghbb590b42019-08-14 09:51:36 +0100331 aclCellToInputWeightsInfo = BuildArmComputeTensorInfo(paramsInfo.GetCellToInputWeights());
Les Bellde9011b2018-10-03 10:37:52 +0100332 }
Francis Murtaghbb590b42019-08-14 09:51:36 +0100333 aclInputToInputWeightsInfo = BuildArmComputeTensorInfo(paramsInfo.GetInputToInputWeights());
334 aclRecurrentToInputWeightsInfo = BuildArmComputeTensorInfo(paramsInfo.GetRecurrentToInputWeights());
335 aclInputGateBiasInfo = BuildArmComputeTensorInfo(paramsInfo.GetInputGateBias());
Jan Eilersad5293a2019-07-08 09:57:55 +0100336
Les Bellde9011b2018-10-03 10:37:52 +0100337 lstm_params_info.set_cifg_params(&aclInputToInputWeightsInfo, &aclRecurrentToInputWeightsInfo,
Jan Eilersad5293a2019-07-08 09:57:55 +0100338 descriptor.m_PeepholeEnabled ? &aclCellToInputWeightsInfo : nullptr,
Les Bellde9011b2018-10-03 10:37:52 +0100339 &aclInputGateBiasInfo);
340 }
341
342 if (descriptor.m_ProjectionEnabled)
343 {
Jan Eilersad5293a2019-07-08 09:57:55 +0100344 if (paramsInfo.m_ProjectionBias != nullptr)
Les Bellde9011b2018-10-03 10:37:52 +0100345 {
Francis Murtaghbb590b42019-08-14 09:51:36 +0100346 aclProjectionBiasInfo = BuildArmComputeTensorInfo(paramsInfo.GetProjectionBias());
Les Bellde9011b2018-10-03 10:37:52 +0100347 }
Francis Murtaghbb590b42019-08-14 09:51:36 +0100348 aclProjectionWeightsInfo = BuildArmComputeTensorInfo(paramsInfo.GetProjectionWeights());
Jan Eilersad5293a2019-07-08 09:57:55 +0100349
Les Bellde9011b2018-10-03 10:37:52 +0100350 lstm_params_info.set_projection_params(&aclProjectionWeightsInfo,
Jan Eilersad5293a2019-07-08 09:57:55 +0100351 paramsInfo.m_ProjectionBias != nullptr ?
352 &aclProjectionBiasInfo : nullptr);
Les Bellde9011b2018-10-03 10:37:52 +0100353 }
354
355 if (descriptor.m_PeepholeEnabled)
356 {
Francis Murtaghbb590b42019-08-14 09:51:36 +0100357 aclCellToForgetWeightsInfo = BuildArmComputeTensorInfo(paramsInfo.GetCellToForgetWeights());
358 aclCellToOutputWeightsInfo = BuildArmComputeTensorInfo(paramsInfo.GetCellToOutputWeights());
Jan Eilersad5293a2019-07-08 09:57:55 +0100359
Les Bellde9011b2018-10-03 10:37:52 +0100360 lstm_params_info.set_peephole_params(&aclCellToForgetWeightsInfo, &aclCellToOutputWeightsInfo);
361 }
362
Jan Eilersad5293a2019-07-08 09:57:55 +0100363 if (descriptor.m_LayerNormEnabled)
364 {
365 if (!descriptor.m_CifgEnabled)
366 {
Francis Murtaghbb590b42019-08-14 09:51:36 +0100367 aclInputLayerNormWeightsInfo = BuildArmComputeTensorInfo(paramsInfo.GetInputLayerNormWeights());
Jan Eilersad5293a2019-07-08 09:57:55 +0100368 }
Francis Murtaghbb590b42019-08-14 09:51:36 +0100369 aclForgetLayerNormWeightsInfo = BuildArmComputeTensorInfo(paramsInfo.GetForgetLayerNormWeights());
370 aclCellLayerNormWeightsInfo = BuildArmComputeTensorInfo(paramsInfo.GetCellLayerNormWeights());
371 aclOutputLayerNormWeightsInfo = BuildArmComputeTensorInfo(paramsInfo.GetOutputLayerNormWeights());
Jan Eilersad5293a2019-07-08 09:57:55 +0100372
373 lstm_params_info.set_layer_normalization_params(descriptor.m_CifgEnabled ?
374 nullptr : &aclInputLayerNormWeightsInfo,
375 &aclForgetLayerNormWeightsInfo,
376 &aclCellLayerNormWeightsInfo,
377 &aclOutputLayerNormWeightsInfo);
378 }
379
Les Bellde9011b2018-10-03 10:37:52 +0100380 float cell_threshold = descriptor.m_ClippingThresCell;
381 float projection_threshold = descriptor.m_ClippingThresProj;
382
383 // for preparing the object for the class ActivationLayerInfo, we need to consider 5 situations
384 arm_compute::ActivationLayerInfo activationLayerInfo;
385 switch (descriptor.m_ActivationFunc)
386 {
387 case 0:
388 // no activation, do nothing
389 break;
390 case 1:
391 activationLayerInfo = arm_compute::ActivationLayerInfo(
392 arm_compute::ActivationLayerInfo::ActivationFunction::RELU);
393 break;
394 case 3:
395 activationLayerInfo = arm_compute::ActivationLayerInfo(
396 arm_compute::ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.0);
397 break;
398 case 4:
399 activationLayerInfo = arm_compute::ActivationLayerInfo(
400 arm_compute::ActivationLayerInfo::ActivationFunction::TANH, 1.0, 1.0);
401 break;
402 case 6:
403 activationLayerInfo = arm_compute::ActivationLayerInfo(
404 arm_compute::ActivationLayerInfo::ActivationFunction::LOGISTIC);
405 break;
406 default:
407 throw armnn::Exception("Wrong Type of Activation Function!");
408 }
409
410 return arm_compute::NELSTMLayer::validate(&aclInputInfo,
411 &aclInputToForgetWeightsInfo,
412 &aclInputToCellWeightsInfo,
413 &aclInputToOutputWeightsInfo,
414 &aclRecurrentToForgetWeightsInfo,
415 &aclRecurrentToCellWeightsInfo,
416 &aclRecurrentToOutputWeightsInfo,
417 &aclForgetGateBiasInfo,
418 &aclCellBiasInfo,
419 &aclOutputGateBiasInfo,
420 &aclOutputStateInInfo,
421 &aclCellStateInInfo,
422 &aclScratchBufferInfo,
423 &aclOutputStateOutInfo,
424 &aclCellStateOutInfo,
425 &aclOutputInfo,
426 lstm_params_info,
427 activationLayerInfo,
428 cell_threshold,
429 projection_threshold);
430}
431
432void NeonLstmFloatWorkload::FreeUnusedTensors()
433{
434 FreeTensorIfUnused(m_InputToInputWeightsTensor);
435 FreeTensorIfUnused(m_InputToForgetWeightsTensor);
436 FreeTensorIfUnused(m_InputToCellWeightsTensor);
437 FreeTensorIfUnused(m_InputToOutputWeightsTensor);
438 FreeTensorIfUnused(m_RecurrentToInputWeightsTensor);
439 FreeTensorIfUnused(m_RecurrentToForgetWeightsTensor);
440 FreeTensorIfUnused(m_RecurrentToCellWeightsTensor);
441 FreeTensorIfUnused(m_RecurrentToOutputWeightsTensor);
442 FreeTensorIfUnused(m_CellToInputWeightsTensor);
443 FreeTensorIfUnused(m_CellToForgetWeightsTensor);
444 FreeTensorIfUnused(m_CellToOutputWeightsTensor);
445 FreeTensorIfUnused(m_InputGateBiasTensor);
446 FreeTensorIfUnused(m_ForgetGateBiasTensor);
447 FreeTensorIfUnused(m_CellBiasTensor);
448 FreeTensorIfUnused(m_OutputGateBiasTensor);
449 FreeTensorIfUnused(m_ProjectionWeightsTensor);
450 FreeTensorIfUnused(m_ProjectionBiasTensor);
451 FreeTensorIfUnused(m_ScratchBuffer);
Jan Eilersad5293a2019-07-08 09:57:55 +0100452 FreeTensorIfUnused(m_InputLayerNormWeightsTensor);
453 FreeTensorIfUnused(m_ForgetLayerNormWeightsTensor);
454 FreeTensorIfUnused(m_CellLayerNormWeightsTensor);
455 FreeTensorIfUnused(m_OutputLayerNormWeightsTensor);
Les Bellde9011b2018-10-03 10:37:52 +0100456}
457
458} //namespace armnn