blob: 6979f9072125abe80cd1515818f4afb5f45e6788 [file] [log] [blame]
Michalis Spyrou25f45a42018-08-08 12:53:05 +01001/*
Michele Di Giorgio39438b42019-06-04 12:41:45 +01002 * Copyright (c) 2018-2019 ARM Limited.
Michalis Spyrou25f45a42018-08-08 12:53:05 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#ifndef __ARM_COMPUTE_LSTMPARAMS_H__
25#define __ARM_COMPUTE_LSTMPARAMS_H__
26
27#include "arm_compute/core/IPyramid.h"
28#include "arm_compute/core/PyramidInfo.h"
29#include "arm_compute/core/Types.h"
30#include "arm_compute/runtime/Tensor.h"
31
32#include <cstddef>
33#include <memory>
34
35namespace arm_compute
36{
37template <typename T>
38class LSTMParams
39{
40public:
41 /** Constructor */
42 LSTMParams()
43 : _input_to_input_weights(nullptr), _recurrent_to_input_weights(nullptr), _cell_to_input_weights(nullptr), _input_gate_bias(nullptr), _cell_to_forget_weights(nullptr),
Michele Di Giorgio39438b42019-06-04 12:41:45 +010044 _cell_to_output_weights(nullptr), _projection_weights(nullptr), _projection_bias(nullptr), _input_layer_norm_weights(nullptr), _forget_layer_norm_weights(nullptr), _cell_layer_norm_weights(nullptr),
45 _output_layer_norm_weights(nullptr), _has_peephole_opt(false), _has_projection(false), _has_cifg_opt(true), _use_layer_norm(false)
Michalis Spyrou25f45a42018-08-08 12:53:05 +010046 {
47 }
48 /** Prevent instances of this class from being copied (As this class contains pointers) */
49 LSTMParams(const LSTMParams &) = delete;
50 /** Prevent instances of this class from being copied (As this class contains pointers) */
51 LSTMParams &operator=(const LSTMParams &) = delete;
52 /** Default destructor */
53 ~LSTMParams() = default;
54 /** Set CIFG tensor parameters.
55 *
56 * @param[in] input_to_input_weights 2D weights tensor with dimensions [input_size, num_units]. Data types supported: F16/F32.
57 * @param[in] recurrent_to_input_weights 2D weights tensor with dimensions [output_size, num_units]. Data type supported: Same as @p input_to_input_weights.
58 * @param[in] cell_to_input_weights 1D weights tensor with dimensions [num_units]. Can be nullptr. Data type supported: Same as @p input_to_input_weights.
59 * @param[in] input_gate_bias 1D weights tensor with dimensions [num_units]. Data type supported: Same as @p input_to_input_weights
60 *
61 * @return Reference to this LSTMParams object
62 */
63 LSTMParams &set_cifg_params(const T *input_to_input_weights, const T *recurrent_to_input_weights, const T *cell_to_input_weights, const T *input_gate_bias)
64 {
65 _input_to_input_weights = input_to_input_weights;
66 _recurrent_to_input_weights = recurrent_to_input_weights;
67 _cell_to_input_weights = cell_to_input_weights;
68 _input_gate_bias = input_gate_bias;
69 _has_cifg_opt = false;
70 return *this;
71 }
72 /** Set projection tensor parameters.
73 *
74 * @param[in] projection_weights 2D weights tensor with dimensions [output_size, num_units]. Data type supported: Data types supported: F16/F32.
75 * @param[in] projection_bias 1D weights tensor with dimensions [output_size]. Data type supported: Same as @p projection_weights.
76 *
77 * @return Reference to this LSTMParams object
78 */
79 LSTMParams &set_projection_params(const T *projection_weights, const T *projection_bias)
80 {
81 _projection_weights = projection_weights;
82 _projection_bias = projection_bias;
83 _has_projection = true;
84 return *this;
85 }
86 /** Set peephole tensor parameters.
87 *
88 * @param[in] cell_to_forget_weights 1D weights tensor with dimensions [num_units]. Data type supported: Data types supported: F16/F32.
89 * @param[in] cell_to_output_weights 1D weights tensor with dimensions [num_units]. Data type supported: Same as @p cell_to_input_weights.
90 *
91 * @return Reference to this LSTMParams object
92 */
93 LSTMParams &set_peephole_params(const T *cell_to_forget_weights, const T *cell_to_output_weights)
94 {
95 _cell_to_forget_weights = cell_to_forget_weights;
96 _cell_to_output_weights = cell_to_output_weights;
97 _has_peephole_opt = true;
98 return *this;
99 }
Michele Di Giorgio39438b42019-06-04 12:41:45 +0100100 /** Set layer normalization tensor parameters.
101 *
102 * @param[in] input_layer_norm_weights 1D weights tensor with dimensions [num_units]. Data type supported: Data types supported: F16/F32.
103 * @param[in] forget_layer_norm_weights 1D weights tensor with dimensions [num_units]. Data type supported: Same as @p input_layer_norm_weights.
104 * @param[in] cell_layer_norm_weights 1D weights tensor with dimensions [num_units]. Data type supported: Same as @p input_layer_norm_weights.
105 * @param[in] output_layer_norm_weights 1D weights tensor with dimensions [num_units]. Data type supported: Same as @p input_layer_norm_weights.
106 *
107 * @return Reference to this LSTMParams object
108 */
109 LSTMParams &set_layer_normalization_params(const T *input_layer_norm_weights, const T *forget_layer_norm_weights,
110 const T *cell_layer_norm_weights, const T *output_layer_norm_weights)
111 {
112 _input_layer_norm_weights = input_layer_norm_weights;
113 _forget_layer_norm_weights = forget_layer_norm_weights;
114 _cell_layer_norm_weights = cell_layer_norm_weights;
115 _output_layer_norm_weights = output_layer_norm_weights;
116 _use_layer_norm = true;
117 return *this;
118 }
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100119
120 const T *input_to_input_weights() const
121 {
122 return _input_to_input_weights;
123 }
124
125 const T *recurrent_to_input_weights() const
126 {
127 return _recurrent_to_input_weights;
128 }
129
130 const T *cell_to_input_weights() const
131 {
132 return _cell_to_input_weights;
133 }
134
135 const T *input_gate_bias() const
136 {
137 return _input_gate_bias;
138 }
139
140 const T *cell_to_forget_weights() const
141 {
142 return _cell_to_forget_weights;
143 }
144
145 const T *cell_to_output_weights() const
146 {
147 return _cell_to_output_weights;
148 }
149
150 const T *projection_weights() const
151 {
152 return _projection_weights;
153 }
154
155 const T *projection_bias() const
156 {
157 return _projection_bias;
158 }
159
Michele Di Giorgio39438b42019-06-04 12:41:45 +0100160 const T *input_layer_norm_weights() const
161 {
162 return _input_layer_norm_weights;
163 }
164
165 const T *forget_layer_norm_weights() const
166 {
167 return _forget_layer_norm_weights;
168 }
169
170 const T *cell_layer_norm_weights() const
171 {
172 return _cell_layer_norm_weights;
173 }
174
175 const T *output_layer_norm_weights() const
176 {
177 return _output_layer_norm_weights;
178 }
179
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100180 bool has_peephole_opt() const
181 {
182 return _has_peephole_opt;
183 }
184
185 bool has_projection() const
186 {
187 return _has_projection;
188 }
189
190 bool has_cifg_opt() const
191 {
192 return _has_cifg_opt;
193 }
194
Michele Di Giorgio39438b42019-06-04 12:41:45 +0100195 bool use_layer_norm() const
196 {
197 return _use_layer_norm;
198 }
199
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100200private:
201 const T *_input_to_input_weights;
202 const T *_recurrent_to_input_weights;
203 const T *_cell_to_input_weights;
204 const T *_input_gate_bias;
205 const T *_cell_to_forget_weights;
206 const T *_cell_to_output_weights;
207 const T *_projection_weights;
208 const T *_projection_bias;
Michele Di Giorgio39438b42019-06-04 12:41:45 +0100209 const T *_input_layer_norm_weights;
210 const T *_forget_layer_norm_weights;
211 const T *_cell_layer_norm_weights;
212 const T *_output_layer_norm_weights;
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100213 bool _has_peephole_opt;
214 bool _has_projection;
215 bool _has_cifg_opt;
Michele Di Giorgio39438b42019-06-04 12:41:45 +0100216 bool _use_layer_norm;
Michalis Spyrou25f45a42018-08-08 12:53:05 +0100217};
218}
219#endif /*__ARM_COMPUTE_LSTMPARAMS_H__ */