blob: cde0087d6f09fb01ed40fa61972f537bc47547f7 [file] [log] [blame]
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006namespace armnnSerializer;
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +00007
8file_identifier "ARMN";
9
10file_extension "armnn";
11
Mike Kellyaf484012019-02-20 16:53:11 +000012enum ActivationFunction : byte {
13 Sigmoid = 0,
14 TanH = 1,
15 Linear = 2,
16 ReLu = 3,
17 BoundedReLu = 4,
18 SoftReLu = 5,
19 LeakyReLu = 6,
20 Abs = 7,
21 Sqrt = 8,
22 Square = 9
23}
24
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +000025enum DataType : byte {
26 Float16 = 0,
27 Float32 = 1,
28 QuantisedAsymm8 = 2,
29 Signed32 = 3,
30 Boolean = 4
31}
32
Saoirse Stewart3166c3e2019-02-18 15:24:53 +000033enum DataLayout : byte {
34 NHWC = 0,
35 NCHW = 1
36}
37
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +000038table TensorInfo {
39 dimensions:[uint];
40 dataType:DataType;
41 quantizationScale:float = 1.0;
42 quantizationOffset:int = 0;
43}
44
45struct Connection {
46 sourceLayerIndex:uint;
47 outputSlotIndex:uint;
48}
49
50table ByteData {
51 data:[byte];
52}
53
54table ShortData {
55 data:[short];
56}
57
58table IntData {
59 data:[int];
60}
61
62table LongData {
63 data:[long];
64}
65
66union ConstTensorData { ByteData, ShortData, IntData, LongData }
67
68table ConstTensor {
69 info:TensorInfo;
70 data:ConstTensorData;
71}
72
73table InputSlot {
74 index:uint;
75 connection:Connection;
76}
77
78table OutputSlot {
79 index:uint;
80 tensorInfo:TensorInfo;
81}
82
83enum LayerType : uint {
84 Addition = 0,
85 Input = 1,
Sadik Armagan5f450272019-02-12 14:31:45 +000086 Multiplication = 2,
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000087 Output = 3,
Saoirse Stewart3166c3e2019-02-18 15:24:53 +000088 Pooling2d = 4,
Saoirse Stewart263829c2019-02-19 15:54:14 +000089 Reshape = 5,
Mike Kellya0766c32019-02-19 17:22:07 +000090 Softmax = 6,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +000091 Convolution2d = 7,
Mike Kellyaf484012019-02-20 16:53:11 +000092 DepthwiseConvolution2d = 8,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +000093 Activation = 9,
Sadik Armagandbb0c0c2019-02-21 09:01:41 +000094 Permute = 10,
Conor Kennedy76277882019-02-26 08:29:54 +000095 FullyConnected = 11,
Nattapat Chaimanowong45286992019-02-26 15:53:02 +000096 Constant = 12,
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +000097 SpaceToBatchNd = 13,
Éanna Ó Catháin58885892019-02-27 16:16:39 +000098 BatchToSpaceNd = 14,
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +000099 Division = 15,
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000100 Minimum = 16,
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000101 Equal = 17,
Nina Drozd57728782019-02-27 10:53:27 +0000102 Maximum = 18,
103 Normalization = 19
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000104}
105
106// Base layer table to be used as part of other layers
107table LayerBase {
108 index:uint;
109 layerName:string;
110 layerType:LayerType;
111 inputSlots:[InputSlot];
112 outputSlots:[OutputSlot];
113}
114
115table BindableLayerBase {
116 base:LayerBase;
117 layerBindingId:int;
118}
119
120// Table for each layer defined below
Mike Kellyaf484012019-02-20 16:53:11 +0000121table ActivationLayer {
122 base:LayerBase;
123 descriptor:ActivationDescriptor;
124}
125
126table ActivationDescriptor {
127 function:ActivationFunction = Sigmoid;
128 a:float;
129 b:float;
130}
131
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000132table AdditionLayer {
133 base:LayerBase;
134}
135
Conor Kennedy76277882019-02-26 08:29:54 +0000136table ConstantLayer {
137 base:LayerBase;
138 input:ConstTensor;
139}
140
Mike Kellya0766c32019-02-19 17:22:07 +0000141table Convolution2dLayer {
142 base:LayerBase;
143 descriptor:Convolution2dDescriptor;
144 weights:ConstTensor;
145 biases:ConstTensor;
146}
147
148table Convolution2dDescriptor {
149 padLeft:uint;
150 padRight:uint;
151 padTop:uint;
152 padBottom:uint;
153 strideX:uint;
154 strideY:uint;
155 biasEnabled:bool = false;
156 dataLayout:DataLayout = NCHW;
157}
158
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000159table DivisionLayer {
160 base:LayerBase;
161}
162
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000163table EqualLayer {
164 base:LayerBase;
165}
166
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000167table FullyConnectedLayer {
168 base:LayerBase;
169 descriptor:FullyConnectedDescriptor;
170 weights:ConstTensor;
171 biases:ConstTensor;
172}
173
174table FullyConnectedDescriptor {
175 biasEnabled:bool = false;
176 transposeWeightsMatrix:bool = false;
177}
178
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000179table InputLayer {
180 base:BindableLayerBase;
181}
182
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000183table MinimumLayer {
184 base:LayerBase;
185}
186
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000187table MaximumLayer {
188 base:LayerBase;
189}
190
Sadik Armagan5f450272019-02-12 14:31:45 +0000191table MultiplicationLayer {
192 base:LayerBase;
193}
194
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000195table Pooling2dLayer {
196 base:LayerBase;
197 descriptor:Pooling2dDescriptor;
198}
199
200enum PoolingAlgorithm : byte {
201 Max = 0,
202 Average = 1,
203 L2 = 2
204}
205
206enum OutputShapeRounding : byte {
207 Floor = 0,
208 Ceiling = 1
209}
210
211enum PaddingMethod : byte {
212 IgnoreValue = 0,
213 Exclude = 1
214}
215
216table Pooling2dDescriptor {
217 poolType:PoolingAlgorithm;
218 padLeft:uint;
219 padRight:uint;
220 padTop:uint;
221 padBottom:uint;
222 poolWidth:uint;
223 poolHeight:uint;
224 strideX:uint;
225 strideY:uint;
226 outputShapeRounding:OutputShapeRounding;
227 paddingMethod:PaddingMethod;
228 dataLayout:DataLayout;
229}
230
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000231table SoftmaxLayer {
232 base:LayerBase;
233 descriptor:SoftmaxDescriptor;
234}
235
236table SoftmaxDescriptor {
237 beta:float;
238}
239
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000240table DepthwiseConvolution2dLayer {
241 base:LayerBase;
242 descriptor:DepthwiseConvolution2dDescriptor;
243 weights:ConstTensor;
244 biases:ConstTensor;
245}
246
247table DepthwiseConvolution2dDescriptor {
248 padLeft:uint;
249 padRight:uint;
250 padTop:uint;
251 padBottom:uint;
252 strideX:uint;
253 strideY:uint;
254 biasEnabled:bool = false;
255 dataLayout:DataLayout = NCHW;
256}
257
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000258table OutputLayer {
259 base:BindableLayerBase;
260}
261
Saoirse Stewart263829c2019-02-19 15:54:14 +0000262table ReshapeLayer {
263 base:LayerBase;
264 descriptor:ReshapeDescriptor;
265}
266
267table ReshapeDescriptor {
268 targetShape:[uint];
269}
270
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000271table PermuteLayer {
272 base:LayerBase;
273 descriptor:PermuteDescriptor;
274}
275
276table PermuteDescriptor {
277 dimMappings:[uint];
278}
279
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000280table SpaceToBatchNdLayer {
281 base:LayerBase;
282 descriptor:SpaceToBatchNdDescriptor;
283}
284
285table SpaceToBatchNdDescriptor {
286 blockShape:[uint];
287 padList:[uint];
288 dataLayout:DataLayout;
289}
290
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000291table BatchToSpaceNdLayer {
292 base:LayerBase;
293 descriptor:BatchToSpaceNdDescriptor;
294}
295
296table BatchToSpaceNdDescriptor {
297 blockShape:[uint];
298 crops:[uint];
299 dataLayout:DataLayout;
300}
301
Nina Drozd57728782019-02-27 10:53:27 +0000302enum NormalizationAlgorithmChannel : byte {
303 Across = 0,
304 Within = 1
305}
306
307enum NormalizationAlgorithmMethod : byte {
308 LocalBrightness = 0,
309 LocalContrast = 1
310}
311
312table NormalizationLayer {
313 base:LayerBase;
314 descriptor:NormalizationDescriptor;
315}
316
317table NormalizationDescriptor {
318 normChannelType:NormalizationAlgorithmChannel = Across;
319 normMethodType:NormalizationAlgorithmMethod = LocalBrightness;
320 normSize:uint;
321 alpha:float;
322 beta:float;
323 k:float;
324 dataLayout:DataLayout = NCHW;
325}
326
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000327union Layer {
Mike Kellyaf484012019-02-20 16:53:11 +0000328 ActivationLayer,
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000329 AdditionLayer,
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000330 BatchToSpaceNdLayer,
Conor Kennedy76277882019-02-26 08:29:54 +0000331 ConstantLayer,
Mike Kellya0766c32019-02-19 17:22:07 +0000332 Convolution2dLayer,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000333 DepthwiseConvolution2dLayer,
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000334 FullyConnectedLayer,
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000335 InputLayer,
Sadik Armagan5f450272019-02-12 14:31:45 +0000336 MultiplicationLayer,
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000337 OutputLayer,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000338 PermuteLayer,
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000339 Pooling2dLayer,
Saoirse Stewart263829c2019-02-19 15:54:14 +0000340 ReshapeLayer,
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000341 SoftmaxLayer,
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000342 SpaceToBatchNdLayer,
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000343 DivisionLayer,
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000344 MinimumLayer,
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000345 EqualLayer,
Nina Drozd57728782019-02-27 10:53:27 +0000346 MaximumLayer,
347 NormalizationLayer
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000348}
349
Saoirse Stewart49dbe0e2019-02-05 17:27:06 +0000350table AnyLayer {
351 layer:Layer;
352}
353
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000354// Root type for serialized data is the graph of the network
355table SerializedGraph {
Saoirse Stewart49dbe0e2019-02-05 17:27:06 +0000356 layers:[AnyLayer];
Mike Kelly8c1701a2019-02-11 17:01:27 +0000357 inputIds:[uint];
358 outputIds:[uint];
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000359}
360
361root_type SerializedGraph;