blob: 410849ec8b4bac3cb61bfe46ac2ba898ebadfaab [file] [log] [blame]
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006namespace armnnSerializer;
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +00007
8file_identifier "ARMN";
9
10file_extension "armnn";
11
Mike Kellyaf484012019-02-20 16:53:11 +000012enum ActivationFunction : byte {
13 Sigmoid = 0,
14 TanH = 1,
15 Linear = 2,
16 ReLu = 3,
17 BoundedReLu = 4,
18 SoftReLu = 5,
19 LeakyReLu = 6,
20 Abs = 7,
21 Sqrt = 8,
22 Square = 9
23}
24
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +000025enum DataType : byte {
26 Float16 = 0,
27 Float32 = 1,
28 QuantisedAsymm8 = 2,
29 Signed32 = 3,
30 Boolean = 4
31}
32
Saoirse Stewart3166c3e2019-02-18 15:24:53 +000033enum DataLayout : byte {
34 NHWC = 0,
35 NCHW = 1
36}
37
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +000038table TensorInfo {
39 dimensions:[uint];
40 dataType:DataType;
41 quantizationScale:float = 1.0;
42 quantizationOffset:int = 0;
43}
44
45struct Connection {
46 sourceLayerIndex:uint;
47 outputSlotIndex:uint;
48}
49
50table ByteData {
51 data:[byte];
52}
53
54table ShortData {
55 data:[short];
56}
57
58table IntData {
59 data:[int];
60}
61
62table LongData {
63 data:[long];
64}
65
66union ConstTensorData { ByteData, ShortData, IntData, LongData }
67
68table ConstTensor {
69 info:TensorInfo;
70 data:ConstTensorData;
71}
72
73table InputSlot {
74 index:uint;
75 connection:Connection;
76}
77
78table OutputSlot {
79 index:uint;
80 tensorInfo:TensorInfo;
81}
82
83enum LayerType : uint {
84 Addition = 0,
85 Input = 1,
Sadik Armagan5f450272019-02-12 14:31:45 +000086 Multiplication = 2,
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000087 Output = 3,
Saoirse Stewart3166c3e2019-02-18 15:24:53 +000088 Pooling2d = 4,
Saoirse Stewart263829c2019-02-19 15:54:14 +000089 Reshape = 5,
Mike Kellya0766c32019-02-19 17:22:07 +000090 Softmax = 6,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +000091 Convolution2d = 7,
Mike Kellyaf484012019-02-20 16:53:11 +000092 DepthwiseConvolution2d = 8,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +000093 Activation = 9,
Sadik Armagandbb0c0c2019-02-21 09:01:41 +000094 Permute = 10,
Conor Kennedy76277882019-02-26 08:29:54 +000095 FullyConnected = 11,
Nattapat Chaimanowong45286992019-02-26 15:53:02 +000096 Constant = 12,
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +000097 SpaceToBatchNd = 13,
Éanna Ó Catháin58885892019-02-27 16:16:39 +000098 BatchToSpaceNd = 14,
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +000099 Division = 15,
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000100 Minimum = 16,
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000101 Equal = 17,
Nina Drozd57728782019-02-27 10:53:27 +0000102 Maximum = 18,
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000103 Normalization = 19,
Sadik Armagan8b42a382019-03-01 14:24:49 +0000104 Pad = 20,
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000105 Rsqrt = 21,
ruoyan018e7fa232019-02-28 15:09:07 +0000106 Floor = 22,
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000107 BatchNormalization = 23,
108 Greater = 24
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000109}
110
111// Base layer table to be used as part of other layers
112table LayerBase {
113 index:uint;
114 layerName:string;
115 layerType:LayerType;
116 inputSlots:[InputSlot];
117 outputSlots:[OutputSlot];
118}
119
120table BindableLayerBase {
121 base:LayerBase;
122 layerBindingId:int;
123}
124
125// Table for each layer defined below
Mike Kellyaf484012019-02-20 16:53:11 +0000126table ActivationLayer {
127 base:LayerBase;
128 descriptor:ActivationDescriptor;
129}
130
131table ActivationDescriptor {
132 function:ActivationFunction = Sigmoid;
133 a:float;
134 b:float;
135}
136
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000137table AdditionLayer {
138 base:LayerBase;
139}
140
Conor Kennedy76277882019-02-26 08:29:54 +0000141table ConstantLayer {
142 base:LayerBase;
143 input:ConstTensor;
144}
145
Mike Kellya0766c32019-02-19 17:22:07 +0000146table Convolution2dLayer {
147 base:LayerBase;
148 descriptor:Convolution2dDescriptor;
149 weights:ConstTensor;
150 biases:ConstTensor;
151}
152
153table Convolution2dDescriptor {
154 padLeft:uint;
155 padRight:uint;
156 padTop:uint;
157 padBottom:uint;
158 strideX:uint;
159 strideY:uint;
160 biasEnabled:bool = false;
161 dataLayout:DataLayout = NCHW;
162}
163
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000164table DivisionLayer {
165 base:LayerBase;
166}
167
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000168table EqualLayer {
169 base:LayerBase;
170}
171
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000172table FloorLayer{
173 base:LayerBase;
174}
175
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000176table FullyConnectedLayer {
177 base:LayerBase;
178 descriptor:FullyConnectedDescriptor;
179 weights:ConstTensor;
180 biases:ConstTensor;
181}
182
183table FullyConnectedDescriptor {
184 biasEnabled:bool = false;
185 transposeWeightsMatrix:bool = false;
186}
187
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000188table GreaterLayer {
189 base:LayerBase;
190}
191
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000192table InputLayer {
193 base:BindableLayerBase;
194}
195
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000196table MinimumLayer {
197 base:LayerBase;
198}
199
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000200table MaximumLayer {
201 base:LayerBase;
202}
203
Sadik Armagan5f450272019-02-12 14:31:45 +0000204table MultiplicationLayer {
205 base:LayerBase;
206}
207
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000208table Pooling2dLayer {
209 base:LayerBase;
210 descriptor:Pooling2dDescriptor;
211}
212
213enum PoolingAlgorithm : byte {
214 Max = 0,
215 Average = 1,
216 L2 = 2
217}
218
219enum OutputShapeRounding : byte {
220 Floor = 0,
221 Ceiling = 1
222}
223
224enum PaddingMethod : byte {
225 IgnoreValue = 0,
226 Exclude = 1
227}
228
229table Pooling2dDescriptor {
230 poolType:PoolingAlgorithm;
231 padLeft:uint;
232 padRight:uint;
233 padTop:uint;
234 padBottom:uint;
235 poolWidth:uint;
236 poolHeight:uint;
237 strideX:uint;
238 strideY:uint;
239 outputShapeRounding:OutputShapeRounding;
240 paddingMethod:PaddingMethod;
241 dataLayout:DataLayout;
242}
243
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000244table SoftmaxLayer {
245 base:LayerBase;
246 descriptor:SoftmaxDescriptor;
247}
248
249table SoftmaxDescriptor {
250 beta:float;
251}
252
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000253table DepthwiseConvolution2dLayer {
254 base:LayerBase;
255 descriptor:DepthwiseConvolution2dDescriptor;
256 weights:ConstTensor;
257 biases:ConstTensor;
258}
259
260table DepthwiseConvolution2dDescriptor {
261 padLeft:uint;
262 padRight:uint;
263 padTop:uint;
264 padBottom:uint;
265 strideX:uint;
266 strideY:uint;
267 biasEnabled:bool = false;
268 dataLayout:DataLayout = NCHW;
269}
270
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000271table OutputLayer {
272 base:BindableLayerBase;
273}
274
Saoirse Stewart263829c2019-02-19 15:54:14 +0000275table ReshapeLayer {
276 base:LayerBase;
277 descriptor:ReshapeDescriptor;
278}
279
280table ReshapeDescriptor {
281 targetShape:[uint];
282}
283
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000284table PermuteLayer {
285 base:LayerBase;
286 descriptor:PermuteDescriptor;
287}
288
289table PermuteDescriptor {
290 dimMappings:[uint];
291}
292
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000293table SpaceToBatchNdLayer {
294 base:LayerBase;
295 descriptor:SpaceToBatchNdDescriptor;
296}
297
298table SpaceToBatchNdDescriptor {
299 blockShape:[uint];
300 padList:[uint];
301 dataLayout:DataLayout;
302}
303
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000304table BatchToSpaceNdLayer {
305 base:LayerBase;
306 descriptor:BatchToSpaceNdDescriptor;
307}
308
309table BatchToSpaceNdDescriptor {
310 blockShape:[uint];
311 crops:[uint];
312 dataLayout:DataLayout;
313}
314
Nina Drozd57728782019-02-27 10:53:27 +0000315enum NormalizationAlgorithmChannel : byte {
316 Across = 0,
317 Within = 1
318}
319
320enum NormalizationAlgorithmMethod : byte {
321 LocalBrightness = 0,
322 LocalContrast = 1
323}
324
325table NormalizationLayer {
326 base:LayerBase;
327 descriptor:NormalizationDescriptor;
328}
329
330table NormalizationDescriptor {
331 normChannelType:NormalizationAlgorithmChannel = Across;
332 normMethodType:NormalizationAlgorithmMethod = LocalBrightness;
333 normSize:uint;
334 alpha:float;
335 beta:float;
336 k:float;
337 dataLayout:DataLayout = NCHW;
338}
339
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000340table PadLayer {
341 base:LayerBase;
342 descriptor:PadDescriptor;
343}
344
345table PadDescriptor {
346 padList:[uint];
347}
348
Sadik Armagan8b42a382019-03-01 14:24:49 +0000349table RsqrtLayer {
350 base:LayerBase;
351}
352
ruoyan018e7fa232019-02-28 15:09:07 +0000353table BatchNormalizationLayer {
354 base:LayerBase;
355 descriptor:BatchNormalizationDescriptor;
356 mean:ConstTensor;
357 variance:ConstTensor;
358 beta:ConstTensor;
359 gamma:ConstTensor;
360}
361
362table BatchNormalizationDescriptor {
363 eps:float;
364 dataLayout:DataLayout;
365}
366
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000367union Layer {
Mike Kellyaf484012019-02-20 16:53:11 +0000368 ActivationLayer,
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000369 AdditionLayer,
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000370 BatchToSpaceNdLayer,
ruoyan018e7fa232019-02-28 15:09:07 +0000371 BatchNormalizationLayer,
Conor Kennedy76277882019-02-26 08:29:54 +0000372 ConstantLayer,
Mike Kellya0766c32019-02-19 17:22:07 +0000373 Convolution2dLayer,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000374 DepthwiseConvolution2dLayer,
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000375 FullyConnectedLayer,
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000376 InputLayer,
Sadik Armagan5f450272019-02-12 14:31:45 +0000377 MultiplicationLayer,
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000378 OutputLayer,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000379 PermuteLayer,
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000380 Pooling2dLayer,
Saoirse Stewart263829c2019-02-19 15:54:14 +0000381 ReshapeLayer,
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000382 SoftmaxLayer,
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000383 SpaceToBatchNdLayer,
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000384 DivisionLayer,
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000385 MinimumLayer,
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000386 EqualLayer,
Nina Drozd57728782019-02-27 10:53:27 +0000387 MaximumLayer,
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000388 NormalizationLayer,
Sadik Armagan8b42a382019-03-01 14:24:49 +0000389 PadLayer,
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000390 RsqrtLayer,
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000391 FloorLayer,
392 GreaterLayer
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000393}
394
Saoirse Stewart49dbe0e2019-02-05 17:27:06 +0000395table AnyLayer {
396 layer:Layer;
397}
398
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000399// Root type for serialized data is the graph of the network
400table SerializedGraph {
Saoirse Stewart49dbe0e2019-02-05 17:27:06 +0000401 layers:[AnyLayer];
Mike Kelly8c1701a2019-02-11 17:01:27 +0000402 inputIds:[uint];
403 outputIds:[uint];
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000404}
405
406root_type SerializedGraph;