blob: f41691261848406e76618154cff6c9a6a4bee0da [file] [log] [blame]
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006namespace armnnSerializer;
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +00007
8file_identifier "ARMN";
9
10file_extension "armnn";
11
Mike Kellyaf484012019-02-20 16:53:11 +000012enum ActivationFunction : byte {
13 Sigmoid = 0,
14 TanH = 1,
15 Linear = 2,
16 ReLu = 3,
17 BoundedReLu = 4,
18 SoftReLu = 5,
19 LeakyReLu = 6,
20 Abs = 7,
21 Sqrt = 8,
22 Square = 9
23}
24
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +000025enum DataType : byte {
26 Float16 = 0,
27 Float32 = 1,
28 QuantisedAsymm8 = 2,
29 Signed32 = 3,
30 Boolean = 4
31}
32
Saoirse Stewart3166c3e2019-02-18 15:24:53 +000033enum DataLayout : byte {
34 NHWC = 0,
35 NCHW = 1
36}
37
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +000038table TensorInfo {
39 dimensions:[uint];
40 dataType:DataType;
41 quantizationScale:float = 1.0;
42 quantizationOffset:int = 0;
43}
44
45struct Connection {
46 sourceLayerIndex:uint;
47 outputSlotIndex:uint;
48}
49
50table ByteData {
51 data:[byte];
52}
53
54table ShortData {
55 data:[short];
56}
57
58table IntData {
59 data:[int];
60}
61
62table LongData {
63 data:[long];
64}
65
66union ConstTensorData { ByteData, ShortData, IntData, LongData }
67
68table ConstTensor {
69 info:TensorInfo;
70 data:ConstTensorData;
71}
72
73table InputSlot {
74 index:uint;
75 connection:Connection;
76}
77
78table OutputSlot {
79 index:uint;
80 tensorInfo:TensorInfo;
81}
82
83enum LayerType : uint {
84 Addition = 0,
85 Input = 1,
Sadik Armagan5f450272019-02-12 14:31:45 +000086 Multiplication = 2,
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000087 Output = 3,
Saoirse Stewart3166c3e2019-02-18 15:24:53 +000088 Pooling2d = 4,
Saoirse Stewart263829c2019-02-19 15:54:14 +000089 Reshape = 5,
Mike Kellya0766c32019-02-19 17:22:07 +000090 Softmax = 6,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +000091 Convolution2d = 7,
Mike Kellyaf484012019-02-20 16:53:11 +000092 DepthwiseConvolution2d = 8,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +000093 Activation = 9,
Sadik Armagandbb0c0c2019-02-21 09:01:41 +000094 Permute = 10,
Conor Kennedy76277882019-02-26 08:29:54 +000095 FullyConnected = 11,
Nattapat Chaimanowong45286992019-02-26 15:53:02 +000096 Constant = 12,
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +000097 SpaceToBatchNd = 13,
Éanna Ó Catháin58885892019-02-27 16:16:39 +000098 BatchToSpaceNd = 14,
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +000099 Division = 15,
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000100 Minimum = 16,
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000101 Equal = 17,
Nina Drozd57728782019-02-27 10:53:27 +0000102 Maximum = 18,
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000103 Normalization = 19,
Sadik Armagan8b42a382019-03-01 14:24:49 +0000104 Pad = 20,
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000105 Rsqrt = 21,
ruoyan018e7fa232019-02-28 15:09:07 +0000106 Floor = 22,
107 BatchNormalization = 23
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000108}
109
110// Base layer table to be used as part of other layers
111table LayerBase {
112 index:uint;
113 layerName:string;
114 layerType:LayerType;
115 inputSlots:[InputSlot];
116 outputSlots:[OutputSlot];
117}
118
119table BindableLayerBase {
120 base:LayerBase;
121 layerBindingId:int;
122}
123
124// Table for each layer defined below
Mike Kellyaf484012019-02-20 16:53:11 +0000125table ActivationLayer {
126 base:LayerBase;
127 descriptor:ActivationDescriptor;
128}
129
130table ActivationDescriptor {
131 function:ActivationFunction = Sigmoid;
132 a:float;
133 b:float;
134}
135
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000136table AdditionLayer {
137 base:LayerBase;
138}
139
Conor Kennedy76277882019-02-26 08:29:54 +0000140table ConstantLayer {
141 base:LayerBase;
142 input:ConstTensor;
143}
144
Mike Kellya0766c32019-02-19 17:22:07 +0000145table Convolution2dLayer {
146 base:LayerBase;
147 descriptor:Convolution2dDescriptor;
148 weights:ConstTensor;
149 biases:ConstTensor;
150}
151
152table Convolution2dDescriptor {
153 padLeft:uint;
154 padRight:uint;
155 padTop:uint;
156 padBottom:uint;
157 strideX:uint;
158 strideY:uint;
159 biasEnabled:bool = false;
160 dataLayout:DataLayout = NCHW;
161}
162
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000163table DivisionLayer {
164 base:LayerBase;
165}
166
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000167table EqualLayer {
168 base:LayerBase;
169}
170
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000171table FloorLayer{
172 base:LayerBase;
173}
174
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000175table FullyConnectedLayer {
176 base:LayerBase;
177 descriptor:FullyConnectedDescriptor;
178 weights:ConstTensor;
179 biases:ConstTensor;
180}
181
182table FullyConnectedDescriptor {
183 biasEnabled:bool = false;
184 transposeWeightsMatrix:bool = false;
185}
186
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000187table InputLayer {
188 base:BindableLayerBase;
189}
190
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000191table MinimumLayer {
192 base:LayerBase;
193}
194
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000195table MaximumLayer {
196 base:LayerBase;
197}
198
Sadik Armagan5f450272019-02-12 14:31:45 +0000199table MultiplicationLayer {
200 base:LayerBase;
201}
202
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000203table Pooling2dLayer {
204 base:LayerBase;
205 descriptor:Pooling2dDescriptor;
206}
207
208enum PoolingAlgorithm : byte {
209 Max = 0,
210 Average = 1,
211 L2 = 2
212}
213
214enum OutputShapeRounding : byte {
215 Floor = 0,
216 Ceiling = 1
217}
218
219enum PaddingMethod : byte {
220 IgnoreValue = 0,
221 Exclude = 1
222}
223
224table Pooling2dDescriptor {
225 poolType:PoolingAlgorithm;
226 padLeft:uint;
227 padRight:uint;
228 padTop:uint;
229 padBottom:uint;
230 poolWidth:uint;
231 poolHeight:uint;
232 strideX:uint;
233 strideY:uint;
234 outputShapeRounding:OutputShapeRounding;
235 paddingMethod:PaddingMethod;
236 dataLayout:DataLayout;
237}
238
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000239table SoftmaxLayer {
240 base:LayerBase;
241 descriptor:SoftmaxDescriptor;
242}
243
244table SoftmaxDescriptor {
245 beta:float;
246}
247
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000248table DepthwiseConvolution2dLayer {
249 base:LayerBase;
250 descriptor:DepthwiseConvolution2dDescriptor;
251 weights:ConstTensor;
252 biases:ConstTensor;
253}
254
255table DepthwiseConvolution2dDescriptor {
256 padLeft:uint;
257 padRight:uint;
258 padTop:uint;
259 padBottom:uint;
260 strideX:uint;
261 strideY:uint;
262 biasEnabled:bool = false;
263 dataLayout:DataLayout = NCHW;
264}
265
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000266table OutputLayer {
267 base:BindableLayerBase;
268}
269
Saoirse Stewart263829c2019-02-19 15:54:14 +0000270table ReshapeLayer {
271 base:LayerBase;
272 descriptor:ReshapeDescriptor;
273}
274
275table ReshapeDescriptor {
276 targetShape:[uint];
277}
278
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000279table PermuteLayer {
280 base:LayerBase;
281 descriptor:PermuteDescriptor;
282}
283
284table PermuteDescriptor {
285 dimMappings:[uint];
286}
287
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000288table SpaceToBatchNdLayer {
289 base:LayerBase;
290 descriptor:SpaceToBatchNdDescriptor;
291}
292
293table SpaceToBatchNdDescriptor {
294 blockShape:[uint];
295 padList:[uint];
296 dataLayout:DataLayout;
297}
298
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000299table BatchToSpaceNdLayer {
300 base:LayerBase;
301 descriptor:BatchToSpaceNdDescriptor;
302}
303
304table BatchToSpaceNdDescriptor {
305 blockShape:[uint];
306 crops:[uint];
307 dataLayout:DataLayout;
308}
309
Nina Drozd57728782019-02-27 10:53:27 +0000310enum NormalizationAlgorithmChannel : byte {
311 Across = 0,
312 Within = 1
313}
314
315enum NormalizationAlgorithmMethod : byte {
316 LocalBrightness = 0,
317 LocalContrast = 1
318}
319
320table NormalizationLayer {
321 base:LayerBase;
322 descriptor:NormalizationDescriptor;
323}
324
325table NormalizationDescriptor {
326 normChannelType:NormalizationAlgorithmChannel = Across;
327 normMethodType:NormalizationAlgorithmMethod = LocalBrightness;
328 normSize:uint;
329 alpha:float;
330 beta:float;
331 k:float;
332 dataLayout:DataLayout = NCHW;
333}
334
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000335table PadLayer {
336 base:LayerBase;
337 descriptor:PadDescriptor;
338}
339
340table PadDescriptor {
341 padList:[uint];
342}
343
ruoyan018e7fa232019-02-28 15:09:07 +0000344
Sadik Armagan8b42a382019-03-01 14:24:49 +0000345table RsqrtLayer {
346 base:LayerBase;
347}
348
ruoyan018e7fa232019-02-28 15:09:07 +0000349table BatchNormalizationLayer {
350 base:LayerBase;
351 descriptor:BatchNormalizationDescriptor;
352 mean:ConstTensor;
353 variance:ConstTensor;
354 beta:ConstTensor;
355 gamma:ConstTensor;
356}
357
358table BatchNormalizationDescriptor {
359 eps:float;
360 dataLayout:DataLayout;
361}
362
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000363union Layer {
Mike Kellyaf484012019-02-20 16:53:11 +0000364 ActivationLayer,
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000365 AdditionLayer,
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000366 BatchToSpaceNdLayer,
ruoyan018e7fa232019-02-28 15:09:07 +0000367 BatchNormalizationLayer,
Conor Kennedy76277882019-02-26 08:29:54 +0000368 ConstantLayer,
Mike Kellya0766c32019-02-19 17:22:07 +0000369 Convolution2dLayer,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000370 DepthwiseConvolution2dLayer,
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000371 FullyConnectedLayer,
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000372 InputLayer,
Sadik Armagan5f450272019-02-12 14:31:45 +0000373 MultiplicationLayer,
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000374 OutputLayer,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000375 PermuteLayer,
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000376 Pooling2dLayer,
Saoirse Stewart263829c2019-02-19 15:54:14 +0000377 ReshapeLayer,
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000378 SoftmaxLayer,
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000379 SpaceToBatchNdLayer,
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000380 DivisionLayer,
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000381 MinimumLayer,
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000382 EqualLayer,
Nina Drozd57728782019-02-27 10:53:27 +0000383 MaximumLayer,
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000384 NormalizationLayer,
Sadik Armagan8b42a382019-03-01 14:24:49 +0000385 PadLayer,
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000386 RsqrtLayer,
387 FloorLayer
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000388}
389
Saoirse Stewart49dbe0e2019-02-05 17:27:06 +0000390table AnyLayer {
391 layer:Layer;
392}
393
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000394// Root type for serialized data is the graph of the network
395table SerializedGraph {
Saoirse Stewart49dbe0e2019-02-05 17:27:06 +0000396 layers:[AnyLayer];
Mike Kelly8c1701a2019-02-11 17:01:27 +0000397 inputIds:[uint];
398 outputIds:[uint];
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000399}
400
401root_type SerializedGraph;