blob: dc1406979805eeae4f6584b3fd002e53b5dacc9c [file] [log] [blame]
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006namespace armnnSerializer;
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +00007
8file_identifier "ARMN";
9
10file_extension "armnn";
11
Mike Kellyaf484012019-02-20 16:53:11 +000012enum ActivationFunction : byte {
13 Sigmoid = 0,
14 TanH = 1,
15 Linear = 2,
16 ReLu = 3,
17 BoundedReLu = 4,
18 SoftReLu = 5,
19 LeakyReLu = 6,
20 Abs = 7,
21 Sqrt = 8,
22 Square = 9
23}
24
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +000025enum DataType : byte {
26 Float16 = 0,
27 Float32 = 1,
28 QuantisedAsymm8 = 2,
29 Signed32 = 3,
30 Boolean = 4
31}
32
Saoirse Stewart3166c3e2019-02-18 15:24:53 +000033enum DataLayout : byte {
34 NHWC = 0,
35 NCHW = 1
36}
37
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +000038table TensorInfo {
39 dimensions:[uint];
40 dataType:DataType;
41 quantizationScale:float = 1.0;
42 quantizationOffset:int = 0;
43}
44
45struct Connection {
46 sourceLayerIndex:uint;
47 outputSlotIndex:uint;
48}
49
50table ByteData {
51 data:[byte];
52}
53
54table ShortData {
55 data:[short];
56}
57
58table IntData {
59 data:[int];
60}
61
62table LongData {
63 data:[long];
64}
65
66union ConstTensorData { ByteData, ShortData, IntData, LongData }
67
68table ConstTensor {
69 info:TensorInfo;
70 data:ConstTensorData;
71}
72
73table InputSlot {
74 index:uint;
75 connection:Connection;
76}
77
78table OutputSlot {
79 index:uint;
80 tensorInfo:TensorInfo;
81}
82
83enum LayerType : uint {
84 Addition = 0,
85 Input = 1,
Sadik Armagan5f450272019-02-12 14:31:45 +000086 Multiplication = 2,
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000087 Output = 3,
Saoirse Stewart3166c3e2019-02-18 15:24:53 +000088 Pooling2d = 4,
Saoirse Stewart263829c2019-02-19 15:54:14 +000089 Reshape = 5,
Mike Kellya0766c32019-02-19 17:22:07 +000090 Softmax = 6,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +000091 Convolution2d = 7,
Mike Kellyaf484012019-02-20 16:53:11 +000092 DepthwiseConvolution2d = 8,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +000093 Activation = 9,
Sadik Armagandbb0c0c2019-02-21 09:01:41 +000094 Permute = 10,
95 FullyConnected = 11
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +000096}
97
98// Base layer table to be used as part of other layers
99table LayerBase {
100 index:uint;
101 layerName:string;
102 layerType:LayerType;
103 inputSlots:[InputSlot];
104 outputSlots:[OutputSlot];
105}
106
107table BindableLayerBase {
108 base:LayerBase;
109 layerBindingId:int;
110}
111
112// Table for each layer defined below
Mike Kellyaf484012019-02-20 16:53:11 +0000113table ActivationLayer {
114 base:LayerBase;
115 descriptor:ActivationDescriptor;
116}
117
118table ActivationDescriptor {
119 function:ActivationFunction = Sigmoid;
120 a:float;
121 b:float;
122}
123
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000124table AdditionLayer {
125 base:LayerBase;
126}
127
Mike Kellya0766c32019-02-19 17:22:07 +0000128table Convolution2dLayer {
129 base:LayerBase;
130 descriptor:Convolution2dDescriptor;
131 weights:ConstTensor;
132 biases:ConstTensor;
133}
134
135table Convolution2dDescriptor {
136 padLeft:uint;
137 padRight:uint;
138 padTop:uint;
139 padBottom:uint;
140 strideX:uint;
141 strideY:uint;
142 biasEnabled:bool = false;
143 dataLayout:DataLayout = NCHW;
144}
145
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000146table FullyConnectedLayer {
147 base:LayerBase;
148 descriptor:FullyConnectedDescriptor;
149 weights:ConstTensor;
150 biases:ConstTensor;
151}
152
153table FullyConnectedDescriptor {
154 biasEnabled:bool = false;
155 transposeWeightsMatrix:bool = false;
156}
157
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000158table InputLayer {
159 base:BindableLayerBase;
160}
161
Sadik Armagan5f450272019-02-12 14:31:45 +0000162table MultiplicationLayer {
163 base:LayerBase;
164}
165
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000166table Pooling2dLayer {
167 base:LayerBase;
168 descriptor:Pooling2dDescriptor;
169}
170
171enum PoolingAlgorithm : byte {
172 Max = 0,
173 Average = 1,
174 L2 = 2
175}
176
177enum OutputShapeRounding : byte {
178 Floor = 0,
179 Ceiling = 1
180}
181
182enum PaddingMethod : byte {
183 IgnoreValue = 0,
184 Exclude = 1
185}
186
187table Pooling2dDescriptor {
188 poolType:PoolingAlgorithm;
189 padLeft:uint;
190 padRight:uint;
191 padTop:uint;
192 padBottom:uint;
193 poolWidth:uint;
194 poolHeight:uint;
195 strideX:uint;
196 strideY:uint;
197 outputShapeRounding:OutputShapeRounding;
198 paddingMethod:PaddingMethod;
199 dataLayout:DataLayout;
200}
201
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000202table SoftmaxLayer {
203 base:LayerBase;
204 descriptor:SoftmaxDescriptor;
205}
206
207table SoftmaxDescriptor {
208 beta:float;
209}
210
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000211table DepthwiseConvolution2dLayer {
212 base:LayerBase;
213 descriptor:DepthwiseConvolution2dDescriptor;
214 weights:ConstTensor;
215 biases:ConstTensor;
216}
217
218table DepthwiseConvolution2dDescriptor {
219 padLeft:uint;
220 padRight:uint;
221 padTop:uint;
222 padBottom:uint;
223 strideX:uint;
224 strideY:uint;
225 biasEnabled:bool = false;
226 dataLayout:DataLayout = NCHW;
227}
228
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000229table OutputLayer {
230 base:BindableLayerBase;
231}
232
Saoirse Stewart263829c2019-02-19 15:54:14 +0000233table ReshapeLayer {
234 base:LayerBase;
235 descriptor:ReshapeDescriptor;
236}
237
238table ReshapeDescriptor {
239 targetShape:[uint];
240}
241
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000242table PermuteLayer {
243 base:LayerBase;
244 descriptor:PermuteDescriptor;
245}
246
247table PermuteDescriptor {
248 dimMappings:[uint];
249}
250
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000251union Layer {
Mike Kellyaf484012019-02-20 16:53:11 +0000252 ActivationLayer,
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000253 AdditionLayer,
Mike Kellya0766c32019-02-19 17:22:07 +0000254 Convolution2dLayer,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000255 DepthwiseConvolution2dLayer,
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000256 FullyConnectedLayer,
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000257 InputLayer,
Sadik Armagan5f450272019-02-12 14:31:45 +0000258 MultiplicationLayer,
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000259 OutputLayer,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000260 PermuteLayer,
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000261 Pooling2dLayer,
Saoirse Stewart263829c2019-02-19 15:54:14 +0000262 ReshapeLayer,
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000263 SoftmaxLayer
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000264}
265
Saoirse Stewart49dbe0e2019-02-05 17:27:06 +0000266table AnyLayer {
267 layer:Layer;
268}
269
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000270// Root type for serialized data is the graph of the network
271table SerializedGraph {
Saoirse Stewart49dbe0e2019-02-05 17:27:06 +0000272 layers:[AnyLayer];
Mike Kelly8c1701a2019-02-11 17:01:27 +0000273 inputIds:[uint];
274 outputIds:[uint];
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000275}
276
277root_type SerializedGraph;