blob: db70e7bd4c3651de404e5bf6ab4d07005dae038f [file] [log] [blame]
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006namespace armnnSerializer;
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +00007
8file_identifier "ARMN";
9
10file_extension "armnn";
11
Mike Kellyaf484012019-02-20 16:53:11 +000012enum ActivationFunction : byte {
13 Sigmoid = 0,
14 TanH = 1,
15 Linear = 2,
16 ReLu = 3,
17 BoundedReLu = 4,
18 SoftReLu = 5,
19 LeakyReLu = 6,
20 Abs = 7,
21 Sqrt = 8,
22 Square = 9
23}
24
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +000025enum DataType : byte {
26 Float16 = 0,
27 Float32 = 1,
28 QuantisedAsymm8 = 2,
29 Signed32 = 3,
30 Boolean = 4
31}
32
Saoirse Stewart3166c3e2019-02-18 15:24:53 +000033enum DataLayout : byte {
34 NHWC = 0,
35 NCHW = 1
36}
37
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +000038table TensorInfo {
39 dimensions:[uint];
40 dataType:DataType;
41 quantizationScale:float = 1.0;
42 quantizationOffset:int = 0;
43}
44
45struct Connection {
46 sourceLayerIndex:uint;
47 outputSlotIndex:uint;
48}
49
50table ByteData {
51 data:[byte];
52}
53
54table ShortData {
55 data:[short];
56}
57
58table IntData {
59 data:[int];
60}
61
62table LongData {
63 data:[long];
64}
65
66union ConstTensorData { ByteData, ShortData, IntData, LongData }
67
68table ConstTensor {
69 info:TensorInfo;
70 data:ConstTensorData;
71}
72
73table InputSlot {
74 index:uint;
75 connection:Connection;
76}
77
78table OutputSlot {
79 index:uint;
80 tensorInfo:TensorInfo;
81}
82
83enum LayerType : uint {
84 Addition = 0,
85 Input = 1,
Sadik Armagan5f450272019-02-12 14:31:45 +000086 Multiplication = 2,
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000087 Output = 3,
Saoirse Stewart3166c3e2019-02-18 15:24:53 +000088 Pooling2d = 4,
Saoirse Stewart263829c2019-02-19 15:54:14 +000089 Reshape = 5,
Mike Kellya0766c32019-02-19 17:22:07 +000090 Softmax = 6,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +000091 Convolution2d = 7,
Mike Kellyaf484012019-02-20 16:53:11 +000092 DepthwiseConvolution2d = 8,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +000093 Activation = 9,
Sadik Armagandbb0c0c2019-02-21 09:01:41 +000094 Permute = 10,
Conor Kennedy76277882019-02-26 08:29:54 +000095 FullyConnected = 11,
96 Constant = 12
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +000097}
98
99// Base layer table to be used as part of other layers
100table LayerBase {
101 index:uint;
102 layerName:string;
103 layerType:LayerType;
104 inputSlots:[InputSlot];
105 outputSlots:[OutputSlot];
106}
107
108table BindableLayerBase {
109 base:LayerBase;
110 layerBindingId:int;
111}
112
113// Table for each layer defined below
Mike Kellyaf484012019-02-20 16:53:11 +0000114table ActivationLayer {
115 base:LayerBase;
116 descriptor:ActivationDescriptor;
117}
118
119table ActivationDescriptor {
120 function:ActivationFunction = Sigmoid;
121 a:float;
122 b:float;
123}
124
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000125table AdditionLayer {
126 base:LayerBase;
127}
128
Conor Kennedy76277882019-02-26 08:29:54 +0000129table ConstantLayer {
130 base:LayerBase;
131 input:ConstTensor;
132}
133
Mike Kellya0766c32019-02-19 17:22:07 +0000134table Convolution2dLayer {
135 base:LayerBase;
136 descriptor:Convolution2dDescriptor;
137 weights:ConstTensor;
138 biases:ConstTensor;
139}
140
141table Convolution2dDescriptor {
142 padLeft:uint;
143 padRight:uint;
144 padTop:uint;
145 padBottom:uint;
146 strideX:uint;
147 strideY:uint;
148 biasEnabled:bool = false;
149 dataLayout:DataLayout = NCHW;
150}
151
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000152table FullyConnectedLayer {
153 base:LayerBase;
154 descriptor:FullyConnectedDescriptor;
155 weights:ConstTensor;
156 biases:ConstTensor;
157}
158
159table FullyConnectedDescriptor {
160 biasEnabled:bool = false;
161 transposeWeightsMatrix:bool = false;
162}
163
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000164table InputLayer {
165 base:BindableLayerBase;
166}
167
Sadik Armagan5f450272019-02-12 14:31:45 +0000168table MultiplicationLayer {
169 base:LayerBase;
170}
171
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000172table Pooling2dLayer {
173 base:LayerBase;
174 descriptor:Pooling2dDescriptor;
175}
176
177enum PoolingAlgorithm : byte {
178 Max = 0,
179 Average = 1,
180 L2 = 2
181}
182
183enum OutputShapeRounding : byte {
184 Floor = 0,
185 Ceiling = 1
186}
187
188enum PaddingMethod : byte {
189 IgnoreValue = 0,
190 Exclude = 1
191}
192
193table Pooling2dDescriptor {
194 poolType:PoolingAlgorithm;
195 padLeft:uint;
196 padRight:uint;
197 padTop:uint;
198 padBottom:uint;
199 poolWidth:uint;
200 poolHeight:uint;
201 strideX:uint;
202 strideY:uint;
203 outputShapeRounding:OutputShapeRounding;
204 paddingMethod:PaddingMethod;
205 dataLayout:DataLayout;
206}
207
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000208table SoftmaxLayer {
209 base:LayerBase;
210 descriptor:SoftmaxDescriptor;
211}
212
213table SoftmaxDescriptor {
214 beta:float;
215}
216
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000217table DepthwiseConvolution2dLayer {
218 base:LayerBase;
219 descriptor:DepthwiseConvolution2dDescriptor;
220 weights:ConstTensor;
221 biases:ConstTensor;
222}
223
224table DepthwiseConvolution2dDescriptor {
225 padLeft:uint;
226 padRight:uint;
227 padTop:uint;
228 padBottom:uint;
229 strideX:uint;
230 strideY:uint;
231 biasEnabled:bool = false;
232 dataLayout:DataLayout = NCHW;
233}
234
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000235table OutputLayer {
236 base:BindableLayerBase;
237}
238
Saoirse Stewart263829c2019-02-19 15:54:14 +0000239table ReshapeLayer {
240 base:LayerBase;
241 descriptor:ReshapeDescriptor;
242}
243
244table ReshapeDescriptor {
245 targetShape:[uint];
246}
247
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000248table PermuteLayer {
249 base:LayerBase;
250 descriptor:PermuteDescriptor;
251}
252
253table PermuteDescriptor {
254 dimMappings:[uint];
255}
256
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000257union Layer {
Mike Kellyaf484012019-02-20 16:53:11 +0000258 ActivationLayer,
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000259 AdditionLayer,
Conor Kennedy76277882019-02-26 08:29:54 +0000260 ConstantLayer,
Mike Kellya0766c32019-02-19 17:22:07 +0000261 Convolution2dLayer,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000262 DepthwiseConvolution2dLayer,
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000263 FullyConnectedLayer,
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000264 InputLayer,
Sadik Armagan5f450272019-02-12 14:31:45 +0000265 MultiplicationLayer,
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000266 OutputLayer,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000267 PermuteLayer,
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000268 Pooling2dLayer,
Saoirse Stewart263829c2019-02-19 15:54:14 +0000269 ReshapeLayer,
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000270 SoftmaxLayer
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000271}
272
Saoirse Stewart49dbe0e2019-02-05 17:27:06 +0000273table AnyLayer {
274 layer:Layer;
275}
276
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000277// Root type for serialized data is the graph of the network
278table SerializedGraph {
Saoirse Stewart49dbe0e2019-02-05 17:27:06 +0000279 layers:[AnyLayer];
Mike Kelly8c1701a2019-02-11 17:01:27 +0000280 inputIds:[uint];
281 outputIds:[uint];
Nattapat Chaimanowong969eea32019-01-30 13:33:11 +0000282}
283
284root_type SerializedGraph;