blob: 5807d1705aa989217197f9696aca50c5be5e691a [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Teresa Charlin52664732020-06-29 16:27:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matteo Martincigh49124022019-01-11 13:25:59 +00005
telsoa014fcda012018-03-09 14:13:49 +00006#include "Network.hpp"
7#include "Graph.hpp"
8#include "Layer.hpp"
telsoa01c577f2c2018-08-31 09:22:23 +01009#include "DeviceSpec.hpp"
telsoa014fcda012018-03-09 14:13:49 +000010#include "Optimizer.hpp"
Derek Lambertiff05cc52019-04-26 13:05:17 +010011#include "SubgraphViewSelector.hpp"
Matteo Martincigh49124022019-01-11 13:25:59 +000012#include "BackendSettings.hpp"
David Beckac42efd2018-09-26 17:41:13 +010013#include "optimizations/All.hpp"
telsoa014fcda012018-03-09 14:13:49 +000014
James Conroy1f58f032021-04-27 17:13:27 +010015#include <backendsCommon/TensorHandle.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000016#include <backendsCommon/WorkloadFactory.hpp>
Matteo Martincighe5b8eb92019-11-28 15:45:42 +000017#include <armnn/backends/IBackendInternal.hpp>
Derek Lamberti84da38b2019-06-13 11:40:08 +010018#include <backendsCommon/TensorHandleFactoryRegistry.hpp>
David Beckac42efd2018-09-26 17:41:13 +010019
20#include <armnn/Exceptions.hpp>
telsoa014fcda012018-03-09 14:13:49 +000021#include <armnn/Utils.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010022#include <armnn/TypesUtils.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000023#include <armnn/BackendRegistry.hpp>
Matthew Benthamf48afc62020-01-15 17:55:08 +000024#include <armnn/Logging.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010025#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000026#include <armnn/utility/IgnoreUnused.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010027#include <armnn/utility/PolymorphicDowncast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000028
Jan Eilers99d9d4a2019-11-06 10:02:16 +000029#include <ProfilingService.hpp>
30
Nikhil Raj77fe76b2021-06-09 14:55:32 +010031#include <common/include/ProfilingGuid.hpp>
32
telsoa014fcda012018-03-09 14:13:49 +000033#include <fcntl.h>
34#include <algorithm>
35#include <fstream>
36#include <memory>
telsoa01c577f2c2018-08-31 09:22:23 +010037#include <vector>
38#include <algorithm>
telsoa014fcda012018-03-09 14:13:49 +000039
telsoa014fcda012018-03-09 14:13:49 +000040namespace armnn
41{
42
Francis Murtagh3d2b4b22021-02-15 18:23:17 +000043INetwork::INetwork(NetworkOptions networkOptions) : pNetworkImpl(new NetworkImpl(networkOptions)) {}
44
45INetwork::~INetwork() = default;
46
47Status INetwork::PrintGraph()
48{
49 return pNetworkImpl->PrintGraph();
50}
51
52IConnectableLayer* INetwork::AddInputLayer(LayerBindingId id, const char* name)
53{
54 return pNetworkImpl->AddInputLayer(id, name);
55}
56
57
58IConnectableLayer* INetwork::AddArgMinMaxLayer(const ArgMinMaxDescriptor& desc,
59 const char* name)
60{
61 return pNetworkImpl->AddArgMinMaxLayer(desc, name);
62}
63
mathad01b392e982021-04-07 12:07:30 +010064IConnectableLayer* INetwork::AddCastLayer(const char* name)
65{
66 return pNetworkImpl->AddCastLayer(name);
67}
Francis Murtagh3d2b4b22021-02-15 18:23:17 +000068
69IConnectableLayer* INetwork::AddComparisonLayer(const ComparisonDescriptor& comparisonDescriptor,
70 const char* name)
71{
72 return pNetworkImpl->AddComparisonLayer(comparisonDescriptor, name);
73}
74
75
76IConnectableLayer* INetwork::AddConcatLayer(const ConcatDescriptor& concatDescriptor,
77 const char* name)
78{
79 return pNetworkImpl->AddConcatLayer(concatDescriptor, name);
80}
81
82
83IConnectableLayer* INetwork::AddConvolution2dLayer(const Convolution2dDescriptor& convolution2dDescriptor,
84 const ConstTensor& weights,
85 const Optional<ConstTensor>& biases,
86 const char* name)
87{
88 return pNetworkImpl->AddConvolution2dLayer(convolution2dDescriptor, weights, biases, name);
89}
90
91
92IConnectableLayer* INetwork::AddConvolution2dLayer(const Convolution2dDescriptor& convolution2dDescriptor,
93 const ConstTensor& weights,
94 const char* name)
95{
96 Optional<ConstTensor> biases;
97 return pNetworkImpl->AddConvolution2dLayer(convolution2dDescriptor, weights, biases, name);
98}
99
100
101IConnectableLayer* INetwork::AddConvolution2dLayer(const Convolution2dDescriptor& convolution2dDescriptor,
102 const ConstTensor& weights,
103 const ConstTensor& biases,
104 const char* name )
105{
106
107 return pNetworkImpl->AddConvolution2dLayer(convolution2dDescriptor,
108 weights,
109 armnn::Optional<ConstTensor>(biases),
110 name);
111}
112
113
114IConnectableLayer* INetwork::AddDepthToSpaceLayer(const DepthToSpaceDescriptor& depthToSpaceDescriptor,
115 const char* name)
116{
117 return pNetworkImpl->AddDepthToSpaceLayer(depthToSpaceDescriptor, name);
118}
119
120
121IConnectableLayer* INetwork::AddDepthwiseConvolution2dLayer(
122 const DepthwiseConvolution2dDescriptor& convolution2dDescriptor,
123 const ConstTensor& weights,
124 const Optional<ConstTensor>& biases,
125 const char* name)
126{
127 return pNetworkImpl->AddDepthwiseConvolution2dLayer(convolution2dDescriptor, weights, biases, name);
128}
129
130
131IConnectableLayer* INetwork::AddDepthwiseConvolution2dLayer(
132 const DepthwiseConvolution2dDescriptor& convolution2dDescriptor,
133 const ConstTensor& weights,
134 const char* name)
135{
136 Optional<ConstTensor> biases;
137 return pNetworkImpl->AddDepthwiseConvolution2dLayer(convolution2dDescriptor, weights, biases, name);
138}
139
140
141IConnectableLayer* INetwork::AddDepthwiseConvolution2dLayer(
142 const DepthwiseConvolution2dDescriptor& convolution2dDescriptor,
143 const ConstTensor& weights,
144 const ConstTensor& biases,
145 const char* name)
146{
147 return pNetworkImpl->AddDepthwiseConvolution2dLayer(convolution2dDescriptor, weights,
148 armnn::Optional<ConstTensor>(biases), name);
149}
150
151
152IConnectableLayer* INetwork::AddDequantizeLayer(const char* name)
153{
154 return pNetworkImpl->AddDequantizeLayer(name);
155}
156
157
158IConnectableLayer* INetwork::AddDetectionPostProcessLayer(
159 const DetectionPostProcessDescriptor& descriptor,
160 const ConstTensor& anchors,
161 const char* name)
162{
163 return pNetworkImpl->AddDetectionPostProcessLayer(descriptor, anchors, name);
164}
165
166
167IConnectableLayer* INetwork::AddElementwiseUnaryLayer(const ElementwiseUnaryDescriptor& elementwiseUnaryDescriptor,
168 const char* name)
169{
170 return pNetworkImpl->AddElementwiseUnaryLayer(elementwiseUnaryDescriptor, name);
171}
172
173
174IConnectableLayer* INetwork::AddFillLayer(const FillDescriptor& fillDescriptor,
175 const char* name)
176{
177 return pNetworkImpl->AddFillLayer(fillDescriptor, name);
178}
179
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000180IConnectableLayer* INetwork::AddFullyConnectedLayer(const FullyConnectedDescriptor& fullyConnectedDescriptor,
181 const ConstTensor& weights,
182 const Optional<ConstTensor>& biases,
183 const char* name)
184{
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000185 return pNetworkImpl->AddFullyConnectedLayer(fullyConnectedDescriptor,
186 armnn::Optional<ConstTensor>(weights),
187 biases,
188 name);
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000189}
190
191IConnectableLayer* INetwork::AddFullyConnectedLayer(const FullyConnectedDescriptor& fullyConnectedDescriptor,
192 const ConstTensor& weights,
193 const char* name)
194{
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000195 armnn::Optional<ConstTensor> biases;
196 return pNetworkImpl->AddFullyConnectedLayer(fullyConnectedDescriptor,
197 armnn::Optional<ConstTensor>(weights),
198 biases,
199 name);
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000200}
201
202IConnectableLayer* INetwork::AddFullyConnectedLayer(const FullyConnectedDescriptor& fullyConnectedDescriptor,
203 const ConstTensor& weights,
204 const ConstTensor& biases,
205 const char* name)
206{
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000207 return pNetworkImpl->AddFullyConnectedLayer(fullyConnectedDescriptor,
208 armnn::Optional<ConstTensor>(weights),
209 armnn::Optional<ConstTensor>(biases),
210 name);
211}
212
213IConnectableLayer* INetwork::AddFullyConnectedLayer(const FullyConnectedDescriptor& fullyConnectedDescriptor,
214 const Optional<ConstTensor>& weights,
215 const Optional<ConstTensor>& biases,
216 const char* name)
217{
218 return pNetworkImpl->AddFullyConnectedLayer(fullyConnectedDescriptor, weights, biases, name);
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000219}
220
221IConnectableLayer* INetwork::AddPermuteLayer(const PermuteDescriptor& permuteDescriptor,
222 const char* name)
223{
224 return pNetworkImpl->AddPermuteLayer(permuteDescriptor, name);
225}
226
227IConnectableLayer* INetwork::AddBatchToSpaceNdLayer(const BatchToSpaceNdDescriptor& batchToSpaceNdDescriptor,
228 const char* name)
229{
230 return pNetworkImpl->AddBatchToSpaceNdLayer(batchToSpaceNdDescriptor, name);
231}
232
233IConnectableLayer* INetwork::AddPooling2dLayer(const Pooling2dDescriptor& pooling2dDescriptor,
234 const char* name)
235{
236 return pNetworkImpl->AddPooling2dLayer(pooling2dDescriptor, name);
237}
238
239IConnectableLayer* INetwork::AddActivationLayer(const ActivationDescriptor& activationDescriptor,
240 const char* name)
241{
242 return pNetworkImpl->AddActivationLayer(activationDescriptor, name);
243}
244
245IConnectableLayer* INetwork::AddNormalizationLayer(const NormalizationDescriptor& normalizationDescriptor,
246 const char* name)
247{
248 return pNetworkImpl->AddNormalizationLayer(normalizationDescriptor, name);
249}
250
251IConnectableLayer* INetwork::AddSliceLayer(const SliceDescriptor& sliceDescriptor, const char* name)
252{
253 return pNetworkImpl->AddSliceLayer(sliceDescriptor, name);
254}
255IConnectableLayer* INetwork::AddSoftmaxLayer(const SoftmaxDescriptor& softmaxDescriptor,
256 const char* name)
257{
258 return pNetworkImpl->AddSoftmaxLayer(softmaxDescriptor, name);
259}
260
261IConnectableLayer* INetwork::AddSplitterLayer(const ViewsDescriptor& splitterDescriptor,
262 const char* name)
263{
264 return pNetworkImpl->AddSplitterLayer(splitterDescriptor, name);
265}
266
267IConnectableLayer* INetwork::AddMergeLayer(const char* name)
268{
269 return pNetworkImpl->AddMergeLayer(name);
270}
271
272IConnectableLayer* INetwork::AddMergerLayer(const MergerDescriptor& mergerDescriptor,
273 const char* name)
274{
275 return pNetworkImpl->AddConcatLayer(mergerDescriptor, name);
276}
277
278IConnectableLayer* INetwork::AddAbsLayer(const char* name)
279{
280 return pNetworkImpl->AddElementwiseUnaryLayer(ElementwiseUnaryDescriptor(UnaryOperation::Abs), name);
281}
282
283IConnectableLayer* INetwork::AddAdditionLayer(const char* name)
284{
285 return pNetworkImpl->AddAdditionLayer(name);
286}
287
288IConnectableLayer* INetwork::AddMultiplicationLayer(const char* name)
289{
290 return pNetworkImpl->AddMultiplicationLayer(name);
291}
292
293IConnectableLayer* INetwork::AddBatchNormalizationLayer(const BatchNormalizationDescriptor& desc,
294 const ConstTensor& mean,
295 const ConstTensor& variance,
296 const ConstTensor& beta,
297 const ConstTensor& gamma,
298 const char* name)
299{
300 return pNetworkImpl->AddBatchNormalizationLayer(desc, mean, variance, beta, gamma, name);
301}
302
303IConnectableLayer* INetwork::AddRankLayer(const char* name)
304{
305 return pNetworkImpl->AddRankLayer(name);
306}
307
308IConnectableLayer* INetwork::AddResizeBilinearLayer(const ResizeBilinearDescriptor& descriptor,
309 const char* name)
310{
311 ResizeDescriptor resizeDescriptor;
312 resizeDescriptor.m_Method = ResizeMethod::Bilinear;
313 resizeDescriptor.m_DataLayout = descriptor.m_DataLayout;
314 resizeDescriptor.m_TargetWidth = descriptor.m_TargetWidth;
315 resizeDescriptor.m_TargetHeight = descriptor.m_TargetHeight;
316 resizeDescriptor.m_AlignCorners = descriptor.m_AlignCorners;
317 resizeDescriptor.m_HalfPixelCenters = descriptor.m_HalfPixelCenters;
318
319 return pNetworkImpl->AddResizeLayer(resizeDescriptor, name);
320}
321
322IConnectableLayer* INetwork::AddResizeLayer(const ResizeDescriptor& resizeDescriptor,
323 const char* name)
324{
325 return pNetworkImpl->AddResizeLayer(resizeDescriptor, name);
326}
327
328IConnectableLayer* INetwork::AddReduceLayer(const ReduceDescriptor& reduceDescriptor,
329 const char* name)
330{
331 return pNetworkImpl->AddReduceLayer(reduceDescriptor, name);
332}
333
334IConnectableLayer* INetwork::AddInstanceNormalizationLayer(const InstanceNormalizationDescriptor& desc,
335 const char* name)
336{
337 return pNetworkImpl->AddInstanceNormalizationLayer(desc, name);
338}
339
340IConnectableLayer* INetwork::AddL2NormalizationLayer(const L2NormalizationDescriptor& desc,
341 const char* name)
342{
343 return pNetworkImpl->AddL2NormalizationLayer(desc, name);
344}
345
346IConnectableLayer* INetwork::AddLogSoftmaxLayer(const LogSoftmaxDescriptor& logSoftmaxDescriptor,
347 const char* name)
348{
349 return pNetworkImpl->AddLogSoftmaxLayer(logSoftmaxDescriptor, name);
350}
351
352IConnectableLayer* INetwork::AddConstantLayer(const ConstTensor& input,
353 const char* name)
354{
355 return pNetworkImpl->AddConstantLayer(input, name);
356}
357
358IConnectableLayer* INetwork::AddReshapeLayer(const ReshapeDescriptor& reshapeDescriptor,
359 const char* name)
360{
361 return pNetworkImpl->AddReshapeLayer(reshapeDescriptor, name);
362}
363
364IConnectableLayer* INetwork::AddSpaceToBatchNdLayer(const SpaceToBatchNdDescriptor& spaceToBatchNdDescriptor,
365 const char* name)
366{
367 return pNetworkImpl->AddSpaceToBatchNdLayer(spaceToBatchNdDescriptor, name);
368}
369
370IConnectableLayer* INetwork::AddSpaceToDepthLayer(const SpaceToDepthDescriptor& spaceToDepthDescriptor,
371 const char* name)
372{
373 return pNetworkImpl->AddSpaceToDepthLayer(spaceToDepthDescriptor, name);
374}
375
376IConnectableLayer* INetwork::AddFloorLayer(const char* name)
377{
378 return pNetworkImpl->AddFloorLayer(name);
379}
380IConnectableLayer* INetwork::AddOutputLayer(LayerBindingId id, const char* name)
381{
382 return pNetworkImpl->AddOutputLayer(id, name);
383}
384
385IConnectableLayer* INetwork::AddLstmLayer(const LstmDescriptor& descriptor,
386 const LstmInputParams& params,
387 const char* name)
388{
389 return pNetworkImpl->AddLstmLayer(descriptor, params, name);
390}
391
392IConnectableLayer* INetwork::AddDivisionLayer(const char* name)
393{
394 return pNetworkImpl->AddDivisionLayer(name);
395}
396
397IConnectableLayer* INetwork::AddSubtractionLayer(const char* name)
398{
399 return pNetworkImpl->AddSubtractionLayer(name);
400}
401
402IConnectableLayer* INetwork::AddMaximumLayer(const char* name)
403{
404 return pNetworkImpl->AddMaximumLayer(name);
405}
406
407IConnectableLayer* INetwork::AddMeanLayer(const MeanDescriptor& meanDescriptor, const char* name)
408{
409 return pNetworkImpl->AddMeanLayer(meanDescriptor, name);
410}
411
412IConnectableLayer* INetwork::AddPadLayer(const PadDescriptor& padDescriptor,
413 const char* name)
414{
415 return pNetworkImpl->AddPadLayer(padDescriptor, name);
416}
417
418IConnectableLayer* INetwork::AddQuantizeLayer(const char* name)
419{
420 return pNetworkImpl->AddQuantizeLayer(name);
421}
422
423IConnectableLayer* INetwork::AddStridedSliceLayer(const StridedSliceDescriptor& stridedSliceDescriptor,
424 const char* name)
425{
426 return pNetworkImpl->AddStridedSliceLayer(stridedSliceDescriptor, name);
427}
428
429IConnectableLayer* INetwork::AddMinimumLayer(const char* name)
430{
431 return pNetworkImpl->AddMinimumLayer(name);
432}
433
434IConnectableLayer* INetwork::AddGreaterLayer(const char* name)
435{
436 return pNetworkImpl->AddComparisonLayer(ComparisonDescriptor(ComparisonOperation::Greater), name);
437}
438
439IConnectableLayer* INetwork::AddEqualLayer(const char* name)
440{
441 return pNetworkImpl->AddComparisonLayer(ComparisonDescriptor(ComparisonOperation::Equal), name);
442}
443
444IConnectableLayer* INetwork::AddRsqrtLayer(const char* name)
445{
446 return pNetworkImpl->AddElementwiseUnaryLayer(ElementwiseUnaryDescriptor(UnaryOperation::Rsqrt), name);
447}
448
449IConnectableLayer* INetwork::AddGatherLayer(const char* name)
450{
451 GatherDescriptor gatherDescriptor{};
452 return pNetworkImpl->AddGatherLayer(gatherDescriptor, name);
453}
454
455IConnectableLayer* INetwork::AddGatherLayer(const GatherDescriptor& descriptor,
456 const char* name)
457{
458 return pNetworkImpl->AddGatherLayer(descriptor, name);
459}
460
461IConnectableLayer* INetwork::AddSwitchLayer(const char* name)
462{
463 return pNetworkImpl->AddSwitchLayer(name);
464}
465
466IConnectableLayer* INetwork::AddPreluLayer(const char* name)
467{
468 return pNetworkImpl->AddPreluLayer(name);
469}
470
471IConnectableLayer* INetwork::AddTransposeConvolution2dLayer(const TransposeConvolution2dDescriptor& descriptor,
472 const ConstTensor& weights,
473 const Optional<ConstTensor>& biases,
474 const char* name)
475{
476 return pNetworkImpl->AddTransposeConvolution2dLayer(descriptor, weights, biases, name);
477}
478
479IConnectableLayer* INetwork::AddTransposeLayer(const TransposeDescriptor& transposeDescriptor,
480 const char* name)
481{
482 return pNetworkImpl->AddTransposeLayer(transposeDescriptor, name);
483}
484
485IConnectableLayer* INetwork::AddStackLayer(const StackDescriptor& descriptor,
486 const char* name)
487{
488 return pNetworkImpl->AddStackLayer(descriptor, name);
489}
490
491IConnectableLayer* INetwork::AddStandInLayer(const StandInDescriptor& descriptor,
492 const char* name)
493{
494 return pNetworkImpl->AddStandInLayer(descriptor, name);
495}
496
497IConnectableLayer* INetwork::AddQuantizedLstmLayer(const QuantizedLstmInputParams& params,
498 const char* name)
499{
500 return pNetworkImpl->AddQuantizedLstmLayer(params, name);
501}
502
503IConnectableLayer* INetwork::AddQLstmLayer(const QLstmDescriptor& descriptor,
504 const LstmInputParams& params,
505 const char* name)
506{
507 return pNetworkImpl->AddQLstmLayer(descriptor, params, name);
508}
509
510IConnectableLayer* INetwork::AddLogicalBinaryLayer(const LogicalBinaryDescriptor& descriptor,
511 const char* name)
512{
513 return pNetworkImpl->AddLogicalBinaryLayer(descriptor, name);
514}
515
516void INetwork::Accept(ILayerVisitor& visitor) const
517{
518 return pNetworkImpl->Accept(visitor);
519}
520
521void INetwork::ExecuteStrategy(IStrategy& strategy) const
522{
523 return pNetworkImpl->ExecuteStrategy(strategy);
524}
525
Finn Williamsf24effa2020-07-03 10:12:03 +0100526armnn::INetwork* INetwork::CreateRaw(NetworkOptions networkOptions)
telsoa014fcda012018-03-09 14:13:49 +0000527{
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000528 return new INetwork(networkOptions);
telsoa014fcda012018-03-09 14:13:49 +0000529}
530
Finn Williamsf24effa2020-07-03 10:12:03 +0100531armnn::INetworkPtr INetwork::Create(NetworkOptions networkOptions)
telsoa014fcda012018-03-09 14:13:49 +0000532{
Finn Williamsf24effa2020-07-03 10:12:03 +0100533 return INetworkPtr(CreateRaw(networkOptions), &INetwork::Destroy);
telsoa014fcda012018-03-09 14:13:49 +0000534}
535
536void INetwork::Destroy(INetwork* network)
537{
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000538 delete network;
telsoa014fcda012018-03-09 14:13:49 +0000539}
540
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000541
542IOptimizedNetwork::IOptimizedNetwork(std::unique_ptr<Graph> graph)
543 : pOptimizedNetworkImpl(new OptimizedNetworkImpl(std::move(graph))) {}
544
545IOptimizedNetwork::IOptimizedNetwork(std::unique_ptr<OptimizedNetworkImpl> impl)
546 : pOptimizedNetworkImpl(std::move(impl)) {}
547
548IOptimizedNetwork::IOptimizedNetwork(std::unique_ptr<Graph> graph, const ModelOptions& modelOptions)
549 : pOptimizedNetworkImpl(new OptimizedNetworkImpl(std::move(graph), modelOptions)) {}
550
551IOptimizedNetwork::~IOptimizedNetwork() = default;
552
telsoa014fcda012018-03-09 14:13:49 +0000553void IOptimizedNetwork::Destroy(IOptimizedNetwork* network)
554{
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000555 delete network;
telsoa014fcda012018-03-09 14:13:49 +0000556}
557
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000558Status IOptimizedNetwork::PrintGraph()
559{
560 return pOptimizedNetworkImpl->PrintGraph();
561}
562
563Status IOptimizedNetwork::SerializeToDot(std::ostream& stream) const
564{
565 return pOptimizedNetworkImpl->SerializeToDot(stream);
566}
567
568profiling::ProfilingGuid IOptimizedNetwork::GetGuid() const
569{
570 return pOptimizedNetworkImpl->GetGuid();
571}
572
573Status OptimizedNetworkImpl::PrintGraph()
telsoa014fcda012018-03-09 14:13:49 +0000574{
575 m_Graph->Print();
576 return Status::Success;
577}
578
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000579Status OptimizedNetworkImpl::SerializeToDot(std::ostream& stream) const
surmeh01bceff2f2018-03-29 16:29:27 +0100580{
581 return m_Graph->SerializeToDot(stream);
582}
583
Matteo Martincigh49124022019-01-11 13:25:59 +0000584void ReportError(const std::string& errorMessage,
585 Optional<std::vector<std::string>&> errorMessages)
586{
587 std::stringstream fullErrorMessage;
588 fullErrorMessage << "ERROR: " << errorMessage;
Derek Lamberti08446972019-11-26 16:38:31 +0000589 ARMNN_LOG(warning) << fullErrorMessage.str();
Matteo Martincigh49124022019-01-11 13:25:59 +0000590 if (errorMessages)
591 {
592 errorMessages.value().push_back(fullErrorMessage.str());
593 }
594}
595
596void ReportWarning(const std::string& warningMessage,
597 Optional<std::vector<std::string>&> warningMessages)
598{
599 std::stringstream fullWarningMessage;
600 fullWarningMessage << "WARNING: " << warningMessage;
Derek Lamberti08446972019-11-26 16:38:31 +0000601 ARMNN_LOG(warning) << fullWarningMessage.str();
Matteo Martincigh49124022019-01-11 13:25:59 +0000602 if (warningMessages)
603 {
604 warningMessages.value().push_back(fullWarningMessage.str());
605 }
606}
607
Derek Lamberti4a9e24b2020-01-03 16:53:38 +0000608OptimizationResult ReturnWithError(OptimizationResult res,
609 const Layer* layer,
610 const BackendSettings& backendSettings,
611 Optional<std::vector<std::string>&> errMessages)
612{
613 std::stringstream failureMsg;
614 failureMsg << "Layer of type " << GetLayerTypeAsCString(layer->GetType())
615 << " is not supported on any preferred backend " << backendSettings.m_PreferredBackends;
616 ReportError(failureMsg.str(), errMessages);
617
618 res.m_Error = true;
619 return res;
620}
621
622
jimfly016b0b53d2018-10-08 14:43:01 +0100623bool CheckScaleSetOnQuantizedType(Layer* layer, Optional<std::vector<std::string>&> errMessages)
624{
625 bool noErrors = true;
626 unsigned int numOutputs = layer->GetNumOutputSlots();
627 for (unsigned int i = 0; i < numOutputs; i++) {
David Monahanb8554702019-04-25 16:03:38 +0100628 OutputSlot& outputSlot = layer->GetOutputSlot(i);
629 TensorInfo info = outputSlot.GetTensorInfo();
Derek Lambertif90c56d2020-01-10 17:14:08 +0000630 if (DataType::QAsymmU8 == info.GetDataType()) {
jimfly016b0b53d2018-10-08 14:43:01 +0100631 if (0.f == info.GetQuantizationScale()) {
632 noErrors = false;
633 std::stringstream ss;
Matteo Martincigh49124022019-01-11 13:25:59 +0000634 ss << "output " << i << " of layer " << GetLayerTypeAsCString(layer->GetType())
jimfly016b0b53d2018-10-08 14:43:01 +0100635 << " (" << layer->GetNameStr() << ") is of type"
636 << " Quantized 8 bit but its scale parameter has not been set";
Matteo Martincigh49124022019-01-11 13:25:59 +0000637 ReportError(ss.str(), errMessages);
jimfly016b0b53d2018-10-08 14:43:01 +0100638 }
David Monahanb8554702019-04-25 16:03:38 +0100639 // Softmax under QuantisedAsymm8 must always be scale (1.0f/256.0f) and offset 0
640 if ((info.GetQuantizationScale() != (1.0f / 256.0f) ||
641 info.GetQuantizationOffset() != 0) &&
642 layer->GetType() == armnn::LayerType::Softmax)
643 {
644 std::stringstream ss;
645 ss << "Quantization parameters for Softmax layer (Scale: " <<
646 info.GetQuantizationScale() << " and Offset: " << info.GetQuantizationOffset() <<
647 ") are incorrect and have been updated to Scale: 0.00390625 and Offset: 0";
Derek Lamberti08446972019-11-26 16:38:31 +0000648 ARMNN_LOG(warning) << ss.str();
David Monahanb8554702019-04-25 16:03:38 +0100649 info.SetQuantizationScale((1.0f /256.0f));
650 info.SetQuantizationOffset(0);
651 outputSlot.SetTensorInfo(info);
652 }
jimfly016b0b53d2018-10-08 14:43:01 +0100653 }
654 }
655 return noErrors;
656}
657
Narumol Prangnawarat250d3922020-03-30 16:11:04 +0100658template <typename LayerT>
659LayerT* ConvertBf16ToFp32Weight(Layer* l)
660{
Jan Eilersbb446e52020-04-02 13:56:54 +0100661 LayerT* layer = PolymorphicDowncast<LayerT*>(l);
Narumol Prangnawarat250d3922020-03-30 16:11:04 +0100662 if ((layer->GetType() == LayerType::Convolution2d || layer->GetType() == LayerType::FullyConnected)
663 && layer->m_Weight)
664 {
665 const TensorInfo& info = layer->m_Weight->GetTensorInfo();
666
667 if (info.GetDataType() == DataType::BFloat16)
668 {
669 std::vector<float> newValues(info.GetNumElements());
670
671 armnnUtils::FloatingPointConverter::ConvertBFloat16ToFloat32(
Finn Williams4422cec2021-03-22 17:51:06 +0000672 layer->m_Weight->template GetConstTensor<armnn::BFloat16>(), info.GetNumElements(), newValues.data());
Narumol Prangnawarat250d3922020-03-30 16:11:04 +0100673
674 TensorInfo newInfo(info.GetShape(), DataType::Float32);
675 ConstTensor newInput(newInfo, newValues);
James Conroy1f58f032021-04-27 17:13:27 +0100676 layer->m_Weight.reset(new ScopedTensorHandle(newInput));
Narumol Prangnawarat250d3922020-03-30 16:11:04 +0100677 }
678 }
679 return layer;
680}
681
Derek Lamberti4a9e24b2020-01-03 16:53:38 +0000682OptimizationResult AttemptBackendAssignment(BackendSettings& backendSettings,
683 Graph& graph,
684 Layer* layer,
685 BackendId backend,
686 DataType dataTypeIn,
687 DataType dataTypeOut,
688 const std::vector<BackendId>& availablePreferredBackends,
689 std::string& reasonIfUnsupported,
690 Optional<std::vector<std::string>&> errMessages)
691{
692 OptimizationResult result;
693
694 // Helper lambda to compose meaningful error message before returning with error
695 auto ReturnError = [&](const Layer* layer)
696 {
697 return ReturnWithError(result, layer, backendSettings, errMessages);
698 };
699
700 // need to set the compute device on the layer
701 // before we can check if it is supported
702 layer->SetBackendId(backend);
703 if (!IWorkloadFactory::IsLayerSupported(*layer, EmptyOptional(), reasonIfUnsupported))
704 {
705 if (dataTypeIn == DataType::Float16 || dataTypeOut == DataType::Float16)
706 {
707 if (IWorkloadFactory::IsLayerSupported(*layer, DataType::Float32, reasonIfUnsupported)
708 && layer->GetType() != LayerType::ConvertFp32ToFp16
709 && layer->GetType() != LayerType::ConvertFp16ToFp32)
710 {
711 // Insert FP16 -> FP32 conversion layer before current layer
712 std::vector<ConvertFp16ToFp32Layer*> convertFp16ToFp32Layers;
713 if (dataTypeIn == DataType::Float16)
714 {
715 convertFp16ToFp32Layers =
716 InsertConvertFp16ToFp32LayersBefore(graph, *layer);
717 }
718
719 // Insert FP32 -> FP16 conversion layer after current layer
720 std::vector<ConvertFp32ToFp16Layer*> convertFp32ToFp16Layers;
721 if (dataTypeOut == DataType::Float16)
722 {
723 convertFp32ToFp16Layers =
724 InsertConvertFp32ToFp16LayersAfter(graph, *layer);
725 }
726
727 // Assign a supported backend to the newly introduced conversion layers
728 auto AssignFirstSupportedBackend = [&](Layer* layer, BackendId preferredBackend)
729 {
730 bool supportedBackendFound = false;
731 std::string reasonIfUnsupported;
732
733 // Try preferred backend first
734 layer->SetBackendId(preferredBackend);
735 if (IWorkloadFactory::IsLayerSupported(*layer,
736 EmptyOptional(),
737 reasonIfUnsupported))
738 {
739 supportedBackendFound = true;
740 }
741 else
742 {
743 for (const auto& backend : availablePreferredBackends)
744 {
745 // Skip preferred backend (we already determined that it is not supported)
746 if (backend == preferredBackend)
747 {
748 continue;
749 }
750
751 layer->SetBackendId(backend);
752 if (IWorkloadFactory::IsLayerSupported(*layer,
753 EmptyOptional(),
754 reasonIfUnsupported))
755 {
756 supportedBackendFound = true;
757 break;
758 }
759 }
760 }
761
762 return supportedBackendFound;
763 };
764
765 for (ConvertFp16ToFp32Layer* convertLayer : convertFp16ToFp32Layers)
766 {
767 if (!AssignFirstSupportedBackend(convertLayer, backend))
768 {
769 return ReturnError(convertLayer);
770 }
771 }
772
773 for (ConvertFp32ToFp16Layer* convertLayer : convertFp32ToFp16Layers)
774 {
775 if (!AssignFirstSupportedBackend(convertLayer, backend))
776 {
777 return ReturnError(convertLayer);
778 }
779 }
780
781 return result;
782 }
783 }
Narumol Prangnawaratbc7ffb52020-03-20 15:01:01 +0000784 else if (dataTypeIn == DataType::BFloat16 || dataTypeOut == DataType::BFloat16)
785 {
786 if (IWorkloadFactory::IsLayerSupported(*layer, DataType::Float32, reasonIfUnsupported)
787 && layer->GetType() != LayerType::ConvertFp32ToBf16
788 && layer->GetType() != LayerType::ConvertBf16ToFp32)
789 {
790 // Insert BF16 -> FP32 conversion layer before current layer
791 std::vector<ConvertBf16ToFp32Layer*> convertBf16ToFp32Layers;
792 if (dataTypeIn == DataType::BFloat16)
793 {
794 convertBf16ToFp32Layers =
795 InsertConvertBf16ToFp32LayersBefore(graph, *layer);
Narumol Prangnawarat250d3922020-03-30 16:11:04 +0100796 if (layer->GetType() == LayerType::Convolution2d)
797 {
798 ConvertBf16ToFp32Weight<Convolution2dLayer>(layer);
799 }
800 else if (layer->GetType() == LayerType::FullyConnected)
801 {
802 ConvertBf16ToFp32Weight<FullyConnectedLayer>(layer);
803 }
Narumol Prangnawaratbc7ffb52020-03-20 15:01:01 +0000804 }
805
806 // Insert FP32 -> BF16 conversion layer after current layer
807 std::vector<ConvertFp32ToBf16Layer*> convertFp32ToBf16Layers;
808 if (dataTypeOut == DataType::BFloat16)
809 {
810 convertFp32ToBf16Layers =
811 InsertConvertFp32ToBf16LayersAfter(graph, *layer);
812 }
813
814 // Assign a supported backend to the newly introduced conversion layers
815 auto AssignFirstSupportedBackend = [&](Layer* layer, BackendId preferredBackend)
816 {
817 bool supportedBackendFound = false;
818 std::string reasonIfUnsupported;
819
820 // Try preferred backend first
821 layer->SetBackendId(preferredBackend);
822 if (IWorkloadFactory::IsLayerSupported(*layer,
823 EmptyOptional(),
824 reasonIfUnsupported))
825 {
826 supportedBackendFound = true;
827 }
828 else
829 {
830 for (const auto& backend : availablePreferredBackends)
831 {
832 // Skip preferred backend (we already determined that it is not supported)
833 if (backend == preferredBackend)
834 {
835 continue;
836 }
837
838 layer->SetBackendId(backend);
839 if (IWorkloadFactory::IsLayerSupported(*layer,
840 EmptyOptional(),
841 reasonIfUnsupported))
842 {
843 supportedBackendFound = true;
844 break;
845 }
846 }
847 }
848
849 return supportedBackendFound;
850 };
851
852 for (ConvertBf16ToFp32Layer* convertLayer : convertBf16ToFp32Layers)
853 {
854 if (!AssignFirstSupportedBackend(convertLayer, backend))
855 {
856 return ReturnError(convertLayer);
857 }
858 }
859
860 for (ConvertFp32ToBf16Layer* convertLayer : convertFp32ToBf16Layers)
861 {
862 if (!AssignFirstSupportedBackend(convertLayer, backend))
863 {
864 return ReturnError(convertLayer);
865 }
866 }
867
868 return result;
869 }
870 }
871
Derek Lamberti4a9e24b2020-01-03 16:53:38 +0000872 std::stringstream warningMsg;
873 warningMsg << "Layer of type " << GetLayerTypeAsCString(layer->GetType())
874 << " is not supported on requested backend " << layer->GetBackendId().Get()
875 << " for input data type " << GetDataTypeName(dataTypeIn)
876 << " and output data type " << GetDataTypeName(dataTypeOut)
877 << " (reason: " << reasonIfUnsupported
878 << "), falling back to the next backend.";
879 ReportWarning(warningMsg.str(), errMessages);
880
881 return OptimizationResult(true, false);
882 }
883 else
884 {
885 return result;
886 }
887}
888
889
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000890OptimizationResult AssignBackends(OptimizedNetworkImpl* optNetObjPtr,
Matteo Martincigh49124022019-01-11 13:25:59 +0000891 BackendSettings& backendSettings,
892 Graph::Iterator& firstLayer,
893 Graph::Iterator& lastLayer,
894 Optional<std::vector<std::string>&> errMessages)
telsoa014fcda012018-03-09 14:13:49 +0000895{
Matteo Martincigh49124022019-01-11 13:25:59 +0000896 OptimizationResult result;
telsoa014fcda012018-03-09 14:13:49 +0000897
Matteo Martincigh49124022019-01-11 13:25:59 +0000898 // Helper lambda to compose meaningful error message before returning with error
Derek Lamberti4a9e24b2020-01-03 16:53:38 +0000899 auto ReturnError = [&](const Layer* layer)
900 {
901 return ReturnWithError(result, layer, backendSettings, errMessages);
902 };
Matteo Martincigh49124022019-01-11 13:25:59 +0000903
telsoa01c577f2c2018-08-31 09:22:23 +0100904
Matteo Martincigh49124022019-01-11 13:25:59 +0000905 auto availablePreferredBackends = backendSettings.GetAvailablePreferredBackends();
906 if (availablePreferredBackends.empty())
telsoa01c577f2c2018-08-31 09:22:23 +0100907 {
Matteo Martincigh49124022019-01-11 13:25:59 +0000908 std::stringstream failureMsg;
909 failureMsg << "No preferred backends are available";
910 ReportError(failureMsg.str(), errMessages);
911
912 result.m_Error = true;
913 return result;
914 }
915
916 for (auto it = firstLayer; it != lastLayer; ++it)
917 {
918 auto layer = *it;
Aron Virginas-Tar87972be2019-11-13 15:16:28 +0000919
920 DataType dataTypeIn = layer->GetNumInputSlots() == 0 ? DataType::Float32 :
921 layer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo().GetDataType();
922 DataType dataTypeOut = layer->GetNumOutputSlots() == 0 ? DataType::Float32 :
923 layer->GetOutputSlot(0).GetTensorInfo().GetDataType();
924
telsoa01c577f2c2018-08-31 09:22:23 +0100925 std::string reasonIfUnsupported;
926 bool found = false;
jimfly016b0b53d2018-10-08 14:43:01 +0100927 if (!CheckScaleSetOnQuantizedType(layer, errMessages))
928 {
929 // don't bomb immediately, find all the quantized outputs
930 // which haven't had a scale set and report them all back.
Matteo Martincigh49124022019-01-11 13:25:59 +0000931 result.m_Error = true;
jimfly016b0b53d2018-10-08 14:43:01 +0100932 }
Matteo Martincigh49124022019-01-11 13:25:59 +0000933
Derek Lamberti4a9e24b2020-01-03 16:53:38 +0000934 // First try assign layer to hint backend
935 if (layer->GetBackendHint().has_value() &&
936 backendSettings.IsBackendSupported(layer->GetBackendHint().value()) &&
937 AttemptBackendAssignment(backendSettings,
938 optNetObjPtr->GetGraph(),
939 layer,
940 layer->GetBackendHint().value(),
941 dataTypeIn,
942 dataTypeOut,
943 availablePreferredBackends,
944 reasonIfUnsupported,
945 errMessages).IsOk())
telsoa01c577f2c2018-08-31 09:22:23 +0100946 {
Derek Lamberti4a9e24b2020-01-03 16:53:38 +0000947 found = true;
948 backendSettings.m_SelectedBackends.insert(layer->GetBackendHint().value());
949 }
950 else
951 {
952 // Try assign layer to prefered list of backends
953 for (const auto& backend : availablePreferredBackends)
telsoa01c577f2c2018-08-31 09:22:23 +0100954 {
Derek Lamberti4a9e24b2020-01-03 16:53:38 +0000955 if (layer->GetBackendHint().has_value() &&
956 layer->GetBackendHint().value() == backend)
telsoa01c577f2c2018-08-31 09:22:23 +0100957 {
Derek Lamberti4a9e24b2020-01-03 16:53:38 +0000958 continue; //Don't re-test the backend hint
telsoa01c577f2c2018-08-31 09:22:23 +0100959 }
Derek Lamberti4a9e24b2020-01-03 16:53:38 +0000960
961 OptimizationResult res = AttemptBackendAssignment(backendSettings,
962 optNetObjPtr->GetGraph(),
963 layer,
964 backend,
965 dataTypeIn,
966 dataTypeOut,
967 availablePreferredBackends,
968 reasonIfUnsupported,
969 errMessages);
970
971 if (res.IsOk())
972 {
973 found = true;
974 backendSettings.m_SelectedBackends.insert(backend);
975 break;
976 }
977 else if (res.IsError())
978 {
979 return res; // Cannot continue.
980 // Note: we don't need to log the error as it would already
981 // be logged in AttemptBackendAssignment().
982 }
983 else
984 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100985 ARMNN_ASSERT_MSG(res.IsWarningOnly(), "OptimizationResult in unexpected state.");
Derek Lamberti4a9e24b2020-01-03 16:53:38 +0000986 }
telsoa01c577f2c2018-08-31 09:22:23 +0100987 }
988 }
989
990 // If the layer is unsupported by any devices, log and return a null network.
Matteo Martincigh49124022019-01-11 13:25:59 +0000991 if (!found)
992 {
telsoa01c577f2c2018-08-31 09:22:23 +0100993 // NOTE: if the layer is not an operation queue type AND we have not got CpuRef as a
994 // fallback we should set the compute device on the layer to CpuRef (these are not
995 // available as accelerated operations, or are only available under certain
996 // conditions, currently they comprise MemCopy, Constant, Permute)
997 armnn::LayerType layerType = layer->GetType();
Matteo Martincigh49124022019-01-11 13:25:59 +0000998 if (!backendSettings.IsCpuRefUsed() && (layerType == armnn::LayerType::MemCopy ||
999 layerType == armnn::LayerType::Constant ||
1000 layerType == armnn::LayerType::Permute))
telsoa01c577f2c2018-08-31 09:22:23 +01001001 {
Matteo Martincigh49124022019-01-11 13:25:59 +00001002 BackendId cpuBackendId(armnn::Compute::CpuRef);
1003 layer->SetBackendId(cpuBackendId);
1004 backendSettings.m_SelectedBackends.insert(cpuBackendId);
telsoa01c577f2c2018-08-31 09:22:23 +01001005 }
1006 else
1007 {
Derek Lamberti4a9e24b2020-01-03 16:53:38 +00001008 return ReturnError(layer);
telsoa01c577f2c2018-08-31 09:22:23 +01001009 }
1010 }
1011 }
Matteo Martincigh49124022019-01-11 13:25:59 +00001012
1013 return result;
1014}
1015
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001016OptimizationResult AssignBackends(OptimizedNetworkImpl* optNetObjPtr,
Matteo Martincighadddddb2019-01-24 14:06:23 +00001017 BackendSettings& backendSettings,
Derek Lambertiff05cc52019-04-26 13:05:17 +01001018 SubgraphView& subgraph,
Matteo Martincighadddddb2019-01-24 14:06:23 +00001019 Optional<std::vector<std::string>&> errMessages)
Matteo Martincigh49124022019-01-11 13:25:59 +00001020{
Derek Lambertiff05cc52019-04-26 13:05:17 +01001021 Graph::Iterator firstLayer = subgraph.begin();
1022 Graph::Iterator lastLayer = subgraph.end();
Matteo Martincighadddddb2019-01-24 14:06:23 +00001023 return AssignBackends(optNetObjPtr,
1024 backendSettings,
1025 firstLayer,
1026 lastLayer,
1027 errMessages);
1028}
1029
Derek Lamberti84da38b2019-06-13 11:40:08 +01001030BackendsMap CreateSupportedBackends(TensorHandleFactoryRegistry& handleFactoryRegistry,
1031 BackendSettings& backendSettings)
1032{
1033 BackendsMap backends;
1034 auto const& backendRegistry = BackendRegistryInstance();
1035 for (auto&& selectedBackend : backendSettings.m_SupportedBackends)
1036 {
1037 auto backendFactory = backendRegistry.GetFactory(selectedBackend);
1038 auto backendObjPtr = backendFactory();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001039 ARMNN_ASSERT(backendObjPtr);
Derek Lamberti84da38b2019-06-13 11:40:08 +01001040
1041 backendObjPtr->RegisterTensorHandleFactories(handleFactoryRegistry);
1042
1043 backends[backendObjPtr->GetId()] = std::move(backendObjPtr);
1044 }
1045
1046 return backends;
1047}
1048
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001049OptimizationResult ApplyBackendOptimizations(OptimizedNetworkImpl* optNetObjPtr,
Matteo Martincighadddddb2019-01-24 14:06:23 +00001050 BackendSettings& backendSettings,
Derek Lamberti84da38b2019-06-13 11:40:08 +01001051 BackendsMap& backends,
Mike Kelly07810fc2020-11-12 10:58:48 +00001052 const ModelOptions& modelOptions,
Matteo Martincighadddddb2019-01-24 14:06:23 +00001053 Optional<std::vector<std::string>&> errMessages)
1054{
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001055 ARMNN_ASSERT(optNetObjPtr);
Matteo Martincigh49124022019-01-11 13:25:59 +00001056
1057 OptimizationResult result;
1058
Matteo Martincighadddddb2019-01-24 14:06:23 +00001059 // Get the optimized graph
1060 Graph& optGraph = optNetObjPtr->GetGraph();
Matteo Martincigh49124022019-01-11 13:25:59 +00001061
Matteo Martincighadddddb2019-01-24 14:06:23 +00001062 // Run backend specific optimizations
Matteo Martincighadddddb2019-01-24 14:06:23 +00001063 for (auto&& selectedBackend : backendSettings.m_SelectedBackends)
Matteo Martincigh49124022019-01-11 13:25:59 +00001064 {
Derek Lamberti84da38b2019-06-13 11:40:08 +01001065 auto backendObjPtr = backends.find(selectedBackend)->second.get();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001066 ARMNN_ASSERT(backendObjPtr);
Matteo Martincighadddddb2019-01-24 14:06:23 +00001067
1068 // Select sub-graphs based on backend
Derek Lambertiff05cc52019-04-26 13:05:17 +01001069 SubgraphViewSelector::Subgraphs subgraphs =
Rob Hughes65c32262019-07-23 15:33:39 +01001070 SubgraphViewSelector::SelectSubgraphs(optGraph,
Matteo Martincigh602af092019-05-01 10:31:27 +01001071 // Select layers assigned to the requested backend
1072 [&backendObjPtr](const Layer& layer)
1073 {
1074 return layer.GetType() != LayerType::Input &&
1075 layer.GetType() != LayerType::Output &&
1076 layer.GetBackendId() == backendObjPtr->GetId();
1077 });
Derek Lambertiff05cc52019-04-26 13:05:17 +01001078 if (subgraphs.empty())
Matteo Martincigh49124022019-01-11 13:25:59 +00001079 {
Matteo Martincighadddddb2019-01-24 14:06:23 +00001080 // No sub-graphs found, try with next selected backend
1081 continue;
Matteo Martincigh49124022019-01-11 13:25:59 +00001082 }
Matteo Martincighadddddb2019-01-24 14:06:23 +00001083
1084 // Try to optimize each sub-graph
Derek Lambertiff05cc52019-04-26 13:05:17 +01001085 for (auto& subgraph : subgraphs)
Matteo Martincigh49124022019-01-11 13:25:59 +00001086 {
Matteo Martincighadddddb2019-01-24 14:06:23 +00001087 // Try to optimize the current sub-graph
Mike Kelly07810fc2020-11-12 10:58:48 +00001088 OptimizationViews optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraph, modelOptions);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001089 ARMNN_ASSERT(optimizationViews.Validate(*subgraph));
Matteo Martincighadddddb2019-01-24 14:06:23 +00001090
1091 // Optimization attempted, check the resulting optimized sub-graph
Matteo Martincigh84924332019-05-09 12:46:16 +01001092 for (auto& substitution : optimizationViews.GetSubstitutions())
Matteo Martincighadddddb2019-01-24 14:06:23 +00001093 {
1094 // Sub-graph optimized, substitute the sub-graph with the new optimized one in the main optimized graph
Matteo Martincigh84924332019-05-09 12:46:16 +01001095 SubgraphView& replacementSubgraph = substitution.m_ReplacementSubgraph;
1096 SubgraphView& substitutableSubgraph = substitution.m_SubstitutableSubgraph;
1097 optGraph.SubstituteSubgraph(substitutableSubgraph, replacementSubgraph);
Matteo Martincighadddddb2019-01-24 14:06:23 +00001098
1099 // Assign the current backend to the optimized sub-graph
Matteo Martincigh84924332019-05-09 12:46:16 +01001100 std::for_each(replacementSubgraph.begin(), replacementSubgraph.end(), [&selectedBackend](Layer* l)
Derek Lambertic2fe5fb2019-05-08 10:23:08 +01001101 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001102 ARMNN_ASSERT(l);
Derek Lambertic2fe5fb2019-05-08 10:23:08 +01001103 l->SetBackendId(selectedBackend);
1104 });
Matteo Martincighadddddb2019-01-24 14:06:23 +00001105 }
Derek Lambertic2fe5fb2019-05-08 10:23:08 +01001106
Matteo Martincigh84924332019-05-09 12:46:16 +01001107 if (!optimizationViews.GetFailedSubgraphs().empty())
Matteo Martincighadddddb2019-01-24 14:06:23 +00001108 {
Matteo Martincighadddddb2019-01-24 14:06:23 +00001109 std::stringstream warningMsg;
Derek Lambertic2fe5fb2019-05-08 10:23:08 +01001110 warningMsg << "Some sub-graph(s) failed to optimized on " << backendObjPtr->GetId() << " backend.";
Matteo Martincighadddddb2019-01-24 14:06:23 +00001111 ReportWarning(warningMsg.str(), errMessages);
1112
1113 // Failed to optimize the given sub-graph, re-assign the sub-graph layers to other available backends
Derek Lambertic2fe5fb2019-05-08 10:23:08 +01001114 BackendSettings settingsCopy(backendSettings);
Matteo Martincighadddddb2019-01-24 14:06:23 +00001115 if (!backendObjPtr->GetId().IsCpuRef())
1116 {
1117 // Add the current backend to the list of backends to ignore
Derek Lambertic2fe5fb2019-05-08 10:23:08 +01001118 settingsCopy.m_IgnoredBackends.insert(backendObjPtr->GetId());
Matteo Martincighadddddb2019-01-24 14:06:23 +00001119 }
Derek Lambertic2fe5fb2019-05-08 10:23:08 +01001120
1121 int count=0;
Matteo Martincigh84924332019-05-09 12:46:16 +01001122 for (auto& failedSubgraph : optimizationViews.GetFailedSubgraphs())
Matteo Martincighadddddb2019-01-24 14:06:23 +00001123 {
Derek Lambertic2fe5fb2019-05-08 10:23:08 +01001124 // An error occurred: the optimization was attempted but not performed, try different backends
1125 std::stringstream subgraphMsg;
1126 subgraphMsg << "Re-assigning backends to " << failedSubgraph.GetLayers().size()
1127 << " layers inside sub-graph " << count++;
Matteo Martincigh328d92b2019-07-04 17:52:55 +01001128 ReportWarning(subgraphMsg.str(), errMessages);
Derek Lambertic2fe5fb2019-05-08 10:23:08 +01001129
1130 OptimizationResult reassignmentResult = AssignBackends(optNetObjPtr,
1131 settingsCopy,
1132 *subgraph,
1133 errMessages);
1134 if (reassignmentResult.m_Error)
1135 {
1136 // Failed to re-assign one of the remaining backends to each layer of the sub-graph
1137 result.m_Error = true;
1138 return result;
1139 }
Matteo Martincighadddddb2019-01-24 14:06:23 +00001140 }
Matteo Martincigh49124022019-01-11 13:25:59 +00001141 }
1142 }
1143 }
1144
1145 return result;
1146}
1147
Derek Lamberti84da38b2019-06-13 11:40:08 +01001148bool RequiresCopy(ITensorHandleFactory::FactoryId src,
1149 ITensorHandleFactory::FactoryId dst,
1150 TensorHandleFactoryRegistry& registry)
1151{
1152 if (src != dst)
1153 {
1154 ITensorHandleFactory* srcFactory = registry.GetFactory(src);
1155 ITensorHandleFactory* dstFactory = registry.GetFactory(dst);
1156
Matteo Martincigha6539ed2019-08-27 13:43:32 +01001157 if (srcFactory && dstFactory &&
1158 (srcFactory->GetExportFlags() & dstFactory->GetImportFlags()) != 0)
Derek Lamberti84da38b2019-06-13 11:40:08 +01001159 {
1160 return false;
1161 }
1162 return true;
1163 }
1164 return false;
1165}
1166
1167// Find the handle factory for the input layer which results in fewest required copies.
1168ITensorHandleFactory::FactoryId CalculateSlotOptionForInput(BackendsMap& backends,
1169 OutputSlot& slot,
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +01001170 TensorHandleFactoryRegistry& registry,
1171 bool importEnabled)
Derek Lamberti84da38b2019-06-13 11:40:08 +01001172{
1173 Layer& layer = slot.GetOwningLayer();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001174 ARMNN_ASSERT(layer.GetType() == LayerType::Input);
Derek Lamberti84da38b2019-06-13 11:40:08 +01001175
1176 // Explicitly select the tensorhandle factory for InputLayer because the rules for it are slightly different. It
1177 // doesn't matter which backend it is assigned to because they all use the same implementation, which
1178 // requires Map/Unmap support. This means that, so long as the handle type supports map/unmap semantics, we can
1179 // select a factory with maximum compatibility with the layers connected to the InputLayer.
1180
1181 // First ensure the from backends can support the TensorHandeAPI
1182 auto frmBackend = backends.find(layer.GetBackendId());
1183 if (frmBackend == backends.end() ||
1184 !frmBackend->second->SupportsTensorAllocatorAPI())
1185 {
1186 return ITensorHandleFactory::LegacyFactoryId;
1187 }
1188
1189 // Go through all connections to the output slot and determine the TensorHandleFactory which results in the
1190 // fewest copies.
1191 std::map<ITensorHandleFactory::FactoryId, int> factoryScores;
1192 int topScore = 0;
1193 ITensorHandleFactory::FactoryId topChoice = ITensorHandleFactory::LegacyFactoryId;
1194
1195 for (auto&& connection : slot.GetConnections())
1196 {
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +01001197
Derek Lamberti84da38b2019-06-13 11:40:08 +01001198 const Layer& connectedLayer = connection->GetOwningLayer();
1199
1200 auto toBackend = backends.find(connectedLayer.GetBackendId());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001201 ARMNN_ASSERT_MSG(toBackend != backends.end(), "Backend id not found for the connected layer");
Derek Lamberti84da38b2019-06-13 11:40:08 +01001202
1203 if (!toBackend->second.get()->SupportsTensorAllocatorAPI())
1204 {
1205 // The destination backend does not support the tensor allocator API, move to the next one
1206 continue;
1207 }
1208
1209 auto dstPrefs = toBackend->second.get()->GetHandleFactoryPreferences();
1210 for (auto&& dst : dstPrefs)
1211 {
Derek Lambertif674aa02019-08-01 15:56:25 +01001212 // Input layers use the mem copy workload or import, so the selected factory must
1213 // support either the map/unmap API or Import API
Derek Lamberti84da38b2019-06-13 11:40:08 +01001214 ITensorHandleFactory* factory = registry.GetFactory(dst);
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +01001215 if (importEnabled && factory->GetImportFlags() == 0)
Derek Lamberti84da38b2019-06-13 11:40:08 +01001216 {
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +01001217 continue;
1218 }
1219 else if (!importEnabled && !factory->SupportsMapUnmap())
1220 {
Derek Lamberti84da38b2019-06-13 11:40:08 +01001221 continue;
1222 }
1223
1224 auto it = factoryScores.find(dst);
1225 if (it == factoryScores.end())
1226 {
1227 // Add new score to the table
1228 factoryScores[dst] = 0;
1229 if (topChoice == ITensorHandleFactory::LegacyFactoryId)
1230 {
1231 topChoice = dst;
1232 }
1233 }
1234 else
1235 {
1236 // Increase the score
1237 factoryScores[dst]++;
1238
1239 // Track the best option
1240 if (factoryScores[dst] > topScore)
1241 {
1242 topScore = factoryScores[dst];
1243 topChoice = dst;
1244 }
1245 }
1246 }
1247 }
1248
1249 return topChoice;
1250}
1251
1252// Find the handle factory for the output layer which results in fewest required copies.
1253ITensorHandleFactory::FactoryId CalculateSlotOptionForOutput(BackendsMap& backends,
1254 OutputSlot& slot,
1255 TensorHandleFactoryRegistry& registry)
1256{
Jan Eilers8eb25602020-03-09 12:13:48 +00001257 IgnoreUnused(backends, slot, registry);
Derek Lamberti94a88d22019-12-10 21:12:59 +00001258 return ITensorHandleFactory::DeferredFactoryId;
Derek Lamberti84da38b2019-06-13 11:40:08 +01001259}
1260
1261// For all handle factories supported on the source backend, we wish to find the one which requires the fewest copies
1262// when considering all connections.
1263ITensorHandleFactory::FactoryId CalculateSlotOption(BackendsMap& backends,
1264 OutputSlot& outputSlot,
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +01001265 TensorHandleFactoryRegistry& registry,
1266 bool importEnabled)
Derek Lamberti84da38b2019-06-13 11:40:08 +01001267{
1268 // First ensure the from backends can support the TensorHandeAPI
1269 Layer& layer = outputSlot.GetOwningLayer();
1270 auto frmBackend = backends.find(layer.GetBackendId());
1271 if (frmBackend == backends.end() ||
1272 !frmBackend->second->SupportsTensorAllocatorAPI())
1273 {
1274 return ITensorHandleFactory::LegacyFactoryId;
1275 }
1276
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +01001277 bool outputConnection = false;
Derek Lamberti84da38b2019-06-13 11:40:08 +01001278 for (auto&& connection : outputSlot.GetConnections())
1279 {
1280 const Layer& connectedLayer = connection->GetOwningLayer();
1281 if (connectedLayer.GetType() == LayerType::Output)
1282 {
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +01001283 outputConnection = true;
Derek Lamberti84da38b2019-06-13 11:40:08 +01001284 }
1285 }
1286
1287 IBackendInternal* srcBackend = frmBackend->second.get();
1288 auto srcPrefs = srcBackend->GetHandleFactoryPreferences();
1289
1290 // Initialize the scores
1291 std::map<ITensorHandleFactory::FactoryId, int> factoryScores;
1292 for (auto&& pref : srcPrefs)
1293 {
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +01001294 if (importEnabled)
Derek Lamberti84da38b2019-06-13 11:40:08 +01001295 {
1296 ITensorHandleFactory* factory = registry.GetFactory(pref);
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +01001297 if (outputConnection)
1298 {
1299 // Check if this is fallback case
1300 bool fallbackConnection = false;
1301 for (auto&& inputSlot : layer.GetInputSlots())
1302 {
1303 if (inputSlot.GetConnectedOutputSlot()->GetOwningLayer().GetBackendId() != layer.GetBackendId())
1304 {
1305 fallbackConnection = true;
1306 }
1307 }
1308 if (fallbackConnection)
1309 {
1310 auto factoryCap = factory->GetCapabilities(&layer, &layer, CapabilityClass::FallbackImportDisabled);
1311 // Cannot use factory import if fallback import is not supported.
1312 if (!factoryCap.empty())
1313 {
1314 continue;
1315 }
1316 }
1317 else if (factory->GetExportFlags() == 0)
1318 {
1319 continue;
1320 }
1321 }
1322 if (!outputConnection)
1323 {
1324 auto factoryCap = factory->GetCapabilities(&layer, &layer, CapabilityClass::FallbackImportDisabled);
1325 // Cannot use factory import if fallback import is not supported.
1326 if (!factoryCap.empty())
1327 {
1328 continue;
1329 }
1330 }
1331
1332 }
1333 else
1334 {
1335 // Only consider factories that support map/unmap
1336 ITensorHandleFactory* factory = registry.GetFactory(pref);
Derek Lamberti84da38b2019-06-13 11:40:08 +01001337 if (!factory->SupportsMapUnmap())
1338 {
1339 // The current tensor handle factory does not support the map/unmap strategy, move to the next one
1340 continue;
1341 }
1342 }
1343
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +01001344
Derek Lamberti84da38b2019-06-13 11:40:08 +01001345 auto it = factoryScores.find(pref);
1346 if (it == factoryScores.end())
1347 {
1348 // Add new score to the table
1349 factoryScores[pref] = 0;
1350 }
1351 }
1352
1353 // Score each handle factory based on how many times it requires copies on the slot connections
1354 for (auto&& connection : outputSlot.GetConnections())
1355 {
1356 const Layer& connectedLayer = connection->GetOwningLayer();
1357
1358 auto toBackend = backends.find(connectedLayer.GetBackendId());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001359 ARMNN_ASSERT_MSG(toBackend != backends.end(), "Backend id not found for the connected layer");
Derek Lamberti84da38b2019-06-13 11:40:08 +01001360
1361 auto dstPrefs = toBackend->second.get()->GetHandleFactoryPreferences();
1362 for (auto&& src : srcPrefs)
1363 {
1364 if (factoryScores.find(src) == factoryScores.end()) // Don't consider excluded factories
1365 {
1366 continue;
1367 }
1368
1369 for (auto&& dst : dstPrefs)
1370 {
1371 if (RequiresCopy(src, dst, registry))
1372 {
1373 // Copy avoided, increase the score
1374 factoryScores[src]++;
1375 break;
1376 }
1377 }
1378 }
1379 }
1380
1381 // Find the lowest score
1382 int minScore = std::numeric_limits<int>::max();
1383 for (auto it : factoryScores)
1384 {
1385 minScore = std::min(minScore, it.second);
1386 }
1387
1388 // Collect factories matching the best(lowest) score
1389 std::vector<ITensorHandleFactory::FactoryId> optimalFactories;
1390 for (auto it : factoryScores)
1391 {
1392 if (it.second == minScore)
1393 {
1394 optimalFactories.push_back(it.first);
1395 }
1396 }
1397
1398 // For all compatible Factories matching the best score, find the preferred one for the current layer.
1399 for (auto&& srcPref : srcPrefs)
1400 {
1401 for (auto&& comp : optimalFactories)
1402 {
1403 if (comp == srcPref)
1404 {
1405 return comp;
1406 }
1407 }
1408 }
1409
1410 return ITensorHandleFactory::LegacyFactoryId;
1411}
1412
Derek Lambertif674aa02019-08-01 15:56:25 +01001413EdgeStrategy CalculateEdgeStrategy(BackendsMap& backends,
1414 ITensorHandleFactory::FactoryId srcFactoryId,
1415 const Layer& layer,
1416 const Layer& connectedLayer,
Narumol Prangnawarata2493a02020-08-19 14:39:07 +01001417 TensorHandleFactoryRegistry& registry,
1418 bool importEnabled)
Derek Lamberti84da38b2019-06-13 11:40:08 +01001419{
1420 auto toBackend = backends.find(connectedLayer.GetBackendId());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001421 ARMNN_ASSERT_MSG(toBackend != backends.end(), "Backend id not found for the connected layer");
Derek Lamberti84da38b2019-06-13 11:40:08 +01001422
1423 auto dstPrefs = toBackend->second.get()->GetHandleFactoryPreferences();
1424
1425 // Legacy API check for backward compatibility
1426 if (srcFactoryId == ITensorHandleFactory::LegacyFactoryId || dstPrefs.empty())
1427 {
1428 if (layer.GetBackendId() != connectedLayer.GetBackendId())
1429 {
Derek Lambertif674aa02019-08-01 15:56:25 +01001430 return EdgeStrategy::CopyToTarget;
Derek Lamberti84da38b2019-06-13 11:40:08 +01001431 }
1432 else
1433 {
Derek Lambertif674aa02019-08-01 15:56:25 +01001434 return EdgeStrategy::DirectCompatibility;
Derek Lamberti84da38b2019-06-13 11:40:08 +01001435 }
1436 }
1437
1438 // TensorHandleFactory API present, so perform more sophisticated strategies.
Derek Lambertif674aa02019-08-01 15:56:25 +01001439 // Dst Output layers don't require copy because they use import or map/unmap
Derek Lamberti84da38b2019-06-13 11:40:08 +01001440 if (connectedLayer.GetType() == LayerType::Output)
1441 {
Derek Lambertif674aa02019-08-01 15:56:25 +01001442 return EdgeStrategy::DirectCompatibility;
Derek Lamberti84da38b2019-06-13 11:40:08 +01001443 }
1444
1445 // Search for direct match in prefs
1446 for (auto&& pref : dstPrefs)
1447 {
1448 if (pref == srcFactoryId)
1449 {
Derek Lambertif674aa02019-08-01 15:56:25 +01001450 return EdgeStrategy::DirectCompatibility;
Derek Lamberti84da38b2019-06-13 11:40:08 +01001451 }
1452 }
1453
1454 // Search for export/import options
1455 ITensorHandleFactory* srcFactory = registry.GetFactory(srcFactoryId);
Narumol Prangnawarata2493a02020-08-19 14:39:07 +01001456 if (srcFactory->GetExportFlags() != 0 && importEnabled)
Derek Lamberti84da38b2019-06-13 11:40:08 +01001457 {
1458 for (auto&& pref : dstPrefs)
1459 {
1460 ITensorHandleFactory* dstFactory = registry.GetFactory(pref);
James Conroyffab16f2019-11-07 14:37:09 +00001461
James Conroy47e863d2019-11-18 17:07:43 +00001462 // Handles cases when a destPref is not listed in TensorHandleFactoryRegistry
James Conroyffab16f2019-11-07 14:37:09 +00001463 if (!dstFactory) {
James Conroy47e863d2019-11-18 17:07:43 +00001464 continue;
James Conroyffab16f2019-11-07 14:37:09 +00001465 }
Derek Lambertif674aa02019-08-01 15:56:25 +01001466 if ((dstFactory->GetImportFlags() & srcFactory->GetExportFlags()) != 0)
Derek Lamberti84da38b2019-06-13 11:40:08 +01001467 {
Narumol Prangnawaratb8d771a2020-08-14 11:51:12 +01001468 auto srcCapability = srcFactory->GetCapabilities(&layer, &layer, CapabilityClass::PaddingRequired);
1469 auto dstCapability = dstFactory->GetCapabilities(&connectedLayer,
1470 &connectedLayer,
1471 CapabilityClass::PaddingRequired);
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +01001472 auto srcFallback = srcFactory->GetCapabilities(&layer, &layer, CapabilityClass::FallbackImportDisabled);
1473 auto dstFallback = dstFactory->GetCapabilities(&connectedLayer,
1474 &connectedLayer,
1475 CapabilityClass::FallbackImportDisabled);
Narumol Prangnawaratb8d771a2020-08-14 11:51:12 +01001476 // Do not require memory copy if the source and destination do not require padding.
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +01001477 if (srcCapability.empty() && dstCapability.empty() && srcFallback.empty() && dstFallback.empty())
Narumol Prangnawaratb8d771a2020-08-14 11:51:12 +01001478 {
1479 return EdgeStrategy::ExportToTarget;
1480 }
Derek Lamberti84da38b2019-06-13 11:40:08 +01001481 }
1482 }
1483 }
1484
1485 // Search for copy options via map/unmap
1486 if (srcFactory->SupportsMapUnmap())
1487 {
1488 for (auto&& pref : dstPrefs)
1489 {
1490 ITensorHandleFactory* dstFactory = registry.GetFactory(pref);
James Conroy47e863d2019-11-18 17:07:43 +00001491 if (dstFactory && dstFactory->SupportsMapUnmap())
Derek Lamberti84da38b2019-06-13 11:40:08 +01001492 {
Derek Lambertif674aa02019-08-01 15:56:25 +01001493 return EdgeStrategy::CopyToTarget;
Derek Lamberti84da38b2019-06-13 11:40:08 +01001494 }
1495 }
1496 }
1497
Derek Lambertif674aa02019-08-01 15:56:25 +01001498 return EdgeStrategy::Undefined;
Derek Lamberti84da38b2019-06-13 11:40:08 +01001499}
1500
1501// Select the TensorHandleFactories and the corresponding memory strategy
1502OptimizationResult SelectTensorHandleStrategy(Graph& optGraph,
1503 BackendsMap& backends,
1504 TensorHandleFactoryRegistry& registry,
Narumol Prangnawarata2493a02020-08-19 14:39:07 +01001505 bool importEnabled,
Derek Lamberti84da38b2019-06-13 11:40:08 +01001506 Optional<std::vector<std::string>&> errMessages)
1507{
1508 OptimizationResult result;
1509
Narumol Prangnawarata2493a02020-08-19 14:39:07 +01001510 optGraph.ForEachLayer([&backends, &registry, &result, &errMessages, importEnabled](Layer* layer)
Derek Lamberti84da38b2019-06-13 11:40:08 +01001511 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001512 ARMNN_ASSERT(layer);
Derek Lamberti84da38b2019-06-13 11:40:08 +01001513
1514 // Lets make sure the backend is in our list of supported backends. Something went wrong during backend
1515 // assignment if this check fails
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001516 ARMNN_ASSERT(backends.find(layer->GetBackendId()) != backends.end());
Derek Lamberti84da38b2019-06-13 11:40:08 +01001517
1518 // Check each output separately
1519 for (unsigned int slotIdx = 0; slotIdx < layer->GetNumOutputSlots(); slotIdx++)
1520 {
1521 OutputSlot& outputSlot = layer->GetOutputSlot(slotIdx);
1522
1523 ITensorHandleFactory::FactoryId slotOption = ITensorHandleFactory::LegacyFactoryId;
1524
1525 // Calculate the factory to use which results in the fewest copies being made.
1526 switch(layer->GetType())
1527 {
1528 case LayerType::Input:
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +01001529 slotOption = CalculateSlotOptionForInput(backends, outputSlot, registry, importEnabled);
Derek Lamberti84da38b2019-06-13 11:40:08 +01001530 break;
1531 case LayerType::Output:
1532 slotOption = CalculateSlotOptionForOutput(backends, outputSlot, registry);
1533 break;
1534 default:
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +01001535 slotOption = CalculateSlotOption(backends, outputSlot, registry, importEnabled);
Derek Lamberti84da38b2019-06-13 11:40:08 +01001536 break;
1537 }
1538 outputSlot.SetTensorHandleFactory(slotOption);
1539
Derek Lambertif674aa02019-08-01 15:56:25 +01001540 // Now determine the "best" edge strategy for each connection given the slotOption.
Derek Lamberti84da38b2019-06-13 11:40:08 +01001541 unsigned int connectionIdx = 0;
1542 for (auto&& connection : outputSlot.GetConnections())
1543 {
1544 const Layer& connectedLayer = connection->GetOwningLayer();
1545
Narumol Prangnawarata2493a02020-08-19 14:39:07 +01001546 EdgeStrategy strategy = CalculateEdgeStrategy(backends, slotOption, *layer, connectedLayer,
1547 registry, importEnabled);
Derek Lamberti84da38b2019-06-13 11:40:08 +01001548
Derek Lambertif674aa02019-08-01 15:56:25 +01001549 if (strategy == EdgeStrategy::Undefined)
Derek Lamberti84da38b2019-06-13 11:40:08 +01001550 {
1551 result.m_Error = true;
1552 if (errMessages)
1553 {
1554 errMessages.value().emplace_back("Could not find valid strategy required for compatibility"
1555 " between backends.");
1556 }
1557 return;
1558 }
1559
Derek Lambertif674aa02019-08-01 15:56:25 +01001560 outputSlot.SetEdgeStrategy(connectionIdx, strategy);
Derek Lamberti84da38b2019-06-13 11:40:08 +01001561
1562 connectionIdx++;
1563 }
1564 }
1565 });
1566
1567 return result;
1568}
1569
Matteo Martincigh49124022019-01-11 13:25:59 +00001570IOptimizedNetworkPtr Optimize(const INetwork& inNetwork,
1571 const std::vector<BackendId>& backendPreferences,
1572 const IDeviceSpec& deviceSpec,
1573 const OptimizerOptions& options,
Rob Hughes23214432019-11-05 11:27:36 +00001574 Optional<std::vector<std::string>&> messages)
Matteo Martincigh49124022019-01-11 13:25:59 +00001575{
1576 if (backendPreferences.empty())
1577 {
Mike Kelly3a613cc2020-09-29 20:50:35 +01001578 throw InvalidArgumentException("Invoked Optimize with no backends specified");
Matteo Martincigh49124022019-01-11 13:25:59 +00001579 }
1580
Narumol Prangnawaratbc7ffb52020-03-20 15:01:01 +00001581 if (options.m_ReduceFp32ToFp16 && options.m_ReduceFp32ToBf16)
1582 {
1583 throw InvalidArgumentException("BFloat16 and Float16 optimization cannot be enabled at the same time.");
1584 }
1585
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001586 std::unique_ptr<Graph> graph = std::make_unique<Graph>(inNetwork.pNetworkImpl->GetGraph());
Matteo Martincigh49124022019-01-11 13:25:59 +00001587
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001588 auto optNet = IOptimizedNetworkPtr(new IOptimizedNetwork(std::move(graph), options.m_ModelOptions),
Sadik Armagan045f6be2020-09-10 13:37:32 +01001589 &IOptimizedNetwork::Destroy);
Matteo Martincigh49124022019-01-11 13:25:59 +00001590
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001591 IOptimizedNetwork* optNetObjPtr = optNet.get();
Matteo Martincigh49124022019-01-11 13:25:59 +00001592
Matteo Martincighadddddb2019-01-24 14:06:23 +00001593 // Get the optimized graph
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001594 Graph& optGraph = optNetObjPtr->pOptimizedNetworkImpl->GetGraph();
Matteo Martincighadddddb2019-01-24 14:06:23 +00001595
Narumol Prangnawarat16f82f92020-09-14 16:12:44 +01001596 // Perform AddBroadcastReshapeLayer optimisation
1597 using namespace optimizations;
1598 Optimizer::Pass(optGraph, MakeOptimizations(AddBroadcastReshapeLayer()));
1599
Narumol Prangnawaratbbf71a62020-09-07 14:05:22 +01001600 // Infer the tensor infos for all output slots. Throws an exception on failure
1601 optGraph.InferTensorInfos();
1602
Matteo Martincigh49124022019-01-11 13:25:59 +00001603 // Perform optimisation passes
Matteo Martincighadddddb2019-01-24 14:06:23 +00001604 Optimizer::Pass(optGraph, MakeOptimizations(SquashEqualPermuteSiblings(),
Mike Kelly490b7be2020-03-03 12:39:09 +00001605 SquashEqualTransposeSiblings(),
Matteo Martincighadddddb2019-01-24 14:06:23 +00001606 SquashEqualReshapeSiblings(),
1607 OptimizeInversePermutes(),
Mike Kelly490b7be2020-03-03 12:39:09 +00001608 OptimizeInverseTransposes(),
Matteo Martincighadddddb2019-01-24 14:06:23 +00001609 MovePermuteUp(),
Mike Kelly490b7be2020-03-03 12:39:09 +00001610 MoveTransposeUp(),
Matteo Martincighadddddb2019-01-24 14:06:23 +00001611 PermuteAsReshape(),
Mike Kelly490b7be2020-03-03 12:39:09 +00001612 TransposeAsReshape(),
Nina Drozd861985f2019-04-18 14:48:51 +01001613 OptimizeConsecutiveReshapes(),
Rob Hughes3a7d3a72019-09-24 16:59:56 +01001614 FoldPadIntoConvolution2d(),
Teresa Charlin5786eb72021-05-21 16:29:45 +01001615 FoldPadIntoDepthwiseConvolution2d(),
Diego Lopez Recasfe95d722021-03-19 12:40:16 +00001616 FoldPadIntoPooling2d(),
Mike Kelly490b7be2020-03-03 12:39:09 +00001617 PermuteAndBatchToSpaceAsDepthToSpace(),
Teresa Charlin06e03002020-10-15 13:16:07 +01001618 TransposeAndBatchToSpaceAsDepthToSpace(),
Mike Kelly90231b82020-11-05 15:44:56 +00001619 FuseBatchNormIntoConvolution2DFloat32(),
1620 FuseBatchNormIntoConvolution2DFloat16(),
1621 FuseBatchNormIntoDepthwiseConvolution2DFloat32(),
1622 FuseBatchNormIntoDepthwiseConvolution2DFloat16()));
Matteo Martincigh49124022019-01-11 13:25:59 +00001623
Matteo Martincigh49124022019-01-11 13:25:59 +00001624 // If Fp32 to Fp16 optimization is set convert Fp32 network to Fp16
1625 if (options.m_ReduceFp32ToFp16)
1626 {
Matteo Martincighadddddb2019-01-24 14:06:23 +00001627 Optimizer::Pass(optGraph, MakeOptimizations(Fp32NetworkToFp16Converter()));
Derek Lambertidd6804b2019-11-27 09:29:57 +00001628 Optimizer::Pass(optGraph, MakeOptimizations(ConvertConstantsFloatToHalf()));
Matteo Martincigh49124022019-01-11 13:25:59 +00001629 }
1630
Narumol Prangnawaratbc7ffb52020-03-20 15:01:01 +00001631 // If Fp32 to Bf16 optimization is set convert Fp32 network to Bf16
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +00001632 // Convert input of Convolution2d and FullyConnected from Fp32 to Bf16
1633 // Only Constant weight of Convolution2d and FullyConnected are converted from Fp32 to Bf16
Narumol Prangnawaratbc7ffb52020-03-20 15:01:01 +00001634 if (options.m_ReduceFp32ToBf16)
1635 {
1636 Optimizer::Pass(optGraph, MakeOptimizations(Fp32NetworkToBf16Converter()));
Narumol Prangnawaratbc7ffb52020-03-20 15:01:01 +00001637 }
1638
Matteo Martincigh49124022019-01-11 13:25:59 +00001639 // Initialize backend settings
1640 BackendSettings backendSettings(backendPreferences, deviceSpec);
1641 if (backendSettings.GetAvailablePreferredBackends().empty())
1642 {
1643 std::stringstream failureMsg;
1644 failureMsg << "None of the preferred backends " << backendPreferences
1645 << " are supported. Current platform provides " << backendSettings.m_SupportedBackends;
Rob Hughes23214432019-11-05 11:27:36 +00001646 ReportError(failureMsg.str(), messages);
Mike Kelly3a613cc2020-09-29 20:50:35 +01001647 throw InvalidArgumentException(failureMsg.str());
Matteo Martincigh49124022019-01-11 13:25:59 +00001648 }
1649
Derek Lamberti84da38b2019-06-13 11:40:08 +01001650 // Create a map to temporarily hold initialized backend objects
1651 TensorHandleFactoryRegistry tensorHandleFactoryRegistry;
1652 BackendsMap backends = CreateSupportedBackends(tensorHandleFactoryRegistry, backendSettings);
1653
Matteo Martincigh49124022019-01-11 13:25:59 +00001654 // Assign an available backend to each layer
Matteo Martincighadddddb2019-01-24 14:06:23 +00001655 Graph::Iterator firstLayer = optGraph.begin();
1656 Graph::Iterator lastLayer = optGraph.end();
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001657 OptimizationResult assignBackendsResult = AssignBackends(optNetObjPtr->pOptimizedNetworkImpl.get(),
Derek Lamberti84da38b2019-06-13 11:40:08 +01001658 backendSettings,
1659 firstLayer,
1660 lastLayer,
Rob Hughes23214432019-11-05 11:27:36 +00001661 messages);
Derek Lamberti84da38b2019-06-13 11:40:08 +01001662 if (assignBackendsResult.m_Error)
Matteo Martincigh49124022019-01-11 13:25:59 +00001663 {
1664 // Failed to assign a backend to each layer
Mike Kelly3a613cc2020-09-29 20:50:35 +01001665 throw InvalidArgumentException("Failed to assign a backend to each layer");
jimfly016b0b53d2018-10-08 14:43:01 +01001666 }
telsoa01c577f2c2018-08-31 09:22:23 +01001667
Matteo Martincighadddddb2019-01-24 14:06:23 +00001668 Optimizer::Pass(optGraph, MakeOptimizations(OptimizeInverseConversionsFp16(),
1669 OptimizeInverseConversionsFp32()));
telsoa01c577f2c2018-08-31 09:22:23 +01001670
Matteo Martincighadddddb2019-01-24 14:06:23 +00001671 // Apply the backend-specific optimizations
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001672 OptimizationResult backendOptimizationResult = ApplyBackendOptimizations(optNetObjPtr->pOptimizedNetworkImpl.get(),
Matteo Martincighadddddb2019-01-24 14:06:23 +00001673 backendSettings,
Derek Lamberti84da38b2019-06-13 11:40:08 +01001674 backends,
Mike Kelly07810fc2020-11-12 10:58:48 +00001675 options.m_ModelOptions,
Rob Hughes23214432019-11-05 11:27:36 +00001676 messages);
Matteo Martincighadddddb2019-01-24 14:06:23 +00001677 if (backendOptimizationResult.m_Error)
Matteo Martincigh49124022019-01-11 13:25:59 +00001678 {
Matteo Martincighadddddb2019-01-24 14:06:23 +00001679 // Failed to apply the backend-specific optimizations
Mike Kelly3a613cc2020-09-29 20:50:35 +01001680 throw InvalidArgumentException("Failed to apply the backend-specific optimizations");
Matteo Martincigh49124022019-01-11 13:25:59 +00001681 }
1682
Matteo Martincighadddddb2019-01-24 14:06:23 +00001683 // If the debug flag is set, then insert a DebugLayer after each layer
1684 // Doing this after applying the backend optimizations as they might have changed some layers
1685 if (options.m_Debug)
1686 {
1687 Optimizer::Pass(optGraph, MakeOptimizations(InsertDebugLayer()));
1688 }
1689
Derek Lamberti84da38b2019-06-13 11:40:08 +01001690 // Calculate the compatibility strategies for tensor handles
1691 OptimizationResult strategyResult = SelectTensorHandleStrategy(optGraph,
1692 backends,
1693 tensorHandleFactoryRegistry,
Narumol Prangnawarata2493a02020-08-19 14:39:07 +01001694 options.m_ImportEnabled,
Rob Hughes23214432019-11-05 11:27:36 +00001695 messages);
Derek Lamberti84da38b2019-06-13 11:40:08 +01001696 if (strategyResult.m_Error)
1697 {
1698 // Failed to apply the backend-specific optimizations
1699 return IOptimizedNetworkPtr(nullptr, &IOptimizedNetwork::Destroy);
1700 }
1701
1702 // Based on the tensor handle strategy determined above, insert copy layers where required.
Derek Lambertif674aa02019-08-01 15:56:25 +01001703 optGraph.AddCompatibilityLayers(backends, tensorHandleFactoryRegistry);
telsoa01c577f2c2018-08-31 09:22:23 +01001704
1705 // Convert constants
Matteo Martincighadddddb2019-01-24 14:06:23 +00001706 Optimizer::Pass(optGraph, MakeOptimizations(ConvertConstantsFloatToHalf()));
1707 Optimizer::Pass(optGraph, MakeOptimizations(ConvertConstantsHalfToFloat()));
telsoa01c577f2c2018-08-31 09:22:23 +01001708
Derek Lamberti84da38b2019-06-13 11:40:08 +01001709 // Run backend specific optimizations (deprecated)
Matteo Martincigh49124022019-01-11 13:25:59 +00001710 for (auto&& chosenBackend : backendSettings.m_SelectedBackends)
David Beck263e3492018-11-09 14:46:40 +00001711 {
1712 auto factoryFun = BackendRegistryInstance().GetFactory(chosenBackend);
1713 auto backendPtr = factoryFun();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001714 ARMNN_ASSERT(backendPtr.get() != nullptr);
David Beck263e3492018-11-09 14:46:40 +00001715
Matteo Martincighed735042019-05-22 09:42:43 +01001716 ARMNN_NO_DEPRECATE_WARN_BEGIN
David Beck263e3492018-11-09 14:46:40 +00001717 auto backendSpecificOptimizations = backendPtr->GetOptimizations();
Matteo Martincighed735042019-05-22 09:42:43 +01001718 ARMNN_NO_DEPRECATE_WARN_END
1719
David Beck263e3492018-11-09 14:46:40 +00001720 if (!backendSpecificOptimizations.empty())
1721 {
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001722 Optimizer::Pass(optNetObjPtr->pOptimizedNetworkImpl->GetGraph(), backendSpecificOptimizations);
David Beck263e3492018-11-09 14:46:40 +00001723 }
1724 }
1725
telsoa01c577f2c2018-08-31 09:22:23 +01001726 return optNet;
telsoa014fcda012018-03-09 14:13:49 +00001727}
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001728bool NetworkImpl::GetShapeInferenceMethod()
telsoa014fcda012018-03-09 14:13:49 +00001729{
Finn Williamsf24effa2020-07-03 10:12:03 +01001730 if (m_NetworkOptions.size() > 0 && m_NetworkOptions[0].GetBackendId().Get() == "ShapeInferenceMethod")
1731 {
1732 return m_NetworkOptions[0].GetOption(0).GetValue().AsBool();
1733 }
1734
1735 return false;
telsoa014fcda012018-03-09 14:13:49 +00001736}
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001737NetworkImpl::NetworkImpl(NetworkOptions networkOptions)
Finn Williamsf24effa2020-07-03 10:12:03 +01001738: m_NetworkOptions(networkOptions),
1739 m_Graph(std::make_unique<Graph>(GetShapeInferenceMethod()))
1740{}
telsoa014fcda012018-03-09 14:13:49 +00001741
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001742NetworkImpl::~NetworkImpl()
telsoa014fcda012018-03-09 14:13:49 +00001743{
1744}
1745
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001746Status NetworkImpl::PrintGraph()
Jan Eilers99d9d4a2019-11-06 10:02:16 +00001747{
1748 m_Graph->Print();
1749 return Status::Success;
1750}
1751
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001752IConnectableLayer* NetworkImpl::AddInputLayer(LayerBindingId id, const char* name)
telsoa014fcda012018-03-09 14:13:49 +00001753{
1754 return m_Graph->AddLayer<InputLayer>(id, name);
1755}
1756
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001757IConnectableLayer* NetworkImpl::AddBatchToSpaceNdLayer(const BatchToSpaceNdDescriptor& batchToSpaceNdDescriptor,
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +00001758 const char* name)
1759{
1760 return m_Graph->AddLayer<BatchToSpaceNdLayer>(batchToSpaceNdDescriptor, name);
1761}
1762
mathad01b392e982021-04-07 12:07:30 +01001763IConnectableLayer* NetworkImpl::AddCastLayer(const char* name)
1764{
1765 return m_Graph->AddLayer<CastLayer>(name);
1766}
1767
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001768IConnectableLayer* NetworkImpl::AddComparisonLayer(const ComparisonDescriptor& comparisonDescriptor,
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001769 const char* name)
1770{
1771 return m_Graph->AddLayer<ComparisonLayer>(comparisonDescriptor, name);
1772}
1773
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001774IConnectableLayer* NetworkImpl::AddElementwiseUnaryLayer(const ElementwiseUnaryDescriptor& elementwiseUnaryDescriptor,
josh minor4a3c6102020-01-06 16:40:46 -06001775 const char* name)
1776{
1777 return m_Graph->AddLayer<ElementwiseUnaryLayer>(elementwiseUnaryDescriptor, name);
1778}
1779
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001780IConnectableLayer* NetworkImpl::AddFillLayer(const FillDescriptor& fillDescriptor,
Ryan OSheaec6c6802020-06-05 17:17:06 +01001781 const char* name)
1782{
1783 return m_Graph->AddLayer<FillLayer>(fillDescriptor, name);
1784}
1785
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001786IConnectableLayer* NetworkImpl::AddFullyConnectedLayerImpl(const FullyConnectedDescriptor& fullyConnectedDescriptor,
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001787 const Optional<ConstTensor>& weights,
1788 const Optional<ConstTensor>& biases,
1789 const char* name)
telsoa014fcda012018-03-09 14:13:49 +00001790{
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001791 if (fullyConnectedDescriptor.m_ConstantWeights && !weights.has_value())
telsoa014fcda012018-03-09 14:13:49 +00001792 {
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001793 throw InvalidArgumentException("AddFullyConnectedLayer: weights cannot be empty");
1794
1795 if (fullyConnectedDescriptor.m_BiasEnabled && !biases.has_value())
1796 {
1797 throw InvalidArgumentException("AddFullyConnectedLayer: biases cannot be empty");
1798 }
telsoa014fcda012018-03-09 14:13:49 +00001799 }
1800
1801 const auto layer = m_Graph->AddLayer<FullyConnectedLayer>(fullyConnectedDescriptor, name);
1802
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001803 if (fullyConnectedDescriptor.m_ConstantWeights)
telsoa014fcda012018-03-09 14:13:49 +00001804 {
James Conroy1f58f032021-04-27 17:13:27 +01001805 layer->m_Weight = std::make_shared<ScopedTensorHandle>(weights.value());
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001806 if (fullyConnectedDescriptor.m_BiasEnabled)
1807 {
James Conroy1f58f032021-04-27 17:13:27 +01001808 layer->m_Bias = std::make_shared<ScopedTensorHandle>(biases.value());
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001809 }
telsoa014fcda012018-03-09 14:13:49 +00001810 }
1811
1812 return layer;
1813}
1814
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001815IConnectableLayer* NetworkImpl::AddFullyConnectedLayer(const FullyConnectedDescriptor& fullyConnectedDescriptor,
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001816 const Optional<ConstTensor>& weights,
1817 const Optional<ConstTensor>& biases,
1818 const char* name)
1819{
1820 return AddFullyConnectedLayerImpl(fullyConnectedDescriptor, weights, biases, name);
1821}
1822
1823IConnectableLayer* NetworkImpl::AddFullyConnectedLayer(const FullyConnectedDescriptor& fullyConnectedDescriptor,
telsoa01c577f2c2018-08-31 09:22:23 +01001824 const ConstTensor& weights,
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001825 const Optional<ConstTensor>& biases,
telsoa01c577f2c2018-08-31 09:22:23 +01001826 const char* name)
telsoa014fcda012018-03-09 14:13:49 +00001827{
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001828 Optional<ConstTensor> optionalWeights(weights);
1829 return AddFullyConnectedLayerImpl(fullyConnectedDescriptor, optionalWeights, biases, name);
telsoa014fcda012018-03-09 14:13:49 +00001830}
1831
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001832IConnectableLayer* NetworkImpl::AddFullyConnectedLayer(const FullyConnectedDescriptor& fullyConnectedDescriptor,
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001833 const ConstTensor& weights,
1834 const char* name)
1835{
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001836 Optional<ConstTensor> optionalWeights(weights);
Matteo Martincighfc598e12019-05-14 10:36:13 +01001837 Optional<ConstTensor> biases;
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001838 return AddFullyConnectedLayerImpl(fullyConnectedDescriptor, optionalWeights, biases, name);
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001839}
1840
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001841IConnectableLayer* NetworkImpl::AddFullyConnectedLayer(const FullyConnectedDescriptor& fullyConnectedDescriptor,
telsoa01c577f2c2018-08-31 09:22:23 +01001842 const ConstTensor& weights,
1843 const ConstTensor& biases,
1844 const char* name)
telsoa014fcda012018-03-09 14:13:49 +00001845{
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001846 Optional<ConstTensor> optionalWeights(weights);
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001847 Optional<ConstTensor> optionalBiases(biases);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001848 return AddFullyConnectedLayerImpl(fullyConnectedDescriptor, optionalWeights, optionalBiases, name);
telsoa014fcda012018-03-09 14:13:49 +00001849}
1850
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001851IConnectableLayer* NetworkImpl::AddConcatLayer(const ConcatDescriptor& concatDescriptor,
Jim Flynn906f9462019-05-10 13:55:21 +01001852 const char* name)
1853{
Jim Flynne242f2d2019-05-22 14:24:13 +01001854 return m_Graph->AddLayer<ConcatLayer>(concatDescriptor, name);
Jim Flynn906f9462019-05-10 13:55:21 +01001855}
1856
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001857IConnectableLayer* NetworkImpl::AddConvolution2dLayerImpl(const Convolution2dDescriptor& convolution2dDescriptor,
1858 const ConstTensor& weights,
1859 const Optional<ConstTensor>& biases,
1860 const char* name)
telsoa014fcda012018-03-09 14:13:49 +00001861{
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001862 if (convolution2dDescriptor.m_BiasEnabled && !biases.has_value())
telsoa014fcda012018-03-09 14:13:49 +00001863 {
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001864 throw InvalidArgumentException("AddConvolution2dLayer: biases cannot be empty");
telsoa014fcda012018-03-09 14:13:49 +00001865 }
1866
1867 const auto layer = m_Graph->AddLayer<Convolution2dLayer>(convolution2dDescriptor, name);
1868
James Conroy1f58f032021-04-27 17:13:27 +01001869 layer->m_Weight = std::make_shared<ScopedTensorHandle>(weights);
telsoa014fcda012018-03-09 14:13:49 +00001870
1871 if (convolution2dDescriptor.m_BiasEnabled)
1872 {
James Conroy1f58f032021-04-27 17:13:27 +01001873 layer->m_Bias = std::make_shared<ScopedTensorHandle>(biases.value());
telsoa014fcda012018-03-09 14:13:49 +00001874 }
1875
1876 return layer;
1877}
1878
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001879IConnectableLayer* NetworkImpl::AddConvolution2dLayer(const Convolution2dDescriptor& convolution2dDescriptor,
telsoa01c577f2c2018-08-31 09:22:23 +01001880 const ConstTensor& weights,
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001881 const Optional<ConstTensor>& biases,
telsoa01c577f2c2018-08-31 09:22:23 +01001882 const char* name)
telsoa014fcda012018-03-09 14:13:49 +00001883{
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001884 return AddConvolution2dLayerImpl(convolution2dDescriptor, weights, biases, name);
telsoa014fcda012018-03-09 14:13:49 +00001885}
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001886
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001887IConnectableLayer* NetworkImpl::AddConvolution2dLayer(const Convolution2dDescriptor& convolution2dDescriptor,
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001888 const ConstTensor& weights,
1889 const char* name)
1890{
Matteo Martincighfc598e12019-05-14 10:36:13 +01001891 Optional<ConstTensor> biases;
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001892 return AddConvolution2dLayerImpl(convolution2dDescriptor, weights, biases, name);
1893}
1894
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001895IConnectableLayer* NetworkImpl::AddConvolution2dLayer(const Convolution2dDescriptor& convolution2dDescriptor,
telsoa01c577f2c2018-08-31 09:22:23 +01001896 const ConstTensor& weights,
1897 const ConstTensor& biases,
1898 const char* name)
telsoa014fcda012018-03-09 14:13:49 +00001899{
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001900 Optional<ConstTensor> optionalBiases(biases);
1901 return AddConvolution2dLayerImpl(convolution2dDescriptor, weights, optionalBiases, name);
telsoa014fcda012018-03-09 14:13:49 +00001902}
1903
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001904IConnectableLayer* NetworkImpl::AddDepthwiseConvolution2dLayerImpl(
telsoa014fcda012018-03-09 14:13:49 +00001905 const DepthwiseConvolution2dDescriptor& convolution2dDescriptor,
1906 const ConstTensor& weights,
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001907 const Optional<ConstTensor>& biases,
telsoa014fcda012018-03-09 14:13:49 +00001908 const char* name)
1909{
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001910 if (convolution2dDescriptor.m_BiasEnabled && !biases.has_value())
telsoa014fcda012018-03-09 14:13:49 +00001911 {
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001912 throw InvalidArgumentException("AddDepthwiseConvolution2dLayer: biases cannot be empty");
telsoa014fcda012018-03-09 14:13:49 +00001913 }
1914
Matteo Martincigh3d6898c2019-01-15 16:11:44 +00001915 const auto layer = m_Graph->AddLayer<DepthwiseConvolution2dLayer>(convolution2dDescriptor, name);
telsoa014fcda012018-03-09 14:13:49 +00001916
James Conroy1f58f032021-04-27 17:13:27 +01001917 layer->m_Weight = std::make_shared<ScopedTensorHandle>(weights);
telsoa014fcda012018-03-09 14:13:49 +00001918
1919 if (convolution2dDescriptor.m_BiasEnabled)
1920 {
James Conroy1f58f032021-04-27 17:13:27 +01001921 layer->m_Bias = std::make_shared<ScopedTensorHandle>(biases.value());
telsoa014fcda012018-03-09 14:13:49 +00001922 }
1923
1924 return layer;
1925}
1926
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001927IConnectableLayer* NetworkImpl::AddDepthToSpaceLayer(const DepthToSpaceDescriptor& depthToSpaceDescriptor,
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +01001928 const char* name)
1929{
1930 return m_Graph->AddLayer<DepthToSpaceLayer>(depthToSpaceDescriptor, name);
1931}
1932
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001933IConnectableLayer* NetworkImpl::AddDepthwiseConvolution2dLayer(
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001934 const DepthwiseConvolution2dDescriptor& convolution2dDescriptor,
1935 const ConstTensor& weights,
1936 const Optional<ConstTensor>& biases,
1937 const char* name)
1938{
1939 return AddDepthwiseConvolution2dLayerImpl(convolution2dDescriptor, weights, biases, name);
1940}
1941
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001942IConnectableLayer* NetworkImpl::AddDepthwiseConvolution2dLayer(
telsoa014fcda012018-03-09 14:13:49 +00001943 const DepthwiseConvolution2dDescriptor& convolution2dDescriptor,
1944 const ConstTensor& weights,
1945 const char* name)
1946{
Matteo Martincighfc598e12019-05-14 10:36:13 +01001947 Optional<ConstTensor> biases;
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001948 return AddDepthwiseConvolution2dLayerImpl(convolution2dDescriptor, weights, biases, name);
telsoa014fcda012018-03-09 14:13:49 +00001949}
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001950
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001951IConnectableLayer* NetworkImpl::AddDepthwiseConvolution2dLayer(
telsoa014fcda012018-03-09 14:13:49 +00001952 const DepthwiseConvolution2dDescriptor& convolution2dDescriptor,
1953 const ConstTensor& weights,
1954 const ConstTensor& biases,
1955 const char* name)
1956{
Aron Virginas-Tarad402702019-02-22 17:03:44 +00001957 Optional<ConstTensor> optionalBiases(biases);
1958 return AddDepthwiseConvolution2dLayerImpl(convolution2dDescriptor, weights, optionalBiases, name);
telsoa014fcda012018-03-09 14:13:49 +00001959}
1960
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001961IConnectableLayer* NetworkImpl::AddDetectionPostProcessLayer(const armnn::DetectionPostProcessDescriptor& descriptor,
Narumol Prangnawarat6d302bf2019-02-04 11:46:26 +00001962 const ConstTensor& anchors, const char* name)
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +00001963{
Narumol Prangnawarat6d302bf2019-02-04 11:46:26 +00001964 const auto layer = m_Graph->AddLayer<DetectionPostProcessLayer>(descriptor, name);
1965
James Conroy1f58f032021-04-27 17:13:27 +01001966 layer->m_Anchors = std::make_shared<ScopedTensorHandle>(anchors);
Narumol Prangnawarat6d302bf2019-02-04 11:46:26 +00001967
1968 return layer;
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +00001969}
1970
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001971IConnectableLayer* NetworkImpl::AddPermuteLayer(const PermuteDescriptor& permuteDescriptor,
telsoa014fcda012018-03-09 14:13:49 +00001972 const char* name)
1973{
1974 return m_Graph->AddLayer<PermuteLayer>(permuteDescriptor, name);
1975}
1976
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001977IConnectableLayer* NetworkImpl::AddPooling2dLayer(const Pooling2dDescriptor& pooling2dDescriptor,
telsoa014fcda012018-03-09 14:13:49 +00001978 const char* name)
1979{
1980 return m_Graph->AddLayer<Pooling2dLayer>(pooling2dDescriptor, name);
1981}
1982
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001983IConnectableLayer* NetworkImpl::AddActivationLayer(const ActivationDescriptor& activationDescriptor,
telsoa014fcda012018-03-09 14:13:49 +00001984 const char* name)
1985{
1986 return m_Graph->AddLayer<ActivationLayer>(activationDescriptor, name);
1987}
1988
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001989IConnectableLayer* NetworkImpl::AddArgMinMaxLayer(const ArgMinMaxDescriptor& argMinMaxDescriptor,
Nikhil Rajee391d52019-09-05 17:50:44 +01001990 const char* name)
1991{
1992 return m_Graph->AddLayer<ArgMinMaxLayer>(argMinMaxDescriptor, name);
1993}
1994
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00001995IConnectableLayer* NetworkImpl::AddNormalizationLayer(const NormalizationDescriptor&
telsoa01c577f2c2018-08-31 09:22:23 +01001996normalizationDescriptor,
telsoa014fcda012018-03-09 14:13:49 +00001997 const char* name)
1998{
1999 return m_Graph->AddLayer<NormalizationLayer>(normalizationDescriptor, name);
2000}
2001
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002002IConnectableLayer* NetworkImpl::AddSliceLayer(const SliceDescriptor& sliceDescriptor, const char* name)
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01002003{
2004 return m_Graph->AddLayer<SliceLayer>(sliceDescriptor, name);
2005}
2006
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002007IConnectableLayer* NetworkImpl::AddSoftmaxLayer(const SoftmaxDescriptor& softmaxDescriptor,
telsoa014fcda012018-03-09 14:13:49 +00002008 const char* name)
2009{
2010 return m_Graph->AddLayer<SoftmaxLayer>(softmaxDescriptor, name);
2011}
2012
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002013IConnectableLayer* NetworkImpl::AddSplitterLayer(const ViewsDescriptor& splitterDescriptor,
telsoa014fcda012018-03-09 14:13:49 +00002014 const char* name)
2015{
2016 return m_Graph->AddLayer<SplitterLayer>(splitterDescriptor, name);
2017}
2018
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002019IConnectableLayer* NetworkImpl::AddMaximumLayer(const char* name)
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +00002020{
2021 return m_Graph->AddLayer<MaximumLayer>(name);
2022}
2023
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002024IConnectableLayer* NetworkImpl::AddMinimumLayer(const char* name)
Éanna Ó Catháin20e58802018-12-04 10:29:06 +00002025{
2026 return m_Graph->AddLayer<MinimumLayer>(name);
2027}
2028
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002029IConnectableLayer* NetworkImpl::AddMergerLayer(const MergerDescriptor& mergerDescriptor,
Jim Flynn906f9462019-05-10 13:55:21 +01002030 const char* name)
telsoa014fcda012018-03-09 14:13:49 +00002031{
Jim Flynne242f2d2019-05-22 14:24:13 +01002032 return AddConcatLayer(mergerDescriptor, name);
telsoa014fcda012018-03-09 14:13:49 +00002033}
2034
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002035IConnectableLayer* NetworkImpl::AddAbsLayer(const char * name)
Kevin May868eb142019-09-04 17:29:31 +01002036{
josh minor4a3c6102020-01-06 16:40:46 -06002037 return AddElementwiseUnaryLayer(ElementwiseUnaryDescriptor(UnaryOperation::Abs), name);
Kevin May868eb142019-09-04 17:29:31 +01002038}
2039
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002040IConnectableLayer* NetworkImpl::AddAdditionLayer(const char* name)
telsoa014fcda012018-03-09 14:13:49 +00002041{
2042 return m_Graph->AddLayer<AdditionLayer>(name);
2043}
2044
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002045IConnectableLayer* NetworkImpl::AddMultiplicationLayer(const char* name)
telsoa014fcda012018-03-09 14:13:49 +00002046{
2047 return m_Graph->AddLayer<MultiplicationLayer>(name);
2048}
2049
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002050IConnectableLayer* NetworkImpl::AddOutputLayer(LayerBindingId id, const char* name)
telsoa014fcda012018-03-09 14:13:49 +00002051{
2052 return m_Graph->AddLayer<OutputLayer>(id, name);
2053}
2054
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002055IConnectableLayer* NetworkImpl::AddBatchNormalizationLayer(const BatchNormalizationDescriptor& desc,
telsoa014fcda012018-03-09 14:13:49 +00002056 const ConstTensor& mean,
2057 const ConstTensor& variance,
2058 const ConstTensor& beta,
2059 const ConstTensor& gamma,
2060 const char* name)
2061{
2062 const auto layer = m_Graph->AddLayer<BatchNormalizationLayer>(desc, name);
2063
James Conroy1f58f032021-04-27 17:13:27 +01002064 layer->m_Mean = std::make_shared<ScopedTensorHandle>(mean);
2065 layer->m_Variance = std::make_shared<ScopedTensorHandle>(variance);
2066 layer->m_Beta = std::make_shared<ScopedTensorHandle>(beta);
2067 layer->m_Gamma = std::make_shared<ScopedTensorHandle>(gamma);
telsoa014fcda012018-03-09 14:13:49 +00002068
2069 return layer;
2070}
2071
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002072IConnectableLayer* NetworkImpl::AddRankLayer(const char* name)
Finn Williams2605b232020-06-10 15:53:46 +01002073{
2074 return m_Graph->AddLayer<RankLayer>(name);
2075}
2076
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002077IConnectableLayer* NetworkImpl::AddReduceLayer(const ReduceDescriptor& reduceDescriptor,
2078 const char* name)
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002079{
2080 return m_Graph->AddLayer<ReduceLayer>(reduceDescriptor, name);
2081}
2082
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002083IConnectableLayer* NetworkImpl::AddResizeBilinearLayer(const ResizeBilinearDescriptor& descriptor,
2084 const char* name)
telsoa014fcda012018-03-09 14:13:49 +00002085{
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002086 ResizeDescriptor resizeDescriptor;
David Monahan4a0c9b92020-05-30 09:48:39 +01002087 resizeDescriptor.m_Method = ResizeMethod::Bilinear;
2088 resizeDescriptor.m_DataLayout = descriptor.m_DataLayout;
2089 resizeDescriptor.m_TargetWidth = descriptor.m_TargetWidth;
2090 resizeDescriptor.m_TargetHeight = descriptor.m_TargetHeight;
2091 resizeDescriptor.m_AlignCorners = descriptor.m_AlignCorners;
2092 resizeDescriptor.m_HalfPixelCenters = descriptor.m_HalfPixelCenters;
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002093
2094 return m_Graph->AddLayer<ResizeLayer>(resizeDescriptor, name);
telsoa014fcda012018-03-09 14:13:49 +00002095}
2096
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002097IConnectableLayer* NetworkImpl::AddResizeLayer(const ResizeDescriptor& resizeDescriptor, const char* name)
Teresa Charlina9075df2019-06-27 15:41:57 +01002098{
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002099 return m_Graph->AddLayer<ResizeLayer>(resizeDescriptor, name);
Teresa Charlina9075df2019-06-27 15:41:57 +01002100}
2101
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002102IConnectableLayer* NetworkImpl::AddInstanceNormalizationLayer(const InstanceNormalizationDescriptor& desc,
2103 const char* name)
Kevin Mayce5045a2019-10-02 14:07:47 +01002104{
2105 return m_Graph->AddLayer<InstanceNormalizationLayer>(desc, name);
2106}
2107
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002108IConnectableLayer* NetworkImpl::AddL2NormalizationLayer(const L2NormalizationDescriptor& desc,
2109 const char* name)
telsoa014fcda012018-03-09 14:13:49 +00002110{
Matteo Martincighbcd3c852018-09-28 14:14:12 +01002111 return m_Graph->AddLayer<L2NormalizationLayer>(desc, name);
telsoa014fcda012018-03-09 14:13:49 +00002112}
2113
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002114IConnectableLayer* NetworkImpl::AddLogSoftmaxLayer(const LogSoftmaxDescriptor& desc,
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +01002115 const char* name)
2116{
2117 return m_Graph->AddLayer<LogSoftmaxLayer>(desc, name);
2118}
2119
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002120IConnectableLayer* NetworkImpl::AddConstantLayer(const ConstTensor& input, const char* name)
telsoa014fcda012018-03-09 14:13:49 +00002121{
telsoa01c577f2c2018-08-31 09:22:23 +01002122 auto layer = m_Graph->AddLayer<ConstantLayer>(name);
2123
James Conroy1f58f032021-04-27 17:13:27 +01002124 layer->m_LayerOutput = std::make_shared<ScopedTensorHandle>(input);
telsoa01c577f2c2018-08-31 09:22:23 +01002125
2126 return layer;
telsoa014fcda012018-03-09 14:13:49 +00002127}
2128
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002129IConnectableLayer* NetworkImpl::AddReshapeLayer(const ReshapeDescriptor& reshapeDescriptor,
telsoa01c577f2c2018-08-31 09:22:23 +01002130 const char* name)
telsoa014fcda012018-03-09 14:13:49 +00002131{
2132 return m_Graph->AddLayer<ReshapeLayer>(reshapeDescriptor, name);
2133}
2134
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002135IConnectableLayer* NetworkImpl::AddSpaceToBatchNdLayer(const SpaceToBatchNdDescriptor& spaceToBatchNdDescriptor,
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +00002136 const char* name)
2137{
2138 return m_Graph->AddLayer<SpaceToBatchNdLayer>(spaceToBatchNdDescriptor, name);
2139}
2140
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002141IConnectableLayer* NetworkImpl::AddSpaceToDepthLayer(const SpaceToDepthDescriptor& spaceToDepthDescriptor,
Aron Virginas-Tar972af152019-06-11 14:14:03 +01002142 const char* name)
2143{
2144 return m_Graph->AddLayer<SpaceToDepthLayer>(spaceToDepthDescriptor, name);
2145}
2146
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002147IConnectableLayer* NetworkImpl::AddFloorLayer(const char* name)
telsoa014fcda012018-03-09 14:13:49 +00002148{
2149 return m_Graph->AddLayer<FloorLayer>(name);
2150}
2151
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002152IConnectableLayer* NetworkImpl::AddLstmLayer(const LstmDescriptor& descriptor,
telsoa01c577f2c2018-08-31 09:22:23 +01002153 const LstmInputParams& params,
2154 const char* name)
2155{
2156 const auto layer = m_Graph->AddLayer<LstmLayer>(descriptor, name);
2157
2158 //Lstm Basic Parameters
2159 layer->m_BasicParameters.m_InputToForgetWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002160 std::make_shared<ScopedTensorHandle>(*(params.m_InputToForgetWeights));
telsoa01c577f2c2018-08-31 09:22:23 +01002161 layer->m_BasicParameters.m_InputToCellWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002162 std::make_shared<ScopedTensorHandle>(*(params.m_InputToCellWeights));
telsoa01c577f2c2018-08-31 09:22:23 +01002163 layer->m_BasicParameters.m_InputToOutputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002164 std::make_shared<ScopedTensorHandle>(*(params.m_InputToOutputWeights));
telsoa01c577f2c2018-08-31 09:22:23 +01002165 layer->m_BasicParameters.m_RecurrentToForgetWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002166 std::make_shared<ScopedTensorHandle>(*(params.m_RecurrentToForgetWeights));
telsoa01c577f2c2018-08-31 09:22:23 +01002167 layer->m_BasicParameters.m_RecurrentToCellWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002168 std::make_shared<ScopedTensorHandle>(*(params.m_RecurrentToCellWeights));
telsoa01c577f2c2018-08-31 09:22:23 +01002169 layer->m_BasicParameters.m_RecurrentToOutputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002170 std::make_shared<ScopedTensorHandle>(*(params.m_RecurrentToOutputWeights));
telsoa01c577f2c2018-08-31 09:22:23 +01002171 layer->m_BasicParameters.m_ForgetGateBias =
James Conroy1f58f032021-04-27 17:13:27 +01002172 std::make_shared<ScopedTensorHandle>(*(params.m_ForgetGateBias));
telsoa01c577f2c2018-08-31 09:22:23 +01002173 layer->m_BasicParameters.m_CellBias =
James Conroy1f58f032021-04-27 17:13:27 +01002174 std::make_shared<ScopedTensorHandle>(*(params.m_CellBias));
telsoa01c577f2c2018-08-31 09:22:23 +01002175 layer->m_BasicParameters.m_OutputGateBias =
James Conroy1f58f032021-04-27 17:13:27 +01002176 std::make_shared<ScopedTensorHandle>(*(params.m_OutputGateBias));
telsoa01c577f2c2018-08-31 09:22:23 +01002177
2178 //Lstm Cifg parameters
2179 if(!descriptor.m_CifgEnabled)
2180 {
2181 if(params.m_InputToInputWeights == nullptr)
2182 {
Jan Eilerse2062cd2020-03-30 15:07:45 +01002183 throw InvalidArgumentException("AddLstmLayer: Input To Input Weights cannot be NULL "
2184 "when CIFG is disabled.");
telsoa01c577f2c2018-08-31 09:22:23 +01002185 }
2186 if(params.m_RecurrentToInputWeights == nullptr)
2187 {
2188 throw InvalidArgumentException(
Jan Eilerse2062cd2020-03-30 15:07:45 +01002189 "AddLstmLayer: Recurrent To Input Weights cannot be NULL "
2190 "when CIFG is disabled.");
telsoa01c577f2c2018-08-31 09:22:23 +01002191 }
2192 if(params.m_InputGateBias == nullptr)
2193 {
Jan Eilerse2062cd2020-03-30 15:07:45 +01002194 throw InvalidArgumentException("AddLstmLayer: Input Gate Bias cannot be NULL "
2195 "when CIFG is disabled.");
telsoa01c577f2c2018-08-31 09:22:23 +01002196 }
2197 layer->m_CifgParameters.m_InputToInputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002198 std::make_shared<ScopedTensorHandle>(*(params.m_InputToInputWeights));
telsoa01c577f2c2018-08-31 09:22:23 +01002199 layer->m_CifgParameters.m_RecurrentToInputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002200 std::make_shared<ScopedTensorHandle>(*(params.m_RecurrentToInputWeights));
telsoa01c577f2c2018-08-31 09:22:23 +01002201 layer->m_CifgParameters.m_InputGateBias =
James Conroy1f58f032021-04-27 17:13:27 +01002202 std::make_shared<ScopedTensorHandle>(*(params.m_InputGateBias));
telsoa01c577f2c2018-08-31 09:22:23 +01002203 }
2204
2205 //Lstm projection parameters
2206 if(descriptor.m_ProjectionEnabled)
2207 {
2208 if(params.m_ProjectionWeights == nullptr)
2209 {
Jan Eilerse2062cd2020-03-30 15:07:45 +01002210 throw InvalidArgumentException("AddLstmLayer: Projection Weights cannot be NULL "
2211 "when projection is enabled.");
telsoa01c577f2c2018-08-31 09:22:23 +01002212 }
2213 layer->m_ProjectionParameters.m_ProjectionWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002214 std::make_shared<ScopedTensorHandle>(*(params.m_ProjectionWeights));
telsoa01c577f2c2018-08-31 09:22:23 +01002215 if(params.m_ProjectionBias != nullptr)
2216 {
2217 layer->m_ProjectionParameters.m_ProjectionBias =
James Conroy1f58f032021-04-27 17:13:27 +01002218 std::make_shared<ScopedTensorHandle>(*(params.m_ProjectionBias));
telsoa01c577f2c2018-08-31 09:22:23 +01002219 }
2220 }
2221
2222 //Lstm Peephole params
2223 if(descriptor.m_PeepholeEnabled)
2224 {
Jan Eilerse2062cd2020-03-30 15:07:45 +01002225 if(!descriptor.m_CifgEnabled)
2226 {
2227 if(params.m_CellToInputWeights == nullptr)
2228 {
2229 throw InvalidArgumentException("AddLstmLayer: Cell To Input Weights cannot be NULL "
2230 "when Peephole is enabled and CIFG disabled.");
2231 }
2232
2233 layer->m_PeepholeParameters.m_CellToInputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002234 std::make_shared<ScopedTensorHandle>(*(params.m_CellToInputWeights));
Jan Eilerse2062cd2020-03-30 15:07:45 +01002235 }
2236
telsoa01c577f2c2018-08-31 09:22:23 +01002237 if(params.m_CellToForgetWeights == nullptr)
2238 {
Jan Eilerse2062cd2020-03-30 15:07:45 +01002239 throw InvalidArgumentException("AddLstmLayer: Cell To Forget Weights cannot be NULL "
2240 "when Peephole is enabled.");
telsoa01c577f2c2018-08-31 09:22:23 +01002241 }
2242 if(params.m_CellToOutputWeights == nullptr)
2243 {
Jan Eilerse2062cd2020-03-30 15:07:45 +01002244 throw InvalidArgumentException("AddLstmLayer: Cell To Output Weights cannot be NULL "
2245 "when Peephole is enabled.");
telsoa01c577f2c2018-08-31 09:22:23 +01002246 }
Jan Eilerse2062cd2020-03-30 15:07:45 +01002247
telsoa01c577f2c2018-08-31 09:22:23 +01002248 layer->m_PeepholeParameters.m_CellToForgetWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002249 std::make_shared<ScopedTensorHandle>(*(params.m_CellToForgetWeights));
telsoa01c577f2c2018-08-31 09:22:23 +01002250 layer->m_PeepholeParameters.m_CellToOutputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002251 std::make_shared<ScopedTensorHandle>(*(params.m_CellToOutputWeights));
telsoa01c577f2c2018-08-31 09:22:23 +01002252 }
Jan Eilersf8c62972019-07-17 11:07:49 +01002253
2254 //Lstm Layer Normalization params
2255 if(descriptor.m_LayerNormEnabled)
2256 {
2257 if(!descriptor.m_CifgEnabled)
2258 {
2259 if(params.m_InputLayerNormWeights == nullptr)
2260 {
Jan Eilerse2062cd2020-03-30 15:07:45 +01002261 throw InvalidArgumentException("AddLstmLayer: Input layer normalization weights cannot be NULL "
2262 "when layer normalization is enabled and CIFG disabled.");
Jan Eilersf8c62972019-07-17 11:07:49 +01002263 }
2264 layer->m_LayerNormParameters.m_InputLayerNormWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002265 std::make_shared<ScopedTensorHandle>(*(params.m_InputLayerNormWeights));
Jan Eilersf8c62972019-07-17 11:07:49 +01002266 }
2267
2268 if(params.m_ForgetLayerNormWeights == nullptr)
2269 {
Jan Eilerse2062cd2020-03-30 15:07:45 +01002270 throw InvalidArgumentException("AddLstmLayer: Forget layer normalization weights cannot be NULL "
2271 "when layer normalization is enabled.");
Jan Eilersf8c62972019-07-17 11:07:49 +01002272 }
2273 if(params.m_CellLayerNormWeights == nullptr)
2274 {
Jan Eilerse2062cd2020-03-30 15:07:45 +01002275 throw InvalidArgumentException("AddLstmLayer: Cell layer normalization weights cannot be NULL "
2276 "when layer normalization is enabled.");
Jan Eilersf8c62972019-07-17 11:07:49 +01002277 }
2278 if(params.m_OutputLayerNormWeights == nullptr)
2279 {
Jan Eilerse2062cd2020-03-30 15:07:45 +01002280 throw InvalidArgumentException("AddLstmLayer: Output layer normalization weights cannot be NULL "
2281 "when layer normalization is enabled.");
Jan Eilersf8c62972019-07-17 11:07:49 +01002282 }
2283 layer->m_LayerNormParameters.m_ForgetLayerNormWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002284 std::make_shared<ScopedTensorHandle>(*(params.m_ForgetLayerNormWeights));
Jan Eilersf8c62972019-07-17 11:07:49 +01002285 layer->m_LayerNormParameters.m_CellLayerNormWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002286 std::make_shared<ScopedTensorHandle>(*(params.m_CellLayerNormWeights));
Jan Eilersf8c62972019-07-17 11:07:49 +01002287 layer->m_LayerNormParameters.m_OutputLayerNormWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002288 std::make_shared<ScopedTensorHandle>(*(params.m_OutputLayerNormWeights));
Jan Eilersf8c62972019-07-17 11:07:49 +01002289 }
telsoa01c577f2c2018-08-31 09:22:23 +01002290 return layer;
2291}
2292
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002293IConnectableLayer* NetworkImpl::AddDivisionLayer(const char* name)
Francis Murtaghe7a86a42018-08-29 12:42:10 +01002294{
2295 return m_Graph->AddLayer<DivisionLayer>(name);
2296}
2297
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002298IConnectableLayer* NetworkImpl::AddSubtractionLayer(const char* name)
David Beck19526222018-09-12 16:00:08 +01002299{
2300 return m_Graph->AddLayer<SubtractionLayer>(name);
2301}
2302
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002303IConnectableLayer* NetworkImpl::AddMeanLayer(const MeanDescriptor& meanDescriptor, const char* name)
narpra0132b90462018-09-13 11:07:48 +01002304{
2305 return m_Graph->AddLayer<MeanLayer>(meanDescriptor,name);
2306}
2307
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002308IConnectableLayer* NetworkImpl::AddPadLayer(const PadDescriptor& padDescriptor, const char* name)
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +01002309{
2310 return m_Graph->AddLayer<PadLayer>(padDescriptor,name);
2311}
2312
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002313IConnectableLayer *NetworkImpl::AddQuantizeLayer(const char *name)
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002314{
2315 return m_Graph->AddLayer<QuantizeLayer>(name);
2316}
2317
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002318IConnectableLayer* NetworkImpl::AddDequantizeLayer(const char* name)
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002319{
2320 return m_Graph->AddLayer<DequantizeLayer>(name);
2321}
2322
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002323IConnectableLayer* NetworkImpl::AddStridedSliceLayer(const StridedSliceDescriptor& stridedSliceDescriptor,
Conor Kennedy430b5d82018-11-14 15:28:28 +00002324 const char* name)
2325{
2326 return m_Graph->AddLayer<StridedSliceLayer>(stridedSliceDescriptor, name);
2327}
2328
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002329IConnectableLayer* NetworkImpl::AddGreaterLayer(const char* name)
Matteo Martincigh59a950c2018-12-13 12:48:25 +00002330{
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01002331 return AddComparisonLayer(ComparisonDescriptor(ComparisonOperation::Greater), name);
Matteo Martincigh59a950c2018-12-13 12:48:25 +00002332}
2333
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002334IConnectableLayer* NetworkImpl::AddEqualLayer(const char* name)
FrancisMurtagh20995952018-12-17 12:11:36 +00002335{
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01002336 return AddComparisonLayer(ComparisonDescriptor(ComparisonOperation::Equal), name);
FrancisMurtagh20995952018-12-17 12:11:36 +00002337}
2338
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002339IConnectableLayer* NetworkImpl::AddRsqrtLayer(const char * name)
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +00002340{
josh minor4a3c6102020-01-06 16:40:46 -06002341 return AddElementwiseUnaryLayer(ElementwiseUnaryDescriptor(UnaryOperation::Rsqrt), name);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +00002342}
2343
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002344IConnectableLayer* NetworkImpl::AddGatherLayer(const char* name)
narpra01b89b05f2019-01-16 09:53:09 +00002345{
Teresa Charlin52664732020-06-29 16:27:03 +01002346 GatherDescriptor gatherDescriptor{};
2347 return AddGatherLayer(gatherDescriptor, name);
2348}
2349
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002350IConnectableLayer* NetworkImpl::AddGatherLayer(const GatherDescriptor& gatherDescriptor,
Teresa Charlin52664732020-06-29 16:27:03 +01002351 const char* name)
2352{
2353 return m_Graph->AddLayer<GatherLayer>(gatherDescriptor, name);
narpra01b89b05f2019-01-16 09:53:09 +00002354}
2355
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002356IConnectableLayer* NetworkImpl::AddMergeLayer(const char* name)
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002357{
2358 return m_Graph->AddLayer<MergeLayer>(name);
2359}
2360
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002361IConnectableLayer* NetworkImpl::AddSwitchLayer(const char* name)
Sadik Armaganeff363d2019-04-05 15:25:46 +01002362{
2363 return m_Graph->AddLayer<SwitchLayer>(name);
2364}
2365
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002366IConnectableLayer* NetworkImpl::AddPreluLayer(const char* name)
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01002367{
2368 return m_Graph->AddLayer<PreluLayer>(name);
2369}
2370
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002371IConnectableLayer* NetworkImpl::AddTransposeConvolution2dLayer(const TransposeConvolution2dDescriptor& descriptor,
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01002372 const ConstTensor& weights,
2373 const Optional<ConstTensor>& biases,
2374 const char* name)
2375{
2376 if (descriptor.m_BiasEnabled && !biases.has_value())
2377 {
2378 throw InvalidArgumentException("AddTransposeConvolution2dLayer: Biases cannot be empty");
2379 }
2380
2381 const auto layer = m_Graph->AddLayer<TransposeConvolution2dLayer>(descriptor, name);
2382
James Conroy1f58f032021-04-27 17:13:27 +01002383 layer->m_Weight = std::make_shared<ScopedTensorHandle>(weights);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01002384
2385 if (descriptor.m_BiasEnabled)
2386 {
James Conroy1f58f032021-04-27 17:13:27 +01002387 layer->m_Bias = std::make_shared<ScopedTensorHandle>(biases.value());
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01002388 }
2389
2390 return layer;
2391}
2392
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002393IConnectableLayer* NetworkImpl::AddTransposeLayer(const TransposeDescriptor& transposeDescriptor,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002394 const char* name)
2395{
2396 return m_Graph->AddLayer<TransposeLayer>(transposeDescriptor, name);
2397}
2398
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002399IConnectableLayer* NetworkImpl::AddStackLayer(const StackDescriptor& stackDescriptor,
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01002400 const char* name)
2401{
2402 return m_Graph->AddLayer<StackLayer>(stackDescriptor, name);
2403}
2404
Derek Lamberti013c3902019-10-21 10:46:16 +01002405
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002406IConnectableLayer* NetworkImpl::AddStandInLayer(const StandInDescriptor& desc,
Derek Lamberti013c3902019-10-21 10:46:16 +01002407 const char* name)
2408{
2409 return m_Graph->AddLayer<StandInLayer>(desc, name);
2410}
2411
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002412IConnectableLayer* NetworkImpl::AddQuantizedLstmLayer(const QuantizedLstmInputParams& params,
James Conroyee18dc82019-07-17 11:27:46 +01002413 const char* name)
2414{
2415 const auto layer = m_Graph->AddLayer<QuantizedLstmLayer>(name);
2416
2417 // InputToX weights
2418 layer->m_QuantizedLstmParameters.m_InputToInputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002419 std::make_shared<ScopedTensorHandle>(params.GetInputToInputWeights());
James Conroyee18dc82019-07-17 11:27:46 +01002420 layer->m_QuantizedLstmParameters.m_InputToForgetWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002421 std::make_shared<ScopedTensorHandle>(params.GetInputToForgetWeights());
James Conroyee18dc82019-07-17 11:27:46 +01002422 layer->m_QuantizedLstmParameters.m_InputToCellWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002423 std::make_shared<ScopedTensorHandle>(params.GetInputToCellWeights());
James Conroyee18dc82019-07-17 11:27:46 +01002424 layer->m_QuantizedLstmParameters.m_InputToOutputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002425 std::make_shared<ScopedTensorHandle>(params.GetInputToOutputWeights());
James Conroyee18dc82019-07-17 11:27:46 +01002426
2427 // RecurrentToX weights
2428 layer->m_QuantizedLstmParameters.m_RecurrentToInputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002429 std::make_shared<ScopedTensorHandle>(params.GetRecurrentToInputWeights());
James Conroyee18dc82019-07-17 11:27:46 +01002430 layer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002431 std::make_shared<ScopedTensorHandle>(params.GetRecurrentToForgetWeights());
James Conroyee18dc82019-07-17 11:27:46 +01002432 layer->m_QuantizedLstmParameters.m_RecurrentToCellWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002433 std::make_shared<ScopedTensorHandle>(params.GetRecurrentToCellWeights());
James Conroyee18dc82019-07-17 11:27:46 +01002434 layer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002435 std::make_shared<ScopedTensorHandle>(params.GetRecurrentToOutputWeights());
James Conroyee18dc82019-07-17 11:27:46 +01002436
2437 // Bias
2438 layer->m_QuantizedLstmParameters.m_InputGateBias =
James Conroy1f58f032021-04-27 17:13:27 +01002439 std::make_shared<ScopedTensorHandle>(params.GetInputGateBias());
James Conroyee18dc82019-07-17 11:27:46 +01002440 layer->m_QuantizedLstmParameters.m_ForgetGateBias =
James Conroy1f58f032021-04-27 17:13:27 +01002441 std::make_shared<ScopedTensorHandle>(params.GetForgetGateBias());
James Conroyee18dc82019-07-17 11:27:46 +01002442 layer->m_QuantizedLstmParameters.m_CellBias =
James Conroy1f58f032021-04-27 17:13:27 +01002443 std::make_shared<ScopedTensorHandle>(params.GetCellBias());
James Conroyee18dc82019-07-17 11:27:46 +01002444 layer->m_QuantizedLstmParameters.m_OutputGateBias =
James Conroy1f58f032021-04-27 17:13:27 +01002445 std::make_shared<ScopedTensorHandle>(params.GetOutputGateBias());
James Conroyee18dc82019-07-17 11:27:46 +01002446
2447 return layer;
2448}
2449
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002450IConnectableLayer* NetworkImpl::AddQLstmLayer(const QLstmDescriptor& descriptor,
James Conroy586a9aa2020-03-20 08:49:33 +00002451 const LstmInputParams& params,
2452 const char* name)
2453{
2454 const auto layer = m_Graph->AddLayer<QLstmLayer>(descriptor, name);
2455
2456 // QLstm Basic Parameters
2457 layer->m_BasicParameters.m_InputToForgetWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002458 std::make_shared<ScopedTensorHandle>(*(params.m_InputToForgetWeights));
James Conroy586a9aa2020-03-20 08:49:33 +00002459 layer->m_BasicParameters.m_InputToCellWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002460 std::make_shared<ScopedTensorHandle>(*(params.m_InputToCellWeights));
James Conroy586a9aa2020-03-20 08:49:33 +00002461 layer->m_BasicParameters.m_InputToOutputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002462 std::make_shared<ScopedTensorHandle>(*(params.m_InputToOutputWeights));
James Conroy586a9aa2020-03-20 08:49:33 +00002463 layer->m_BasicParameters.m_RecurrentToForgetWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002464 std::make_shared<ScopedTensorHandle>(*(params.m_RecurrentToForgetWeights));
James Conroy586a9aa2020-03-20 08:49:33 +00002465 layer->m_BasicParameters.m_RecurrentToCellWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002466 std::make_shared<ScopedTensorHandle>(*(params.m_RecurrentToCellWeights));
James Conroy586a9aa2020-03-20 08:49:33 +00002467 layer->m_BasicParameters.m_RecurrentToOutputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002468 std::make_shared<ScopedTensorHandle>(*(params.m_RecurrentToOutputWeights));
James Conroy586a9aa2020-03-20 08:49:33 +00002469 layer->m_BasicParameters.m_ForgetGateBias =
James Conroy1f58f032021-04-27 17:13:27 +01002470 std::make_shared<ScopedTensorHandle>(*(params.m_ForgetGateBias));
James Conroy586a9aa2020-03-20 08:49:33 +00002471 layer->m_BasicParameters.m_CellBias =
James Conroy1f58f032021-04-27 17:13:27 +01002472 std::make_shared<ScopedTensorHandle>(*(params.m_CellBias));
James Conroy586a9aa2020-03-20 08:49:33 +00002473 layer->m_BasicParameters.m_OutputGateBias =
James Conroy1f58f032021-04-27 17:13:27 +01002474 std::make_shared<ScopedTensorHandle>(*(params.m_OutputGateBias));
James Conroy586a9aa2020-03-20 08:49:33 +00002475
2476 // QLstm Cifg parameters
2477 if(!descriptor.m_CifgEnabled)
2478 {
2479 if(params.m_InputToInputWeights == nullptr)
2480 {
2481 throw InvalidArgumentException("AddQLstmLayer: Input To Input Weights cannot be NULL");
2482 }
2483
2484 if(params.m_RecurrentToInputWeights == nullptr)
2485 {
2486 throw InvalidArgumentException(
2487 "AddQLstmLayer: Recurrent To Input Weights cannot be NULL");
2488 }
2489
2490 if(params.m_InputGateBias == nullptr)
2491 {
2492 throw InvalidArgumentException("AddQLstmLayer: Input Gate Bias cannot be NULL");
2493 }
2494
2495 layer->m_CifgParameters.m_InputToInputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002496 std::make_shared<ScopedTensorHandle>(*(params.m_InputToInputWeights));
James Conroy586a9aa2020-03-20 08:49:33 +00002497 layer->m_CifgParameters.m_RecurrentToInputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002498 std::make_shared<ScopedTensorHandle>(*(params.m_RecurrentToInputWeights));
James Conroy586a9aa2020-03-20 08:49:33 +00002499 layer->m_CifgParameters.m_InputGateBias =
James Conroy1f58f032021-04-27 17:13:27 +01002500 std::make_shared<ScopedTensorHandle>(*(params.m_InputGateBias));
James Conroy586a9aa2020-03-20 08:49:33 +00002501 }
2502
2503 // QLstm Projection parameters
2504 if(descriptor.m_ProjectionEnabled)
2505 {
2506 if(params.m_ProjectionWeights == nullptr)
2507 {
2508 throw InvalidArgumentException("AddQLstmLayer: Projection Weights cannot be NULL");
2509 }
2510
James Conroy586a9aa2020-03-20 08:49:33 +00002511 layer->m_ProjectionParameters.m_ProjectionWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002512 std::make_shared<ScopedTensorHandle>(*(params.m_ProjectionWeights));
James Conroyed324052020-05-18 15:16:42 +01002513
2514 // Projection bias is optional even if projection is enabled
2515 if(params.m_ProjectionWeights != nullptr)
2516 {
2517 layer->m_ProjectionParameters.m_ProjectionBias =
James Conroy1f58f032021-04-27 17:13:27 +01002518 std::make_shared<ScopedTensorHandle>(*(params.m_ProjectionBias));
James Conroyed324052020-05-18 15:16:42 +01002519 }
2520
James Conroy586a9aa2020-03-20 08:49:33 +00002521 }
2522
2523 // QLstm Peephole params
2524 if(descriptor.m_PeepholeEnabled)
2525 {
2526 if(params.m_CellToForgetWeights == nullptr)
2527 {
2528 throw InvalidArgumentException("AddQLstmLayer: Cell To Forget Weights cannot be NULL");
2529 }
2530
2531 if(params.m_CellToOutputWeights == nullptr)
2532 {
2533 throw InvalidArgumentException("AddQLstmLayer: Cell To Output Weights cannot be NULL");
2534 }
2535
2536 if(!descriptor.m_CifgEnabled)
2537 {
2538 if(params.m_CellToInputWeights == nullptr)
2539 {
2540 throw InvalidArgumentException("AddQLstmLayer: Cell To Input Weights cannot be NULL");
2541 }
2542
2543 layer->m_PeepholeParameters.m_CellToInputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002544 std::make_shared<ScopedTensorHandle>(*(params.m_CellToInputWeights));
James Conroy586a9aa2020-03-20 08:49:33 +00002545 }
2546
2547 layer->m_PeepholeParameters.m_CellToForgetWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002548 std::make_shared<ScopedTensorHandle>(*(params.m_CellToForgetWeights));
James Conroy586a9aa2020-03-20 08:49:33 +00002549 layer->m_PeepholeParameters.m_CellToOutputWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002550 std::make_shared<ScopedTensorHandle>(*(params.m_CellToOutputWeights));
James Conroy586a9aa2020-03-20 08:49:33 +00002551 }
2552
2553 // QLstm Layer Normalization params
2554 if(descriptor.m_LayerNormEnabled)
2555 {
2556 if(params.m_ForgetLayerNormWeights == nullptr)
2557 {
2558 throw InvalidArgumentException("AddQLstmLayer: Forget layer normalization weights cannot be NULL");
2559 }
2560
2561 if(params.m_CellLayerNormWeights == nullptr)
2562 {
2563 throw InvalidArgumentException("AddQLstmLayer: Cell layer normalization weights cannot be NULL");
2564 }
2565
2566 if(params.m_OutputLayerNormWeights == nullptr)
2567 {
2568 throw InvalidArgumentException("AddQLstmLayer: Output layer normalization weights cannot be NULL");
2569 }
2570
2571 if(!descriptor.m_CifgEnabled)
2572 {
2573 if(params.m_InputLayerNormWeights == nullptr)
2574 {
2575 throw InvalidArgumentException("AddQLstmLayer: Input layer normalization weights cannot be NULL");
2576 }
2577
2578 layer->m_LayerNormParameters.m_InputLayerNormWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002579 std::make_shared<ScopedTensorHandle>(*(params.m_InputLayerNormWeights));
James Conroy586a9aa2020-03-20 08:49:33 +00002580 }
2581
2582 layer->m_LayerNormParameters.m_ForgetLayerNormWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002583 std::make_shared<ScopedTensorHandle>(*(params.m_ForgetLayerNormWeights));
James Conroy586a9aa2020-03-20 08:49:33 +00002584 layer->m_LayerNormParameters.m_CellLayerNormWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002585 std::make_shared<ScopedTensorHandle>(*(params.m_CellLayerNormWeights));
James Conroy586a9aa2020-03-20 08:49:33 +00002586 layer->m_LayerNormParameters.m_OutputLayerNormWeights =
James Conroy1f58f032021-04-27 17:13:27 +01002587 std::make_shared<ScopedTensorHandle>(*(params.m_OutputLayerNormWeights));
James Conroy586a9aa2020-03-20 08:49:33 +00002588 }
2589 return layer;
2590}
2591
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002592IConnectableLayer* NetworkImpl::AddLogicalBinaryLayer(const LogicalBinaryDescriptor& logicalBinaryDescriptor,
James Conroyaba90cd2020-11-06 16:28:18 +00002593 const char* name)
2594{
2595 return m_Graph->AddLayer<LogicalBinaryLayer>(logicalBinaryDescriptor, name);
2596}
2597
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002598void NetworkImpl::Accept(ILayerVisitor& visitor) const
Mike Kelly8c1701a2019-02-11 17:01:27 +00002599{
2600 for (auto layer : GetGraph())
2601 {
2602 layer->Accept(visitor);
2603 };
2604}
2605
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002606void NetworkImpl::ExecuteStrategy(IStrategy& strategy) const
Finn Williamsb454c5c2021-02-09 15:56:23 +00002607{
2608 for (auto layer : GetGraph())
2609 {
2610 layer->ExecuteStrategy(strategy);
2611 };
2612}
2613
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002614OptimizedNetworkImpl::OptimizedNetworkImpl(std::unique_ptr<Graph> graph)
Sadik Armagan3184c902020-03-18 10:57:30 +00002615 : m_Graph(std::move(graph)), m_Guid(profiling::ProfilingService::GetNextGuid())
telsoa014fcda012018-03-09 14:13:49 +00002616{
2617}
2618
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002619OptimizedNetworkImpl::OptimizedNetworkImpl(std::unique_ptr<Graph> graph, const ModelOptions& modelOptions)
Sadik Armagan045f6be2020-09-10 13:37:32 +01002620 : m_Graph(std::move(graph)), m_Guid(profiling::ProfilingService::GetNextGuid()), m_ModelOptions(modelOptions)
2621{
2622}
2623
Francis Murtagh3d2b4b22021-02-15 18:23:17 +00002624OptimizedNetworkImpl::~OptimizedNetworkImpl()
telsoa014fcda012018-03-09 14:13:49 +00002625{
2626}
2627
2628} // namespace armnn