blob: 4ed179fa2290458acf14681d1a68c5a2db2cd05a [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Finn Williamsf24effa2020-07-03 10:12:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#include "Layer.hpp"
6
7#include "Graph.hpp"
janeil013fec1ea2019-11-07 09:47:20 +00008#include <ProfilingService.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +01009#include <armnn/utility/NumericCast.hpp>
Colm Donelan0c479742021-12-10 12:43:54 +000010#include <armnn/backends/TensorHandle.hpp>
11#include <armnn/backends/WorkloadData.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012
Colm Donelan5b5c2222020-09-09 12:48:16 +010013#include <fmt/format.h>
telsoa014fcda012018-03-09 14:13:49 +000014
15#include <numeric>
16
17namespace armnn
18{
19
Jim Flynne4665962022-01-31 16:08:53 +000020// Instantiate the static member variable
21NullDescriptor Layer::m_NullDescriptor;
22
telsoa014fcda012018-03-09 14:13:49 +000023void InputSlot::Insert(Layer& layer)
24{
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010025 ARMNN_ASSERT(layer.GetNumOutputSlots() == 1);
telsoa014fcda012018-03-09 14:13:49 +000026
27 OutputSlot* const prevSlot = GetConnectedOutputSlot();
28
29 if (prevSlot != nullptr)
30 {
telsoa01c577f2c2018-08-31 09:22:23 +010031 // Disconnects parent from this.
telsoa014fcda012018-03-09 14:13:49 +000032 prevSlot->Disconnect(*this);
33
telsoa01c577f2c2018-08-31 09:22:23 +010034 // Connects inserted layer to parent.
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010035 ARMNN_ASSERT(layer.GetNumInputSlots() == 1);
Derek Lamberti84da38b2019-06-13 11:40:08 +010036 int idx = prevSlot->Connect(layer.GetInputSlot(0));
Matthew Sloyan0663d662020-09-14 11:47:26 +010037 prevSlot->SetEdgeStrategy(armnn::numeric_cast<unsigned int>(idx), EdgeStrategy::Undefined);
telsoa014fcda012018-03-09 14:13:49 +000038
telsoa01c577f2c2018-08-31 09:22:23 +010039 // Sets tensor info for inserted layer.
telsoa014fcda012018-03-09 14:13:49 +000040 const TensorInfo& tensorInfo = prevSlot->GetTensorInfo();
41 layer.GetOutputHandler().SetTensorInfo(tensorInfo);
42 }
43
telsoa01c577f2c2018-08-31 09:22:23 +010044 // Connects inserted layer to this.
telsoa014fcda012018-03-09 14:13:49 +000045 layer.GetOutputSlot(0).Connect(*this);
Derek Lambertif674aa02019-08-01 15:56:25 +010046 layer.GetOutputSlot(0).SetEdgeStrategy(0, EdgeStrategy::Undefined);
telsoa014fcda012018-03-09 14:13:49 +000047}
48
49const InputSlot* OutputSlot::GetConnection(unsigned int index) const
50{
51 ValidateConnectionIndex(index);
52 return m_Connections[index];
53}
54
55InputSlot* OutputSlot::GetConnection(unsigned int index)
56{
57 ValidateConnectionIndex(index);
58 return m_Connections[index];
59}
60
61void OutputSlot::SetTensorInfo(const TensorInfo& tensorInfo)
62{
63 GetOutputHandler().SetTensorInfo(tensorInfo);
64}
65
66const TensorInfo& OutputSlot::GetTensorInfo() const
67{
68 return GetOutputHandler().GetTensorInfo();
69}
70
71bool OutputSlot::IsTensorInfoSet() const
72{
Finn Williamsf24effa2020-07-03 10:12:03 +010073 if (GetOwningLayer().GetShapeInferenceMethod() == ShapeInferenceMethod::InferAndValidate)
74 {
75 GetOwningLayer().ValidateTensorShapesFromInputs();
76 }
telsoa014fcda012018-03-09 14:13:49 +000077 return GetOutputHandler().IsTensorInfoSet();
78}
79
80bool OutputSlot::ValidateTensorShape(const TensorShape& shape) const
81{
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010082 ARMNN_ASSERT_MSG(IsTensorInfoSet(), "TensorInfo must be set in order to validate the shape.");
telsoa014fcda012018-03-09 14:13:49 +000083 return shape == m_OutputHandler.GetTensorInfo().GetShape();
84}
85
86int OutputSlot::Connect(InputSlot& destination)
87{
88 destination.SetConnection(this);
89 m_Connections.push_back(&destination);
Derek Lambertif674aa02019-08-01 15:56:25 +010090 m_EdgeStrategies.push_back(EdgeStrategy::Undefined);
Matthew Sloyan0663d662020-09-14 11:47:26 +010091 return armnn::numeric_cast<int>(m_Connections.size() - 1);
telsoa014fcda012018-03-09 14:13:49 +000092}
93
94void OutputSlot::Disconnect(InputSlot& slot)
95{
96 slot.SetConnection(nullptr);
Derek Lamberti84da38b2019-06-13 11:40:08 +010097 auto it = std::find(m_Connections.begin(), m_Connections.end(), &slot);
98
99 if (it == m_Connections.end())
100 {
101 return;
102 }
103
104 auto idx = std::distance(m_Connections.begin(), it);
telsoa014fcda012018-03-09 14:13:49 +0000105 m_Connections.erase(std::remove(m_Connections.begin(), m_Connections.end(), &slot), m_Connections.end());
Derek Lamberti84da38b2019-06-13 11:40:08 +0100106
Derek Lambertif674aa02019-08-01 15:56:25 +0100107 m_EdgeStrategies.erase(m_EdgeStrategies.begin() + idx);
telsoa014fcda012018-03-09 14:13:49 +0000108}
109
110void OutputSlot::DisconnectAll()
111{
112 while (GetNumConnections() > 0)
113 {
114 InputSlot& connection = *GetConnection(0);
115 Disconnect(connection);
116 }
117}
118
119void OutputSlot::MoveAllConnections(OutputSlot& destination)
120{
121 while (GetNumConnections() > 0)
122 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100123 ARMNN_ASSERT_MSG(m_EdgeStrategies[0] == EdgeStrategy::Undefined,
Derek Lamberti84da38b2019-06-13 11:40:08 +0100124 "Cannot move connections once memory strategies have be established.");
125
telsoa014fcda012018-03-09 14:13:49 +0000126 InputSlot& connection = *GetConnection(0);
127 Disconnect(connection);
128 destination.Connect(connection);
Finn Williams3e6676d2020-05-14 13:41:48 +0100129 destination.GetOutputHandler().SetTensorInfo(GetOutputHandler().GetTensorInfo());
telsoa014fcda012018-03-09 14:13:49 +0000130 }
131}
132
Derek Lamberti27d83072019-02-05 16:00:08 +0000133unsigned int OutputSlot::CalculateIndexOnOwner() const
134{
Matteo Martincigh9c5d33a2019-02-07 17:52:41 +0000135 for (unsigned int i = 0; i < GetOwningLayer().GetNumOutputSlots(); i++)
Derek Lamberti27d83072019-02-05 16:00:08 +0000136 {
137 if (GetOwningLayer().GetOutputSlot(i) == (*this))
138 {
139 return i;
140 }
141 }
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100142 ARMNN_ASSERT_MSG(false, "Did not find slot on owner.");
Derek Lamberti27d83072019-02-05 16:00:08 +0000143 return 0; // Error
144}
145
146bool OutputSlot::operator==(const OutputSlot& other) const
147{
148 bool isSame = other.GetNumConnections() == GetNumConnections();
149 if (!isSame)
150 {
151 return false;
152 }
153
Matteo Martincigh9c5d33a2019-02-07 17:52:41 +0000154 for (unsigned int i = 0; i < GetNumConnections(); i++)
Derek Lamberti27d83072019-02-05 16:00:08 +0000155 {
156 isSame &= other.GetConnection(i) == GetConnection(i);
157 }
158 return isSame;
159}
160
telsoa014fcda012018-03-09 14:13:49 +0000161void OutputSlot::ValidateConnectionIndex(unsigned int index) const
162{
Matthew Sloyan0663d662020-09-14 11:47:26 +0100163 if (armnn::numeric_cast<std::size_t>(index) >= m_Connections.size())
telsoa014fcda012018-03-09 14:13:49 +0000164 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100165 throw InvalidArgumentException((fmt::format("GetConnection: Invalid index {} provided", index)));
telsoa014fcda012018-03-09 14:13:49 +0000166 }
167}
168
Mike Kelly8c1701a2019-02-11 17:01:27 +0000169LayerGuid OutputSlot::GetOwningLayerGuid() const
170{
171 return GetOwningLayer().GetGuid();
172}
173
Derek Lamberti84da38b2019-06-13 11:40:08 +0100174void OutputSlot::SetTensorHandleFactory(const ITensorHandleFactory::FactoryId& id)
175{
176 m_TensorHandleFactoryId = id;
177}
178
179ITensorHandleFactory::FactoryId OutputSlot::GetTensorHandleFactoryId() const
180{
181 return m_TensorHandleFactoryId;
182}
183
Derek Lambertif674aa02019-08-01 15:56:25 +0100184void OutputSlot::SetEdgeStrategy(unsigned int connectionIndex, EdgeStrategy strategy)
Derek Lamberti84da38b2019-06-13 11:40:08 +0100185{
Derek Lambertif674aa02019-08-01 15:56:25 +0100186 m_EdgeStrategies[connectionIndex] = strategy;
Derek Lamberti84da38b2019-06-13 11:40:08 +0100187}
188
Derek Lambertif674aa02019-08-01 15:56:25 +0100189EdgeStrategy OutputSlot::GetEdgeStrategyForConnection(unsigned int connectionIdx) const
Derek Lamberti84da38b2019-06-13 11:40:08 +0100190{
Derek Lambertif674aa02019-08-01 15:56:25 +0100191 return m_EdgeStrategies[connectionIdx];
Derek Lamberti84da38b2019-06-13 11:40:08 +0100192}
193
Derek Lamberti0cff1632018-09-18 16:02:25 +0100194Layer::Layer(unsigned int numInputSlots,
195 unsigned int numOutputSlots,
196 LayerType type,
197 DataLayout layout,
198 const char* name)
telsoa014fcda012018-03-09 14:13:49 +0000199: m_OutputHandlers(numOutputSlots)
Finn Williamsf24effa2020-07-03 10:12:03 +0100200, m_ShapeInferenceMethod(ShapeInferenceMethod::ValidateOnly)
telsoa014fcda012018-03-09 14:13:49 +0000201, m_LayerName(name ? name : "")
202, m_Type(type)
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000203, m_BackendId()
Derek Lamberti4a9e24b2020-01-03 16:53:38 +0000204, m_BackendHint(EmptyOptional())
Sadik Armagan3184c902020-03-18 10:57:30 +0000205, m_Guid(profiling::ProfilingService::GetNextGuid())
telsoa014fcda012018-03-09 14:13:49 +0000206{
Jan Eilers8eb25602020-03-09 12:13:48 +0000207 IgnoreUnused(layout);
telsoa014fcda012018-03-09 14:13:49 +0000208 m_InputSlots.reserve(numInputSlots);
209 for (unsigned int i = 0; i < numInputSlots; ++i)
210 {
211 m_InputSlots.emplace_back(*this, i);
212 }
213
214 m_OutputSlots.reserve(numOutputSlots);
215 for (unsigned int i = 0; i < numOutputSlots; ++i)
216 {
217 m_OutputSlots.emplace_back(*this, m_OutputHandlers[i]);
218 }
219}
220
Derek Lamberti0cff1632018-09-18 16:02:25 +0100221Layer::Layer(unsigned int numInputSlots,
222 unsigned int numOutputSlots,
223 LayerType type,
224 const char* name)
225: Layer(numInputSlots, numOutputSlots, type, DataLayout::NCHW, name)
226{
227}
228
Derek Lamberti94a88d22019-12-10 21:12:59 +0000229void Layer::CollectWorkloadInputs(WorkloadDataCollector& dataCollector) const
telsoa014fcda012018-03-09 14:13:49 +0000230{
231 for (auto&& inputSlot : GetInputSlots())
232 {
telsoa01c577f2c2018-08-31 09:22:23 +0100233 // The graph must be well-formed at this point.
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100234 ARMNN_ASSERT(inputSlot.GetConnection());
telsoa014fcda012018-03-09 14:13:49 +0000235 const OutputHandler& outputHandler = inputSlot.GetConnectedOutputSlot()->GetOutputHandler();
236 dataCollector.Push(outputHandler.GetData(), outputHandler.GetTensorInfo());
237 }
238}
239
Derek Lamberti94a88d22019-12-10 21:12:59 +0000240void Layer::CollectWorkloadOutputs(WorkloadDataCollector& dataCollector) const
telsoa014fcda012018-03-09 14:13:49 +0000241{
242 for (auto&& outputHandler : m_OutputHandlers)
243 {
244 outputHandler.CollectWorkloadOutputs(dataCollector);
245 }
246}
247
Keith Davisdf04d232020-10-23 17:20:05 +0100248void Layer::SetAdditionalInfo(QueueDescriptor& descriptor) const
249{
250 descriptor.m_AdditionalInfoObject = m_AdditionalInfoObject.get();
251}
252
David Monahan3fb7e102019-08-20 11:25:29 +0100253void Layer::CreateTensorHandles(const TensorHandleFactoryRegistry& registry,
254 const IWorkloadFactory& workloadFactory,
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +0100255 const bool IsMemoryManaged)
telsoa014fcda012018-03-09 14:13:49 +0000256{
Derek Lamberti84da38b2019-06-13 11:40:08 +0100257 for (unsigned int idx=0; idx < GetNumOutputSlots(); idx++)
telsoa014fcda012018-03-09 14:13:49 +0000258 {
Derek Lamberti84da38b2019-06-13 11:40:08 +0100259
260 OutputSlot& slot = GetOutputSlot(idx);
261 ITensorHandleFactory::FactoryId factoryId = slot.GetTensorHandleFactoryId();
262
263 OutputHandler& handler = GetOutputHandler(idx);
264 if (factoryId == ITensorHandleFactory::LegacyFactoryId)
265 {
David Monahan3fb7e102019-08-20 11:25:29 +0100266 handler.CreateTensorHandles(workloadFactory, IsMemoryManaged);
Derek Lamberti84da38b2019-06-13 11:40:08 +0100267 }
268 else
269 {
Francis Murtagh73d3e2e2021-04-29 14:23:04 +0100270 ITensorHandleFactory* handleFactory;
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +0100271 handleFactory = registry.GetFactory(factoryId);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100272 ARMNN_ASSERT(handleFactory);
David Monahan3fb7e102019-08-20 11:25:29 +0100273 handler.CreateTensorHandles(*handleFactory, IsMemoryManaged);
Derek Lamberti84da38b2019-06-13 11:40:08 +0100274 }
telsoa014fcda012018-03-09 14:13:49 +0000275 }
276}
277
telsoa01c577f2c2018-08-31 09:22:23 +0100278void Layer::ReleaseConstantData()
279{
280 // Now free up the static data.
James Conroy1f58f032021-04-27 17:13:27 +0100281 OperateOnConstantTensors([](std::shared_ptr<ConstTensorHandle>& handle)
telsoa01c577f2c2018-08-31 09:22:23 +0100282 {
Finn Williams4422cec2021-03-22 17:51:06 +0000283 handle.reset();
telsoa01c577f2c2018-08-31 09:22:23 +0100284 });
285}
286
telsoa014fcda012018-03-09 14:13:49 +0000287DataType Layer::GetDataType() const
288{
telsoa01c577f2c2018-08-31 09:22:23 +0100289 if (GetNumInputSlots() > 0) // Ignore the input layer.
telsoa014fcda012018-03-09 14:13:49 +0000290 {
291 return GetInputSlot(0).GetConnection()->GetTensorInfo().GetDataType();
292 }
telsoa01c577f2c2018-08-31 09:22:23 +0100293 return GetOutputSlot(0).GetTensorInfo().GetDataType();
telsoa014fcda012018-03-09 14:13:49 +0000294}
295
296void Layer::ResetPriority() const
297{
298 m_Priority = 0;
299 m_Visiting = false;
300}
301
302LayerPriority Layer::GetPriority() const
303{
304 constexpr LayerPriority inputPrio = std::numeric_limits<LayerPriority>::lowest();
305 constexpr LayerPriority outputPrio = std::numeric_limits<LayerPriority>::max();
306
307 if (GetType() == LayerType::Input)
308 {
309 m_Priority = inputPrio;
310 }
311 else if (GetType() == LayerType::Output)
312 {
313 m_Priority = outputPrio;
314 }
315 else if (m_Priority == 0)
316 {
317 if (m_Visiting)
318 {
319 throw GraphValidationException("Graph has circular dependencies: cannot walk");
320 }
321
322 auto maxPrio = [](const LayerPriority prio, const InputSlot& slot) -> LayerPriority
323 {
Matthew Benthamd78b891d2019-04-30 10:17:40 +0100324 const OutputSlot *outputSlot = slot.GetConnectedOutputSlot();
325 if (outputSlot)
326 {
327 const Layer& input = outputSlot->GetOwningLayer();
328 return std::max(prio, input.GetPriority());
329 }
330 else
331 {
332 // unconnected input slot
333 return prio;
334 }
telsoa014fcda012018-03-09 14:13:49 +0000335 };
336
337 m_Visiting = true;
338 LayerPriority parentPrio = std::accumulate(GetInputSlots().cbegin(), GetInputSlots().cend(), 0U, maxPrio);
339 m_Visiting = false;
340
341 if (parentPrio >= outputPrio)
342 {
343 throw GraphValidationException("Graph has too many edges");
344 }
345
346 m_Priority = parentPrio + 1U;
347 }
348
349 return m_Priority;
350}
351
telsoa01c577f2c2018-08-31 09:22:23 +0100352void Layer::VerifyLayerConnections(unsigned int expectedConnections, const CheckLocation& location) const
353{
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100354 ARMNN_ASSERT(GetNumInputSlots() == expectedConnections);
telsoa01c577f2c2018-08-31 09:22:23 +0100355
356 for (unsigned int i=0; i<expectedConnections; ++i)
357 {
358 if (GetInputSlot(i).GetConnection() == nullptr)
359 {
360 throw LayerValidationException(
Colm Donelan5b5c2222020-09-09 12:48:16 +0100361 fmt::format("Input connection #{0} must be connected "
362 "for {1} layer {2} {3}",
363 i,
364 GetLayerTypeAsCString(this->GetType()),
365 GetNameStr(),
366 location.AsString()));
telsoa01c577f2c2018-08-31 09:22:23 +0100367 }
telsoa01c577f2c2018-08-31 09:22:23 +0100368 }
369}
370
371std::vector<TensorShape> Layer::InferOutputShapes(const std::vector<TensorShape>& inputShapes) const
372{
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100373 ARMNN_ASSERT(GetNumInputSlots() != 0);
374 ARMNN_ASSERT(GetNumOutputSlots() != 0);
telsoa01c577f2c2018-08-31 09:22:23 +0100375
376 // By default we return what we got, meaning the output shape(s) are the same as the input(s).
377 // This only works if the number of inputs and outputs are the same. Since we are in the Layer
378 // base class, this means the implementation needs to be overridden in the specific layers for
379 // the other cases. So the missing implementation justifies the UnimplementedException.
380
381 if (GetNumInputSlots() != GetNumOutputSlots())
382 {
383 throw UnimplementedException(
Colm Donelan5b5c2222020-09-09 12:48:16 +0100384 fmt::format("Default implementation for InferOutputShapes can only be used for "
385 "layers with the same number of input and output slots. This doesn't "
386 "hold for {0} layer {1} (#inputs={2} #outputs={3}) {4}",
387 GetLayerTypeAsCString(this->GetType()),
388 GetNameStr(),
389 GetNumInputSlots(),
390 GetNumOutputSlots(),
391 CHECK_LOCATION().AsString()));
telsoa01c577f2c2018-08-31 09:22:23 +0100392 }
393 return inputShapes;
394}
Andre Ghattas23ae2ea2019-08-07 12:18:38 +0100395
Finn Williams87d0bda2020-07-03 10:12:03 +0100396void Layer::ValidateAndCopyShape(const TensorShape& outputShape,
397 const TensorShape& inferredShape,
398 const ShapeInferenceMethod shapeInferenceMethod,
399 const std::string& layerName,
400 const unsigned int outputSlotIndex)
401{
402 if (shapeInferenceMethod == ShapeInferenceMethod::ValidateOnly)
403 {
404 ConditionalThrowIfNotEqual<LayerValidationException>(
405 layerName + ": TensorShape set on OutputSlot[0] does not match the inferred shape.",
406 outputShape,
407 inferredShape);
408 return;
409 }
410
411 if (outputShape.GetDimensionality() == Dimensionality::Specified)
412 {
413 for (unsigned int i = 0; i < outputShape.GetNumDimensions(); ++i)
414 {
415 if (outputShape.GetDimensionSpecificity(i) && outputShape[i] != inferredShape[i])
416 {
417 std::stringstream ss;
418 ss << layerName << ": TensorShape set on OutputSlot[" << outputSlotIndex <<
419 "] does not match the inferred shape at dimension index [";
420 ss << i << "] " << outputShape << " != " << inferredShape;
421 throw LayerValidationException(ss.str());
422 }
423 }
424 }
425
426 TensorInfo info = GetOutputSlot(outputSlotIndex).GetTensorInfo();
427
428 armnn::TensorInfo inferredTensorInfo(inferredShape,
429 info.GetDataType(),
430 info.GetQuantizationScale(),
431 info.GetQuantizationOffset());
432
433 GetOutputSlot(outputSlotIndex).SetTensorInfo(inferredTensorInfo);
434}
435
436void Layer::VerifyShapeInferenceType(const TensorShape& outputShape, ShapeInferenceMethod shapeInferenceMethod)
437{
438 if (shapeInferenceMethod == ShapeInferenceMethod::ValidateOnly)
439 {
440 ConditionalThrow<LayerValidationException>(
441 outputShape.GetDimensionality() != Dimensionality::NotSpecified,
442 "Dimensionality can not be NotSpecified while using ShapeInferenceMethod::ValidateOnly");
443
444 ConditionalThrow<LayerValidationException>(
445 outputShape.AreAllDimensionsSpecified(),
446 "Unspecified dimension while using ShapeInferenceMethod::ValidateOnly");
447 }
Finn Williams87d0bda2020-07-03 10:12:03 +0100448}
449
Andre Ghattas23ae2ea2019-08-07 12:18:38 +0100450void Layer::SerializeLayerParameters(ParameterStringifyFunction& fn) const
451{
Rob Hughesb17220d2020-08-28 11:48:35 +0100452 std::string guid = std::to_string(m_Guid);
Andre Ghattas23ae2ea2019-08-07 12:18:38 +0100453 std::string layerType = GetLayerTypeAsCString(m_Type);
454 std::string backendId = std::string(m_BackendId);
Rob Hughesb17220d2020-08-28 11:48:35 +0100455 if (!(guid.compare("") == 0) && !guid.empty())
456 {
457 fn("Guid", guid);
458 }
Andre Ghattas23ae2ea2019-08-07 12:18:38 +0100459 if(!(m_LayerName.compare("") == 0) && !m_LayerName.empty())
460 {
461 fn("LayerName",m_LayerName);
462 }
463 if(!(layerType.compare("") == 0) && !layerType.empty())
464 {
465 fn("LayerType",layerType);
466 }
467 if(!(backendId.compare("") == 0) && !backendId.empty())
468 {
469 fn("BackendID",backendId);
470 }
Mike Kelly7a0efa52020-11-17 13:55:01 +0000471 std::shared_ptr<ActivationDescriptor>
472 activationDescPtr = GetAdditionalInformation<ActivationDescriptor>();
473
474 if (activationDescPtr)
475 {
476 StringifyLayerParameters<ActivationDescriptor>::Serialize(fn, *activationDescPtr.get());
477 }
Andre Ghattas23ae2ea2019-08-07 12:18:38 +0100478}
479
Finn Williamsb454c5c2021-02-09 15:56:23 +0000480// default implementation of ExecuteStrategy
481void Layer::ExecuteStrategy(IStrategy& strategy) const
482{
483 strategy.ExecuteStrategy(this, BaseDescriptor(), {}, GetName());
484}
485
Francis Murtagh56ccf682021-12-13 18:48:12 +0000486const IConnectableLayer& OutputSlot::GetOwningIConnectableLayer() const
487{
488 return m_OwningLayer;
489}
490
Francis Murtagh9d74ba62022-01-19 16:31:58 +0000491const IConnectableLayer& InputSlot::GetOwningIConnectableLayer() const
492{
493 return m_OwningLayer;
494}
495
telsoa014fcda012018-03-09 14:13:49 +0000496} // namespace armnn