blob: 507b37bf957ad01507b7e000a3c28e02fa40ab14 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
7#include "LayerFwd.hpp"
8
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009#include <backendsCommon/OutputHandler.hpp>
10#include <backendsCommon/WorkloadDataCollector.hpp>
11#include <backendsCommon/WorkloadInfo.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012#include "InternalTypes.hpp"
surmeh01bceff2f2018-03-29 16:29:27 +010013#include "SerializeLayerParameters.hpp"
telsoa014fcda012018-03-09 14:13:49 +000014
15#include <armnn/Types.hpp>
16#include <armnn/Tensor.hpp>
17#include <armnn/INetwork.hpp>
18
19#include <algorithm>
20#include <memory>
21#include <string>
22#include <vector>
surmeh013537c2c2018-05-18 16:31:43 +010023#include <iostream>
telsoa01c577f2c2018-08-31 09:22:23 +010024#include <functional>
25#include <list>
telsoa014fcda012018-03-09 14:13:49 +000026
27#include <boost/numeric/conversion/cast.hpp>
28#include <boost/core/ignore_unused.hpp>
29#include <boost/cast.hpp>
30
31namespace armnn
32{
33
34class IWorkload;
35class IWorkloadFactory;
36class Layer;
37class Graph;
38
39class InputSlot final : public IInputSlot
40{
41public:
42 explicit InputSlot(Layer& owner, unsigned int slotIndex)
43 : m_OwningLayer(owner)
44 , m_Connection(nullptr)
45 , m_SlotIndex(slotIndex)
46 {}
47
48 ~InputSlot();
49
50 Layer& GetOwningLayer() const { return m_OwningLayer; }
51 unsigned int GetSlotIndex() const { return m_SlotIndex; }
52
53 const OutputSlot* GetConnectedOutputSlot() const { return m_Connection; }
54 OutputSlot* GetConnectedOutputSlot() { return m_Connection; }
55
telsoa01c577f2c2018-08-31 09:22:23 +010056 /// Links the slot to an output slot or breaks an existing link if passing nullptr.
telsoa014fcda012018-03-09 14:13:49 +000057 void SetConnection(OutputSlot* source)
58 {
59 if (m_Connection != nullptr && source != nullptr)
60 {
61 throw InvalidArgumentException("Tried to connect an output slot to an input slot, "
62 "but the latter already has a connection");
63 }
64 m_Connection = source;
65 }
66
telsoa01c577f2c2018-08-31 09:22:23 +010067 // Inserts single-output existing layer at this point in the graph.
telsoa014fcda012018-03-09 14:13:49 +000068 void Insert(Layer& layer);
69
70 // IInputSlot
71
72 const IOutputSlot* GetConnection() const override;
73 IOutputSlot* GetConnection() override;
74
75private:
76 Layer& m_OwningLayer;
77 OutputSlot* m_Connection;
78 const unsigned int m_SlotIndex;
79};
80
81class OutputSlot final : public IOutputSlot
82{
83public:
84 explicit OutputSlot(Layer& owner, OutputHandler& outputHandler)
85 : m_OwningLayer(owner)
86 , m_OutputHandler(outputHandler)
87 {}
88
89 ~OutputSlot()
90 {
surmeh013537c2c2018-05-18 16:31:43 +010091 try
92 {
93 // Coverity fix: DisconnectAll() may throw uncaught exceptions.
94 DisconnectAll();
95 }
96 catch (const std::exception& e)
97 {
98 // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
99 // exception of type std::length_error.
100 // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
101 std::cerr << "WARNING: An error has occurred when disconnecting all output slots: "
102 << e.what() << std::endl;
103 }
telsoa014fcda012018-03-09 14:13:49 +0000104 }
105
106 Layer& GetOwningLayer() const { return m_OwningLayer; }
107
Mike Kelly8c1701a2019-02-11 17:01:27 +0000108 LayerGuid GetOwningLayerGuid() const override;
109
telsoa014fcda012018-03-09 14:13:49 +0000110 const OutputHandler& GetOutputHandler() const { return m_OutputHandler; }
111 OutputHandler& GetOutputHandler() { return m_OutputHandler; }
112
113 int Connect(InputSlot& destination);
114 void Disconnect(InputSlot& slot);
115
116 const std::vector<InputSlot*>& GetConnections() const { return m_Connections; }
117
118 bool ValidateTensorShape(const TensorShape& shape) const;
119
telsoa01c577f2c2018-08-31 09:22:23 +0100120 // Disconnect all conections.
telsoa014fcda012018-03-09 14:13:49 +0000121 void DisconnectAll();
122
telsoa01c577f2c2018-08-31 09:22:23 +0100123 /// Moves all connections to another OutputSlot.
telsoa014fcda012018-03-09 14:13:49 +0000124 void MoveAllConnections(OutputSlot& destination);
125
126 // IOutputSlot
127
128 unsigned int GetNumConnections() const override { return boost::numeric_cast<unsigned int>(m_Connections.size()); }
129 const InputSlot* GetConnection(unsigned int index) const override;
130 InputSlot* GetConnection(unsigned int index) override;
131
132 void SetTensorInfo(const TensorInfo& tensorInfo) override;
133 const TensorInfo& GetTensorInfo() const override;
134 bool IsTensorInfoSet() const override;
135
136 int Connect(IInputSlot& destination) override
137 {
138 return Connect(*boost::polymorphic_downcast<InputSlot*>(&destination));
139 }
140
141 void Disconnect(IInputSlot& slot) override
142 {
143 return Disconnect(*boost::polymorphic_downcast<InputSlot*>(&slot));
144 }
145
Mike Kelly8c1701a2019-02-11 17:01:27 +0000146 unsigned int CalculateIndexOnOwner() const override;
Derek Lamberti27d83072019-02-05 16:00:08 +0000147
148 bool operator==(const OutputSlot& other) const;
149
telsoa014fcda012018-03-09 14:13:49 +0000150private:
151 void ValidateConnectionIndex(unsigned int index) const;
152
153 Layer& m_OwningLayer;
154 OutputHandler& m_OutputHandler;
155 std::vector<InputSlot*> m_Connections;
156};
157
telsoa01c577f2c2018-08-31 09:22:23 +0100158// InputSlot inlines that need OutputSlot declaration.
telsoa014fcda012018-03-09 14:13:49 +0000159
160inline InputSlot::~InputSlot()
161{
162 if (m_Connection != nullptr)
163 {
surmeh013537c2c2018-05-18 16:31:43 +0100164 try
165 {
166 // Coverity fix: Disconnect() may throw uncaught exceptions.
167 m_Connection->Disconnect(*this);
168 }
169 catch (const std::exception& e)
170 {
171 // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
172 // exception of type std::length_error.
173 // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
174 std::cerr << "WARNING: An error has occurred when disconnecting an input slot: "
175 << e.what() << std::endl;
176 }
telsoa014fcda012018-03-09 14:13:49 +0000177 }
178}
179
180inline const IOutputSlot* InputSlot::GetConnection() const { return GetConnectedOutputSlot(); }
181inline IOutputSlot* InputSlot::GetConnection() { return GetConnectedOutputSlot(); }
182
telsoa01c577f2c2018-08-31 09:22:23 +0100183
184class ScopedCpuTensorHandle;
185
telsoa014fcda012018-03-09 14:13:49 +0000186// Base layer class
187
188using LayerPriority = unsigned int;
189
190class Layer : public IConnectableLayer
191{
192public:
telsoa01c577f2c2018-08-31 09:22:23 +0100193 /// @param name - Optional name for the layer (may be nullptr).
telsoa014fcda012018-03-09 14:13:49 +0000194 Layer(unsigned int numInputSlots, unsigned int numOutputSlots, LayerType type, const char* name);
Derek Lamberti0cff1632018-09-18 16:02:25 +0100195 Layer(unsigned int numInputSlots, unsigned int numOutputSlots, LayerType type, DataLayout layout, const char* name);
telsoa014fcda012018-03-09 14:13:49 +0000196
197 const std::string& GetNameStr() const
198 {
199 return m_LayerName;
200 }
201
202 const OutputHandler& GetOutputHandler(unsigned int i = 0) const
203 {
204 return m_OutputHandlers[i];
205 }
206
207 OutputHandler& GetOutputHandler(unsigned int i = 0)
208 {
209 return const_cast<OutputHandler&>(const_cast<const Layer*>(this)->GetOutputHandler(i));
210 }
211
212 const std::vector<InputSlot>& GetInputSlots() const { return m_InputSlots; }
213 const std::vector<OutputSlot>& GetOutputSlots() const { return m_OutputSlots; }
214
telsoa01c577f2c2018-08-31 09:22:23 +0100215 // Allows non-const access to input slots, but don't expose vector (vector size is fixed at layer construction).
telsoa014fcda012018-03-09 14:13:49 +0000216 std::vector<InputSlot>::iterator BeginInputSlots() { return m_InputSlots.begin(); }
217 std::vector<InputSlot>::iterator EndInputSlots() { return m_InputSlots.end(); }
218
telsoa01c577f2c2018-08-31 09:22:23 +0100219 // Allows non-const access to output slots, but don't expose vector (vector size is fixed at layer construction).
telsoa014fcda012018-03-09 14:13:49 +0000220 std::vector<OutputSlot>::iterator BeginOutputSlots() { return m_OutputSlots.begin(); }
221 std::vector<OutputSlot>::iterator EndOutputSlots() { return m_OutputSlots.end(); }
222
telsoa01c577f2c2018-08-31 09:22:23 +0100223 // Checks whether the outputs of this layer don't have any connection.
telsoa014fcda012018-03-09 14:13:49 +0000224 bool IsOutputUnconnected()
225 {
226 unsigned int numConnections = 0;
227
228 for (auto&& output : GetOutputSlots())
229 {
230 numConnections += output.GetNumConnections();
231 }
232
233 return (GetNumOutputSlots() > 0) && (numConnections == 0);
234 }
235
telsoa01c577f2c2018-08-31 09:22:23 +0100236 // Used for sorting.
telsoa014fcda012018-03-09 14:13:49 +0000237 void ResetPriority() const;
238 LayerPriority GetPriority() const;
239
240 LayerType GetType() const { return m_Type; }
241
242 DataType GetDataType() const;
243
David Beck33f0ae02018-10-18 15:13:56 +0100244 const BackendId& GetBackendId() const { return m_BackendId; }
245 void SetBackendId(const BackendId& id) { m_BackendId = id; }
telsoa014fcda012018-03-09 14:13:49 +0000246
247 // Virtuals
248
249 virtual std::unique_ptr<IWorkload> CreateWorkload(const Graph& graph, const IWorkloadFactory& factory) const = 0;
250
251 virtual void CreateTensorHandles(Graph& graph, const IWorkloadFactory& factory);
252
telsoa01c577f2c2018-08-31 09:22:23 +0100253 /// Creates a dynamically-allocated copy of this layer.
254 /// @param graph - The Graph into which this Layer is being cloned.
telsoa014fcda012018-03-09 14:13:49 +0000255 virtual Layer* Clone(Graph& graph) const = 0;
256
telsoa01c577f2c2018-08-31 09:22:23 +0100257 void VerifyLayerConnections(unsigned int expectedConnections, const CheckLocation& location) const;
258
telsoa014fcda012018-03-09 14:13:49 +0000259 virtual void ValidateTensorShapesFromInputs() = 0;
260
telsoa01c577f2c2018-08-31 09:22:23 +0100261 std::vector<TensorShape> InferOutputShapes(const std::vector<TensorShape>& inputShapes) const override;
262
263 /// Helper to serialize the layer parameters to string.
264 /// (currently used in DotSerializer and company).
surmeh013537c2c2018-05-18 16:31:43 +0100265 virtual void SerializeLayerParameters(ParameterStringifyFunction &) const {}
surmeh01bceff2f2018-03-29 16:29:27 +0100266
telsoa01c577f2c2018-08-31 09:22:23 +0100267 // Free up the constant source data
268 virtual void ReleaseConstantData();
269
270 template<typename Op>
271 void OperateOnConstantTensors(Op op)
272 {
273 for (auto constant : GetConstantTensorsByRef())
274 {
275 if (constant.get())
276 {
277 op(constant);
278 }
279 }
280 };
281
telsoa014fcda012018-03-09 14:13:49 +0000282 // IConnectableLayer
283
284 const char* GetName() const override { return m_LayerName.c_str(); }
285
286 unsigned int GetNumInputSlots() const override { return static_cast<unsigned int>(m_InputSlots.size()); }
287 unsigned int GetNumOutputSlots() const override { return static_cast<unsigned int>(m_OutputSlots.size()); }
288
289 const InputSlot& GetInputSlot(unsigned int index) const override { return m_InputSlots.at(index); }
290 InputSlot& GetInputSlot(unsigned int index) override { return m_InputSlots.at(index); }
291 const OutputSlot& GetOutputSlot(unsigned int index = 0) const override { return m_OutputSlots.at(index); }
292 OutputSlot& GetOutputSlot(unsigned int index = 0) override { return m_OutputSlots.at(index); }
293
surmeh01bceff2f2018-03-29 16:29:27 +0100294 void SetGuid(LayerGuid guid) { m_Guid = guid; }
295 LayerGuid GetGuid() const final { return m_Guid; }
296
telsoa01c577f2c2018-08-31 09:22:23 +0100297 void AddRelatedLayerName(const std::string layerName) { m_RelatedLayerNames.emplace_back(layerName); }
298
299 const std::list<std::string>& GetRelatedLayerNames() { return m_RelatedLayerNames; }
300
telsoa014fcda012018-03-09 14:13:49 +0000301protected:
telsoa01c577f2c2018-08-31 09:22:23 +0100302 // Graph needs access to the virtual destructor.
telsoa014fcda012018-03-09 14:13:49 +0000303 friend class Graph;
304 virtual ~Layer() = default;
305
306 template <typename QueueDescriptor>
307 void CollectQueueDescriptorInputs(QueueDescriptor& descriptor, WorkloadInfo& info, const Graph& graph) const
308 {
309 WorkloadDataCollector dataCollector(descriptor.m_Inputs, info.m_InputTensorInfos);
310 CollectWorkloadInputs(dataCollector, graph);
311 }
312
313 template <typename QueueDescriptor>
314 void CollectQueueDescriptorOutputs(QueueDescriptor& descriptor, WorkloadInfo& info, const Graph& graph) const
315 {
316 WorkloadDataCollector dataCollector(descriptor.m_Outputs, info.m_OutputTensorInfos);
317 CollectWorkloadOutputs(dataCollector, graph);
318 }
319
telsoa01c577f2c2018-08-31 09:22:23 +0100320 /// Helper function to reduce duplication in *Layer::CreateWorkload.
telsoa014fcda012018-03-09 14:13:49 +0000321 template <typename QueueDescriptor>
322 WorkloadInfo PrepInfoAndDesc(QueueDescriptor& descriptor, const Graph& graph) const
323 {
324 WorkloadInfo info;
325 CollectQueueDescriptorInputs(descriptor, info, graph);
326 CollectQueueDescriptorOutputs(descriptor, info, graph);
327 return info;
328 }
329
330 template <typename LayerType, typename ... Params>
331 LayerType* CloneBase(Graph& graph, Params&& ... params) const;
332
telsoa01c577f2c2018-08-31 09:22:23 +0100333 // Retrieve the Handles to the constants
334 using ConstantTensors = std::vector<std::reference_wrapper<std::unique_ptr<ScopedCpuTensorHandle>>>;
335 virtual ConstantTensors GetConstantTensorsByRef() {return ConstantTensors(); };
336
telsoa014fcda012018-03-09 14:13:49 +0000337private:
338 void CollectWorkloadInputs(WorkloadDataCollector& dataCollector, const Graph& graph) const;
339 void CollectWorkloadOutputs(WorkloadDataCollector& dataCollector, const Graph& graph) const;
340
341protected:
342 std::vector<OutputHandler> m_OutputHandlers;
343
344private:
345 const std::string m_LayerName;
346
347 std::vector<InputSlot> m_InputSlots;
348 std::vector<OutputSlot> m_OutputSlots;
349
350 const LayerType m_Type;
David Beck33f0ae02018-10-18 15:13:56 +0100351 BackendId m_BackendId;
telsoa014fcda012018-03-09 14:13:49 +0000352
telsoa01c577f2c2018-08-31 09:22:23 +0100353 /// Used for sorting.
telsoa014fcda012018-03-09 14:13:49 +0000354 mutable LayerPriority m_Priority = 0;
355 mutable bool m_Visiting = false;
surmeh01bceff2f2018-03-29 16:29:27 +0100356
357 LayerGuid m_Guid;
telsoa01c577f2c2018-08-31 09:22:23 +0100358
359 std::list<std::string> m_RelatedLayerNames;
telsoa014fcda012018-03-09 14:13:49 +0000360};
361
telsoa01c577f2c2018-08-31 09:22:23 +0100362// A layer user-provided data can be bound to (e.g. inputs, outputs).
telsoa014fcda012018-03-09 14:13:49 +0000363class BindableLayer : public Layer
364{
365public:
366 BindableLayer(unsigned int numInputSlots,
367 unsigned int numOutputSlots,
368 LayerType type,
369 const char* name,
370 LayerBindingId id)
371 : Layer(numInputSlots, numOutputSlots, type, name)
372 , m_Id(id)
373 {
374 }
375
376 LayerBindingId GetBindingId() const { return m_Id; };
377
378protected:
379 ~BindableLayer() = default;
380
381private:
382 LayerBindingId m_Id;
383};
384
385}