blob: 51c6c09563abfbb815c02293cdfff9728877ec44 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
7#include "LayerFwd.hpp"
8
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009#include <backendsCommon/OutputHandler.hpp>
10#include <backendsCommon/WorkloadDataCollector.hpp>
11#include <backendsCommon/WorkloadInfo.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012#include "InternalTypes.hpp"
surmeh01bceff2f2018-03-29 16:29:27 +010013#include "SerializeLayerParameters.hpp"
telsoa014fcda012018-03-09 14:13:49 +000014
15#include <armnn/Types.hpp>
16#include <armnn/Tensor.hpp>
17#include <armnn/INetwork.hpp>
18
19#include <algorithm>
20#include <memory>
21#include <string>
22#include <vector>
surmeh013537c2c2018-05-18 16:31:43 +010023#include <iostream>
telsoa01c577f2c2018-08-31 09:22:23 +010024#include <functional>
25#include <list>
telsoa014fcda012018-03-09 14:13:49 +000026
27#include <boost/numeric/conversion/cast.hpp>
28#include <boost/core/ignore_unused.hpp>
29#include <boost/cast.hpp>
30
31namespace armnn
32{
33
34class IWorkload;
35class IWorkloadFactory;
36class Layer;
37class Graph;
38
39class InputSlot final : public IInputSlot
40{
41public:
42 explicit InputSlot(Layer& owner, unsigned int slotIndex)
43 : m_OwningLayer(owner)
44 , m_Connection(nullptr)
45 , m_SlotIndex(slotIndex)
46 {}
47
48 ~InputSlot();
49
50 Layer& GetOwningLayer() const { return m_OwningLayer; }
51 unsigned int GetSlotIndex() const { return m_SlotIndex; }
52
53 const OutputSlot* GetConnectedOutputSlot() const { return m_Connection; }
54 OutputSlot* GetConnectedOutputSlot() { return m_Connection; }
55
telsoa01c577f2c2018-08-31 09:22:23 +010056 /// Links the slot to an output slot or breaks an existing link if passing nullptr.
telsoa014fcda012018-03-09 14:13:49 +000057 void SetConnection(OutputSlot* source)
58 {
59 if (m_Connection != nullptr && source != nullptr)
60 {
61 throw InvalidArgumentException("Tried to connect an output slot to an input slot, "
62 "but the latter already has a connection");
63 }
64 m_Connection = source;
65 }
66
telsoa01c577f2c2018-08-31 09:22:23 +010067 // Inserts single-output existing layer at this point in the graph.
telsoa014fcda012018-03-09 14:13:49 +000068 void Insert(Layer& layer);
69
70 // IInputSlot
71
72 const IOutputSlot* GetConnection() const override;
73 IOutputSlot* GetConnection() override;
74
75private:
76 Layer& m_OwningLayer;
77 OutputSlot* m_Connection;
78 const unsigned int m_SlotIndex;
79};
80
81class OutputSlot final : public IOutputSlot
82{
83public:
84 explicit OutputSlot(Layer& owner, OutputHandler& outputHandler)
85 : m_OwningLayer(owner)
86 , m_OutputHandler(outputHandler)
87 {}
88
89 ~OutputSlot()
90 {
surmeh013537c2c2018-05-18 16:31:43 +010091 try
92 {
93 // Coverity fix: DisconnectAll() may throw uncaught exceptions.
94 DisconnectAll();
95 }
96 catch (const std::exception& e)
97 {
98 // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
99 // exception of type std::length_error.
100 // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
101 std::cerr << "WARNING: An error has occurred when disconnecting all output slots: "
102 << e.what() << std::endl;
103 }
telsoa014fcda012018-03-09 14:13:49 +0000104 }
105
106 Layer& GetOwningLayer() const { return m_OwningLayer; }
107
108 const OutputHandler& GetOutputHandler() const { return m_OutputHandler; }
109 OutputHandler& GetOutputHandler() { return m_OutputHandler; }
110
111 int Connect(InputSlot& destination);
112 void Disconnect(InputSlot& slot);
113
114 const std::vector<InputSlot*>& GetConnections() const { return m_Connections; }
115
116 bool ValidateTensorShape(const TensorShape& shape) const;
117
telsoa01c577f2c2018-08-31 09:22:23 +0100118 // Disconnect all conections.
telsoa014fcda012018-03-09 14:13:49 +0000119 void DisconnectAll();
120
telsoa01c577f2c2018-08-31 09:22:23 +0100121 /// Moves all connections to another OutputSlot.
telsoa014fcda012018-03-09 14:13:49 +0000122 void MoveAllConnections(OutputSlot& destination);
123
124 // IOutputSlot
125
126 unsigned int GetNumConnections() const override { return boost::numeric_cast<unsigned int>(m_Connections.size()); }
127 const InputSlot* GetConnection(unsigned int index) const override;
128 InputSlot* GetConnection(unsigned int index) override;
129
130 void SetTensorInfo(const TensorInfo& tensorInfo) override;
131 const TensorInfo& GetTensorInfo() const override;
132 bool IsTensorInfoSet() const override;
133
134 int Connect(IInputSlot& destination) override
135 {
136 return Connect(*boost::polymorphic_downcast<InputSlot*>(&destination));
137 }
138
139 void Disconnect(IInputSlot& slot) override
140 {
141 return Disconnect(*boost::polymorphic_downcast<InputSlot*>(&slot));
142 }
143
144private:
145 void ValidateConnectionIndex(unsigned int index) const;
146
147 Layer& m_OwningLayer;
148 OutputHandler& m_OutputHandler;
149 std::vector<InputSlot*> m_Connections;
150};
151
telsoa01c577f2c2018-08-31 09:22:23 +0100152// InputSlot inlines that need OutputSlot declaration.
telsoa014fcda012018-03-09 14:13:49 +0000153
154inline InputSlot::~InputSlot()
155{
156 if (m_Connection != nullptr)
157 {
surmeh013537c2c2018-05-18 16:31:43 +0100158 try
159 {
160 // Coverity fix: Disconnect() may throw uncaught exceptions.
161 m_Connection->Disconnect(*this);
162 }
163 catch (const std::exception& e)
164 {
165 // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
166 // exception of type std::length_error.
167 // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
168 std::cerr << "WARNING: An error has occurred when disconnecting an input slot: "
169 << e.what() << std::endl;
170 }
telsoa014fcda012018-03-09 14:13:49 +0000171 }
172}
173
174inline const IOutputSlot* InputSlot::GetConnection() const { return GetConnectedOutputSlot(); }
175inline IOutputSlot* InputSlot::GetConnection() { return GetConnectedOutputSlot(); }
176
telsoa01c577f2c2018-08-31 09:22:23 +0100177
178class ScopedCpuTensorHandle;
179
telsoa014fcda012018-03-09 14:13:49 +0000180// Base layer class
181
182using LayerPriority = unsigned int;
183
184class Layer : public IConnectableLayer
185{
186public:
telsoa01c577f2c2018-08-31 09:22:23 +0100187 /// @param name - Optional name for the layer (may be nullptr).
telsoa014fcda012018-03-09 14:13:49 +0000188 Layer(unsigned int numInputSlots, unsigned int numOutputSlots, LayerType type, const char* name);
Derek Lamberti0cff1632018-09-18 16:02:25 +0100189 Layer(unsigned int numInputSlots, unsigned int numOutputSlots, LayerType type, DataLayout layout, const char* name);
telsoa014fcda012018-03-09 14:13:49 +0000190
191 const std::string& GetNameStr() const
192 {
193 return m_LayerName;
194 }
195
196 const OutputHandler& GetOutputHandler(unsigned int i = 0) const
197 {
198 return m_OutputHandlers[i];
199 }
200
201 OutputHandler& GetOutputHandler(unsigned int i = 0)
202 {
203 return const_cast<OutputHandler&>(const_cast<const Layer*>(this)->GetOutputHandler(i));
204 }
205
206 const std::vector<InputSlot>& GetInputSlots() const { return m_InputSlots; }
207 const std::vector<OutputSlot>& GetOutputSlots() const { return m_OutputSlots; }
208
telsoa01c577f2c2018-08-31 09:22:23 +0100209 // Allows non-const access to input slots, but don't expose vector (vector size is fixed at layer construction).
telsoa014fcda012018-03-09 14:13:49 +0000210 std::vector<InputSlot>::iterator BeginInputSlots() { return m_InputSlots.begin(); }
211 std::vector<InputSlot>::iterator EndInputSlots() { return m_InputSlots.end(); }
212
telsoa01c577f2c2018-08-31 09:22:23 +0100213 // Allows non-const access to output slots, but don't expose vector (vector size is fixed at layer construction).
telsoa014fcda012018-03-09 14:13:49 +0000214 std::vector<OutputSlot>::iterator BeginOutputSlots() { return m_OutputSlots.begin(); }
215 std::vector<OutputSlot>::iterator EndOutputSlots() { return m_OutputSlots.end(); }
216
telsoa01c577f2c2018-08-31 09:22:23 +0100217 // Checks whether the outputs of this layer don't have any connection.
telsoa014fcda012018-03-09 14:13:49 +0000218 bool IsOutputUnconnected()
219 {
220 unsigned int numConnections = 0;
221
222 for (auto&& output : GetOutputSlots())
223 {
224 numConnections += output.GetNumConnections();
225 }
226
227 return (GetNumOutputSlots() > 0) && (numConnections == 0);
228 }
229
telsoa01c577f2c2018-08-31 09:22:23 +0100230 // Used for sorting.
telsoa014fcda012018-03-09 14:13:49 +0000231 void ResetPriority() const;
232 LayerPriority GetPriority() const;
233
234 LayerType GetType() const { return m_Type; }
235
236 DataType GetDataType() const;
237
David Beck33f0ae02018-10-18 15:13:56 +0100238 const BackendId& GetBackendId() const { return m_BackendId; }
239 void SetBackendId(const BackendId& id) { m_BackendId = id; }
telsoa014fcda012018-03-09 14:13:49 +0000240
241 // Virtuals
242
243 virtual std::unique_ptr<IWorkload> CreateWorkload(const Graph& graph, const IWorkloadFactory& factory) const = 0;
244
245 virtual void CreateTensorHandles(Graph& graph, const IWorkloadFactory& factory);
246
telsoa01c577f2c2018-08-31 09:22:23 +0100247 /// Creates a dynamically-allocated copy of this layer.
248 /// @param graph - The Graph into which this Layer is being cloned.
telsoa014fcda012018-03-09 14:13:49 +0000249 virtual Layer* Clone(Graph& graph) const = 0;
250
telsoa01c577f2c2018-08-31 09:22:23 +0100251 void VerifyLayerConnections(unsigned int expectedConnections, const CheckLocation& location) const;
252
telsoa014fcda012018-03-09 14:13:49 +0000253 virtual void ValidateTensorShapesFromInputs() = 0;
254
telsoa01c577f2c2018-08-31 09:22:23 +0100255 std::vector<TensorShape> InferOutputShapes(const std::vector<TensorShape>& inputShapes) const override;
256
257 /// Helper to serialize the layer parameters to string.
258 /// (currently used in DotSerializer and company).
surmeh013537c2c2018-05-18 16:31:43 +0100259 virtual void SerializeLayerParameters(ParameterStringifyFunction &) const {}
surmeh01bceff2f2018-03-29 16:29:27 +0100260
telsoa01c577f2c2018-08-31 09:22:23 +0100261 // Free up the constant source data
262 virtual void ReleaseConstantData();
263
264 template<typename Op>
265 void OperateOnConstantTensors(Op op)
266 {
267 for (auto constant : GetConstantTensorsByRef())
268 {
269 if (constant.get())
270 {
271 op(constant);
272 }
273 }
274 };
275
telsoa014fcda012018-03-09 14:13:49 +0000276 // IConnectableLayer
277
278 const char* GetName() const override { return m_LayerName.c_str(); }
279
280 unsigned int GetNumInputSlots() const override { return static_cast<unsigned int>(m_InputSlots.size()); }
281 unsigned int GetNumOutputSlots() const override { return static_cast<unsigned int>(m_OutputSlots.size()); }
282
283 const InputSlot& GetInputSlot(unsigned int index) const override { return m_InputSlots.at(index); }
284 InputSlot& GetInputSlot(unsigned int index) override { return m_InputSlots.at(index); }
285 const OutputSlot& GetOutputSlot(unsigned int index = 0) const override { return m_OutputSlots.at(index); }
286 OutputSlot& GetOutputSlot(unsigned int index = 0) override { return m_OutputSlots.at(index); }
287
surmeh01bceff2f2018-03-29 16:29:27 +0100288 void SetGuid(LayerGuid guid) { m_Guid = guid; }
289 LayerGuid GetGuid() const final { return m_Guid; }
290
telsoa01c577f2c2018-08-31 09:22:23 +0100291 void AddRelatedLayerName(const std::string layerName) { m_RelatedLayerNames.emplace_back(layerName); }
292
293 const std::list<std::string>& GetRelatedLayerNames() { return m_RelatedLayerNames; }
294
telsoa014fcda012018-03-09 14:13:49 +0000295protected:
telsoa01c577f2c2018-08-31 09:22:23 +0100296 // Graph needs access to the virtual destructor.
telsoa014fcda012018-03-09 14:13:49 +0000297 friend class Graph;
298 virtual ~Layer() = default;
299
300 template <typename QueueDescriptor>
301 void CollectQueueDescriptorInputs(QueueDescriptor& descriptor, WorkloadInfo& info, const Graph& graph) const
302 {
303 WorkloadDataCollector dataCollector(descriptor.m_Inputs, info.m_InputTensorInfos);
304 CollectWorkloadInputs(dataCollector, graph);
305 }
306
307 template <typename QueueDescriptor>
308 void CollectQueueDescriptorOutputs(QueueDescriptor& descriptor, WorkloadInfo& info, const Graph& graph) const
309 {
310 WorkloadDataCollector dataCollector(descriptor.m_Outputs, info.m_OutputTensorInfos);
311 CollectWorkloadOutputs(dataCollector, graph);
312 }
313
telsoa01c577f2c2018-08-31 09:22:23 +0100314 /// Helper function to reduce duplication in *Layer::CreateWorkload.
telsoa014fcda012018-03-09 14:13:49 +0000315 template <typename QueueDescriptor>
316 WorkloadInfo PrepInfoAndDesc(QueueDescriptor& descriptor, const Graph& graph) const
317 {
318 WorkloadInfo info;
319 CollectQueueDescriptorInputs(descriptor, info, graph);
320 CollectQueueDescriptorOutputs(descriptor, info, graph);
321 return info;
322 }
323
324 template <typename LayerType, typename ... Params>
325 LayerType* CloneBase(Graph& graph, Params&& ... params) const;
326
telsoa01c577f2c2018-08-31 09:22:23 +0100327 // Retrieve the Handles to the constants
328 using ConstantTensors = std::vector<std::reference_wrapper<std::unique_ptr<ScopedCpuTensorHandle>>>;
329 virtual ConstantTensors GetConstantTensorsByRef() {return ConstantTensors(); };
330
telsoa014fcda012018-03-09 14:13:49 +0000331private:
332 void CollectWorkloadInputs(WorkloadDataCollector& dataCollector, const Graph& graph) const;
333 void CollectWorkloadOutputs(WorkloadDataCollector& dataCollector, const Graph& graph) const;
334
335protected:
336 std::vector<OutputHandler> m_OutputHandlers;
337
338private:
339 const std::string m_LayerName;
340
341 std::vector<InputSlot> m_InputSlots;
342 std::vector<OutputSlot> m_OutputSlots;
343
344 const LayerType m_Type;
David Beck33f0ae02018-10-18 15:13:56 +0100345 BackendId m_BackendId;
telsoa014fcda012018-03-09 14:13:49 +0000346
telsoa01c577f2c2018-08-31 09:22:23 +0100347 /// Used for sorting.
telsoa014fcda012018-03-09 14:13:49 +0000348 mutable LayerPriority m_Priority = 0;
349 mutable bool m_Visiting = false;
surmeh01bceff2f2018-03-29 16:29:27 +0100350
351 LayerGuid m_Guid;
telsoa01c577f2c2018-08-31 09:22:23 +0100352
353 std::list<std::string> m_RelatedLayerNames;
telsoa014fcda012018-03-09 14:13:49 +0000354};
355
telsoa01c577f2c2018-08-31 09:22:23 +0100356// A layer user-provided data can be bound to (e.g. inputs, outputs).
telsoa014fcda012018-03-09 14:13:49 +0000357class BindableLayer : public Layer
358{
359public:
360 BindableLayer(unsigned int numInputSlots,
361 unsigned int numOutputSlots,
362 LayerType type,
363 const char* name,
364 LayerBindingId id)
365 : Layer(numInputSlots, numOutputSlots, type, name)
366 , m_Id(id)
367 {
368 }
369
370 LayerBindingId GetBindingId() const { return m_Id; };
371
372protected:
373 ~BindableLayer() = default;
374
375private:
376 LayerBindingId m_Id;
377};
378
379}