blob: 237f27a4ed5f2030230b48c1e869948f813e3ce7 [file] [log] [blame]
Jan Eilerse9f0f0f2019-08-16 10:28:37 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6
7#include "ClTensorHandleFactory.hpp"
8#include "ClTensorHandle.hpp"
9
Matthew Sloyan171214c2020-09-09 09:07:37 +010010#include <armnn/utility/NumericCast.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010011#include <armnn/utility/PolymorphicDowncast.hpp>
12
Jan Eilerse9f0f0f2019-08-16 10:28:37 +010013#include <arm_compute/runtime/CL/CLTensor.h>
14#include <arm_compute/core/Coordinates.h>
15#include <arm_compute/runtime/CL/CLSubTensor.h>
16
Jan Eilerse9f0f0f2019-08-16 10:28:37 +010017
18namespace armnn
19{
20
21using FactoryId = ITensorHandleFactory::FactoryId;
22
23std::unique_ptr<ITensorHandle> ClTensorHandleFactory::CreateSubTensorHandle(ITensorHandle& parent,
24 const TensorShape& subTensorShape,
25 const unsigned int* subTensorOrigin) const
26{
27 arm_compute::Coordinates coords;
28 arm_compute::TensorShape shape = armcomputetensorutils::BuildArmComputeTensorShape(subTensorShape);
29
30 coords.set_num_dimensions(subTensorShape.GetNumDimensions());
31 for (unsigned int i = 0; i < subTensorShape.GetNumDimensions(); ++i)
32 {
33 // Arm compute indexes tensor coords in reverse order.
34 unsigned int revertedIndex = subTensorShape.GetNumDimensions() - i - 1;
Matthew Sloyan171214c2020-09-09 09:07:37 +010035 coords.set(i, armnn::numeric_cast<int>(subTensorOrigin[revertedIndex]));
Jan Eilerse9f0f0f2019-08-16 10:28:37 +010036 }
37
38 const arm_compute::TensorShape parentShape = armcomputetensorutils::BuildArmComputeTensorShape(
39 parent.GetShape());
David Monahan49895f42020-07-21 11:16:51 +010040
41 // In order for ACL to support subtensors the concat axis cannot be on x or y and the values of x and y
42 // must match the parent shapes
43 if (coords.x() != 0 || coords.y() != 0)
44 {
45 return nullptr;
46 }
47 if ((parentShape.x() != shape.x()) || (parentShape.y() != shape.y()))
48 {
49 return nullptr;
50 }
51
Jan Eilerse9f0f0f2019-08-16 10:28:37 +010052 if (!::arm_compute::error_on_invalid_subtensor(__func__, __FILE__, __LINE__, parentShape, coords, shape))
53 {
54 return nullptr;
55 }
56
57 return std::make_unique<ClSubTensorHandle>(
Jan Eilersbb446e52020-04-02 13:56:54 +010058 PolymorphicDowncast<IClTensorHandle *>(&parent), shape, coords);
Jan Eilerse9f0f0f2019-08-16 10:28:37 +010059}
60
David Monahanc6e5a6e2019-10-02 09:33:57 +010061std::unique_ptr<ITensorHandle> ClTensorHandleFactory::CreateTensorHandle(const TensorInfo& tensorInfo) const
62{
63 return ClTensorHandleFactory::CreateTensorHandle(tensorInfo, true);
64}
65
66std::unique_ptr<ITensorHandle> ClTensorHandleFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
67 DataLayout dataLayout) const
68{
69 return ClTensorHandleFactory::CreateTensorHandle(tensorInfo, dataLayout, true);
70}
71
David Monahan3fb7e102019-08-20 11:25:29 +010072std::unique_ptr<ITensorHandle> ClTensorHandleFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
73 const bool IsMemoryManaged) const
Jan Eilerse9f0f0f2019-08-16 10:28:37 +010074{
75 std::unique_ptr<ClTensorHandle> tensorHandle = std::make_unique<ClTensorHandle>(tensorInfo);
Narumol Prangnawarat265e53e2020-10-30 16:06:55 +000076 if (!IsMemoryManaged)
David Monahan3fb7e102019-08-20 11:25:29 +010077 {
Narumol Prangnawarat265e53e2020-10-30 16:06:55 +000078 ARMNN_LOG(warning) << "ClTensorHandleFactory only has support for memory managed.";
David Monahan3fb7e102019-08-20 11:25:29 +010079 }
Narumol Prangnawarat265e53e2020-10-30 16:06:55 +000080 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
Jan Eilerse9f0f0f2019-08-16 10:28:37 +010081 return tensorHandle;
82}
83
84std::unique_ptr<ITensorHandle> ClTensorHandleFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +010085 DataLayout dataLayout,
86 const bool IsMemoryManaged) const
Jan Eilerse9f0f0f2019-08-16 10:28:37 +010087{
88 std::unique_ptr<ClTensorHandle> tensorHandle = std::make_unique<ClTensorHandle>(tensorInfo, dataLayout);
Narumol Prangnawarat265e53e2020-10-30 16:06:55 +000089 if (!IsMemoryManaged)
David Monahan3fb7e102019-08-20 11:25:29 +010090 {
Narumol Prangnawarat265e53e2020-10-30 16:06:55 +000091 ARMNN_LOG(warning) << "ClTensorHandleFactory only has support for memory managed.";
David Monahan3fb7e102019-08-20 11:25:29 +010092 }
Narumol Prangnawarat265e53e2020-10-30 16:06:55 +000093 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
Jan Eilerse9f0f0f2019-08-16 10:28:37 +010094 return tensorHandle;
95}
96
97const FactoryId& ClTensorHandleFactory::GetIdStatic()
98{
99 static const FactoryId s_Id(ClTensorHandleFactoryId());
100 return s_Id;
101}
102
Ferran Balaguerbfeb2712019-08-07 15:14:56 +0100103const FactoryId& ClTensorHandleFactory::GetId() const
Jan Eilerse9f0f0f2019-08-16 10:28:37 +0100104{
105 return GetIdStatic();
106}
107
108bool ClTensorHandleFactory::SupportsSubTensors() const
109{
110 return true;
111}
112
113MemorySourceFlags ClTensorHandleFactory::GetExportFlags() const
114{
115 return m_ExportFlags;
116}
117
118MemorySourceFlags ClTensorHandleFactory::GetImportFlags() const
119{
120 return m_ImportFlags;
121}
122
123} // namespace armnn