blob: e196b61ccd8de93ba5f8807c165f3f3a3a5e2239 [file] [log] [blame]
Matthew Bentham4cefc412019-06-18 16:14:34 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#include "RefTensorHandle.hpp"
6
7namespace armnn
8{
9
Matthew Bentham7c1603a2019-06-21 17:22:23 +010010RefTensorHandle::RefTensorHandle(const TensorInfo &tensorInfo, std::shared_ptr<RefMemoryManager> &memoryManager):
Matthew Bentham4cefc412019-06-18 16:14:34 +010011 m_TensorInfo(tensorInfo),
Matthew Bentham7c1603a2019-06-21 17:22:23 +010012 m_MemoryManager(memoryManager),
13 m_Pool(nullptr),
Ferran Balaguerbfeb2712019-08-07 15:14:56 +010014 m_UnmanagedMemory(nullptr),
15 m_ImportFlags(static_cast<MemorySourceFlags>(MemorySource::Undefined)),
Narumol Prangnawarat3b90af62020-06-26 11:00:21 +010016 m_Imported(false),
17 m_IsImportEnabled(false)
Ferran Balaguerbfeb2712019-08-07 15:14:56 +010018{
19
20}
21
Narumol Prangnawarat3b90af62020-06-26 11:00:21 +010022RefTensorHandle::RefTensorHandle(const TensorInfo& tensorInfo,
Ferran Balaguerbfeb2712019-08-07 15:14:56 +010023 MemorySourceFlags importFlags)
24 : m_TensorInfo(tensorInfo),
Ferran Balaguerbfeb2712019-08-07 15:14:56 +010025 m_Pool(nullptr),
26 m_UnmanagedMemory(nullptr),
27 m_ImportFlags(importFlags),
Narumol Prangnawarat3b90af62020-06-26 11:00:21 +010028 m_Imported(false),
29 m_IsImportEnabled(true)
Matthew Bentham4cefc412019-06-18 16:14:34 +010030{
31
32}
33
34RefTensorHandle::~RefTensorHandle()
35{
Matthew Bentham7c1603a2019-06-21 17:22:23 +010036 if (!m_Pool)
37 {
38 // unmanaged
Ferran Balaguer1cd451c2019-08-22 14:09:44 +010039 if (!m_Imported)
40 {
41 ::operator delete(m_UnmanagedMemory);
42 }
Matthew Bentham7c1603a2019-06-21 17:22:23 +010043 }
44}
45
46void RefTensorHandle::Manage()
47{
Narumol Prangnawarat3b90af62020-06-26 11:00:21 +010048 if (!m_IsImportEnabled)
49 {
50 ARMNN_ASSERT_MSG(!m_Pool, "RefTensorHandle::Manage() called twice");
51 ARMNN_ASSERT_MSG(!m_UnmanagedMemory, "RefTensorHandle::Manage() called after Allocate()");
Matthew Bentham7c1603a2019-06-21 17:22:23 +010052
Narumol Prangnawarat3b90af62020-06-26 11:00:21 +010053 m_Pool = m_MemoryManager->Manage(m_TensorInfo.GetNumBytes());
54 }
Matthew Bentham4cefc412019-06-18 16:14:34 +010055}
56
57void RefTensorHandle::Allocate()
58{
Narumol Prangnawarat3b90af62020-06-26 11:00:21 +010059 // If import is enabled, do not allocate the tensor
60 if (!m_IsImportEnabled)
Matthew Bentham4cefc412019-06-18 16:14:34 +010061 {
Narumol Prangnawarat3b90af62020-06-26 11:00:21 +010062
63 if (!m_UnmanagedMemory)
Matthew Bentham7c1603a2019-06-21 17:22:23 +010064 {
Narumol Prangnawarat3b90af62020-06-26 11:00:21 +010065 if (!m_Pool)
66 {
67 // unmanaged
68 m_UnmanagedMemory = ::operator new(m_TensorInfo.GetNumBytes());
69 }
70 else
71 {
72 m_MemoryManager->Allocate(m_Pool);
73 }
Matthew Bentham7c1603a2019-06-21 17:22:23 +010074 }
75 else
76 {
Narumol Prangnawarat3b90af62020-06-26 11:00:21 +010077 throw InvalidArgumentException("RefTensorHandle::Allocate Trying to allocate a RefTensorHandle"
78 "that already has allocated memory.");
Matthew Bentham7c1603a2019-06-21 17:22:23 +010079 }
Matthew Bentham4cefc412019-06-18 16:14:34 +010080 }
Matthew Bentham4cefc412019-06-18 16:14:34 +010081}
82
Matthew Bentham7c1603a2019-06-21 17:22:23 +010083const void* RefTensorHandle::Map(bool /*unused*/) const
Matthew Bentham4cefc412019-06-18 16:14:34 +010084{
Matthew Bentham7c1603a2019-06-21 17:22:23 +010085 return GetPointer();
Matthew Bentham4cefc412019-06-18 16:14:34 +010086}
87
Matthew Bentham7c1603a2019-06-21 17:22:23 +010088void* RefTensorHandle::GetPointer() const
Matthew Bentham4cefc412019-06-18 16:14:34 +010089{
Matthew Bentham7c1603a2019-06-21 17:22:23 +010090 if (m_UnmanagedMemory)
91 {
92 return m_UnmanagedMemory;
93 }
Narumol Prangnawarat3b90af62020-06-26 11:00:21 +010094 else if (m_Pool)
95 {
96 return m_MemoryManager->GetPointer(m_Pool);
97 }
Matthew Bentham7c1603a2019-06-21 17:22:23 +010098 else
99 {
Narumol Prangnawarat3b90af62020-06-26 11:00:21 +0100100 throw NullPointerException("RefTensorHandle::GetPointer called on unmanaged, unallocated tensor handle");
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100101 }
Matthew Bentham4cefc412019-06-18 16:14:34 +0100102}
103
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100104void RefTensorHandle::CopyOutTo(void* dest) const
105{
106 const void *src = GetPointer();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100107 ARMNN_ASSERT(src);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100108 memcpy(dest, src, m_TensorInfo.GetNumBytes());
109}
110
111void RefTensorHandle::CopyInFrom(const void* src)
112{
113 void *dest = GetPointer();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100114 ARMNN_ASSERT(dest);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100115 memcpy(dest, src, m_TensorInfo.GetNumBytes());
116}
117
Ferran Balaguerbfeb2712019-08-07 15:14:56 +0100118bool RefTensorHandle::Import(void* memory, MemorySource source)
119{
Ferran Balaguerbfeb2712019-08-07 15:14:56 +0100120 if (m_ImportFlags & static_cast<MemorySourceFlags>(source))
121 {
Narumol Prangnawarat3b90af62020-06-26 11:00:21 +0100122 if (m_IsImportEnabled && source == MemorySource::Malloc)
Ferran Balaguerbfeb2712019-08-07 15:14:56 +0100123 {
Aron Virginas-Tard9f7c8b2019-09-13 13:37:03 +0100124 // Check memory alignment
Nikhil Raj53e06592022-01-05 16:04:08 +0000125 if(!CanBeImported(memory, source))
Ferran Balaguerbfeb2712019-08-07 15:14:56 +0100126 {
Ferran Balaguer1cd451c2019-08-22 14:09:44 +0100127 if (m_Imported)
128 {
129 m_Imported = false;
130 m_UnmanagedMemory = nullptr;
131 }
Ferran Balaguerbfeb2712019-08-07 15:14:56 +0100132 return false;
133 }
134
135 // m_UnmanagedMemory not yet allocated.
136 if (!m_Imported && !m_UnmanagedMemory)
137 {
138 m_UnmanagedMemory = memory;
139 m_Imported = true;
140 return true;
141 }
142
143 // m_UnmanagedMemory initially allocated with Allocate().
144 if (!m_Imported && m_UnmanagedMemory)
145 {
146 return false;
147 }
148
149 // m_UnmanagedMemory previously imported.
150 if (m_Imported)
151 {
152 m_UnmanagedMemory = memory;
153 return true;
154 }
155 }
156 }
157
158 return false;
159}
160
Nikhil Raj53e06592022-01-05 16:04:08 +0000161bool RefTensorHandle::CanBeImported(void *memory, MemorySource source)
162{
163 if (m_ImportFlags & static_cast<MemorySourceFlags>(source))
164 {
165 if (m_IsImportEnabled && source == MemorySource::Malloc)
166 {
167 uintptr_t alignment = GetDataTypeSize(m_TensorInfo.GetDataType());
168 if (reinterpret_cast<uintptr_t>(memory) % alignment)
169 {
170 return false;
171 }
Nikhil Raj53e06592022-01-05 16:04:08 +0000172 return true;
Nikhil Raj53e06592022-01-05 16:04:08 +0000173 }
Nikhil Raj53e06592022-01-05 16:04:08 +0000174 }
175 return false;
176}
177
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100178}