Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 1 | // |
| 2 | // Copyright © 2017 Arm Ltd. All rights reserved. |
| 3 | // SPDX-License-Identifier: MIT |
| 4 | // |
| 5 | #include "RefTensorHandle.hpp" |
| 6 | |
| 7 | namespace armnn |
| 8 | { |
| 9 | |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 10 | RefTensorHandle::RefTensorHandle(const TensorInfo &tensorInfo, std::shared_ptr<RefMemoryManager> &memoryManager): |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 11 | m_TensorInfo(tensorInfo), |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 12 | m_MemoryManager(memoryManager), |
| 13 | m_Pool(nullptr), |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 14 | m_UnmanagedMemory(nullptr), |
Narumol Prangnawarat | 3b90af6 | 2020-06-26 11:00:21 +0100 | [diff] [blame] | 15 | m_Imported(false), |
| 16 | m_IsImportEnabled(false) |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 17 | { |
| 18 | |
| 19 | } |
| 20 | |
Matthew Bentham | c30abd8 | 2022-11-23 12:11:32 +0000 | [diff] [blame^] | 21 | RefTensorHandle::RefTensorHandle(const TensorInfo& tensorInfo) |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 22 | : m_TensorInfo(tensorInfo), |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 23 | m_Pool(nullptr), |
| 24 | m_UnmanagedMemory(nullptr), |
Narumol Prangnawarat | 3b90af6 | 2020-06-26 11:00:21 +0100 | [diff] [blame] | 25 | m_Imported(false), |
| 26 | m_IsImportEnabled(true) |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 27 | { |
| 28 | |
| 29 | } |
| 30 | |
| 31 | RefTensorHandle::~RefTensorHandle() |
| 32 | { |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 33 | if (!m_Pool) |
| 34 | { |
| 35 | // unmanaged |
Ferran Balaguer | 1cd451c | 2019-08-22 14:09:44 +0100 | [diff] [blame] | 36 | if (!m_Imported) |
| 37 | { |
| 38 | ::operator delete(m_UnmanagedMemory); |
| 39 | } |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 40 | } |
| 41 | } |
| 42 | |
| 43 | void RefTensorHandle::Manage() |
| 44 | { |
Narumol Prangnawarat | 3b90af6 | 2020-06-26 11:00:21 +0100 | [diff] [blame] | 45 | if (!m_IsImportEnabled) |
| 46 | { |
| 47 | ARMNN_ASSERT_MSG(!m_Pool, "RefTensorHandle::Manage() called twice"); |
| 48 | ARMNN_ASSERT_MSG(!m_UnmanagedMemory, "RefTensorHandle::Manage() called after Allocate()"); |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 49 | |
Narumol Prangnawarat | 3b90af6 | 2020-06-26 11:00:21 +0100 | [diff] [blame] | 50 | m_Pool = m_MemoryManager->Manage(m_TensorInfo.GetNumBytes()); |
| 51 | } |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 52 | } |
| 53 | |
| 54 | void RefTensorHandle::Allocate() |
| 55 | { |
Narumol Prangnawarat | 3b90af6 | 2020-06-26 11:00:21 +0100 | [diff] [blame] | 56 | // If import is enabled, do not allocate the tensor |
| 57 | if (!m_IsImportEnabled) |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 58 | { |
Narumol Prangnawarat | 3b90af6 | 2020-06-26 11:00:21 +0100 | [diff] [blame] | 59 | |
| 60 | if (!m_UnmanagedMemory) |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 61 | { |
Narumol Prangnawarat | 3b90af6 | 2020-06-26 11:00:21 +0100 | [diff] [blame] | 62 | if (!m_Pool) |
| 63 | { |
| 64 | // unmanaged |
| 65 | m_UnmanagedMemory = ::operator new(m_TensorInfo.GetNumBytes()); |
| 66 | } |
| 67 | else |
| 68 | { |
| 69 | m_MemoryManager->Allocate(m_Pool); |
| 70 | } |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 71 | } |
| 72 | else |
| 73 | { |
Narumol Prangnawarat | 3b90af6 | 2020-06-26 11:00:21 +0100 | [diff] [blame] | 74 | throw InvalidArgumentException("RefTensorHandle::Allocate Trying to allocate a RefTensorHandle" |
| 75 | "that already has allocated memory."); |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 76 | } |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 77 | } |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 78 | } |
| 79 | |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 80 | const void* RefTensorHandle::Map(bool /*unused*/) const |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 81 | { |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 82 | return GetPointer(); |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 83 | } |
| 84 | |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 85 | void* RefTensorHandle::GetPointer() const |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 86 | { |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 87 | if (m_UnmanagedMemory) |
| 88 | { |
| 89 | return m_UnmanagedMemory; |
| 90 | } |
Narumol Prangnawarat | 3b90af6 | 2020-06-26 11:00:21 +0100 | [diff] [blame] | 91 | else if (m_Pool) |
| 92 | { |
| 93 | return m_MemoryManager->GetPointer(m_Pool); |
| 94 | } |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 95 | else |
| 96 | { |
Narumol Prangnawarat | 3b90af6 | 2020-06-26 11:00:21 +0100 | [diff] [blame] | 97 | throw NullPointerException("RefTensorHandle::GetPointer called on unmanaged, unallocated tensor handle"); |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 98 | } |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 99 | } |
| 100 | |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 101 | void RefTensorHandle::CopyOutTo(void* dest) const |
| 102 | { |
| 103 | const void *src = GetPointer(); |
Narumol Prangnawarat | ac2770a | 2020-04-01 16:51:23 +0100 | [diff] [blame] | 104 | ARMNN_ASSERT(src); |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 105 | memcpy(dest, src, m_TensorInfo.GetNumBytes()); |
| 106 | } |
| 107 | |
| 108 | void RefTensorHandle::CopyInFrom(const void* src) |
| 109 | { |
| 110 | void *dest = GetPointer(); |
Narumol Prangnawarat | ac2770a | 2020-04-01 16:51:23 +0100 | [diff] [blame] | 111 | ARMNN_ASSERT(dest); |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 112 | memcpy(dest, src, m_TensorInfo.GetNumBytes()); |
| 113 | } |
| 114 | |
Matthew Bentham | c30abd8 | 2022-11-23 12:11:32 +0000 | [diff] [blame^] | 115 | MemorySourceFlags RefTensorHandle::GetImportFlags() const |
| 116 | { |
| 117 | if (m_IsImportEnabled) |
| 118 | { |
| 119 | return static_cast<MemorySourceFlags>(MemorySource::Malloc); |
| 120 | } |
| 121 | else |
| 122 | { |
| 123 | return static_cast<MemorySourceFlags>(MemorySource::Undefined); |
| 124 | } |
| 125 | } |
| 126 | |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 127 | bool RefTensorHandle::Import(void* memory, MemorySource source) |
| 128 | { |
Matthew Bentham | c30abd8 | 2022-11-23 12:11:32 +0000 | [diff] [blame^] | 129 | if (m_IsImportEnabled && source == MemorySource::Malloc) |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 130 | { |
Matthew Bentham | c30abd8 | 2022-11-23 12:11:32 +0000 | [diff] [blame^] | 131 | // Check memory alignment |
| 132 | if(!CanBeImported(memory, source)) |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 133 | { |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 134 | if (m_Imported) |
| 135 | { |
Matthew Bentham | c30abd8 | 2022-11-23 12:11:32 +0000 | [diff] [blame^] | 136 | m_Imported = false; |
| 137 | m_UnmanagedMemory = nullptr; |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 138 | } |
Matthew Bentham | c30abd8 | 2022-11-23 12:11:32 +0000 | [diff] [blame^] | 139 | return false; |
| 140 | } |
| 141 | |
| 142 | // m_UnmanagedMemory not yet allocated. |
| 143 | if (!m_Imported && !m_UnmanagedMemory) |
| 144 | { |
| 145 | m_UnmanagedMemory = memory; |
| 146 | m_Imported = true; |
| 147 | return true; |
| 148 | } |
| 149 | |
| 150 | // m_UnmanagedMemory initially allocated with Allocate(). |
| 151 | if (!m_Imported && m_UnmanagedMemory) |
| 152 | { |
| 153 | return false; |
| 154 | } |
| 155 | |
| 156 | // m_UnmanagedMemory previously imported. |
| 157 | if (m_Imported) |
| 158 | { |
| 159 | m_UnmanagedMemory = memory; |
| 160 | return true; |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 161 | } |
| 162 | } |
| 163 | |
| 164 | return false; |
| 165 | } |
| 166 | |
Nikhil Raj | 53e0659 | 2022-01-05 16:04:08 +0000 | [diff] [blame] | 167 | bool RefTensorHandle::CanBeImported(void *memory, MemorySource source) |
| 168 | { |
Matthew Bentham | c30abd8 | 2022-11-23 12:11:32 +0000 | [diff] [blame^] | 169 | if (m_IsImportEnabled && source == MemorySource::Malloc) |
Nikhil Raj | 53e0659 | 2022-01-05 16:04:08 +0000 | [diff] [blame] | 170 | { |
Matthew Bentham | c30abd8 | 2022-11-23 12:11:32 +0000 | [diff] [blame^] | 171 | uintptr_t alignment = GetDataTypeSize(m_TensorInfo.GetDataType()); |
| 172 | if (reinterpret_cast<uintptr_t>(memory) % alignment) |
Nikhil Raj | 53e0659 | 2022-01-05 16:04:08 +0000 | [diff] [blame] | 173 | { |
Matthew Bentham | c30abd8 | 2022-11-23 12:11:32 +0000 | [diff] [blame^] | 174 | return false; |
Nikhil Raj | 53e0659 | 2022-01-05 16:04:08 +0000 | [diff] [blame] | 175 | } |
Matthew Bentham | c30abd8 | 2022-11-23 12:11:32 +0000 | [diff] [blame^] | 176 | return true; |
Nikhil Raj | 53e0659 | 2022-01-05 16:04:08 +0000 | [diff] [blame] | 177 | } |
| 178 | return false; |
| 179 | } |
| 180 | |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 181 | } |