Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 1 | // |
| 2 | // Copyright © 2017 Arm Ltd. All rights reserved. |
| 3 | // SPDX-License-Identifier: MIT |
| 4 | // |
| 5 | #include "RefTensorHandle.hpp" |
| 6 | |
| 7 | namespace armnn |
| 8 | { |
| 9 | |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 10 | RefTensorHandle::RefTensorHandle(const TensorInfo &tensorInfo, std::shared_ptr<RefMemoryManager> &memoryManager): |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 11 | m_TensorInfo(tensorInfo), |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 12 | m_MemoryManager(memoryManager), |
| 13 | m_Pool(nullptr), |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame^] | 14 | m_UnmanagedMemory(nullptr), |
| 15 | m_ImportFlags(static_cast<MemorySourceFlags>(MemorySource::Undefined)), |
| 16 | m_Imported(false) |
| 17 | { |
| 18 | |
| 19 | } |
| 20 | |
| 21 | RefTensorHandle::RefTensorHandle(const TensorInfo& tensorInfo, std::shared_ptr<RefMemoryManager> &memoryManager, |
| 22 | MemorySourceFlags importFlags) |
| 23 | : m_TensorInfo(tensorInfo), |
| 24 | m_MemoryManager(memoryManager), |
| 25 | m_Pool(nullptr), |
| 26 | m_UnmanagedMemory(nullptr), |
| 27 | m_ImportFlags(importFlags), |
| 28 | m_Imported(false) |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 29 | { |
| 30 | |
| 31 | } |
| 32 | |
| 33 | RefTensorHandle::~RefTensorHandle() |
| 34 | { |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 35 | if (!m_Pool) |
| 36 | { |
| 37 | // unmanaged |
| 38 | ::operator delete(m_UnmanagedMemory); |
| 39 | } |
| 40 | } |
| 41 | |
| 42 | void RefTensorHandle::Manage() |
| 43 | { |
| 44 | BOOST_ASSERT_MSG(!m_Pool, "RefTensorHandle::Manage() called twice"); |
| 45 | BOOST_ASSERT_MSG(!m_UnmanagedMemory, "RefTensorHandle::Manage() called after Allocate()"); |
| 46 | |
| 47 | m_Pool = m_MemoryManager->Manage(m_TensorInfo.GetNumBytes()); |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 48 | } |
| 49 | |
| 50 | void RefTensorHandle::Allocate() |
| 51 | { |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 52 | if (!m_UnmanagedMemory) |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 53 | { |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 54 | if (!m_Pool) |
| 55 | { |
| 56 | // unmanaged |
| 57 | m_UnmanagedMemory = ::operator new(m_TensorInfo.GetNumBytes()); |
| 58 | } |
| 59 | else |
| 60 | { |
| 61 | m_MemoryManager->Allocate(m_Pool); |
| 62 | } |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 63 | } |
| 64 | else |
| 65 | { |
| 66 | throw InvalidArgumentException("RefTensorHandle::Allocate Trying to allocate a RefTensorHandle" |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 67 | "that already has allocated memory."); |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 68 | } |
| 69 | } |
| 70 | |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 71 | const void* RefTensorHandle::Map(bool /*unused*/) const |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 72 | { |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 73 | return GetPointer(); |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 74 | } |
| 75 | |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 76 | void* RefTensorHandle::GetPointer() const |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 77 | { |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 78 | if (m_UnmanagedMemory) |
| 79 | { |
| 80 | return m_UnmanagedMemory; |
| 81 | } |
| 82 | else |
| 83 | { |
| 84 | BOOST_ASSERT_MSG(m_Pool, "RefTensorHandle::GetPointer called on unmanaged, unallocated tensor handle"); |
| 85 | return m_MemoryManager->GetPointer(m_Pool); |
| 86 | } |
Matthew Bentham | 4cefc41 | 2019-06-18 16:14:34 +0100 | [diff] [blame] | 87 | } |
| 88 | |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 89 | void RefTensorHandle::CopyOutTo(void* dest) const |
| 90 | { |
| 91 | const void *src = GetPointer(); |
| 92 | BOOST_ASSERT(src); |
| 93 | memcpy(dest, src, m_TensorInfo.GetNumBytes()); |
| 94 | } |
| 95 | |
| 96 | void RefTensorHandle::CopyInFrom(const void* src) |
| 97 | { |
| 98 | void *dest = GetPointer(); |
| 99 | BOOST_ASSERT(dest); |
| 100 | memcpy(dest, src, m_TensorInfo.GetNumBytes()); |
| 101 | } |
| 102 | |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame^] | 103 | bool RefTensorHandle::Import(void* memory, MemorySource source) |
| 104 | { |
| 105 | |
| 106 | if (m_ImportFlags & static_cast<MemorySourceFlags>(source)) |
| 107 | { |
| 108 | if (source == MemorySource::Malloc) |
| 109 | { |
| 110 | // Checks the 16 byte memory alignment. |
| 111 | if (reinterpret_cast<uint64_t>(memory) % 16) |
| 112 | { |
| 113 | return false; |
| 114 | } |
| 115 | |
| 116 | // m_UnmanagedMemory not yet allocated. |
| 117 | if (!m_Imported && !m_UnmanagedMemory) |
| 118 | { |
| 119 | m_UnmanagedMemory = memory; |
| 120 | m_Imported = true; |
| 121 | return true; |
| 122 | } |
| 123 | |
| 124 | // m_UnmanagedMemory initially allocated with Allocate(). |
| 125 | if (!m_Imported && m_UnmanagedMemory) |
| 126 | { |
| 127 | return false; |
| 128 | } |
| 129 | |
| 130 | // m_UnmanagedMemory previously imported. |
| 131 | if (m_Imported) |
| 132 | { |
| 133 | m_UnmanagedMemory = memory; |
| 134 | return true; |
| 135 | } |
| 136 | } |
| 137 | } |
| 138 | |
| 139 | return false; |
| 140 | } |
| 141 | |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 142 | } |