blob: 7d86b110a76a5f57688213f7eec9840007ec5bfc [file] [log] [blame]
Matthew Bentham4cefc412019-06-18 16:14:34 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#include "RefTensorHandle.hpp"
6
7namespace armnn
8{
9
Matthew Bentham7c1603a2019-06-21 17:22:23 +010010RefTensorHandle::RefTensorHandle(const TensorInfo &tensorInfo, std::shared_ptr<RefMemoryManager> &memoryManager):
Matthew Bentham4cefc412019-06-18 16:14:34 +010011 m_TensorInfo(tensorInfo),
Matthew Bentham7c1603a2019-06-21 17:22:23 +010012 m_MemoryManager(memoryManager),
13 m_Pool(nullptr),
Ferran Balaguerbfeb2712019-08-07 15:14:56 +010014 m_UnmanagedMemory(nullptr),
15 m_ImportFlags(static_cast<MemorySourceFlags>(MemorySource::Undefined)),
16 m_Imported(false)
17{
18
19}
20
21RefTensorHandle::RefTensorHandle(const TensorInfo& tensorInfo, std::shared_ptr<RefMemoryManager> &memoryManager,
22 MemorySourceFlags importFlags)
23 : m_TensorInfo(tensorInfo),
24 m_MemoryManager(memoryManager),
25 m_Pool(nullptr),
26 m_UnmanagedMemory(nullptr),
27 m_ImportFlags(importFlags),
28 m_Imported(false)
Matthew Bentham4cefc412019-06-18 16:14:34 +010029{
30
31}
32
33RefTensorHandle::~RefTensorHandle()
34{
Matthew Bentham7c1603a2019-06-21 17:22:23 +010035 if (!m_Pool)
36 {
37 // unmanaged
Ferran Balaguer1cd451c2019-08-22 14:09:44 +010038 if (!m_Imported)
39 {
40 ::operator delete(m_UnmanagedMemory);
41 }
Matthew Bentham7c1603a2019-06-21 17:22:23 +010042 }
43}
44
45void RefTensorHandle::Manage()
46{
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010047 ARMNN_ASSERT_MSG(!m_Pool, "RefTensorHandle::Manage() called twice");
48 ARMNN_ASSERT_MSG(!m_UnmanagedMemory, "RefTensorHandle::Manage() called after Allocate()");
Matthew Bentham7c1603a2019-06-21 17:22:23 +010049
50 m_Pool = m_MemoryManager->Manage(m_TensorInfo.GetNumBytes());
Matthew Bentham4cefc412019-06-18 16:14:34 +010051}
52
53void RefTensorHandle::Allocate()
54{
Matthew Bentham7c1603a2019-06-21 17:22:23 +010055 if (!m_UnmanagedMemory)
Matthew Bentham4cefc412019-06-18 16:14:34 +010056 {
Matthew Bentham7c1603a2019-06-21 17:22:23 +010057 if (!m_Pool)
58 {
59 // unmanaged
60 m_UnmanagedMemory = ::operator new(m_TensorInfo.GetNumBytes());
61 }
62 else
63 {
64 m_MemoryManager->Allocate(m_Pool);
65 }
Matthew Bentham4cefc412019-06-18 16:14:34 +010066 }
67 else
68 {
69 throw InvalidArgumentException("RefTensorHandle::Allocate Trying to allocate a RefTensorHandle"
Matthew Bentham7c1603a2019-06-21 17:22:23 +010070 "that already has allocated memory.");
Matthew Bentham4cefc412019-06-18 16:14:34 +010071 }
72}
73
Matthew Bentham7c1603a2019-06-21 17:22:23 +010074const void* RefTensorHandle::Map(bool /*unused*/) const
Matthew Bentham4cefc412019-06-18 16:14:34 +010075{
Matthew Bentham7c1603a2019-06-21 17:22:23 +010076 return GetPointer();
Matthew Bentham4cefc412019-06-18 16:14:34 +010077}
78
Matthew Bentham7c1603a2019-06-21 17:22:23 +010079void* RefTensorHandle::GetPointer() const
Matthew Bentham4cefc412019-06-18 16:14:34 +010080{
Matthew Bentham7c1603a2019-06-21 17:22:23 +010081 if (m_UnmanagedMemory)
82 {
83 return m_UnmanagedMemory;
84 }
85 else
86 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010087 ARMNN_ASSERT_MSG(m_Pool, "RefTensorHandle::GetPointer called on unmanaged, unallocated tensor handle");
Matthew Bentham7c1603a2019-06-21 17:22:23 +010088 return m_MemoryManager->GetPointer(m_Pool);
89 }
Matthew Bentham4cefc412019-06-18 16:14:34 +010090}
91
Matthew Bentham7c1603a2019-06-21 17:22:23 +010092void RefTensorHandle::CopyOutTo(void* dest) const
93{
94 const void *src = GetPointer();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010095 ARMNN_ASSERT(src);
Matthew Bentham7c1603a2019-06-21 17:22:23 +010096 memcpy(dest, src, m_TensorInfo.GetNumBytes());
97}
98
99void RefTensorHandle::CopyInFrom(const void* src)
100{
101 void *dest = GetPointer();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100102 ARMNN_ASSERT(dest);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100103 memcpy(dest, src, m_TensorInfo.GetNumBytes());
104}
105
Ferran Balaguerbfeb2712019-08-07 15:14:56 +0100106bool RefTensorHandle::Import(void* memory, MemorySource source)
107{
108
109 if (m_ImportFlags & static_cast<MemorySourceFlags>(source))
110 {
111 if (source == MemorySource::Malloc)
112 {
Aron Virginas-Tard9f7c8b2019-09-13 13:37:03 +0100113 // Check memory alignment
114 constexpr uintptr_t alignment = sizeof(size_t);
115 if (reinterpret_cast<uintptr_t>(memory) % alignment)
Ferran Balaguerbfeb2712019-08-07 15:14:56 +0100116 {
Ferran Balaguer1cd451c2019-08-22 14:09:44 +0100117 if (m_Imported)
118 {
119 m_Imported = false;
120 m_UnmanagedMemory = nullptr;
121 }
122
Ferran Balaguerbfeb2712019-08-07 15:14:56 +0100123 return false;
124 }
125
126 // m_UnmanagedMemory not yet allocated.
127 if (!m_Imported && !m_UnmanagedMemory)
128 {
129 m_UnmanagedMemory = memory;
130 m_Imported = true;
131 return true;
132 }
133
134 // m_UnmanagedMemory initially allocated with Allocate().
135 if (!m_Imported && m_UnmanagedMemory)
136 {
137 return false;
138 }
139
140 // m_UnmanagedMemory previously imported.
141 if (m_Imported)
142 {
143 m_UnmanagedMemory = memory;
144 return true;
145 }
146 }
147 }
148
149 return false;
150}
151
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100152}