Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 1 | /* |
Georgios Pinitas | 035004e | 2021-04-13 19:44:17 +0100 | [diff] [blame] | 2 | * Copyright (c) 2016-2021 Arm Limited. |
Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 3 | * |
| 4 | * SPDX-License-Identifier: MIT |
| 5 | * |
| 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy |
| 7 | * of this software and associated documentation files (the "Software"), to |
| 8 | * deal in the Software without restriction, including without limitation the |
| 9 | * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or |
| 10 | * sell copies of the Software, and to permit persons to whom the Software is |
| 11 | * furnished to do so, subject to the following conditions: |
| 12 | * |
| 13 | * The above copyright notice and this permission notice shall be included in all |
| 14 | * copies or substantial portions of the Software. |
| 15 | * |
| 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
| 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
| 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
| 22 | * SOFTWARE. |
| 23 | */ |
| 24 | #include "arm_compute/runtime/CL/CLTensorAllocator.h" |
| 25 | |
| 26 | #include "arm_compute/core/Error.h" |
| 27 | #include "arm_compute/core/TensorInfo.h" |
Pablo Tello | db8485a | 2019-09-24 11:03:47 +0100 | [diff] [blame] | 28 | #include "arm_compute/runtime/CL/CLRuntimeContext.h" |
Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 29 | #include "arm_compute/runtime/CL/CLScheduler.h" |
| 30 | |
Georgios Pinitas | df31036 | 2018-11-14 13:16:56 +0000 | [diff] [blame] | 31 | namespace arm_compute |
| 32 | { |
| 33 | const cl::Buffer CLTensorAllocator::_empty_buffer = cl::Buffer(); |
Georgios Pinitas | 99d4095 | 2018-04-23 16:26:46 +0100 | [diff] [blame] | 34 | namespace |
Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 35 | { |
Georgios Pinitas | 035004e | 2021-04-13 19:44:17 +0100 | [diff] [blame] | 36 | /** Global user-defined allocator that can be used for all internal allocations of a CLTensor */ |
| 37 | static IAllocator *static_global_cl_allocator = nullptr; |
| 38 | |
Georgios Pinitas | 4c5469b | 2019-05-21 13:32:43 +0100 | [diff] [blame] | 39 | /** Helper function used to allocate the backing memory of a tensor |
| 40 | * |
| 41 | * @param[in] context OpenCL context to use |
| 42 | * @param[in] size Size of the allocation |
| 43 | * @param[in] alignment Alignment of the allocation |
| 44 | * |
| 45 | * @return A wrapped memory region |
| 46 | */ |
Pablo Tello | db8485a | 2019-09-24 11:03:47 +0100 | [diff] [blame] | 47 | std::unique_ptr<ICLMemoryRegion> allocate_region(CLCoreRuntimeContext *ctx, size_t size, cl_uint alignment) |
Georgios Pinitas | 99d4095 | 2018-04-23 16:26:46 +0100 | [diff] [blame] | 48 | { |
| 49 | // Try fine-grain SVM |
Georgios Pinitas | 40f51a6 | 2020-11-21 03:04:18 +0000 | [diff] [blame] | 50 | std::unique_ptr<ICLMemoryRegion> region = std::make_unique<CLFineSVMMemoryRegion>(ctx, |
| 51 | CL_MEM_READ_WRITE | CL_MEM_SVM_FINE_GRAIN_BUFFER, |
| 52 | size, |
| 53 | alignment); |
Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 54 | |
Georgios Pinitas | 99d4095 | 2018-04-23 16:26:46 +0100 | [diff] [blame] | 55 | // Try coarse-grain SVM in case of failure |
| 56 | if(region != nullptr && region->ptr() == nullptr) |
| 57 | { |
Georgios Pinitas | 40f51a6 | 2020-11-21 03:04:18 +0000 | [diff] [blame] | 58 | region = std::make_unique<CLCoarseSVMMemoryRegion>(ctx, CL_MEM_READ_WRITE, size, alignment); |
Georgios Pinitas | 99d4095 | 2018-04-23 16:26:46 +0100 | [diff] [blame] | 59 | } |
| 60 | // Try legacy buffer memory in case of failure |
| 61 | if(region != nullptr && region->ptr() == nullptr) |
| 62 | { |
Georgios Pinitas | 40f51a6 | 2020-11-21 03:04:18 +0000 | [diff] [blame] | 63 | region = std::make_unique<CLBufferMemoryRegion>(ctx, CL_MEM_ALLOC_HOST_PTR | CL_MEM_READ_WRITE, size); |
Georgios Pinitas | 99d4095 | 2018-04-23 16:26:46 +0100 | [diff] [blame] | 64 | } |
| 65 | return region; |
| 66 | } |
Georgios Pinitas | 4c5469b | 2019-05-21 13:32:43 +0100 | [diff] [blame] | 67 | /** Clears quantization arrays |
| 68 | * |
| 69 | * @param[in, out] scale Quantization scale array |
| 70 | * @param[in, out] offset Quantization offset array |
| 71 | */ |
| 72 | void clear_quantization_arrays(CLFloatArray &scale, CLInt32Array &offset) |
| 73 | { |
| 74 | // Clear arrays |
| 75 | scale = CLFloatArray(); |
| 76 | offset = CLInt32Array(); |
| 77 | } |
| 78 | /** Helper function used to create quantization data arrays |
| 79 | * |
| 80 | * @param[in, out] scale Quantization scale array |
| 81 | * @param[in, out] offset Quantization offset array |
| 82 | * @param[in] qinfo Quantization info |
| 83 | * @param[in] pad_size Pad size to use in case array needs to be padded for computation purposes |
Georgios Pinitas | 4c5469b | 2019-05-21 13:32:43 +0100 | [diff] [blame] | 84 | */ |
| 85 | void populate_quantization_info(CLFloatArray &scale, CLInt32Array &offset, const QuantizationInfo &qinfo, size_t pad_size) |
| 86 | { |
| 87 | clear_quantization_arrays(scale, offset); |
| 88 | |
| 89 | // Create scale array |
Georgios Pinitas | 3d13af8 | 2019-06-04 13:04:16 +0100 | [diff] [blame] | 90 | const std::vector<float> &qscale = qinfo.scale(); |
| 91 | const size_t num_elements = qscale.size(); |
| 92 | const size_t element_size = sizeof(std::remove_reference<decltype(qscale)>::type::value_type); |
| 93 | scale = CLFloatArray(num_elements + pad_size); |
Georgios Pinitas | 4c5469b | 2019-05-21 13:32:43 +0100 | [diff] [blame] | 94 | scale.resize(num_elements); |
Georgios Pinitas | 3d13af8 | 2019-06-04 13:04:16 +0100 | [diff] [blame] | 95 | CLScheduler::get().queue().enqueueWriteBuffer(scale.cl_buffer(), CL_TRUE, 0, num_elements * element_size, qinfo.scale().data()); |
Michalis Spyrou | 3f632f3 | 2019-08-22 16:52:00 +0100 | [diff] [blame] | 96 | |
| 97 | if(!qinfo.offset().empty()) |
| 98 | { |
| 99 | // Create offset array |
| 100 | const std::vector<int32_t> &qoffset = qinfo.offset(); |
| 101 | const size_t offset_element_size = sizeof(std::remove_reference<decltype(qoffset)>::type::value_type); |
| 102 | offset = CLInt32Array(num_elements + pad_size); |
| 103 | offset.resize(num_elements); |
| 104 | CLScheduler::get().queue().enqueueWriteBuffer(offset.cl_buffer(), CL_TRUE, 0, num_elements * offset_element_size, qinfo.offset().data()); |
| 105 | } |
Georgios Pinitas | 4c5469b | 2019-05-21 13:32:43 +0100 | [diff] [blame] | 106 | } |
Georgios Pinitas | 99d4095 | 2018-04-23 16:26:46 +0100 | [diff] [blame] | 107 | } // namespace |
| 108 | |
Pablo Tello | db8485a | 2019-09-24 11:03:47 +0100 | [diff] [blame] | 109 | CLTensorAllocator::CLTensorAllocator(IMemoryManageable *owner, CLRuntimeContext *ctx) |
| 110 | : _ctx(ctx), _owner(owner), _associated_memory_group(nullptr), _memory(), _mapping(nullptr), _scale(), _offset() |
Georgios Pinitas | baf174e | 2017-09-08 19:47:30 +0100 | [diff] [blame] | 111 | { |
Georgios Pinitas | baf174e | 2017-09-08 19:47:30 +0100 | [diff] [blame] | 112 | } |
| 113 | |
Georgios Pinitas | 4c5469b | 2019-05-21 13:32:43 +0100 | [diff] [blame] | 114 | CLQuantization CLTensorAllocator::quantization() const |
| 115 | { |
| 116 | return { &_scale, &_offset }; |
| 117 | } |
| 118 | |
Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 119 | uint8_t *CLTensorAllocator::data() |
| 120 | { |
Georgios Pinitas | df31036 | 2018-11-14 13:16:56 +0000 | [diff] [blame] | 121 | return _mapping; |
Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 122 | } |
| 123 | |
| 124 | const cl::Buffer &CLTensorAllocator::cl_data() const |
| 125 | { |
Georgios Pinitas | df31036 | 2018-11-14 13:16:56 +0000 | [diff] [blame] | 126 | return _memory.region() == nullptr ? _empty_buffer : _memory.cl_region()->cl_data(); |
Pablo Tello | e86a09f | 2018-01-11 15:44:48 +0000 | [diff] [blame] | 127 | } |
| 128 | |
Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 129 | void CLTensorAllocator::allocate() |
| 130 | { |
Georgios Pinitas | 4c5469b | 2019-05-21 13:32:43 +0100 | [diff] [blame] | 131 | // Allocate tensor backing memory |
Georgios Pinitas | baf174e | 2017-09-08 19:47:30 +0100 | [diff] [blame] | 132 | if(_associated_memory_group == nullptr) |
| 133 | { |
Georgios Pinitas | b785dd4 | 2019-09-19 12:09:32 +0100 | [diff] [blame] | 134 | // Perform memory allocation |
Georgios Pinitas | 035004e | 2021-04-13 19:44:17 +0100 | [diff] [blame] | 135 | if(static_global_cl_allocator != nullptr) |
| 136 | { |
| 137 | _memory.set_owned_region(static_global_cl_allocator->make_region(info().total_size(), 0)); |
| 138 | } |
| 139 | else if(_ctx == nullptr) |
Pablo Tello | db8485a | 2019-09-24 11:03:47 +0100 | [diff] [blame] | 140 | { |
| 141 | auto legacy_ctx = CLCoreRuntimeContext(nullptr, CLScheduler::get().context(), CLScheduler::get().queue()); |
| 142 | _memory.set_owned_region(allocate_region(&legacy_ctx, info().total_size(), 0)); |
| 143 | } |
| 144 | else |
| 145 | { |
| 146 | _memory.set_owned_region(allocate_region(_ctx->core_runtime_context(), info().total_size(), 0)); |
| 147 | } |
Georgios Pinitas | baf174e | 2017-09-08 19:47:30 +0100 | [diff] [blame] | 148 | } |
| 149 | else |
| 150 | { |
Georgios Pinitas | 035004e | 2021-04-13 19:44:17 +0100 | [diff] [blame] | 151 | // Finalize memory management instead |
Georgios Pinitas | 26014cf | 2019-09-09 19:00:57 +0100 | [diff] [blame] | 152 | _associated_memory_group->finalize_memory(_owner, _memory, info().total_size(), alignment()); |
Georgios Pinitas | baf174e | 2017-09-08 19:47:30 +0100 | [diff] [blame] | 153 | } |
Georgios Pinitas | 4c5469b | 2019-05-21 13:32:43 +0100 | [diff] [blame] | 154 | |
| 155 | // Allocate and fill the quantization parameter arrays |
Michalis Spyrou | c853021 | 2019-08-22 11:44:04 +0100 | [diff] [blame] | 156 | if(is_data_type_quantized_per_channel(info().data_type())) |
Georgios Pinitas | 4c5469b | 2019-05-21 13:32:43 +0100 | [diff] [blame] | 157 | { |
| 158 | const size_t pad_size = 0; |
| 159 | populate_quantization_info(_scale, _offset, info().quantization_info(), pad_size); |
| 160 | } |
| 161 | |
| 162 | // Lock allocator |
Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 163 | info().set_is_resizable(false); |
| 164 | } |
| 165 | |
| 166 | void CLTensorAllocator::free() |
| 167 | { |
Georgios Pinitas | df31036 | 2018-11-14 13:16:56 +0000 | [diff] [blame] | 168 | _mapping = nullptr; |
| 169 | _memory.set_region(nullptr); |
Georgios Pinitas | 4c5469b | 2019-05-21 13:32:43 +0100 | [diff] [blame] | 170 | clear_quantization_arrays(_scale, _offset); |
Georgios Pinitas | df31036 | 2018-11-14 13:16:56 +0000 | [diff] [blame] | 171 | info().set_is_resizable(true); |
Georgios Pinitas | baf174e | 2017-09-08 19:47:30 +0100 | [diff] [blame] | 172 | } |
Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 173 | |
Georgios Pinitas | 4d0351c | 2019-04-03 15:11:16 +0100 | [diff] [blame] | 174 | Status CLTensorAllocator::import_memory(cl::Buffer buffer) |
Georgios Pinitas | 99d4095 | 2018-04-23 16:26:46 +0100 | [diff] [blame] | 175 | { |
Georgios Pinitas | df31036 | 2018-11-14 13:16:56 +0000 | [diff] [blame] | 176 | ARM_COMPUTE_RETURN_ERROR_ON(buffer.get() == nullptr); |
Georgios Pinitas | 4d0351c | 2019-04-03 15:11:16 +0100 | [diff] [blame] | 177 | ARM_COMPUTE_RETURN_ERROR_ON(buffer.getInfo<CL_MEM_SIZE>() < info().total_size()); |
Georgios Pinitas | df31036 | 2018-11-14 13:16:56 +0000 | [diff] [blame] | 178 | ARM_COMPUTE_RETURN_ERROR_ON(buffer.getInfo<CL_MEM_CONTEXT>().get() != CLScheduler::get().context().get()); |
Georgios Pinitas | 99d4095 | 2018-04-23 16:26:46 +0100 | [diff] [blame] | 179 | ARM_COMPUTE_RETURN_ERROR_ON(_associated_memory_group != nullptr); |
Georgios Pinitas | df31036 | 2018-11-14 13:16:56 +0000 | [diff] [blame] | 180 | |
Pablo Tello | db8485a | 2019-09-24 11:03:47 +0100 | [diff] [blame] | 181 | if(_ctx == nullptr) |
| 182 | { |
| 183 | auto legacy_ctx = CLCoreRuntimeContext(nullptr, CLScheduler::get().context(), CLScheduler::get().queue()); |
Georgios Pinitas | 40f51a6 | 2020-11-21 03:04:18 +0000 | [diff] [blame] | 184 | _memory.set_owned_region(std::make_unique<CLBufferMemoryRegion>(buffer, &legacy_ctx)); |
Pablo Tello | db8485a | 2019-09-24 11:03:47 +0100 | [diff] [blame] | 185 | } |
| 186 | else |
| 187 | { |
Georgios Pinitas | 40f51a6 | 2020-11-21 03:04:18 +0000 | [diff] [blame] | 188 | _memory.set_owned_region(std::make_unique<CLBufferMemoryRegion>(buffer, _ctx->core_runtime_context())); |
Pablo Tello | db8485a | 2019-09-24 11:03:47 +0100 | [diff] [blame] | 189 | } |
Georgios Pinitas | 99d4095 | 2018-04-23 16:26:46 +0100 | [diff] [blame] | 190 | |
Pablo Tello | db8485a | 2019-09-24 11:03:47 +0100 | [diff] [blame] | 191 | info().set_is_resizable(false); |
Georgios Pinitas | 99d4095 | 2018-04-23 16:26:46 +0100 | [diff] [blame] | 192 | return Status{}; |
| 193 | } |
| 194 | |
Georgios Pinitas | 26014cf | 2019-09-09 19:00:57 +0100 | [diff] [blame] | 195 | void CLTensorAllocator::set_associated_memory_group(IMemoryGroup *associated_memory_group) |
Georgios Pinitas | baf174e | 2017-09-08 19:47:30 +0100 | [diff] [blame] | 196 | { |
| 197 | ARM_COMPUTE_ERROR_ON(associated_memory_group == nullptr); |
Michalis Spyrou | caa7dee | 2019-09-09 19:23:39 +0100 | [diff] [blame] | 198 | ARM_COMPUTE_ERROR_ON(_associated_memory_group != nullptr && _associated_memory_group != associated_memory_group); |
Georgios Pinitas | df31036 | 2018-11-14 13:16:56 +0000 | [diff] [blame] | 199 | ARM_COMPUTE_ERROR_ON(_memory.region() != nullptr && _memory.cl_region()->cl_data().get() != nullptr); |
| 200 | |
Georgios Pinitas | baf174e | 2017-09-08 19:47:30 +0100 | [diff] [blame] | 201 | _associated_memory_group = associated_memory_group; |
Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 202 | } |
| 203 | |
Georgios Pinitas | 035004e | 2021-04-13 19:44:17 +0100 | [diff] [blame] | 204 | void CLTensorAllocator::set_global_allocator(IAllocator *allocator) |
| 205 | { |
| 206 | static_global_cl_allocator = allocator; |
| 207 | } |
| 208 | |
Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 209 | uint8_t *CLTensorAllocator::lock() |
| 210 | { |
Pablo Tello | db8485a | 2019-09-24 11:03:47 +0100 | [diff] [blame] | 211 | if(_ctx) |
| 212 | { |
| 213 | return map(_ctx->gpu_scheduler()->queue(), true); |
| 214 | } |
| 215 | else |
| 216 | { |
| 217 | return map(CLScheduler::get().queue(), true); |
| 218 | } |
Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 219 | } |
| 220 | |
| 221 | void CLTensorAllocator::unlock() |
| 222 | { |
Georgios Pinitas | 99d4095 | 2018-04-23 16:26:46 +0100 | [diff] [blame] | 223 | ARM_COMPUTE_ERROR_ON(_memory.region() == nullptr); |
Pablo Tello | db8485a | 2019-09-24 11:03:47 +0100 | [diff] [blame] | 224 | if(_ctx) |
| 225 | { |
| 226 | unmap(_ctx->gpu_scheduler()->queue(), reinterpret_cast<uint8_t *>(_memory.region()->buffer())); |
| 227 | } |
| 228 | else |
| 229 | { |
| 230 | //Legacy singleton api |
| 231 | unmap(CLScheduler::get().queue(), reinterpret_cast<uint8_t *>(_memory.region()->buffer())); |
| 232 | } |
Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 233 | } |
| 234 | |
| 235 | uint8_t *CLTensorAllocator::map(cl::CommandQueue &q, bool blocking) |
| 236 | { |
Georgios Pinitas | df31036 | 2018-11-14 13:16:56 +0000 | [diff] [blame] | 237 | ARM_COMPUTE_ERROR_ON(_mapping != nullptr); |
Georgios Pinitas | 99d4095 | 2018-04-23 16:26:46 +0100 | [diff] [blame] | 238 | ARM_COMPUTE_ERROR_ON(_memory.region() == nullptr); |
| 239 | ARM_COMPUTE_ERROR_ON(_memory.region()->buffer() != nullptr); |
Georgios Pinitas | df31036 | 2018-11-14 13:16:56 +0000 | [diff] [blame] | 240 | |
| 241 | _mapping = reinterpret_cast<uint8_t *>(_memory.cl_region()->map(q, blocking)); |
| 242 | return _mapping; |
Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 243 | } |
| 244 | |
| 245 | void CLTensorAllocator::unmap(cl::CommandQueue &q, uint8_t *mapping) |
| 246 | { |
Georgios Pinitas | df31036 | 2018-11-14 13:16:56 +0000 | [diff] [blame] | 247 | ARM_COMPUTE_ERROR_ON(_mapping == nullptr); |
| 248 | ARM_COMPUTE_ERROR_ON(_mapping != mapping); |
Georgios Pinitas | 99d4095 | 2018-04-23 16:26:46 +0100 | [diff] [blame] | 249 | ARM_COMPUTE_ERROR_ON(_memory.region() == nullptr); |
| 250 | ARM_COMPUTE_ERROR_ON(_memory.region()->buffer() == nullptr); |
Georgios Pinitas | df31036 | 2018-11-14 13:16:56 +0000 | [diff] [blame] | 251 | ARM_COMPUTE_UNUSED(mapping); |
| 252 | |
| 253 | _memory.cl_region()->unmap(q); |
| 254 | _mapping = nullptr; |
Anthony Barbier | 6ff3b19 | 2017-09-04 18:44:23 +0100 | [diff] [blame] | 255 | } |
Georgios Pinitas | df31036 | 2018-11-14 13:16:56 +0000 | [diff] [blame] | 256 | } // namespace arm_compute |