blob: 7e99a1bbb30fce4077008765a2751f855207f925 [file] [log] [blame]
Anthony Barbier6ff3b192017-09-04 18:44:23 +01001/*
Michele Di Giorgiod9eaf612020-07-08 11:12:57 +01002 * Copyright (c) 2017-2020 Arm Limited.
Anthony Barbier6ff3b192017-09-04 18:44:23 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/runtime/CL/functions/CLCannyEdge.h"
25
26#include "arm_compute/core/CL/ICLTensor.h"
27#include "arm_compute/core/CL/OpenCL.h"
28#include "arm_compute/core/Error.h"
Anthony Barbier6ff3b192017-09-04 18:44:23 +010029#include "arm_compute/core/Validate.h"
30#include "arm_compute/runtime/CL/CLScheduler.h"
31#include "arm_compute/runtime/CL/functions/CLSobel3x3.h"
32#include "arm_compute/runtime/CL/functions/CLSobel5x5.h"
33#include "arm_compute/runtime/CL/functions/CLSobel7x7.h"
Sang-Hoon Parkbef7fa22020-10-21 15:58:54 +010034#include "src/core/CL/kernels/CLCannyEdgeKernel.h"
35#include "src/core/CL/kernels/CLFillBorderKernel.h"
36#include "src/core/CL/kernels/CLSobel5x5Kernel.h"
37#include "src/core/CL/kernels/CLSobel7x7Kernel.h"
Anthony Barbier6ff3b192017-09-04 18:44:23 +010038
39using namespace arm_compute;
40
Georgios Pinitas5701e2a2017-09-18 17:43:33 +010041CLCannyEdge::CLCannyEdge(std::shared_ptr<IMemoryManager> memory_manager) // NOLINT
42 : _memory_group(std::move(memory_manager)),
43 _sobel(),
Georgios Pinitas40f51a62020-11-21 03:04:18 +000044 _gradient(std::make_unique<CLGradientKernel>()),
45 _border_mag_gradient(std::make_unique<CLFillBorderKernel>()),
46 _non_max_suppr(std::make_unique<CLEdgeNonMaxSuppressionKernel>()),
47 _edge_trace(std::make_unique<CLEdgeTraceKernel>()),
Moritz Pflanzerf4af76e2017-09-06 07:42:43 +010048 _gx(),
49 _gy(),
50 _mag(),
51 _phase(),
52 _nonmax(),
53 _visited(),
54 _recorded(),
55 _l1_list_counter(),
Abe Mbise1b993382017-12-19 13:51:59 +000056 _l1_stack(),
57 _output(nullptr)
Anthony Barbier6ff3b192017-09-04 18:44:23 +010058{
59}
60
Sang-Hoon Parkbef7fa22020-10-21 15:58:54 +010061CLCannyEdge::~CLCannyEdge() = default;
62
Abe Mbise1b993382017-12-19 13:51:59 +000063void CLCannyEdge::configure(ICLTensor *input, ICLTensor *output, int32_t upper_thr, int32_t lower_thr, int32_t gradient_size, int32_t norm_type, BorderMode border_mode,
64 uint8_t constant_border_value)
Anthony Barbier6ff3b192017-09-04 18:44:23 +010065{
Manuel Bottini2b84be52020-04-08 10:15:51 +010066 configure(CLKernelLibrary::get().get_compile_context(), input, output, upper_thr, lower_thr, gradient_size, norm_type, border_mode, constant_border_value);
67}
68
69void CLCannyEdge::configure(const CLCompileContext &compile_context, ICLTensor *input, ICLTensor *output, int32_t upper_thr, int32_t lower_thr, int32_t gradient_size, int32_t norm_type,
70 BorderMode border_mode,
71 uint8_t constant_border_value)
72{
Abe Mbise1b993382017-12-19 13:51:59 +000073 ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
Anthony Barbier6ff3b192017-09-04 18:44:23 +010074 ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::U8);
75 ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::U8);
76 ARM_COMPUTE_ERROR_ON((1 != norm_type) && (2 != norm_type));
Abe Mbise1b993382017-12-19 13:51:59 +000077 ARM_COMPUTE_ERROR_ON((gradient_size != 3) && (gradient_size != 5) && (gradient_size != 7));
Michele Di Giorgioef915162018-07-30 12:01:44 +010078 ARM_COMPUTE_ERROR_ON((lower_thr < 0) || (lower_thr >= upper_thr));
Anthony Barbier6ff3b192017-09-04 18:44:23 +010079
Abe Mbise1b993382017-12-19 13:51:59 +000080 _output = output;
81
Anthony Barbier6ff3b192017-09-04 18:44:23 +010082 const unsigned int L1_hysteresis_stack_size = 8;
83 const TensorShape shape = input->info()->tensor_shape();
84
85 TensorInfo gradient_info;
86 TensorInfo info;
87
88 // Initialize images
89 if(gradient_size < 7)
90 {
91 gradient_info.init(shape, 1, arm_compute::DataType::S16);
92 info.init(shape, 1, arm_compute::DataType::U16);
93 }
94 else
95 {
96 gradient_info.init(shape, 1, arm_compute::DataType::S32);
97 info.init(shape, 1, arm_compute::DataType::U32);
98 }
99
100 _gx.allocator()->init(gradient_info);
101 _gy.allocator()->init(gradient_info);
102 _mag.allocator()->init(info);
103 _nonmax.allocator()->init(info);
104
105 TensorInfo info_u8(shape, 1, arm_compute::DataType::U8);
106 _phase.allocator()->init(info_u8);
107 _l1_list_counter.allocator()->init(info_u8);
108
109 TensorInfo info_u32(shape, 1, arm_compute::DataType::U32);
110 _visited.allocator()->init(info_u32);
111 _recorded.allocator()->init(info_u32);
112
113 TensorShape shape_l1_stack = input->info()->tensor_shape();
114 shape_l1_stack.set(0, input->info()->dimension(0) * L1_hysteresis_stack_size);
115 TensorInfo info_s32(shape_l1_stack, 1, arm_compute::DataType::S32);
116 _l1_stack.allocator()->init(info_s32);
117
Georgios Pinitas5701e2a2017-09-18 17:43:33 +0100118 // Manage intermediate buffers
119 _memory_group.manage(&_gx);
120 _memory_group.manage(&_gy);
121
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100122 // Configure/Init sobelNxN
123 if(gradient_size == 3)
124 {
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000125 auto k = std::make_unique<CLSobel3x3>();
Manuel Bottini2b84be52020-04-08 10:15:51 +0100126 k->configure(compile_context, input, &_gx, &_gy, border_mode, constant_border_value);
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100127 _sobel = std::move(k);
128 }
129 else if(gradient_size == 5)
130 {
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000131 auto k = std::make_unique<CLSobel5x5>();
Manuel Bottini2b84be52020-04-08 10:15:51 +0100132 k->configure(compile_context, input, &_gx, &_gy, border_mode, constant_border_value);
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100133 _sobel = std::move(k);
134 }
135 else if(gradient_size == 7)
136 {
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000137 auto k = std::make_unique<CLSobel7x7>();
Manuel Bottini2b84be52020-04-08 10:15:51 +0100138 k->configure(compile_context, input, &_gx, &_gy, border_mode, constant_border_value);
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100139 _sobel = std::move(k);
140 }
141 else
142 {
Michalis Spyrou7c60c992019-10-10 14:33:47 +0100143 ARM_COMPUTE_ERROR_VAR("Gradient size %d not supported", gradient_size);
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100144 }
145
Georgios Pinitas5701e2a2017-09-18 17:43:33 +0100146 // Manage intermediate buffers
147 _memory_group.manage(&_mag);
148 _memory_group.manage(&_phase);
149
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100150 // Configure gradient
Sang-Hoon Parkbef7fa22020-10-21 15:58:54 +0100151 _gradient->configure(compile_context, &_gx, &_gy, &_mag, &_phase, norm_type);
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100152
Georgios Pinitas5701e2a2017-09-18 17:43:33 +0100153 // Allocate intermediate buffers
154 _gx.allocator()->allocate();
155 _gy.allocator()->allocate();
156
157 // Manage intermediate buffers
158 _memory_group.manage(&_nonmax);
159
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100160 // Configure non-maxima suppression
Sang-Hoon Parkbef7fa22020-10-21 15:58:54 +0100161 _non_max_suppr->configure(compile_context, &_mag, &_phase, &_nonmax, lower_thr, border_mode == BorderMode::UNDEFINED);
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100162
Georgios Pinitas5701e2a2017-09-18 17:43:33 +0100163 // Allocate intermediate buffers
164 _phase.allocator()->allocate();
165
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100166 // Fill border around magnitude image as non-maxima suppression will access
167 // it. If border mode is undefined filling the border is a nop.
Sang-Hoon Parkbef7fa22020-10-21 15:58:54 +0100168 _border_mag_gradient->configure(compile_context, &_mag, _non_max_suppr->border_size(), border_mode, constant_border_value);
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100169
Georgios Pinitas5701e2a2017-09-18 17:43:33 +0100170 // Allocate intermediate buffers
171 _mag.allocator()->allocate();
172
173 // Manage intermediate buffers
174 _memory_group.manage(&_visited);
175 _memory_group.manage(&_recorded);
176 _memory_group.manage(&_l1_stack);
177 _memory_group.manage(&_l1_list_counter);
178
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100179 // Configure edge tracing
Sang-Hoon Parkbef7fa22020-10-21 15:58:54 +0100180 _edge_trace->configure(compile_context, &_nonmax, output, upper_thr, lower_thr, &_visited, &_recorded, &_l1_stack, &_l1_list_counter);
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100181
Georgios Pinitas5701e2a2017-09-18 17:43:33 +0100182 // Allocate intermediate buffers
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100183 _visited.allocator()->allocate();
184 _recorded.allocator()->allocate();
185 _l1_stack.allocator()->allocate();
186 _l1_list_counter.allocator()->allocate();
187 _nonmax.allocator()->allocate();
188}
189
190void CLCannyEdge::run()
191{
Georgios Pinitasda953f22019-04-02 17:27:03 +0100192 MemoryGroupResourceScope scope_mg(_memory_group);
Georgios Pinitas5701e2a2017-09-18 17:43:33 +0100193
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100194 // Run sobel
195 _sobel->run();
196
197 // Run phase and magnitude calculation
Sang-Hoon Parkbef7fa22020-10-21 15:58:54 +0100198 CLScheduler::get().enqueue(*_gradient, false);
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100199
200 // Fill border before non-maxima suppression. Nop for border mode undefined.
Sang-Hoon Parkbef7fa22020-10-21 15:58:54 +0100201 CLScheduler::get().enqueue(*_border_mag_gradient, false);
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100202
203 // Run non max suppresion
204 _nonmax.clear(CLScheduler::get().queue());
Sang-Hoon Parkbef7fa22020-10-21 15:58:54 +0100205 CLScheduler::get().enqueue(*_non_max_suppr, false);
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100206
207 // Clear temporary structures and run edge trace
Abe Mbise1b993382017-12-19 13:51:59 +0000208 _output->clear(CLScheduler::get().queue());
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100209 _visited.clear(CLScheduler::get().queue());
210 _recorded.clear(CLScheduler::get().queue());
211 _l1_list_counter.clear(CLScheduler::get().queue());
212 _l1_stack.clear(CLScheduler::get().queue());
Sang-Hoon Parkbef7fa22020-10-21 15:58:54 +0100213 CLScheduler::get().enqueue(*_edge_trace, true);
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100214}