blob: e558e84e2879a503f6448bf53dc29fb51c001053 [file] [log] [blame]
Richard Burtondc0c6ed2020-04-08 16:39:05 +01001# Copyright © 2020 Arm Ltd. All rights reserved.
2# SPDX-License-Identifier: MIT
3import os
4
5import pytest
Jan Eilers1b2654f2021-09-24 15:45:46 +01006import warnings
Richard Burtondc0c6ed2020-04-08 16:39:05 +01007import numpy as np
8
9import pyarmnn as ann
10
11
12@pytest.fixture(scope="function")
13def random_runtime(shared_data_folder):
14 parser = ann.ITfLiteParser()
15 network = parser.CreateNetworkFromBinaryFile(os.path.join(shared_data_folder, 'mock_model.tflite'))
16 preferred_backends = [ann.BackendId('CpuRef')]
17 options = ann.CreationOptions()
Éanna Ó Catháin59da3692020-04-16 08:54:12 +010018
Richard Burtondc0c6ed2020-04-08 16:39:05 +010019 runtime = ann.IRuntime(options)
20
21 graphs_count = parser.GetSubgraphCount()
22
23 graph_id = graphs_count - 1
24 input_names = parser.GetSubgraphInputTensorNames(graph_id)
25
26 input_binding_info = parser.GetNetworkInputBindingInfo(graph_id, input_names[0])
27 input_tensor_id = input_binding_info[0]
28
29 input_tensor_info = input_binding_info[1]
Cathal Corbett5b8093c2021-10-22 11:12:07 +010030 input_tensor_info.SetConstant()
Richard Burtondc0c6ed2020-04-08 16:39:05 +010031
32 output_names = parser.GetSubgraphOutputTensorNames(graph_id)
33
34 input_data = np.random.randint(255, size=input_tensor_info.GetNumElements(), dtype=np.uint8)
35
36 const_tensor_pair = (input_tensor_id, ann.ConstTensor(input_tensor_info, input_data))
37
38 input_tensors = [const_tensor_pair]
39
40 output_tensors = []
41
42 for index, output_name in enumerate(output_names):
43 out_bind_info = parser.GetNetworkOutputBindingInfo(graph_id, output_name)
44
45 out_tensor_info = out_bind_info[1]
46 out_tensor_id = out_bind_info[0]
47
48 output_tensors.append((out_tensor_id,
49 ann.Tensor(out_tensor_info)))
50
51 yield preferred_backends, network, runtime, input_tensors, output_tensors
52
53
54@pytest.fixture(scope='function')
55def mock_model_runtime(shared_data_folder):
56 parser = ann.ITfLiteParser()
57 network = parser.CreateNetworkFromBinaryFile(os.path.join(shared_data_folder, 'mock_model.tflite'))
58 graph_id = 0
59
60 input_binding_info = parser.GetNetworkInputBindingInfo(graph_id, "input_1")
61
62 input_tensor_data = np.load(os.path.join(shared_data_folder, 'tflite_parser/input_lite.npy'))
63
64 preferred_backends = [ann.BackendId('CpuRef')]
65
66 options = ann.CreationOptions()
67 runtime = ann.IRuntime(options)
68
69 opt_network, messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions())
70
71 print(messages)
72
73 net_id, messages = runtime.LoadNetwork(opt_network)
74
75 print(messages)
76
77 input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data])
78
79 output_names = parser.GetSubgraphOutputTensorNames(graph_id)
80 outputs_binding_info = []
81
82 for output_name in output_names:
83 outputs_binding_info.append(parser.GetNetworkOutputBindingInfo(graph_id, output_name))
84
85 output_tensors = ann.make_output_tensors(outputs_binding_info)
86
87 yield runtime, net_id, input_tensors, output_tensors
88
89
90def test_python_disowns_network(random_runtime):
91 preferred_backends = random_runtime[0]
92 network = random_runtime[1]
93 runtime = random_runtime[2]
94 opt_network, _ = ann.Optimize(network, preferred_backends,
95 runtime.GetDeviceSpec(), ann.OptimizerOptions())
96
97 runtime.LoadNetwork(opt_network)
98
99 assert not opt_network.thisown
100
alexander73010782021-10-18 19:17:24 +0100101
Richard Burtondc0c6ed2020-04-08 16:39:05 +0100102def test_load_network(random_runtime):
103 preferred_backends = random_runtime[0]
104 network = random_runtime[1]
105 runtime = random_runtime[2]
106
107 opt_network, _ = ann.Optimize(network, preferred_backends,
108 runtime.GetDeviceSpec(), ann.OptimizerOptions())
109
110 net_id, messages = runtime.LoadNetwork(opt_network)
111 assert "" == messages
112 assert net_id == 0
113
alexander73010782021-10-18 19:17:24 +0100114
Éanna Ó Catháin59da3692020-04-16 08:54:12 +0100115def test_create_runtime_with_external_profiling_enabled():
116
117 options = ann.CreationOptions()
118
119 options.m_ProfilingOptions.m_FileOnly = True
120 options.m_ProfilingOptions.m_EnableProfiling = True
121 options.m_ProfilingOptions.m_OutgoingCaptureFile = "/tmp/outgoing.txt"
122 options.m_ProfilingOptions.m_IncomingCaptureFile = "/tmp/incoming.txt"
123 options.m_ProfilingOptions.m_TimelineEnabled = True
124 options.m_ProfilingOptions.m_CapturePeriod = 1000
125 options.m_ProfilingOptions.m_FileFormat = "JSON"
126
127 runtime = ann.IRuntime(options)
128
129 assert runtime is not None
130
alexander73010782021-10-18 19:17:24 +0100131
Éanna Ó Catháin59da3692020-04-16 08:54:12 +0100132def test_create_runtime_with_external_profiling_enabled_invalid_options():
133
134 options = ann.CreationOptions()
135
136 options.m_ProfilingOptions.m_FileOnly = True
137 options.m_ProfilingOptions.m_EnableProfiling = False
138 options.m_ProfilingOptions.m_OutgoingCaptureFile = "/tmp/outgoing.txt"
139 options.m_ProfilingOptions.m_IncomingCaptureFile = "/tmp/incoming.txt"
140 options.m_ProfilingOptions.m_TimelineEnabled = True
141 options.m_ProfilingOptions.m_CapturePeriod = 1000
142 options.m_ProfilingOptions.m_FileFormat = "JSON"
143
144 with pytest.raises(RuntimeError) as err:
145 runtime = ann.IRuntime(options)
146
147 expected_error_message = "It is not possible to enable timeline reporting without profiling being enabled"
148 assert expected_error_message in str(err.value)
149
Richard Burtondc0c6ed2020-04-08 16:39:05 +0100150
151def test_load_network_properties_provided(random_runtime):
152 preferred_backends = random_runtime[0]
153 network = random_runtime[1]
154 runtime = random_runtime[2]
155
156 opt_network, _ = ann.Optimize(network, preferred_backends,
157 runtime.GetDeviceSpec(), ann.OptimizerOptions())
158
159 properties = ann.INetworkProperties(True, True)
160 net_id, messages = runtime.LoadNetwork(opt_network, properties)
161 assert "" == messages
162 assert net_id == 0
163
alexander73010782021-10-18 19:17:24 +0100164
Jan Eilers1b2654f2021-09-24 15:45:46 +0100165def test_network_properties_constructor(random_runtime):
166 preferred_backends = random_runtime[0]
167 network = random_runtime[1]
168 runtime = random_runtime[2]
169
170 opt_network, _ = ann.Optimize(network, preferred_backends,
171 runtime.GetDeviceSpec(), ann.OptimizerOptions())
172
173 inputSource = ann.MemorySource_Undefined
174 outputSource = ann.MemorySource_Undefined
175 properties = ann.INetworkProperties(True, inputSource, outputSource)
176 assert properties.m_AsyncEnabled == True
177 assert properties.m_ProfilingEnabled == False
178 assert properties.m_OutputNetworkDetailsMethod == ann.ProfilingDetailsMethod_Undefined
179 assert properties.m_InputSource == ann.MemorySource_Undefined
180 assert properties.m_OutputSource == ann.MemorySource_Undefined
181
182 net_id, messages = runtime.LoadNetwork(opt_network, properties)
183 assert "" == messages
184 assert net_id == 0
185
alexander73010782021-10-18 19:17:24 +0100186
Jan Eilers1b2654f2021-09-24 15:45:46 +0100187def test_network_properties_deprecated_constructor():
188 with pytest.warns(DeprecationWarning):
189 warnings.warn("Deprecated: Use constructor with MemorySource argument instead.", DeprecationWarning)
Richard Burtondc0c6ed2020-04-08 16:39:05 +0100190
alexander73010782021-10-18 19:17:24 +0100191
Richard Burtondc0c6ed2020-04-08 16:39:05 +0100192def test_unload_network_fails_for_invalid_net_id(random_runtime):
193 preferred_backends = random_runtime[0]
194 network = random_runtime[1]
195 runtime = random_runtime[2]
196
197 ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions())
198
199 with pytest.raises(RuntimeError) as err:
200 runtime.UnloadNetwork(9)
201
202 expected_error_message = "Failed to unload network."
203 assert expected_error_message in str(err.value)
204
205
206def test_enqueue_workload(random_runtime):
207 preferred_backends = random_runtime[0]
208 network = random_runtime[1]
209 runtime = random_runtime[2]
210 input_tensors = random_runtime[3]
211 output_tensors = random_runtime[4]
212
213 opt_network, _ = ann.Optimize(network, preferred_backends,
214 runtime.GetDeviceSpec(), ann.OptimizerOptions())
215
216 net_id, _ = runtime.LoadNetwork(opt_network)
217 runtime.EnqueueWorkload(net_id, input_tensors, output_tensors)
218
219
220def test_enqueue_workload_fails_with_empty_input_tensors(random_runtime):
221 preferred_backends = random_runtime[0]
222 network = random_runtime[1]
223 runtime = random_runtime[2]
224 input_tensors = []
225 output_tensors = random_runtime[4]
226
227 opt_network, _ = ann.Optimize(network, preferred_backends,
228 runtime.GetDeviceSpec(), ann.OptimizerOptions())
229
230 net_id, _ = runtime.LoadNetwork(opt_network)
231 with pytest.raises(RuntimeError) as err:
232 runtime.EnqueueWorkload(net_id, input_tensors, output_tensors)
233
234 expected_error_message = "Number of inputs provided does not match network."
235 assert expected_error_message in str(err.value)
236
237
238@pytest.mark.x86_64
239@pytest.mark.parametrize('count', [5])
240def test_multiple_inference_runs_yield_same_result(count, mock_model_runtime):
241 """
242 Test that results remain consistent among multiple runs of the same inference.
243 """
244 runtime = mock_model_runtime[0]
245 net_id = mock_model_runtime[1]
246 input_tensors = mock_model_runtime[2]
247 output_tensors = mock_model_runtime[3]
248
249 expected_results = np.array([[4, 85, 108, 29, 8, 16, 0, 2, 5, 0]])
250
251 for _ in range(count):
252 runtime.EnqueueWorkload(net_id, input_tensors, output_tensors)
253
254 output_vectors = ann.workload_tensors_to_ndarray(output_tensors)
255
256 for i in range(len(expected_results)):
257 assert output_vectors[i].all() == expected_results[i].all()
258
259
260@pytest.mark.aarch64
261def test_aarch64_inference_results(mock_model_runtime):
262
263 runtime = mock_model_runtime[0]
264 net_id = mock_model_runtime[1]
265 input_tensors = mock_model_runtime[2]
266 output_tensors = mock_model_runtime[3]
267
268 runtime.EnqueueWorkload(net_id, input_tensors, output_tensors)
269
270 output_vectors = ann.workload_tensors_to_ndarray(output_tensors)
271
272 expected_outputs = expected_results = np.array([[4, 85, 108, 29, 8, 16, 0, 2, 5, 0]])
273
274 for i in range(len(expected_outputs)):
275 assert output_vectors[i].all() == expected_results[i].all()
276
277
278def test_enqueue_workload_with_profiler(random_runtime):
279 """
280 Tests ArmNN's profiling extension
281 """
282 preferred_backends = random_runtime[0]
283 network = random_runtime[1]
284 runtime = random_runtime[2]
285 input_tensors = random_runtime[3]
286 output_tensors = random_runtime[4]
287
288 opt_network, _ = ann.Optimize(network, preferred_backends,
289 runtime.GetDeviceSpec(), ann.OptimizerOptions())
290 net_id, _ = runtime.LoadNetwork(opt_network)
291
292 profiler = runtime.GetProfiler(net_id)
293 # By default profiling should be turned off:
294 assert profiler.IsProfilingEnabled() is False
295
296 # Enable profiling:
297 profiler.EnableProfiling(True)
298 assert profiler.IsProfilingEnabled() is True
299
300 # Run the inference:
301 runtime.EnqueueWorkload(net_id, input_tensors, output_tensors)
302
303 # Get profile output as a string:
304 str_profile = profiler.as_json()
305
306 # Verify that certain markers are present:
307 assert len(str_profile) != 0
308 assert str_profile.find('\"ArmNN\": {') > 0
309
310 # Get events analysis output as a string:
311 str_events_analysis = profiler.event_log()
312
313 assert "Event Sequence - Name | Duration (ms) | Start (ms) | Stop (ms) | Device" in str_events_analysis
314
315 assert profiler.thisown == 0
316
317
318def test_check_runtime_swig_ownership(random_runtime):
319 # Check to see that SWIG has ownership for runtime. This instructs SWIG to take
320 # ownership of the return value. This allows the value to be automatically
321 # garbage-collected when it is no longer in use
322 runtime = random_runtime[2]
323 assert runtime.thisown