blob: fbdd8044ce72fcd9f50a43478429a5f0c93456ba [file] [log] [blame]
Richard Burtondc0c6ed2020-04-08 16:39:05 +01001# Copyright © 2020 Arm Ltd. All rights reserved.
2# SPDX-License-Identifier: MIT
3import os
4
5import pytest
Jan Eilers1b2654f2021-09-24 15:45:46 +01006import warnings
Richard Burtondc0c6ed2020-04-08 16:39:05 +01007import numpy as np
8
9import pyarmnn as ann
10
11
12@pytest.fixture(scope="function")
13def random_runtime(shared_data_folder):
14 parser = ann.ITfLiteParser()
15 network = parser.CreateNetworkFromBinaryFile(os.path.join(shared_data_folder, 'mock_model.tflite'))
16 preferred_backends = [ann.BackendId('CpuRef')]
17 options = ann.CreationOptions()
Éanna Ó Catháin59da3692020-04-16 08:54:12 +010018
Richard Burtondc0c6ed2020-04-08 16:39:05 +010019 runtime = ann.IRuntime(options)
20
21 graphs_count = parser.GetSubgraphCount()
22
23 graph_id = graphs_count - 1
24 input_names = parser.GetSubgraphInputTensorNames(graph_id)
25
26 input_binding_info = parser.GetNetworkInputBindingInfo(graph_id, input_names[0])
27 input_tensor_id = input_binding_info[0]
28
29 input_tensor_info = input_binding_info[1]
30
31 output_names = parser.GetSubgraphOutputTensorNames(graph_id)
32
33 input_data = np.random.randint(255, size=input_tensor_info.GetNumElements(), dtype=np.uint8)
34
35 const_tensor_pair = (input_tensor_id, ann.ConstTensor(input_tensor_info, input_data))
36
37 input_tensors = [const_tensor_pair]
38
39 output_tensors = []
40
41 for index, output_name in enumerate(output_names):
42 out_bind_info = parser.GetNetworkOutputBindingInfo(graph_id, output_name)
43
44 out_tensor_info = out_bind_info[1]
45 out_tensor_id = out_bind_info[0]
46
47 output_tensors.append((out_tensor_id,
48 ann.Tensor(out_tensor_info)))
49
50 yield preferred_backends, network, runtime, input_tensors, output_tensors
51
52
53@pytest.fixture(scope='function')
54def mock_model_runtime(shared_data_folder):
55 parser = ann.ITfLiteParser()
56 network = parser.CreateNetworkFromBinaryFile(os.path.join(shared_data_folder, 'mock_model.tflite'))
57 graph_id = 0
58
59 input_binding_info = parser.GetNetworkInputBindingInfo(graph_id, "input_1")
60
61 input_tensor_data = np.load(os.path.join(shared_data_folder, 'tflite_parser/input_lite.npy'))
62
63 preferred_backends = [ann.BackendId('CpuRef')]
64
65 options = ann.CreationOptions()
66 runtime = ann.IRuntime(options)
67
68 opt_network, messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions())
69
70 print(messages)
71
72 net_id, messages = runtime.LoadNetwork(opt_network)
73
74 print(messages)
75
76 input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data])
77
78 output_names = parser.GetSubgraphOutputTensorNames(graph_id)
79 outputs_binding_info = []
80
81 for output_name in output_names:
82 outputs_binding_info.append(parser.GetNetworkOutputBindingInfo(graph_id, output_name))
83
84 output_tensors = ann.make_output_tensors(outputs_binding_info)
85
86 yield runtime, net_id, input_tensors, output_tensors
87
88
89def test_python_disowns_network(random_runtime):
90 preferred_backends = random_runtime[0]
91 network = random_runtime[1]
92 runtime = random_runtime[2]
93 opt_network, _ = ann.Optimize(network, preferred_backends,
94 runtime.GetDeviceSpec(), ann.OptimizerOptions())
95
96 runtime.LoadNetwork(opt_network)
97
98 assert not opt_network.thisown
99
alexander73010782021-10-18 19:17:24 +0100100
Richard Burtondc0c6ed2020-04-08 16:39:05 +0100101def test_load_network(random_runtime):
102 preferred_backends = random_runtime[0]
103 network = random_runtime[1]
104 runtime = random_runtime[2]
105
106 opt_network, _ = ann.Optimize(network, preferred_backends,
107 runtime.GetDeviceSpec(), ann.OptimizerOptions())
108
109 net_id, messages = runtime.LoadNetwork(opt_network)
110 assert "" == messages
111 assert net_id == 0
112
alexander73010782021-10-18 19:17:24 +0100113
Éanna Ó Catháin59da3692020-04-16 08:54:12 +0100114def test_create_runtime_with_external_profiling_enabled():
115
116 options = ann.CreationOptions()
117
118 options.m_ProfilingOptions.m_FileOnly = True
119 options.m_ProfilingOptions.m_EnableProfiling = True
120 options.m_ProfilingOptions.m_OutgoingCaptureFile = "/tmp/outgoing.txt"
121 options.m_ProfilingOptions.m_IncomingCaptureFile = "/tmp/incoming.txt"
122 options.m_ProfilingOptions.m_TimelineEnabled = True
123 options.m_ProfilingOptions.m_CapturePeriod = 1000
124 options.m_ProfilingOptions.m_FileFormat = "JSON"
125
126 runtime = ann.IRuntime(options)
127
128 assert runtime is not None
129
alexander73010782021-10-18 19:17:24 +0100130
Éanna Ó Catháin59da3692020-04-16 08:54:12 +0100131def test_create_runtime_with_external_profiling_enabled_invalid_options():
132
133 options = ann.CreationOptions()
134
135 options.m_ProfilingOptions.m_FileOnly = True
136 options.m_ProfilingOptions.m_EnableProfiling = False
137 options.m_ProfilingOptions.m_OutgoingCaptureFile = "/tmp/outgoing.txt"
138 options.m_ProfilingOptions.m_IncomingCaptureFile = "/tmp/incoming.txt"
139 options.m_ProfilingOptions.m_TimelineEnabled = True
140 options.m_ProfilingOptions.m_CapturePeriod = 1000
141 options.m_ProfilingOptions.m_FileFormat = "JSON"
142
143 with pytest.raises(RuntimeError) as err:
144 runtime = ann.IRuntime(options)
145
146 expected_error_message = "It is not possible to enable timeline reporting without profiling being enabled"
147 assert expected_error_message in str(err.value)
148
Richard Burtondc0c6ed2020-04-08 16:39:05 +0100149
150def test_load_network_properties_provided(random_runtime):
151 preferred_backends = random_runtime[0]
152 network = random_runtime[1]
153 runtime = random_runtime[2]
154
155 opt_network, _ = ann.Optimize(network, preferred_backends,
156 runtime.GetDeviceSpec(), ann.OptimizerOptions())
157
158 properties = ann.INetworkProperties(True, True)
159 net_id, messages = runtime.LoadNetwork(opt_network, properties)
160 assert "" == messages
161 assert net_id == 0
162
alexander73010782021-10-18 19:17:24 +0100163
Jan Eilers1b2654f2021-09-24 15:45:46 +0100164def test_network_properties_constructor(random_runtime):
165 preferred_backends = random_runtime[0]
166 network = random_runtime[1]
167 runtime = random_runtime[2]
168
169 opt_network, _ = ann.Optimize(network, preferred_backends,
170 runtime.GetDeviceSpec(), ann.OptimizerOptions())
171
172 inputSource = ann.MemorySource_Undefined
173 outputSource = ann.MemorySource_Undefined
174 properties = ann.INetworkProperties(True, inputSource, outputSource)
175 assert properties.m_AsyncEnabled == True
176 assert properties.m_ProfilingEnabled == False
177 assert properties.m_OutputNetworkDetailsMethod == ann.ProfilingDetailsMethod_Undefined
178 assert properties.m_InputSource == ann.MemorySource_Undefined
179 assert properties.m_OutputSource == ann.MemorySource_Undefined
180
181 net_id, messages = runtime.LoadNetwork(opt_network, properties)
182 assert "" == messages
183 assert net_id == 0
184
alexander73010782021-10-18 19:17:24 +0100185
Jan Eilers1b2654f2021-09-24 15:45:46 +0100186def test_network_properties_deprecated_constructor():
187 with pytest.warns(DeprecationWarning):
188 warnings.warn("Deprecated: Use constructor with MemorySource argument instead.", DeprecationWarning)
Richard Burtondc0c6ed2020-04-08 16:39:05 +0100189
alexander73010782021-10-18 19:17:24 +0100190
Richard Burtondc0c6ed2020-04-08 16:39:05 +0100191def test_unload_network_fails_for_invalid_net_id(random_runtime):
192 preferred_backends = random_runtime[0]
193 network = random_runtime[1]
194 runtime = random_runtime[2]
195
196 ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions())
197
198 with pytest.raises(RuntimeError) as err:
199 runtime.UnloadNetwork(9)
200
201 expected_error_message = "Failed to unload network."
202 assert expected_error_message in str(err.value)
203
204
205def test_enqueue_workload(random_runtime):
206 preferred_backends = random_runtime[0]
207 network = random_runtime[1]
208 runtime = random_runtime[2]
209 input_tensors = random_runtime[3]
210 output_tensors = random_runtime[4]
211
212 opt_network, _ = ann.Optimize(network, preferred_backends,
213 runtime.GetDeviceSpec(), ann.OptimizerOptions())
214
215 net_id, _ = runtime.LoadNetwork(opt_network)
216 runtime.EnqueueWorkload(net_id, input_tensors, output_tensors)
217
218
219def test_enqueue_workload_fails_with_empty_input_tensors(random_runtime):
220 preferred_backends = random_runtime[0]
221 network = random_runtime[1]
222 runtime = random_runtime[2]
223 input_tensors = []
224 output_tensors = random_runtime[4]
225
226 opt_network, _ = ann.Optimize(network, preferred_backends,
227 runtime.GetDeviceSpec(), ann.OptimizerOptions())
228
229 net_id, _ = runtime.LoadNetwork(opt_network)
230 with pytest.raises(RuntimeError) as err:
231 runtime.EnqueueWorkload(net_id, input_tensors, output_tensors)
232
233 expected_error_message = "Number of inputs provided does not match network."
234 assert expected_error_message in str(err.value)
235
236
237@pytest.mark.x86_64
238@pytest.mark.parametrize('count', [5])
239def test_multiple_inference_runs_yield_same_result(count, mock_model_runtime):
240 """
241 Test that results remain consistent among multiple runs of the same inference.
242 """
243 runtime = mock_model_runtime[0]
244 net_id = mock_model_runtime[1]
245 input_tensors = mock_model_runtime[2]
246 output_tensors = mock_model_runtime[3]
247
248 expected_results = np.array([[4, 85, 108, 29, 8, 16, 0, 2, 5, 0]])
249
250 for _ in range(count):
251 runtime.EnqueueWorkload(net_id, input_tensors, output_tensors)
252
253 output_vectors = ann.workload_tensors_to_ndarray(output_tensors)
254
255 for i in range(len(expected_results)):
256 assert output_vectors[i].all() == expected_results[i].all()
257
258
259@pytest.mark.aarch64
260def test_aarch64_inference_results(mock_model_runtime):
261
262 runtime = mock_model_runtime[0]
263 net_id = mock_model_runtime[1]
264 input_tensors = mock_model_runtime[2]
265 output_tensors = mock_model_runtime[3]
266
267 runtime.EnqueueWorkload(net_id, input_tensors, output_tensors)
268
269 output_vectors = ann.workload_tensors_to_ndarray(output_tensors)
270
271 expected_outputs = expected_results = np.array([[4, 85, 108, 29, 8, 16, 0, 2, 5, 0]])
272
273 for i in range(len(expected_outputs)):
274 assert output_vectors[i].all() == expected_results[i].all()
275
276
277def test_enqueue_workload_with_profiler(random_runtime):
278 """
279 Tests ArmNN's profiling extension
280 """
281 preferred_backends = random_runtime[0]
282 network = random_runtime[1]
283 runtime = random_runtime[2]
284 input_tensors = random_runtime[3]
285 output_tensors = random_runtime[4]
286
287 opt_network, _ = ann.Optimize(network, preferred_backends,
288 runtime.GetDeviceSpec(), ann.OptimizerOptions())
289 net_id, _ = runtime.LoadNetwork(opt_network)
290
291 profiler = runtime.GetProfiler(net_id)
292 # By default profiling should be turned off:
293 assert profiler.IsProfilingEnabled() is False
294
295 # Enable profiling:
296 profiler.EnableProfiling(True)
297 assert profiler.IsProfilingEnabled() is True
298
299 # Run the inference:
300 runtime.EnqueueWorkload(net_id, input_tensors, output_tensors)
301
302 # Get profile output as a string:
303 str_profile = profiler.as_json()
304
305 # Verify that certain markers are present:
306 assert len(str_profile) != 0
307 assert str_profile.find('\"ArmNN\": {') > 0
308
309 # Get events analysis output as a string:
310 str_events_analysis = profiler.event_log()
311
312 assert "Event Sequence - Name | Duration (ms) | Start (ms) | Stop (ms) | Device" in str_events_analysis
313
314 assert profiler.thisown == 0
315
316
317def test_check_runtime_swig_ownership(random_runtime):
318 # Check to see that SWIG has ownership for runtime. This instructs SWIG to take
319 # ownership of the return value. This allows the value to be automatically
320 # garbage-collected when it is no longer in use
321 runtime = random_runtime[2]
322 assert runtime.thisown