blob: fb4a8115d047038c03b401a91c8788b81a59c28a [file] [log] [blame]
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +00001"""Tests for tosa_verif_run_tests.py."""
Jeremy Johnsone2b5e872023-09-14 17:02:09 +01002# Copyright (c) 2021-2023, ARM Limited.
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +00003# SPDX-License-Identifier: Apache-2.0
4import json
5from copy import deepcopy
6from pathlib import Path
7from xml.dom import minidom
8
9import pytest
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010010from runner.tosa_test_presets import TOSA_REFCOMPLIANCE_RUNNER
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000011from runner.tosa_verif_run_tests import main
12
13
14TEST_DESC = {
15 "tosa_file": "pytest.json",
16 "ifm_name": ["test-0", "test-1"],
17 "ifm_file": ["test-0.npy", "test-1.npy"],
18 "ofm_name": ["test-result-0"],
19 "ofm_file": ["test-result-0.npy"],
20 "expected_failure": False,
21}
22GRAPH_RESULT_VALID = "valid"
23GRAPH_RESULT_ERROR = "error"
24
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010025FAKE_REF_MODEL_PATH = Path(__file__).parent / "__fake_ref_model__"
26
27
28def _create_fake_ref_model():
29 """Create a fake ref model to fool the runner."""
30 with FAKE_REF_MODEL_PATH.open("w") as fd:
31 print("Fake ref model for mock testing", file=fd)
32
33
34def _delete_fake_ref_model():
35 """Clean up fake ref model."""
36 FAKE_REF_MODEL_PATH.unlink()
37
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000038
39def _create_desc_json(json_object) -> Path:
40 """Create test desc.json."""
41 file = Path(__file__).parent / "desc.json"
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010042 with file.open("w") as fd:
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000043 json.dump(json_object, fd, indent=2)
44 return file
45
46
47def _delete_desc_json(file: Path):
48 """Clean up desc.json."""
49 binary_file = file.parent / "desc_binary.json"
50 if binary_file.exists():
51 print(binary_file.read_text())
52 binary_file.unlink()
53 else:
54 print(file.read_text())
55 file.unlink()
56
57
58@pytest.fixture
59def testDir() -> str:
60 """Set up a mock expected pass test."""
61 print("SET UP - testDir")
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010062 _create_fake_ref_model()
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000063 file = _create_desc_json(TEST_DESC)
64 yield file.parent
65 print("TEAR DOWN - testDir")
66 _delete_desc_json(file)
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010067 _delete_fake_ref_model()
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000068
69
70@pytest.fixture
71def testDirExpectedFail() -> str:
72 """Set up a mock expected fail test."""
73 print("SET UP - testDirExpectedFail")
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010074 _create_fake_ref_model()
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000075 fail = deepcopy(TEST_DESC)
76 fail["expected_failure"] = True
77 file = _create_desc_json(fail)
78 yield file.parent
79 print("TEAR DOWN - testDirExpectedFail")
80 _delete_desc_json(file)
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010081 _delete_fake_ref_model()
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000082
83
84@pytest.fixture
85def testDirMultiOutputs() -> str:
86 """Set up a mock multiple results output test."""
87 print("SET UP - testDirMultiOutputs")
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010088 _create_fake_ref_model()
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000089 out = deepcopy(TEST_DESC)
90 out["ofm_name"].append("tr1")
91 out["ofm_file"].append("test-result-1.npy")
92 file = _create_desc_json(out)
93 yield file.parent
94 print("TEAR DOWN - testDirMultiOutputs")
95 _delete_desc_json(file)
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010096 _delete_fake_ref_model()
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000097
98
99def _get_default_argv(testDir: Path, graphResult: str) -> list:
100 """Create default args based on test directory and graph result."""
101 return [
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100102 "--ref-model-path",
103 f"{str(FAKE_REF_MODEL_PATH)}",
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000104 "--sut-module",
105 "tests.tosa_mock_sut_run",
106 "--test",
107 str(testDir),
108 "--xunit-file",
109 str(testDir / "result.xml"),
110 # Must be last argument to allow easy extension with extra args
111 "--sut-module-args",
112 f"tests.tosa_mock_sut_run:graph={graphResult}",
113 ]
114
115
116def _get_xml_results(argv: list):
117 """Get XML results and remove file."""
118 resultsFile = Path(argv[argv.index("--xunit-file") + 1])
119 results = minidom.parse(str(resultsFile))
120 resultsFile.unlink()
121 return results
122
123
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100124def _get_xml_testsuites_from_results(results, numExpectedTestSuites: int):
125 """Get XML testsuites from results."""
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000126 testSuites = results.getElementsByTagName("testsuite")
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100127 assert len(testSuites) == numExpectedTestSuites
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000128 return testSuites
129
130
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100131def _check_xml_testsuites_in_results(results, expectedTestSuites: list):
132 """Check XML testsuites in results."""
133 # Add compliance to expected list
134 expectedTestSuites.append(TOSA_REFCOMPLIANCE_RUNNER)
135 testSuites = _get_xml_testsuites_from_results(results, len(expectedTestSuites))
136 for suite in testSuites:
137 assert suite.getAttribute("name") in expectedTestSuites
138
139
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000140def _get_xml_testcases_from_results(results, expectedTestCases: int):
141 """Get XML testcases from results."""
142 testCases = results.getElementsByTagName("testcase")
143 assert len(testCases) == expectedTestCases
144 return testCases
145
146
147def _get_xml_failure(argv: list):
148 """Get the results and single testcase with the failure result entry if there is one."""
149 results = _get_xml_results(argv)
150 testCases = _get_xml_testcases_from_results(results, 1)
151 fail = testCases[0].getElementsByTagName("failure")
152 if fail:
153 return fail[0].firstChild.data
154 return None
155
156
157def test_mock_sut_expected_pass(testDir: Path):
158 """Run expected pass SUT test."""
159 try:
160 argv = _get_default_argv(testDir, GRAPH_RESULT_VALID)
161 main(argv)
162 fail = _get_xml_failure(argv)
163 except Exception as e:
164 assert False, f"Unexpected exception {e}"
165 assert not fail
166
167
168UNEXPECTED_PASS_PREFIX_STR = "UNEXPECTED_PASS"
169UNEXPECTED_FAIL_PREFIX_STR = "UNEXPECTED_FAIL"
170
171
172def test_mock_sut_unexpected_pass(testDirExpectedFail: Path):
173 """Run unexpected pass SUT test."""
174 try:
175 argv = _get_default_argv(testDirExpectedFail, GRAPH_RESULT_VALID)
176 main(argv)
177 fail = _get_xml_failure(argv)
178 except Exception as e:
179 assert False, f"Unexpected exception {e}"
180 assert fail.startswith(UNEXPECTED_PASS_PREFIX_STR)
181
182
183def test_mock_sut_expected_failure(testDirExpectedFail: Path):
184 """Run expected failure SUT test."""
185 try:
186 argv = _get_default_argv(testDirExpectedFail, GRAPH_RESULT_ERROR)
187 main(argv)
188 fail = _get_xml_failure(argv)
189 except Exception as e:
190 assert False, f"Unexpected exception {e}"
191 assert not fail
192
193
194def test_mock_sut_unexpected_failure(testDir: Path):
195 """Run unexpected failure SUT test."""
196 try:
197 argv = _get_default_argv(testDir, GRAPH_RESULT_ERROR)
198 main(argv)
199 fail = _get_xml_failure(argv)
200 except Exception as e:
201 assert False, f"Unexpected exception {e}"
202 assert fail.startswith(UNEXPECTED_FAIL_PREFIX_STR)
203
204
205def test_mock_sut_binary_conversion(testDir: Path):
206 """Run unexpected failure SUT test."""
207 try:
208 argv = _get_default_argv(testDir, GRAPH_RESULT_VALID)
209 argv.extend(["--binary", "--flatc-path", str(testDir / "mock_flatc.py")])
210 main(argv)
211 binary_desc = testDir / "desc_binary.json"
212 assert binary_desc.exists()
213 fail = _get_xml_failure(argv)
214 except Exception as e:
215 assert False, f"Unexpected exception {e}"
216 assert not fail
217
218
219def test_mock_and_dummy_sut_results(testDir: Path):
220 """Run two SUTs and check they both return results."""
221 try:
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100222 suts = ["tests.tosa_dummy_sut_run", "tests.tosa_mock_sut_run"]
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000223 argv = _get_default_argv(testDir, GRAPH_RESULT_VALID)
224 # Override sut-module setting with both SUTs
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100225 argv.extend(["--sut-module"] + suts)
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000226 main(argv)
227 results = _get_xml_results(argv)
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100228 _check_xml_testsuites_in_results(results, suts)
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000229 _get_xml_testcases_from_results(results, 2)
230 except Exception as e:
231 assert False, f"Unexpected exception {e}"
232
233
234def test_two_mock_suts(testDir: Path):
235 """Test that a duplicated SUT is ignored."""
236 try:
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100237 sut = ["tests.tosa_mock_sut_run"]
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000238 argv = _get_default_argv(testDir, GRAPH_RESULT_VALID)
239 # Override sut-module setting with duplicated SUT
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100240 argv.extend(["--sut-module"] + sut * 2)
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000241 main(argv)
242 results = _get_xml_results(argv)
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100243 _check_xml_testsuites_in_results(results, sut)
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000244 _get_xml_testcases_from_results(results, 1)
245 except Exception as e:
246 assert False, f"Unexpected exception {e}"
247
248
249def test_mock_sut_multi_outputs_expected_pass(testDirMultiOutputs: Path):
250 """Run expected pass SUT test with multiple outputs."""
251 try:
252 argv = _get_default_argv(testDirMultiOutputs, GRAPH_RESULT_VALID)
253 main(argv)
254 fail = _get_xml_failure(argv)
255 except Exception as e:
256 assert False, f"Unexpected exception {e}"
257 assert not fail
258
259
260def test_mock_sut_multi_outputs_unexpected_failure(testDirMultiOutputs: Path):
261 """Run SUT test which expects multiple outputs, but last one is missing."""
262 try:
263 argv = _get_default_argv(testDirMultiOutputs, GRAPH_RESULT_VALID)
264 argv.append("tests.tosa_mock_sut_run:num_results=1")
265 main(argv)
266 fail = _get_xml_failure(argv)
267 except Exception as e:
268 assert False, f"Unexpected exception {e}"
269 assert fail.startswith(UNEXPECTED_FAIL_PREFIX_STR)