blob: f4437b23745e4aee998651e483838466aa34970e [file] [log] [blame]
Jeremy Johnson65ba8092023-10-09 16:31:13 +01001"""Mock SUT tests for tosa_verif_run_tests.py."""
Jeremy Johnsone2b5e872023-09-14 17:02:09 +01002# Copyright (c) 2021-2023, ARM Limited.
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +00003# SPDX-License-Identifier: Apache-2.0
4import json
5from copy import deepcopy
6from pathlib import Path
7from xml.dom import minidom
8
9import pytest
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010010from runner.tosa_test_presets import TOSA_REFCOMPLIANCE_RUNNER
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000011from runner.tosa_verif_run_tests import main
12
13
14TEST_DESC = {
15 "tosa_file": "pytest.json",
16 "ifm_name": ["test-0", "test-1"],
17 "ifm_file": ["test-0.npy", "test-1.npy"],
18 "ofm_name": ["test-result-0"],
19 "ofm_file": ["test-result-0.npy"],
20 "expected_failure": False,
21}
22GRAPH_RESULT_VALID = "valid"
23GRAPH_RESULT_ERROR = "error"
24
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010025FAKE_REF_MODEL_PATH = Path(__file__).parent / "__fake_ref_model__"
26
27
28def _create_fake_ref_model():
29 """Create a fake ref model to fool the runner."""
30 with FAKE_REF_MODEL_PATH.open("w") as fd:
31 print("Fake ref model for mock testing", file=fd)
32
33
34def _delete_fake_ref_model():
35 """Clean up fake ref model."""
36 FAKE_REF_MODEL_PATH.unlink()
37
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000038
39def _create_desc_json(json_object) -> Path:
40 """Create test desc.json."""
41 file = Path(__file__).parent / "desc.json"
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010042 with file.open("w") as fd:
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000043 json.dump(json_object, fd, indent=2)
44 return file
45
46
47def _delete_desc_json(file: Path):
48 """Clean up desc.json."""
49 binary_file = file.parent / "desc_binary.json"
50 if binary_file.exists():
51 print(binary_file.read_text())
52 binary_file.unlink()
53 else:
54 print(file.read_text())
55 file.unlink()
56
57
Jeremy Johnson65ba8092023-10-09 16:31:13 +010058def _create_ifm_files(files):
59 """Create empty input files."""
60 for name in files:
61 file = Path(__file__).parent / name
62 with open(file, "w") as fd:
63 fd.write("empty")
64
65
66def _delete_ifm_files(files):
67 """Delete empty input files."""
68 for name in files:
69 file = Path(__file__).parent / name
70 file.unlink()
71
72
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000073@pytest.fixture
74def testDir() -> str:
75 """Set up a mock expected pass test."""
76 print("SET UP - testDir")
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010077 _create_fake_ref_model()
Jeremy Johnson65ba8092023-10-09 16:31:13 +010078 _create_ifm_files(TEST_DESC["ifm_file"])
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000079 file = _create_desc_json(TEST_DESC)
80 yield file.parent
81 print("TEAR DOWN - testDir")
82 _delete_desc_json(file)
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010083 _delete_fake_ref_model()
Jeremy Johnson65ba8092023-10-09 16:31:13 +010084 _delete_ifm_files(TEST_DESC["ifm_file"])
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000085
86
87@pytest.fixture
88def testDirExpectedFail() -> str:
89 """Set up a mock expected fail test."""
90 print("SET UP - testDirExpectedFail")
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010091 _create_fake_ref_model()
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000092 fail = deepcopy(TEST_DESC)
93 fail["expected_failure"] = True
Jeremy Johnson65ba8092023-10-09 16:31:13 +010094 _create_ifm_files(TEST_DESC["ifm_file"])
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000095 file = _create_desc_json(fail)
96 yield file.parent
97 print("TEAR DOWN - testDirExpectedFail")
98 _delete_desc_json(file)
Jeremy Johnsone2b5e872023-09-14 17:02:09 +010099 _delete_fake_ref_model()
Jeremy Johnson65ba8092023-10-09 16:31:13 +0100100 _delete_ifm_files(TEST_DESC["ifm_file"])
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000101
102
103@pytest.fixture
104def testDirMultiOutputs() -> str:
105 """Set up a mock multiple results output test."""
106 print("SET UP - testDirMultiOutputs")
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100107 _create_fake_ref_model()
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000108 out = deepcopy(TEST_DESC)
109 out["ofm_name"].append("tr1")
110 out["ofm_file"].append("test-result-1.npy")
Jeremy Johnson65ba8092023-10-09 16:31:13 +0100111 _create_ifm_files(TEST_DESC["ifm_file"])
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000112 file = _create_desc_json(out)
113 yield file.parent
114 print("TEAR DOWN - testDirMultiOutputs")
115 _delete_desc_json(file)
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100116 _delete_fake_ref_model()
Jeremy Johnson65ba8092023-10-09 16:31:13 +0100117 _delete_ifm_files(TEST_DESC["ifm_file"])
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000118
119
120def _get_default_argv(testDir: Path, graphResult: str) -> list:
121 """Create default args based on test directory and graph result."""
122 return [
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100123 "--ref-model-path",
124 f"{str(FAKE_REF_MODEL_PATH)}",
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000125 "--sut-module",
126 "tests.tosa_mock_sut_run",
127 "--test",
128 str(testDir),
129 "--xunit-file",
130 str(testDir / "result.xml"),
131 # Must be last argument to allow easy extension with extra args
132 "--sut-module-args",
133 f"tests.tosa_mock_sut_run:graph={graphResult}",
134 ]
135
136
137def _get_xml_results(argv: list):
138 """Get XML results and remove file."""
139 resultsFile = Path(argv[argv.index("--xunit-file") + 1])
140 results = minidom.parse(str(resultsFile))
141 resultsFile.unlink()
142 return results
143
144
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100145def _get_xml_testsuites_from_results(results, numExpectedTestSuites: int):
146 """Get XML testsuites from results."""
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000147 testSuites = results.getElementsByTagName("testsuite")
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100148 assert len(testSuites) == numExpectedTestSuites
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000149 return testSuites
150
151
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100152def _check_xml_testsuites_in_results(results, expectedTestSuites: list):
153 """Check XML testsuites in results."""
154 # Add compliance to expected list
155 expectedTestSuites.append(TOSA_REFCOMPLIANCE_RUNNER)
156 testSuites = _get_xml_testsuites_from_results(results, len(expectedTestSuites))
157 for suite in testSuites:
158 assert suite.getAttribute("name") in expectedTestSuites
159
160
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000161def _get_xml_testcases_from_results(results, expectedTestCases: int):
162 """Get XML testcases from results."""
163 testCases = results.getElementsByTagName("testcase")
164 assert len(testCases) == expectedTestCases
165 return testCases
166
167
168def _get_xml_failure(argv: list):
169 """Get the results and single testcase with the failure result entry if there is one."""
170 results = _get_xml_results(argv)
171 testCases = _get_xml_testcases_from_results(results, 1)
172 fail = testCases[0].getElementsByTagName("failure")
173 if fail:
174 return fail[0].firstChild.data
175 return None
176
177
178def test_mock_sut_expected_pass(testDir: Path):
179 """Run expected pass SUT test."""
180 try:
181 argv = _get_default_argv(testDir, GRAPH_RESULT_VALID)
182 main(argv)
183 fail = _get_xml_failure(argv)
184 except Exception as e:
185 assert False, f"Unexpected exception {e}"
186 assert not fail
187
188
189UNEXPECTED_PASS_PREFIX_STR = "UNEXPECTED_PASS"
190UNEXPECTED_FAIL_PREFIX_STR = "UNEXPECTED_FAIL"
191
192
193def test_mock_sut_unexpected_pass(testDirExpectedFail: Path):
194 """Run unexpected pass SUT test."""
195 try:
196 argv = _get_default_argv(testDirExpectedFail, GRAPH_RESULT_VALID)
197 main(argv)
198 fail = _get_xml_failure(argv)
199 except Exception as e:
200 assert False, f"Unexpected exception {e}"
201 assert fail.startswith(UNEXPECTED_PASS_PREFIX_STR)
202
203
204def test_mock_sut_expected_failure(testDirExpectedFail: Path):
205 """Run expected failure SUT test."""
206 try:
207 argv = _get_default_argv(testDirExpectedFail, GRAPH_RESULT_ERROR)
208 main(argv)
209 fail = _get_xml_failure(argv)
210 except Exception as e:
211 assert False, f"Unexpected exception {e}"
212 assert not fail
213
214
215def test_mock_sut_unexpected_failure(testDir: Path):
216 """Run unexpected failure SUT test."""
217 try:
218 argv = _get_default_argv(testDir, GRAPH_RESULT_ERROR)
219 main(argv)
220 fail = _get_xml_failure(argv)
221 except Exception as e:
222 assert False, f"Unexpected exception {e}"
223 assert fail.startswith(UNEXPECTED_FAIL_PREFIX_STR)
224
225
226def test_mock_sut_binary_conversion(testDir: Path):
227 """Run unexpected failure SUT test."""
228 try:
229 argv = _get_default_argv(testDir, GRAPH_RESULT_VALID)
230 argv.extend(["--binary", "--flatc-path", str(testDir / "mock_flatc.py")])
231 main(argv)
232 binary_desc = testDir / "desc_binary.json"
233 assert binary_desc.exists()
234 fail = _get_xml_failure(argv)
235 except Exception as e:
236 assert False, f"Unexpected exception {e}"
237 assert not fail
238
239
240def test_mock_and_dummy_sut_results(testDir: Path):
241 """Run two SUTs and check they both return results."""
242 try:
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100243 suts = ["tests.tosa_dummy_sut_run", "tests.tosa_mock_sut_run"]
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000244 argv = _get_default_argv(testDir, GRAPH_RESULT_VALID)
245 # Override sut-module setting with both SUTs
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100246 argv.extend(["--sut-module"] + suts)
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000247 main(argv)
248 results = _get_xml_results(argv)
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100249 _check_xml_testsuites_in_results(results, suts)
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000250 _get_xml_testcases_from_results(results, 2)
251 except Exception as e:
252 assert False, f"Unexpected exception {e}"
253
254
255def test_two_mock_suts(testDir: Path):
256 """Test that a duplicated SUT is ignored."""
257 try:
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100258 sut = ["tests.tosa_mock_sut_run"]
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000259 argv = _get_default_argv(testDir, GRAPH_RESULT_VALID)
260 # Override sut-module setting with duplicated SUT
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100261 argv.extend(["--sut-module"] + sut * 2)
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000262 main(argv)
263 results = _get_xml_results(argv)
Jeremy Johnsone2b5e872023-09-14 17:02:09 +0100264 _check_xml_testsuites_in_results(results, sut)
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +0000265 _get_xml_testcases_from_results(results, 1)
266 except Exception as e:
267 assert False, f"Unexpected exception {e}"
268
269
270def test_mock_sut_multi_outputs_expected_pass(testDirMultiOutputs: Path):
271 """Run expected pass SUT test with multiple outputs."""
272 try:
273 argv = _get_default_argv(testDirMultiOutputs, GRAPH_RESULT_VALID)
274 main(argv)
275 fail = _get_xml_failure(argv)
276 except Exception as e:
277 assert False, f"Unexpected exception {e}"
278 assert not fail
279
280
281def test_mock_sut_multi_outputs_unexpected_failure(testDirMultiOutputs: Path):
282 """Run SUT test which expects multiple outputs, but last one is missing."""
283 try:
284 argv = _get_default_argv(testDirMultiOutputs, GRAPH_RESULT_VALID)
285 argv.append("tests.tosa_mock_sut_run:num_results=1")
286 main(argv)
287 fail = _get_xml_failure(argv)
288 except Exception as e:
289 assert False, f"Unexpected exception {e}"
290 assert fail.startswith(UNEXPECTED_FAIL_PREFIX_STR)