blob: 234f156ed7ace0f5aa3cc6e1131e50109bdca951 [file] [log] [blame]
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +00001"""Tests for tosa_verif_run_tests.py."""
2# Copyright (c) 2021-2022, ARM Limited.
3# SPDX-License-Identifier: Apache-2.0
4import json
5from copy import deepcopy
6from pathlib import Path
7from xml.dom import minidom
8
9import pytest
Jeremy Johnsonbe1a9402021-12-15 17:14:56 +000010from runner.tosa_verif_run_tests import main
11
12
13TEST_DESC = {
14 "tosa_file": "pytest.json",
15 "ifm_name": ["test-0", "test-1"],
16 "ifm_file": ["test-0.npy", "test-1.npy"],
17 "ofm_name": ["test-result-0"],
18 "ofm_file": ["test-result-0.npy"],
19 "expected_failure": False,
20}
21GRAPH_RESULT_VALID = "valid"
22GRAPH_RESULT_ERROR = "error"
23
24
25def _create_desc_json(json_object) -> Path:
26 """Create test desc.json."""
27 file = Path(__file__).parent / "desc.json"
28 with open(file, "w") as fd:
29 json.dump(json_object, fd, indent=2)
30 return file
31
32
33def _delete_desc_json(file: Path):
34 """Clean up desc.json."""
35 binary_file = file.parent / "desc_binary.json"
36 if binary_file.exists():
37 print(binary_file.read_text())
38 binary_file.unlink()
39 else:
40 print(file.read_text())
41 file.unlink()
42
43
44@pytest.fixture
45def testDir() -> str:
46 """Set up a mock expected pass test."""
47 print("SET UP - testDir")
48 file = _create_desc_json(TEST_DESC)
49 yield file.parent
50 print("TEAR DOWN - testDir")
51 _delete_desc_json(file)
52
53
54@pytest.fixture
55def testDirExpectedFail() -> str:
56 """Set up a mock expected fail test."""
57 print("SET UP - testDirExpectedFail")
58 fail = deepcopy(TEST_DESC)
59 fail["expected_failure"] = True
60 file = _create_desc_json(fail)
61 yield file.parent
62 print("TEAR DOWN - testDirExpectedFail")
63 _delete_desc_json(file)
64
65
66@pytest.fixture
67def testDirMultiOutputs() -> str:
68 """Set up a mock multiple results output test."""
69 print("SET UP - testDirMultiOutputs")
70 out = deepcopy(TEST_DESC)
71 out["ofm_name"].append("tr1")
72 out["ofm_file"].append("test-result-1.npy")
73 file = _create_desc_json(out)
74 yield file.parent
75 print("TEAR DOWN - testDirMultiOutputs")
76 _delete_desc_json(file)
77
78
79def _get_default_argv(testDir: Path, graphResult: str) -> list:
80 """Create default args based on test directory and graph result."""
81 return [
82 "--sut-module",
83 "tests.tosa_mock_sut_run",
84 "--test",
85 str(testDir),
86 "--xunit-file",
87 str(testDir / "result.xml"),
88 # Must be last argument to allow easy extension with extra args
89 "--sut-module-args",
90 f"tests.tosa_mock_sut_run:graph={graphResult}",
91 ]
92
93
94def _get_xml_results(argv: list):
95 """Get XML results and remove file."""
96 resultsFile = Path(argv[argv.index("--xunit-file") + 1])
97 results = minidom.parse(str(resultsFile))
98 resultsFile.unlink()
99 return results
100
101
102def _get_xml_testsuites_from_results(results, expectedTestSuites: int):
103 """Get XML testcases from results."""
104 testSuites = results.getElementsByTagName("testsuite")
105 assert len(testSuites) == expectedTestSuites
106 return testSuites
107
108
109def _get_xml_testcases_from_results(results, expectedTestCases: int):
110 """Get XML testcases from results."""
111 testCases = results.getElementsByTagName("testcase")
112 assert len(testCases) == expectedTestCases
113 return testCases
114
115
116def _get_xml_failure(argv: list):
117 """Get the results and single testcase with the failure result entry if there is one."""
118 results = _get_xml_results(argv)
119 testCases = _get_xml_testcases_from_results(results, 1)
120 fail = testCases[0].getElementsByTagName("failure")
121 if fail:
122 return fail[0].firstChild.data
123 return None
124
125
126def test_mock_sut_expected_pass(testDir: Path):
127 """Run expected pass SUT test."""
128 try:
129 argv = _get_default_argv(testDir, GRAPH_RESULT_VALID)
130 main(argv)
131 fail = _get_xml_failure(argv)
132 except Exception as e:
133 assert False, f"Unexpected exception {e}"
134 assert not fail
135
136
137UNEXPECTED_PASS_PREFIX_STR = "UNEXPECTED_PASS"
138UNEXPECTED_FAIL_PREFIX_STR = "UNEXPECTED_FAIL"
139
140
141def test_mock_sut_unexpected_pass(testDirExpectedFail: Path):
142 """Run unexpected pass SUT test."""
143 try:
144 argv = _get_default_argv(testDirExpectedFail, GRAPH_RESULT_VALID)
145 main(argv)
146 fail = _get_xml_failure(argv)
147 except Exception as e:
148 assert False, f"Unexpected exception {e}"
149 assert fail.startswith(UNEXPECTED_PASS_PREFIX_STR)
150
151
152def test_mock_sut_expected_failure(testDirExpectedFail: Path):
153 """Run expected failure SUT test."""
154 try:
155 argv = _get_default_argv(testDirExpectedFail, GRAPH_RESULT_ERROR)
156 main(argv)
157 fail = _get_xml_failure(argv)
158 except Exception as e:
159 assert False, f"Unexpected exception {e}"
160 assert not fail
161
162
163def test_mock_sut_unexpected_failure(testDir: Path):
164 """Run unexpected failure SUT test."""
165 try:
166 argv = _get_default_argv(testDir, GRAPH_RESULT_ERROR)
167 main(argv)
168 fail = _get_xml_failure(argv)
169 except Exception as e:
170 assert False, f"Unexpected exception {e}"
171 assert fail.startswith(UNEXPECTED_FAIL_PREFIX_STR)
172
173
174def test_mock_sut_binary_conversion(testDir: Path):
175 """Run unexpected failure SUT test."""
176 try:
177 argv = _get_default_argv(testDir, GRAPH_RESULT_VALID)
178 argv.extend(["--binary", "--flatc-path", str(testDir / "mock_flatc.py")])
179 main(argv)
180 binary_desc = testDir / "desc_binary.json"
181 assert binary_desc.exists()
182 fail = _get_xml_failure(argv)
183 except Exception as e:
184 assert False, f"Unexpected exception {e}"
185 assert not fail
186
187
188def test_mock_and_dummy_sut_results(testDir: Path):
189 """Run two SUTs and check they both return results."""
190 try:
191 argv = _get_default_argv(testDir, GRAPH_RESULT_VALID)
192 # Override sut-module setting with both SUTs
193 argv.extend(
194 ["--sut-module", "tests.tosa_dummy_sut_run", "tests.tosa_mock_sut_run"]
195 )
196 main(argv)
197 results = _get_xml_results(argv)
198 _get_xml_testsuites_from_results(results, 2)
199 _get_xml_testcases_from_results(results, 2)
200 except Exception as e:
201 assert False, f"Unexpected exception {e}"
202
203
204def test_two_mock_suts(testDir: Path):
205 """Test that a duplicated SUT is ignored."""
206 try:
207 argv = _get_default_argv(testDir, GRAPH_RESULT_VALID)
208 # Override sut-module setting with duplicated SUT
209 argv.extend(
210 ["--sut-module", "tests.tosa_mock_sut_run", "tests.tosa_mock_sut_run"]
211 )
212 main(argv)
213 results = _get_xml_results(argv)
214 _get_xml_testsuites_from_results(results, 1)
215 _get_xml_testcases_from_results(results, 1)
216 except Exception as e:
217 assert False, f"Unexpected exception {e}"
218
219
220def test_mock_sut_multi_outputs_expected_pass(testDirMultiOutputs: Path):
221 """Run expected pass SUT test with multiple outputs."""
222 try:
223 argv = _get_default_argv(testDirMultiOutputs, GRAPH_RESULT_VALID)
224 main(argv)
225 fail = _get_xml_failure(argv)
226 except Exception as e:
227 assert False, f"Unexpected exception {e}"
228 assert not fail
229
230
231def test_mock_sut_multi_outputs_unexpected_failure(testDirMultiOutputs: Path):
232 """Run SUT test which expects multiple outputs, but last one is missing."""
233 try:
234 argv = _get_default_argv(testDirMultiOutputs, GRAPH_RESULT_VALID)
235 argv.append("tests.tosa_mock_sut_run:num_results=1")
236 main(argv)
237 fail = _get_xml_failure(argv)
238 except Exception as e:
239 assert False, f"Unexpected exception {e}"
240 assert fail.startswith(UNEXPECTED_FAIL_PREFIX_STR)