Jeremy Johnson | 65ba809 | 2023-10-09 16:31:13 +0100 | [diff] [blame] | 1 | """Mock SUT tests for tosa_verif_run_tests.py.""" |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 2 | # Copyright (c) 2021-2023, ARM Limited. |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 3 | # SPDX-License-Identifier: Apache-2.0 |
| 4 | import json |
| 5 | from copy import deepcopy |
| 6 | from pathlib import Path |
| 7 | from xml.dom import minidom |
| 8 | |
| 9 | import pytest |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 10 | from runner.tosa_test_presets import TOSA_REFCOMPLIANCE_RUNNER |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 11 | from runner.tosa_verif_run_tests import main |
| 12 | |
| 13 | |
| 14 | TEST_DESC = { |
| 15 | "tosa_file": "pytest.json", |
| 16 | "ifm_name": ["test-0", "test-1"], |
| 17 | "ifm_file": ["test-0.npy", "test-1.npy"], |
| 18 | "ofm_name": ["test-result-0"], |
| 19 | "ofm_file": ["test-result-0.npy"], |
| 20 | "expected_failure": False, |
| 21 | } |
| 22 | GRAPH_RESULT_VALID = "valid" |
| 23 | GRAPH_RESULT_ERROR = "error" |
| 24 | |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 25 | FAKE_REF_MODEL_PATH = Path(__file__).parent / "__fake_ref_model__" |
| 26 | |
| 27 | |
| 28 | def _create_fake_ref_model(): |
| 29 | """Create a fake ref model to fool the runner.""" |
| 30 | with FAKE_REF_MODEL_PATH.open("w") as fd: |
| 31 | print("Fake ref model for mock testing", file=fd) |
| 32 | |
| 33 | |
| 34 | def _delete_fake_ref_model(): |
| 35 | """Clean up fake ref model.""" |
| 36 | FAKE_REF_MODEL_PATH.unlink() |
| 37 | |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 38 | |
| 39 | def _create_desc_json(json_object) -> Path: |
| 40 | """Create test desc.json.""" |
| 41 | file = Path(__file__).parent / "desc.json" |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 42 | with file.open("w") as fd: |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 43 | json.dump(json_object, fd, indent=2) |
| 44 | return file |
| 45 | |
| 46 | |
| 47 | def _delete_desc_json(file: Path): |
| 48 | """Clean up desc.json.""" |
| 49 | binary_file = file.parent / "desc_binary.json" |
| 50 | if binary_file.exists(): |
| 51 | print(binary_file.read_text()) |
| 52 | binary_file.unlink() |
| 53 | else: |
| 54 | print(file.read_text()) |
| 55 | file.unlink() |
| 56 | |
| 57 | |
Jeremy Johnson | 65ba809 | 2023-10-09 16:31:13 +0100 | [diff] [blame] | 58 | def _create_ifm_files(files): |
| 59 | """Create empty input files.""" |
| 60 | for name in files: |
| 61 | file = Path(__file__).parent / name |
| 62 | with open(file, "w") as fd: |
| 63 | fd.write("empty") |
| 64 | |
| 65 | |
| 66 | def _delete_ifm_files(files): |
| 67 | """Delete empty input files.""" |
| 68 | for name in files: |
| 69 | file = Path(__file__).parent / name |
| 70 | file.unlink() |
| 71 | |
| 72 | |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 73 | @pytest.fixture |
| 74 | def testDir() -> str: |
| 75 | """Set up a mock expected pass test.""" |
| 76 | print("SET UP - testDir") |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 77 | _create_fake_ref_model() |
Jeremy Johnson | 65ba809 | 2023-10-09 16:31:13 +0100 | [diff] [blame] | 78 | _create_ifm_files(TEST_DESC["ifm_file"]) |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 79 | file = _create_desc_json(TEST_DESC) |
| 80 | yield file.parent |
| 81 | print("TEAR DOWN - testDir") |
| 82 | _delete_desc_json(file) |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 83 | _delete_fake_ref_model() |
Jeremy Johnson | 65ba809 | 2023-10-09 16:31:13 +0100 | [diff] [blame] | 84 | _delete_ifm_files(TEST_DESC["ifm_file"]) |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 85 | |
| 86 | |
| 87 | @pytest.fixture |
| 88 | def testDirExpectedFail() -> str: |
| 89 | """Set up a mock expected fail test.""" |
| 90 | print("SET UP - testDirExpectedFail") |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 91 | _create_fake_ref_model() |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 92 | fail = deepcopy(TEST_DESC) |
| 93 | fail["expected_failure"] = True |
Jeremy Johnson | 65ba809 | 2023-10-09 16:31:13 +0100 | [diff] [blame] | 94 | _create_ifm_files(TEST_DESC["ifm_file"]) |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 95 | file = _create_desc_json(fail) |
| 96 | yield file.parent |
| 97 | print("TEAR DOWN - testDirExpectedFail") |
| 98 | _delete_desc_json(file) |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 99 | _delete_fake_ref_model() |
Jeremy Johnson | 65ba809 | 2023-10-09 16:31:13 +0100 | [diff] [blame] | 100 | _delete_ifm_files(TEST_DESC["ifm_file"]) |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 101 | |
| 102 | |
| 103 | @pytest.fixture |
| 104 | def testDirMultiOutputs() -> str: |
| 105 | """Set up a mock multiple results output test.""" |
| 106 | print("SET UP - testDirMultiOutputs") |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 107 | _create_fake_ref_model() |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 108 | out = deepcopy(TEST_DESC) |
| 109 | out["ofm_name"].append("tr1") |
| 110 | out["ofm_file"].append("test-result-1.npy") |
Jeremy Johnson | 65ba809 | 2023-10-09 16:31:13 +0100 | [diff] [blame] | 111 | _create_ifm_files(TEST_DESC["ifm_file"]) |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 112 | file = _create_desc_json(out) |
| 113 | yield file.parent |
| 114 | print("TEAR DOWN - testDirMultiOutputs") |
| 115 | _delete_desc_json(file) |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 116 | _delete_fake_ref_model() |
Jeremy Johnson | 65ba809 | 2023-10-09 16:31:13 +0100 | [diff] [blame] | 117 | _delete_ifm_files(TEST_DESC["ifm_file"]) |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 118 | |
| 119 | |
| 120 | def _get_default_argv(testDir: Path, graphResult: str) -> list: |
| 121 | """Create default args based on test directory and graph result.""" |
| 122 | return [ |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 123 | "--ref-model-path", |
| 124 | f"{str(FAKE_REF_MODEL_PATH)}", |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 125 | "--sut-module", |
| 126 | "tests.tosa_mock_sut_run", |
| 127 | "--test", |
| 128 | str(testDir), |
| 129 | "--xunit-file", |
| 130 | str(testDir / "result.xml"), |
| 131 | # Must be last argument to allow easy extension with extra args |
| 132 | "--sut-module-args", |
| 133 | f"tests.tosa_mock_sut_run:graph={graphResult}", |
| 134 | ] |
| 135 | |
| 136 | |
| 137 | def _get_xml_results(argv: list): |
| 138 | """Get XML results and remove file.""" |
| 139 | resultsFile = Path(argv[argv.index("--xunit-file") + 1]) |
| 140 | results = minidom.parse(str(resultsFile)) |
| 141 | resultsFile.unlink() |
| 142 | return results |
| 143 | |
| 144 | |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 145 | def _get_xml_testsuites_from_results(results, numExpectedTestSuites: int): |
| 146 | """Get XML testsuites from results.""" |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 147 | testSuites = results.getElementsByTagName("testsuite") |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 148 | assert len(testSuites) == numExpectedTestSuites |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 149 | return testSuites |
| 150 | |
| 151 | |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 152 | def _check_xml_testsuites_in_results(results, expectedTestSuites: list): |
| 153 | """Check XML testsuites in results.""" |
| 154 | # Add compliance to expected list |
| 155 | expectedTestSuites.append(TOSA_REFCOMPLIANCE_RUNNER) |
| 156 | testSuites = _get_xml_testsuites_from_results(results, len(expectedTestSuites)) |
| 157 | for suite in testSuites: |
| 158 | assert suite.getAttribute("name") in expectedTestSuites |
| 159 | |
| 160 | |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 161 | def _get_xml_testcases_from_results(results, expectedTestCases: int): |
| 162 | """Get XML testcases from results.""" |
| 163 | testCases = results.getElementsByTagName("testcase") |
| 164 | assert len(testCases) == expectedTestCases |
| 165 | return testCases |
| 166 | |
| 167 | |
| 168 | def _get_xml_failure(argv: list): |
| 169 | """Get the results and single testcase with the failure result entry if there is one.""" |
| 170 | results = _get_xml_results(argv) |
| 171 | testCases = _get_xml_testcases_from_results(results, 1) |
| 172 | fail = testCases[0].getElementsByTagName("failure") |
| 173 | if fail: |
| 174 | return fail[0].firstChild.data |
| 175 | return None |
| 176 | |
| 177 | |
| 178 | def test_mock_sut_expected_pass(testDir: Path): |
| 179 | """Run expected pass SUT test.""" |
| 180 | try: |
| 181 | argv = _get_default_argv(testDir, GRAPH_RESULT_VALID) |
| 182 | main(argv) |
| 183 | fail = _get_xml_failure(argv) |
| 184 | except Exception as e: |
| 185 | assert False, f"Unexpected exception {e}" |
| 186 | assert not fail |
| 187 | |
| 188 | |
| 189 | UNEXPECTED_PASS_PREFIX_STR = "UNEXPECTED_PASS" |
| 190 | UNEXPECTED_FAIL_PREFIX_STR = "UNEXPECTED_FAIL" |
| 191 | |
| 192 | |
| 193 | def test_mock_sut_unexpected_pass(testDirExpectedFail: Path): |
| 194 | """Run unexpected pass SUT test.""" |
| 195 | try: |
| 196 | argv = _get_default_argv(testDirExpectedFail, GRAPH_RESULT_VALID) |
| 197 | main(argv) |
| 198 | fail = _get_xml_failure(argv) |
| 199 | except Exception as e: |
| 200 | assert False, f"Unexpected exception {e}" |
| 201 | assert fail.startswith(UNEXPECTED_PASS_PREFIX_STR) |
| 202 | |
| 203 | |
| 204 | def test_mock_sut_expected_failure(testDirExpectedFail: Path): |
| 205 | """Run expected failure SUT test.""" |
| 206 | try: |
| 207 | argv = _get_default_argv(testDirExpectedFail, GRAPH_RESULT_ERROR) |
| 208 | main(argv) |
| 209 | fail = _get_xml_failure(argv) |
| 210 | except Exception as e: |
| 211 | assert False, f"Unexpected exception {e}" |
| 212 | assert not fail |
| 213 | |
| 214 | |
| 215 | def test_mock_sut_unexpected_failure(testDir: Path): |
| 216 | """Run unexpected failure SUT test.""" |
| 217 | try: |
| 218 | argv = _get_default_argv(testDir, GRAPH_RESULT_ERROR) |
| 219 | main(argv) |
| 220 | fail = _get_xml_failure(argv) |
| 221 | except Exception as e: |
| 222 | assert False, f"Unexpected exception {e}" |
| 223 | assert fail.startswith(UNEXPECTED_FAIL_PREFIX_STR) |
| 224 | |
| 225 | |
| 226 | def test_mock_sut_binary_conversion(testDir: Path): |
| 227 | """Run unexpected failure SUT test.""" |
| 228 | try: |
| 229 | argv = _get_default_argv(testDir, GRAPH_RESULT_VALID) |
| 230 | argv.extend(["--binary", "--flatc-path", str(testDir / "mock_flatc.py")]) |
| 231 | main(argv) |
| 232 | binary_desc = testDir / "desc_binary.json" |
| 233 | assert binary_desc.exists() |
| 234 | fail = _get_xml_failure(argv) |
| 235 | except Exception as e: |
| 236 | assert False, f"Unexpected exception {e}" |
| 237 | assert not fail |
| 238 | |
| 239 | |
| 240 | def test_mock_and_dummy_sut_results(testDir: Path): |
| 241 | """Run two SUTs and check they both return results.""" |
| 242 | try: |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 243 | suts = ["tests.tosa_dummy_sut_run", "tests.tosa_mock_sut_run"] |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 244 | argv = _get_default_argv(testDir, GRAPH_RESULT_VALID) |
| 245 | # Override sut-module setting with both SUTs |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 246 | argv.extend(["--sut-module"] + suts) |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 247 | main(argv) |
| 248 | results = _get_xml_results(argv) |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 249 | _check_xml_testsuites_in_results(results, suts) |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 250 | _get_xml_testcases_from_results(results, 2) |
| 251 | except Exception as e: |
| 252 | assert False, f"Unexpected exception {e}" |
| 253 | |
| 254 | |
| 255 | def test_two_mock_suts(testDir: Path): |
| 256 | """Test that a duplicated SUT is ignored.""" |
| 257 | try: |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 258 | sut = ["tests.tosa_mock_sut_run"] |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 259 | argv = _get_default_argv(testDir, GRAPH_RESULT_VALID) |
| 260 | # Override sut-module setting with duplicated SUT |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 261 | argv.extend(["--sut-module"] + sut * 2) |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 262 | main(argv) |
| 263 | results = _get_xml_results(argv) |
Jeremy Johnson | e2b5e87 | 2023-09-14 17:02:09 +0100 | [diff] [blame] | 264 | _check_xml_testsuites_in_results(results, sut) |
Jeremy Johnson | be1a940 | 2021-12-15 17:14:56 +0000 | [diff] [blame] | 265 | _get_xml_testcases_from_results(results, 1) |
| 266 | except Exception as e: |
| 267 | assert False, f"Unexpected exception {e}" |
| 268 | |
| 269 | |
| 270 | def test_mock_sut_multi_outputs_expected_pass(testDirMultiOutputs: Path): |
| 271 | """Run expected pass SUT test with multiple outputs.""" |
| 272 | try: |
| 273 | argv = _get_default_argv(testDirMultiOutputs, GRAPH_RESULT_VALID) |
| 274 | main(argv) |
| 275 | fail = _get_xml_failure(argv) |
| 276 | except Exception as e: |
| 277 | assert False, f"Unexpected exception {e}" |
| 278 | assert not fail |
| 279 | |
| 280 | |
| 281 | def test_mock_sut_multi_outputs_unexpected_failure(testDirMultiOutputs: Path): |
| 282 | """Run SUT test which expects multiple outputs, but last one is missing.""" |
| 283 | try: |
| 284 | argv = _get_default_argv(testDirMultiOutputs, GRAPH_RESULT_VALID) |
| 285 | argv.append("tests.tosa_mock_sut_run:num_results=1") |
| 286 | main(argv) |
| 287 | fail = _get_xml_failure(argv) |
| 288 | except Exception as e: |
| 289 | assert False, f"Unexpected exception {e}" |
| 290 | assert fail.startswith(UNEXPECTED_FAIL_PREFIX_STR) |