Tim Hall | e6ccd87 | 2020-11-09 16:46:37 +0000 | [diff] [blame] | 1 | # Copyright (C) 2020 Arm Limited or its affiliates. All rights reserved. |
| 2 | # |
| 3 | # SPDX-License-Identifier: Apache-2.0 |
| 4 | # |
| 5 | # Licensed under the Apache License, Version 2.0 (the License); you may |
| 6 | # not use this file except in compliance with the License. |
| 7 | # You may obtain a copy of the License at |
| 8 | # |
| 9 | # www.apache.org/licenses/LICENSE-2.0 |
| 10 | # |
| 11 | # Unless required by applicable law or agreed to in writing, software |
| 12 | # distributed under the License is distributed on an AS IS BASIS, WITHOUT |
| 13 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | # See the License for the specific language governing permissions and |
| 15 | # limitations under the License. |
| 16 | import csv |
| 17 | import io |
| 18 | |
| 19 | import lxml.etree as xml |
| 20 | |
| 21 | from . import numeric_util |
| 22 | from .operation import Operation |
| 23 | |
| 24 | |
| 25 | class DebugDatabase: |
| 26 | NULLREF = -1 |
| 27 | show_warnings = False |
| 28 | |
| 29 | SOURCE_TABLE = "source" |
| 30 | _sourceUID = {} |
| 31 | _sourceHeaders = ["id", "operator", "kernel_w", "kernel_h", "ofm_w", "ofm_h", "ofm_d"] |
| 32 | _sourceTable = [] |
| 33 | |
| 34 | OPTIMISED_TABLE = "optimised" |
| 35 | _optimisedUID = {} |
| 36 | _optimisedHeaders = ["id", "source_id", "operator", "kernel_w", "kernel_h", "ofm_w", "ofm_h", "ofm_d"] |
| 37 | _optimisedTable = [] |
| 38 | |
| 39 | QUEUE_TABLE = "queue" |
| 40 | _queueHeaders = ["offset", "cmdstream_id", "optimised_id"] |
| 41 | _queueTable = [] |
| 42 | |
| 43 | STREAM_TABLE = "cmdstream" |
| 44 | _streamUID = {} |
| 45 | _streamHeaders = ["id", "file_offset"] |
| 46 | _streamTable = [] |
| 47 | |
| 48 | @classmethod |
| 49 | def add_source(cls, op: Operation): |
| 50 | assert isinstance(op, Operation) |
| 51 | uid = len(cls._sourceUID) |
| 52 | cls._sourceUID[op] = uid |
| 53 | ofm_shape = numeric_util.full_shape(3, op.outputs[0].shape, 1) |
| 54 | cls._sourceTable.append( |
| 55 | [uid, op.type, op.kernel.width, op.kernel.height, ofm_shape[-2], ofm_shape[-3], ofm_shape[-1]] |
| 56 | ) |
| 57 | |
| 58 | @classmethod |
| 59 | def add_optimised(cls, parent: Operation, op: Operation): |
| 60 | assert isinstance(parent, Operation) and isinstance(op, Operation) |
| 61 | if op not in cls._optimisedUID: |
| 62 | if parent not in cls._sourceUID: |
| 63 | # The the parent wasn't in the source network try to look it |
| 64 | # up in the optimised network and use that op's source parent. |
| 65 | if parent in cls._optimisedUID: |
| 66 | src_uid = cls._optimisedUID[parent][1] |
| 67 | else: |
| 68 | if DebugDatabase.show_warnings: |
| 69 | print("Debug Database: Associated parent '{0}' not in network".format(parent.type)) |
| 70 | src_uid = DebugDatabase.NULLREF |
| 71 | else: |
| 72 | src_uid = cls._sourceUID[parent] |
| 73 | uid = len(cls._optimisedUID) |
| 74 | cls._optimisedUID[op] = (uid, src_uid) |
| 75 | ofm_shape = numeric_util.full_shape(3, op.outputs[0].shape, 1) |
| 76 | cls._optimisedTable.append( |
| 77 | [uid, src_uid, op.type, op.kernel.width, op.kernel.height, ofm_shape[-2], ofm_shape[-3], ofm_shape[-1]] |
| 78 | ) |
| 79 | |
| 80 | @classmethod |
| 81 | def add_stream(cls, key): |
| 82 | if key not in cls._streamUID: |
| 83 | uid = len(cls._streamUID) |
| 84 | cls._streamUID[key] = uid |
| 85 | return uid |
| 86 | |
| 87 | @classmethod |
| 88 | def set_stream_offset(cls, key, file_offset): |
| 89 | assert key in cls._streamUID |
| 90 | uid = cls._streamUID[key] |
| 91 | cls._streamTable.append([uid, file_offset]) |
| 92 | |
| 93 | @classmethod |
| 94 | def add_command(cls, stream_id, offset, op: Operation): |
| 95 | assert stream_id < len(cls._streamUID) |
| 96 | assert op in cls._optimisedUID, "Optimised operator must exist before code generation" |
| 97 | optimised_id = cls._optimisedUID[op][0] |
| 98 | cls._queueTable.append([offset, stream_id, optimised_id]) |
| 99 | |
| 100 | @classmethod |
| 101 | def _write_table(cls, root, name, headers, table): |
| 102 | # Convert table to CSV |
| 103 | out = io.StringIO() |
| 104 | writer = csv.writer(out, quoting=csv.QUOTE_NONNUMERIC) |
| 105 | writer.writerow(headers) |
| 106 | writer.writerows(table) |
| 107 | |
| 108 | # Package table into XML output |
| 109 | table = xml.SubElement(root, "table", {"name": name}) |
| 110 | table.text = xml.CDATA(out.getvalue()) |
| 111 | |
| 112 | @classmethod |
| 113 | def write(cls, file_path, input_file, output_file): |
| 114 | root = xml.Element("debug", {"source": input_file, "optimised": output_file}) |
| 115 | |
| 116 | cls._write_table(root, cls.SOURCE_TABLE, cls._sourceHeaders, cls._sourceTable) |
| 117 | cls._write_table(root, cls.OPTIMISED_TABLE, cls._optimisedHeaders, cls._optimisedTable) |
| 118 | cls._write_table(root, cls.QUEUE_TABLE, cls._queueHeaders, cls._queueTable) |
| 119 | cls._write_table(root, cls.STREAM_TABLE, cls._streamHeaders, cls._streamTable) |
| 120 | |
| 121 | xml.ElementTree(root).write(file_path, encoding="utf-8", xml_declaration=True, pretty_print=True) |