blob: 4f0a50aedf369cdbe346271080b85083e77038ef [file] [log] [blame]
Tim Halle6ccd872020-11-09 16:46:37 +00001# Copyright (C) 2020 Arm Limited or its affiliates. All rights reserved.
2#
3# SPDX-License-Identifier: Apache-2.0
4#
5# Licensed under the Apache License, Version 2.0 (the License); you may
6# not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an AS IS BASIS, WITHOUT
13# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16import csv
17import io
Dwight Lidman9b43f842020-12-08 17:56:44 +010018from typing import Any
19from typing import Dict
20from typing import List
Tim Halle6ccd872020-11-09 16:46:37 +000021
22import lxml.etree as xml
23
24from . import numeric_util
25from .operation import Operation
26
27
Dwight Lidman9b43f842020-12-08 17:56:44 +010028UntypedDict = Dict[Any, Any]
29UntypedList = List[Any]
30
31
Tim Halle6ccd872020-11-09 16:46:37 +000032class DebugDatabase:
33 NULLREF = -1
34 show_warnings = False
35
36 SOURCE_TABLE = "source"
Dwight Lidman9b43f842020-12-08 17:56:44 +010037 _sourceUID: UntypedDict = {}
Tim Halle6ccd872020-11-09 16:46:37 +000038 _sourceHeaders = ["id", "operator", "kernel_w", "kernel_h", "ofm_w", "ofm_h", "ofm_d"]
Dwight Lidman9b43f842020-12-08 17:56:44 +010039 _sourceTable: UntypedList = []
Tim Halle6ccd872020-11-09 16:46:37 +000040
41 OPTIMISED_TABLE = "optimised"
Dwight Lidman9b43f842020-12-08 17:56:44 +010042 _optimisedUID: UntypedDict = {}
Tim Halle6ccd872020-11-09 16:46:37 +000043 _optimisedHeaders = ["id", "source_id", "operator", "kernel_w", "kernel_h", "ofm_w", "ofm_h", "ofm_d"]
Dwight Lidman9b43f842020-12-08 17:56:44 +010044 _optimisedTable: UntypedList = []
Tim Halle6ccd872020-11-09 16:46:37 +000045
46 QUEUE_TABLE = "queue"
47 _queueHeaders = ["offset", "cmdstream_id", "optimised_id"]
Dwight Lidman9b43f842020-12-08 17:56:44 +010048 _queueTable: UntypedList = []
Tim Halle6ccd872020-11-09 16:46:37 +000049
50 STREAM_TABLE = "cmdstream"
Dwight Lidman9b43f842020-12-08 17:56:44 +010051 _streamUID: UntypedDict = {}
Tim Halle6ccd872020-11-09 16:46:37 +000052 _streamHeaders = ["id", "file_offset"]
Dwight Lidman9b43f842020-12-08 17:56:44 +010053 _streamTable: UntypedList = []
Tim Halle6ccd872020-11-09 16:46:37 +000054
55 @classmethod
56 def add_source(cls, op: Operation):
57 assert isinstance(op, Operation)
58 uid = len(cls._sourceUID)
59 cls._sourceUID[op] = uid
60 ofm_shape = numeric_util.full_shape(3, op.outputs[0].shape, 1)
61 cls._sourceTable.append(
62 [uid, op.type, op.kernel.width, op.kernel.height, ofm_shape[-2], ofm_shape[-3], ofm_shape[-1]]
63 )
64
65 @classmethod
66 def add_optimised(cls, parent: Operation, op: Operation):
67 assert isinstance(parent, Operation) and isinstance(op, Operation)
68 if op not in cls._optimisedUID:
69 if parent not in cls._sourceUID:
70 # The the parent wasn't in the source network try to look it
71 # up in the optimised network and use that op's source parent.
72 if parent in cls._optimisedUID:
73 src_uid = cls._optimisedUID[parent][1]
74 else:
75 if DebugDatabase.show_warnings:
76 print("Debug Database: Associated parent '{0}' not in network".format(parent.type))
77 src_uid = DebugDatabase.NULLREF
78 else:
79 src_uid = cls._sourceUID[parent]
80 uid = len(cls._optimisedUID)
81 cls._optimisedUID[op] = (uid, src_uid)
82 ofm_shape = numeric_util.full_shape(3, op.outputs[0].shape, 1)
83 cls._optimisedTable.append(
84 [uid, src_uid, op.type, op.kernel.width, op.kernel.height, ofm_shape[-2], ofm_shape[-3], ofm_shape[-1]]
85 )
86
87 @classmethod
88 def add_stream(cls, key):
89 if key not in cls._streamUID:
90 uid = len(cls._streamUID)
91 cls._streamUID[key] = uid
92 return uid
93
94 @classmethod
95 def set_stream_offset(cls, key, file_offset):
96 assert key in cls._streamUID
97 uid = cls._streamUID[key]
98 cls._streamTable.append([uid, file_offset])
99
100 @classmethod
101 def add_command(cls, stream_id, offset, op: Operation):
102 assert stream_id < len(cls._streamUID)
103 assert op in cls._optimisedUID, "Optimised operator must exist before code generation"
104 optimised_id = cls._optimisedUID[op][0]
105 cls._queueTable.append([offset, stream_id, optimised_id])
106
107 @classmethod
108 def _write_table(cls, root, name, headers, table):
109 # Convert table to CSV
110 out = io.StringIO()
111 writer = csv.writer(out, quoting=csv.QUOTE_NONNUMERIC)
112 writer.writerow(headers)
113 writer.writerows(table)
114
115 # Package table into XML output
116 table = xml.SubElement(root, "table", {"name": name})
117 table.text = xml.CDATA(out.getvalue())
118
119 @classmethod
120 def write(cls, file_path, input_file, output_file):
121 root = xml.Element("debug", {"source": input_file, "optimised": output_file})
122
123 cls._write_table(root, cls.SOURCE_TABLE, cls._sourceHeaders, cls._sourceTable)
124 cls._write_table(root, cls.OPTIMISED_TABLE, cls._optimisedHeaders, cls._optimisedTable)
125 cls._write_table(root, cls.QUEUE_TABLE, cls._queueHeaders, cls._queueTable)
126 cls._write_table(root, cls.STREAM_TABLE, cls._streamHeaders, cls._streamTable)
127
128 xml.ElementTree(root).write(file_path, encoding="utf-8", xml_declaration=True, pretty_print=True)