blob: f5cd0ac448498cc59271acf47778d5efb74b0a27 [file] [log] [blame]
alexanderf4e2c472021-05-14 13:14:21 +01001#!/usr/bin/env python3
Alex Tawsedaba3cf2023-09-29 15:55:38 +01002# SPDX-FileCopyrightText: Copyright 2021-2023 Arm Limited and/or its affiliates <open-source-office@arm.com>
Isabella Gottardi2181d0a2021-04-07 09:27:38 +01003# SPDX-License-Identifier: Apache-2.0
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
Alex Tawsedaba3cf2023-09-29 15:55:38 +010016"""
17Script to set up default resources for ML Embedded Evaluation Kit
18"""
19import dataclasses
Isabella Gottardief2b9dd2022-02-16 14:24:03 +000020import errno
Isabella Gottardi2181d0a2021-04-07 09:27:38 +010021import fnmatch
Isabella Gottardief2b9dd2022-02-16 14:24:03 +000022import json
Isabella Gottardi2181d0a2021-04-07 09:27:38 +010023import logging
Isabella Gottardief2b9dd2022-02-16 14:24:03 +000024import os
25import re
26import shutil
27import subprocess
Isabella Gottardi2181d0a2021-04-07 09:27:38 +010028import sys
Alex Tawsedaba3cf2023-09-29 15:55:38 +010029import typing
Isabella Gottardief2b9dd2022-02-16 14:24:03 +000030import urllib.request
Kshitij Sisodia36b5b132023-06-09 11:58:26 +010031import venv
Isabella Gottardief2b9dd2022-02-16 14:24:03 +000032from argparse import ArgumentParser
33from argparse import ArgumentTypeError
Kshitij Sisodia3be26232021-10-29 12:29:06 +010034from collections import namedtuple
Alex Tawsedaba3cf2023-09-29 15:55:38 +010035from dataclasses import dataclass
Richard Burton17069622022-03-17 10:54:26 +000036from pathlib import Path
Alex Tawsedaba3cf2023-09-29 15:55:38 +010037from urllib.error import URLError
Isabella Gottardi6c2ea452022-03-11 13:25:08 +000038
Kshitij Sisodia6a2ac462022-03-01 17:36:06 +000039from scripts.py.check_update_resources_downloaded import get_md5sum_for_file
Kshitij Sisodia3be26232021-10-29 12:29:06 +010040
Alex Tawsedaba3cf2023-09-29 15:55:38 +010041# Supported version of Python and Vela
Richard Burton49482d52023-11-30 11:38:45 +000042
43VELA_VERSION = "3.10.0"
44py3_version_minimum = (3, 10)
Isabella Gottardi2181d0a2021-04-07 09:27:38 +010045
Kshitij Sisodia3be26232021-10-29 12:29:06 +010046# Valid NPU configurations:
47valid_npu_config_names = [
Isabella Gottardief2b9dd2022-02-16 14:24:03 +000048 "ethos-u55-32",
49 "ethos-u55-64",
50 "ethos-u55-128",
51 "ethos-u55-256",
52 "ethos-u65-256",
53 "ethos-u65-512",
54]
Kshitij Sisodia3be26232021-10-29 12:29:06 +010055
56# Default NPU configurations (these are always run when the models are optimised)
57default_npu_config_names = [valid_npu_config_names[2], valid_npu_config_names[4]]
58
59# NPU config named tuple
Isabella Gottardief2b9dd2022-02-16 14:24:03 +000060NPUConfig = namedtuple(
61 "NPUConfig",
62 [
63 "config_name",
64 "memory_mode",
65 "system_config",
66 "ethos_u_npu_id",
67 "ethos_u_config_id",
68 "arena_cache_size",
69 ],
70)
Kshitij Sisodia3be26232021-10-29 12:29:06 +010071
Alex Tawsedaba3cf2023-09-29 15:55:38 +010072
73@dataclass(frozen=True)
74class UseCaseResource:
75 """
76 Represent a use case's resource
77 """
78 name: str
79 url: str
80 sub_folder: typing.Optional[str] = None
81
82
83@dataclass(frozen=True)
84class UseCase:
85 """
86 Represent a use case
87 """
88 name: str
89 url_prefix: str
90 resources: typing.List[UseCaseResource]
91
92
Kshitij Sisodia661959c2021-11-24 10:39:52 +000093# The internal SRAM size for Corstone-300 implementation on MPS3 specified by AN552
Kshitij Sisodia8c61c0a2022-05-17 11:16:22 +010094# The internal SRAM size for Corstone-310 implementation on MPS3 specified by AN555
95# is 4MB, but we are content with the 2MB specified below.
Alex Tawsedaba3cf2023-09-29 15:55:38 +010096MPS3_MAX_SRAM_SZ = 2 * 1024 * 1024 # 2 MiB (2 banks of 1 MiB each)
97
98
99def load_use_case_resources(current_file_dir: Path) -> typing.List[UseCase]:
100 """
101 Load use case metadata resources
102
103 Parameters
104 ----------
105 current_file_dir: Directory of the current script
106
107 Returns
108 -------
109 The use cases resources object parsed to a dict
110 """
111
112 resources_path = current_file_dir / "scripts" / "py" / "use_case_resources.json"
113 with open(resources_path, encoding="utf8") as f:
114 use_cases = json.load(f)
115 return [
116 UseCase(
117 name=u["name"],
118 url_prefix=u["url_prefix"],
119 resources=[UseCaseResource(**r) for r in u["resources"]],
120 )
121 for u in use_cases
122 ]
Liam Barryb52b5852021-11-15 11:41:40 +0000123
Isabella Gottardi2181d0a2021-04-07 09:27:38 +0100124
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000125def call_command(command: str, verbose: bool = True) -> str:
Isabella Gottardi2181d0a2021-04-07 09:27:38 +0100126 """
127 Helpers function that call subprocess and return the output.
128
129 Parameters:
130 ----------
131 command (string): Specifies the command to run.
132 """
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000133 if verbose:
134 logging.info(command)
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100135 try:
136 proc = subprocess.run(
137 command, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True
138 )
139 log = proc.stdout.decode("utf-8")
alexander50a06502021-05-12 19:06:02 +0100140 logging.info(log)
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100141 return log
142 except subprocess.CalledProcessError as err:
143 log = err.stdout.decode("utf-8")
alexander50a06502021-05-12 19:06:02 +0100144 logging.error(log)
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100145 raise err
Isabella Gottardi2181d0a2021-04-07 09:27:38 +0100146
147
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000148def get_default_npu_config_from_name(
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100149 config_name: str, arena_cache_size: int = 0
150) -> typing.Optional[NPUConfig]:
Kshitij Sisodia3be26232021-10-29 12:29:06 +0100151 """
Richard Burton17069622022-03-17 10:54:26 +0000152 Gets the file suffix for the TFLite file from the
Kshitij Sisodia3be26232021-10-29 12:29:06 +0100153 `accelerator_config` string.
154
155 Parameters:
156 ----------
Isabella Gottardi3acaaee2021-11-30 12:33:27 +0000157 config_name (str): Ethos-U NPU configuration from valid_npu_config_names
158
159 arena_cache_size (int): Specifies arena cache size in bytes. If a value
160 greater than 0 is provided, this will be taken
161 as the cache size. If 0, the default values, as per
162 the NPU config requirements, are used.
Kshitij Sisodia3be26232021-10-29 12:29:06 +0100163
164 Returns:
165 -------
166 NPUConfig: An NPU config named tuple populated with defaults for the given
167 config name
168 """
169 if config_name not in valid_npu_config_names:
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000170 raise ValueError(
171 f"""
Kshitij Sisodia3be26232021-10-29 12:29:06 +0100172 Invalid Ethos-U NPU configuration.
173 Select one from {valid_npu_config_names}.
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000174 """
175 )
Kshitij Sisodia3be26232021-10-29 12:29:06 +0100176
177 strings_ids = ["ethos-u55-", "ethos-u65-"]
178 processor_ids = ["U55", "U65"]
179 prefix_ids = ["H", "Y"]
180 memory_modes = ["Shared_Sram", "Dedicated_Sram"]
181 system_configs = ["Ethos_U55_High_End_Embedded", "Ethos_U65_High_End"]
Isabella Gottardi3acaaee2021-11-30 12:33:27 +0000182 memory_modes_arena = {
Richard Burton17069622022-03-17 10:54:26 +0000183 # For shared SRAM memory mode, we use the MPS3 SRAM size by default.
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100184 "Shared_Sram": MPS3_MAX_SRAM_SZ if arena_cache_size <= 0 else arena_cache_size,
Richard Burton17069622022-03-17 10:54:26 +0000185 # For dedicated SRAM memory mode, we do not override the arena size. This is expected to
186 # be defined in the Vela configuration file instead.
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000187 "Dedicated_Sram": None if arena_cache_size <= 0 else arena_cache_size,
Isabella Gottardi3acaaee2021-11-30 12:33:27 +0000188 }
Kshitij Sisodia3be26232021-10-29 12:29:06 +0100189
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100190 for i, string_id in enumerate(strings_ids):
191 if config_name.startswith(string_id):
192 npu_config_id = config_name.replace(string_id, prefix_ids[i])
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000193 return NPUConfig(
194 config_name=config_name,
195 memory_mode=memory_modes[i],
196 system_config=system_configs[i],
197 ethos_u_npu_id=processor_ids[i],
198 ethos_u_config_id=npu_config_id,
199 arena_cache_size=memory_modes_arena[memory_modes[i]],
200 )
Kshitij Sisodia3be26232021-10-29 12:29:06 +0100201
202 return None
203
204
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100205def remove_tree_dir(dir_path: Path):
206 """
207 Delete and re-create a directory
208
209 Parameters
210 ----------
211 dir_path : The directory path
212 """
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000213 try:
Richard Burton17069622022-03-17 10:54:26 +0000214 # Remove the full directory.
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000215 shutil.rmtree(dir_path)
Richard Burton17069622022-03-17 10:54:26 +0000216 # Re-create an empty one.
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000217 os.mkdir(dir_path)
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100218 except OSError:
219 logging.error("Failed to delete %s.", dir_path)
220
221
222def initialize_use_case_resources_directory(
223 use_case: UseCase,
224 metadata: typing.Dict,
225 download_dir: Path,
226 check_clean_folder: bool,
227 setup_script_hash_verified: bool,
228):
229 """
230 Initialize the resources_downloaded directory for a use case
231
232 @param use_case: The use case
233 @param metadata: The metadata
234 @param download_dir: The parent directory
235 @param check_clean_folder: Whether to clean the folder
236 @param setup_script_hash_verified: Whether the hash of this script is verified
237 """
238 try:
239 # Does the usecase_name download dir exist?
240 (download_dir / use_case.name).mkdir()
241 except OSError as err:
242 if err.errno == errno.EEXIST:
243 # The usecase_name download dir exist.
244 if check_clean_folder and not setup_script_hash_verified:
245 for idx, metadata_uc_url_prefix in enumerate(
246 [
247 f
248 for f in metadata["resources_info"]
249 if f["name"] == use_case.name
250 ][0]["url_prefix"]
251 ):
252 if metadata_uc_url_prefix != use_case.url_prefix[idx]:
253 logging.info("Removing %s resources.", use_case.name)
254 remove_tree_dir(download_dir / use_case.name)
255 break
256 elif err.errno != errno.EEXIST:
257 logging.error("Error creating %s directory.", use_case.name)
258 raise
259
260
261def download_file(url: str, dest: Path):
262 """
263 Download a file
264
265 @param url: The URL of the file to download
266 @param dest: The destination of downloaded file
267 """
268 try:
269 with urllib.request.urlopen(url) as g:
270 with open(dest, "b+w") as f:
271 f.write(g.read())
272 logging.info("- Downloaded %s to %s.", url, dest)
273 except URLError:
274 logging.error("URLError while downloading %s.", url)
275 raise
276
277
278def download_resources(
279 use_case: UseCase,
280 metadata: typing.Dict,
281 download_dir: Path,
282 check_clean_folder: bool,
283 setup_script_hash_verified: bool,
284):
285 """
286 Download the resources associated with a use case
287
288 @param use_case: The use case
289 @param metadata: The metadata
290 @param download_dir: The parent directory
291 @param check_clean_folder: Whether to clean the folder
292 @param setup_script_hash_verified: Whether the hash is already verified
293 """
294 initialize_use_case_resources_directory(
295 use_case,
296 metadata,
297 download_dir,
298 check_clean_folder,
299 setup_script_hash_verified
300 )
301
302 reg_expr_str = r"{url_prefix:(.*\d)}"
303 reg_expr_pattern = re.compile(reg_expr_str)
304 for res in use_case.resources:
305 res_name = res.name
306 url_prefix_idx = int(reg_expr_pattern.search(res.url).group(1))
307 res_url = use_case.url_prefix[url_prefix_idx] + re.sub(
308 reg_expr_str, "", res.url
309 )
310
311 sub_folder = ""
312 if res.sub_folder is not None:
313 try:
314 # Does the usecase_name/sub_folder download dir exist?
315 (download_dir / use_case.name / res.sub_folder).mkdir()
316 except OSError as err:
317 if err.errno != errno.EEXIST:
318 logging.error(
319 "Error creating %s/%s directory.",
320 use_case.name,
321 res.sub_folder
322 )
323 raise
324 sub_folder = res.sub_folder
325
326 res_dst = download_dir / use_case.name / sub_folder / res_name
327
328 if res_dst.is_file():
329 logging.info("File %s exists, skipping download.", res_dst)
330 else:
331 download_file(res_url, res_dst)
332
333
334def run_vela(
335 config: NPUConfig,
336 env_activate_cmd: str,
337 model: Path,
338 config_file: Path,
339 output_dir: Path
340) -> bool:
341 """
342 Run vela on the specified model
343 @param config: The NPU configuration
344 @param env_activate_cmd: The Python venv activation command
345 @param model: The model
346 @param config_file: The vela config file
347 @param output_dir: The output directory
348 @return: True if the optimisation was skipped, false otherwise
349 """
350 # model name after compiling with vela is an initial model name + _vela suffix
351 vela_optimised_model_path = model.parent / (model.stem + "_vela.tflite")
352
353 vela_command_arena_cache_size = ""
354
355 if config.arena_cache_size:
356 vela_command_arena_cache_size = (
357 f"--arena-cache-size={config.arena_cache_size}"
358 )
359
360 vela_command = (
361 f"{env_activate_cmd} && vela {model} "
362 + f"--accelerator-config={config.config_name} "
363 + "--optimise Performance "
364 + f"--config {config_file} "
365 + f"--memory-mode={config.memory_mode} "
366 + f"--system-config={config.system_config} "
367 + f"--output-dir={output_dir} "
368 + f"{vela_command_arena_cache_size}"
369 )
370
371 # We want the name to include the configuration suffix. For example: vela_H128,
372 # vela_Y512 etc.
373 new_suffix = "_vela_" + config.ethos_u_config_id + ".tflite"
374 new_vela_optimised_model_path = model.parent / (model.stem + new_suffix)
375
376 skip_optimisation = new_vela_optimised_model_path.is_file()
377
378 if skip_optimisation:
379 logging.info(
380 "File %s exists, skipping optimisation.",
381 new_vela_optimised_model_path
382 )
383 else:
384 call_command(vela_command)
385
386 # Rename default vela model.
387 vela_optimised_model_path.rename(new_vela_optimised_model_path)
388 logging.info(
389 "Renaming %s to %s.",
390 vela_optimised_model_path,
391 new_vela_optimised_model_path
392 )
393
394 return skip_optimisation
395
396
397def run_vela_on_all_models(
398 current_file_dir: Path,
399 download_dir: Path,
400 env_activate_cmd: str,
401 arena_cache_size: int,
402 npu_config_names: typing.List[str]
403):
404 """
405 Run vela on downloaded models for the specified NPU configurations
406
407 @param current_file_dir: Path to the current directory
408 @param download_dir: Path to the downloaded resources directory
409 @param env_activate_cmd: Command used to activate Python venv
410 @param npu_config_names: Names of NPU configurations for which to run Vela
411 @param arena_cache_size: The arena cache size
412 """
413 config_file = current_file_dir / "scripts" / "vela" / "default_vela.ini"
414 models = [
415 Path(dirpath) / f
416 for dirpath, dirnames, files in os.walk(download_dir)
417 for f in fnmatch.filter(files, "*.tflite")
418 if "vela" not in f
419 ]
420
421 # Get npu config tuple for each config name in a list:
422 npu_configs = [
423 get_default_npu_config_from_name(name, arena_cache_size)
424 for name in npu_config_names
425 ]
426
427 logging.info("All models will be optimised for these configs:")
428 for config in npu_configs:
429 logging.info(config)
430
431 optimisation_skipped = False
432
433 for model in models:
434 for config in npu_configs:
435 optimisation_skipped = run_vela(
436 config,
437 env_activate_cmd,
438 model,
439 config_file,
440 output_dir=model.parent
441 ) or optimisation_skipped
442
443 # If any optimisation was skipped, show how to regenerate:
444 if optimisation_skipped:
445 logging.warning("One or more optimisations were skipped.")
446 logging.warning(
447 "To optimise all the models, please remove the directory %s.",
448 download_dir
449 )
450
451
452def initialize_resources_directory(
453 download_dir: Path,
454 check_clean_folder: bool,
455 metadata_file_path: Path,
456 setup_script_hash: str
457) -> typing.Tuple[typing.Dict, bool]:
458 """
459 Sets up the resources_downloaded directory and checks to see if this script
460 has been modified since the last time resources were downloaded
461
462 @param download_dir: Path to the resources_downloaded directory
463 @param check_clean_folder: Determines whether to clean the downloads directory
464 @param metadata_file_path: Path to the metadata file
465 @param setup_script_hash: The md5 hash of this script
466 @return: The metadata and a boolean to indicate whether this
467 script has changed since it was last run
468 """
469 metadata_dict = {}
470 setup_script_hash_verified = False
471
472 if download_dir.is_dir():
473 logging.info("'resources_downloaded' directory exists.")
474 # Check and clean?
475 if check_clean_folder and metadata_file_path.is_file():
476 with open(metadata_file_path, encoding="utf8") as metadata_file:
477 metadata_dict = json.load(metadata_file)
478
479 vela_in_metadata = metadata_dict["ethosu_vela_version"]
480 if vela_in_metadata != VELA_VERSION:
481 # Check if all the resources needs to be removed and regenerated.
482 # This can happen when the Vela version has changed.
483 logging.info(
484 ("Vela version in metadata is %s, current %s."
485 " Removing the resources and re-download them.",
486 vela_in_metadata,
487 VELA_VERSION
488 )
489 )
490 remove_tree_dir(download_dir)
491 metadata_dict = {}
492 else:
493 # Check if the set_up_default_resorces.py has changed from last setup
494 setup_script_hash_verified = (
495 metadata_dict.get("set_up_script_md5sum")
496 == setup_script_hash
497 )
498 else:
499 download_dir.mkdir()
500
501 return metadata_dict, setup_script_hash_verified
502
503
504def set_up_python_venv(
505 download_dir: Path,
506 additional_requirements_file: Path = ""
507):
508 """
509 Set up the Python environment with which to set up the resources
510
511 @param download_dir: Path to the resources_downloaded directory
512 @param additional_requirements_file: Optional additional requirements file
513 @return: Path to the venv Python binary + activate command
514 """
515 env_dirname = "env"
516 env_path = download_dir / env_dirname
517
518 venv_builder = venv.EnvBuilder(with_pip=True, upgrade_deps=True)
519 venv_context = venv_builder.ensure_directories(env_dir=env_path)
520
521 env_python = Path(venv_context.env_exe)
522
523 if not env_python.is_file():
524 # Create the virtual environment using current interpreter's venv
525 # (not necessarily the system's Python3)
526 venv_builder.create(env_dir=env_path)
527
528 if sys.platform == "win32":
529 env_activate = Path(f"{venv_context.bin_path}/activate.bat")
530 env_activate_cmd = str(env_activate)
531 else:
532 env_activate = Path(f"{venv_context.bin_path}/activate")
533 env_activate_cmd = f". {env_activate}"
534
535 if not env_activate.is_file():
536 venv_builder.install_scripts(venv_context, venv_context.bin_path)
537
538 # 1.3 Install additional requirements first, if a valid file has been provided
539 if additional_requirements_file and os.path.isfile(additional_requirements_file):
540 command = f"{env_python} -m pip install -r {additional_requirements_file}"
541 call_command(command)
542
543 # 1.4 Make sure to have all the main requirements
544 requirements = [f"ethos-u-vela=={VELA_VERSION}"]
545 command = f"{env_python} -m pip freeze"
546 packages = call_command(command)
547 for req in requirements:
548 if req not in packages:
549 command = f"{env_python} -m pip install {req}"
550 call_command(command)
551
552 return env_path, env_activate_cmd
553
554
555def update_metadata(
556 metadata_dict: typing.Dict,
557 setup_script_hash: str,
558 json_uc_res: typing.List[UseCase],
559 metadata_file_path: Path
560):
561 """
562 Update the metadata file
563
564 @param metadata_dict: The metadata dictionary to update
565 @param setup_script_hash: The setup script hash
566 @param json_uc_res: The use case resources metadata
567 @param metadata_file_path The metadata file path
568 """
569 metadata_dict["ethosu_vela_version"] = VELA_VERSION
570 metadata_dict["set_up_script_md5sum"] = setup_script_hash.strip("\n")
571 metadata_dict["resources_info"] = [dataclasses.asdict(uc) for uc in json_uc_res]
572
573 with open(metadata_file_path, "w", encoding="utf8") as metadata_file:
574 json.dump(metadata_dict, metadata_file, indent=4)
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000575
576
577def set_up_resources(
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100578 run_vela_on_models: bool = False,
579 additional_npu_config_names: tuple = (),
580 arena_cache_size: int = 0,
581 check_clean_folder: bool = False,
582 additional_requirements_file: Path = ""
583) -> Path:
Isabella Gottardi2181d0a2021-04-07 09:27:38 +0100584 """
585 Helpers function that retrieve the output from a command.
586
587 Parameters:
588 ----------
Isabella Gottardi3acaaee2021-11-30 12:33:27 +0000589 run_vela_on_models (bool): Specifies if run vela on downloaded models.
590 additional_npu_config_names(list): list of strings of Ethos-U NPU configs.
591 arena_cache_size (int): Specifies arena cache size in bytes. If a value
592 greater than 0 is provided, this will be taken
593 as the cache size. If 0, the default values, as per
594 the NPU config requirements, are used.
Kshitij Sisodia6a2ac462022-03-01 17:36:06 +0000595 check_clean_folder (bool): Indicates whether the resources folder needs to
596 be checked for updates and cleaned.
Kshitij Sisodiac22e80e2022-03-14 09:26:48 +0000597 additional_requirements_file (str): Path to a requirements.txt file if
598 additional packages need to be
599 installed.
Kshitij Sisodia9c6f9f82022-05-20 14:30:02 +0100600
601 Returns
602 -------
603
604 Tuple of pair of Paths: (download_directory_path, virtual_env_path)
605
606 download_directory_path: Root of the directory where the resources have been downloaded to.
607 virtual_env_path: Path to the root of virtual environment.
Isabella Gottardi2181d0a2021-04-07 09:27:38 +0100608 """
Richard Burton17069622022-03-17 10:54:26 +0000609 # Paths.
610 current_file_dir = Path(__file__).parent.resolve()
611 download_dir = current_file_dir / "resources_downloaded"
612 metadata_file_path = download_dir / "resources_downloaded_metadata.json"
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000613
Isabella Gottardi6c2ea452022-03-11 13:25:08 +0000614 # Is Python minimum requirement matched?
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100615 if sys.version_info < py3_version_minimum:
616 raise RuntimeError(
617 f"ERROR: Python{'.'.join(str(i) for i in py3_version_minimum)}+ is required,"
618 f" please see the documentation on how to update it."
Isabella Gottardi6c2ea452022-03-11 13:25:08 +0000619 )
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100620 logging.info("Using Python version: %s", sys.version_info)
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000621
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100622 json_uc_res = load_use_case_resources(current_file_dir)
Richard Burton17069622022-03-17 10:54:26 +0000623 setup_script_hash = get_md5sum_for_file(Path(__file__).resolve())
Isabella Gottardi2181d0a2021-04-07 09:27:38 +0100624
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100625 metadata_dict, setup_script_hash_verified = initialize_resources_directory(
626 download_dir,
627 check_clean_folder,
628 metadata_file_path,
629 setup_script_hash
630 )
Isabella Gottardi2181d0a2021-04-07 09:27:38 +0100631
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100632 env_path, env_activate = set_up_python_venv(
633 download_dir,
634 additional_requirements_file
635 )
Isabella Gottardi2181d0a2021-04-07 09:27:38 +0100636
637 # 2. Download models
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000638 logging.info("Downloading resources.")
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100639 for use_case in json_uc_res:
640 download_resources(
641 use_case,
642 metadata_dict,
643 download_dir,
644 check_clean_folder,
645 setup_script_hash_verified
646 )
Isabella Gottardi2181d0a2021-04-07 09:27:38 +0100647
648 # 3. Run vela on models in resources_downloaded
649 # New models will have same name with '_vela' appended.
650 # For example:
Kshitij Sisodia76a15802021-12-24 11:05:11 +0000651 # original model: kws_micronet_m.tflite
652 # after vela model: kws_micronet_m_vela_H128.tflite
Isabella Gottardi2181d0a2021-04-07 09:27:38 +0100653 #
654 # Note: To avoid to run vela twice on the same model, it's supposed that
655 # downloaded model names don't contain the 'vela' word.
656 if run_vela_on_models is True:
Kshitij Sisodia3be26232021-10-29 12:29:06 +0100657 # Consolidate all config names while discarding duplicates:
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100658 run_vela_on_all_models(
659 current_file_dir,
660 download_dir,
661 env_activate,
662 arena_cache_size,
663 npu_config_names=list(set(default_npu_config_names + list(additional_npu_config_names)))
664 )
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000665
666 # 4. Collect and write metadata
667 logging.info("Collecting and write metadata.")
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100668 update_metadata(
669 metadata_dict,
670 setup_script_hash.strip("\n"),
671 json_uc_res,
672 metadata_file_path
673 )
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000674
Alex Tawsedaba3cf2023-09-29 15:55:38 +0100675 return env_path
Kshitij Sisodia9c6f9f82022-05-20 14:30:02 +0100676
Isabella Gottardi3acaaee2021-11-30 12:33:27 +0000677
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000678if __name__ == "__main__":
Isabella Gottardi2181d0a2021-04-07 09:27:38 +0100679 parser = ArgumentParser()
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000680 parser.add_argument(
681 "--skip-vela",
682 help="Do not run Vela optimizer on downloaded models.",
683 action="store_true",
684 )
685 parser.add_argument(
686 "--additional-ethos-u-config-name",
687 help=f"""Additional (non-default) configurations for Vela:
Kshitij Sisodia3be26232021-10-29 12:29:06 +0100688 {valid_npu_config_names}""",
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000689 default=[],
690 action="append",
691 )
692 parser.add_argument(
693 "--arena-cache-size",
694 help="Arena cache size in bytes (if overriding the defaults)",
695 type=int,
696 default=0,
697 )
698 parser.add_argument(
699 "--clean",
Richard Burton17069622022-03-17 10:54:26 +0000700 help="Clean the directory and optimize the downloaded resources",
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000701 action="store_true",
702 )
Kshitij Sisodiac22e80e2022-03-14 09:26:48 +0000703 parser.add_argument(
704 "--requirements-file",
705 help="Path to requirements.txt file to install additional packages",
706 type=str,
Richard Burton17069622022-03-17 10:54:26 +0000707 default=Path(__file__).parent.resolve() / 'scripts' / 'py' / 'requirements.txt'
Kshitij Sisodiac22e80e2022-03-14 09:26:48 +0000708 )
709
Isabella Gottardi2181d0a2021-04-07 09:27:38 +0100710 args = parser.parse_args()
Kshitij Sisodiab9e9c892021-05-27 13:57:35 +0100711
Isabella Gottardi3acaaee2021-11-30 12:33:27 +0000712 if args.arena_cache_size < 0:
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000713 raise ArgumentTypeError("Arena cache size cannot not be less than 0")
Isabella Gottardi3acaaee2021-11-30 12:33:27 +0000714
Richard Burton17069622022-03-17 10:54:26 +0000715 if not Path(args.requirements_file).is_file():
Kshitij Sisodiac22e80e2022-03-14 09:26:48 +0000716 raise ArgumentTypeError(f"Invalid requirements file: {args.requirements_file}")
717
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000718 logging.basicConfig(filename="log_build_default.log", level=logging.DEBUG)
Kshitij Sisodiab9e9c892021-05-27 13:57:35 +0100719 logging.getLogger().addHandler(logging.StreamHandler(sys.stdout))
720
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000721 set_up_resources(
722 not args.skip_vela,
723 args.additional_ethos_u_config_name,
724 args.arena_cache_size,
725 args.clean,
Isabella Gottardi6c2ea452022-03-11 13:25:08 +0000726 args.requirements_file,
Isabella Gottardief2b9dd2022-02-16 14:24:03 +0000727 )