mirror of
https://github.com/hathach/tinyusb.git
synced 2026-02-04 09:35:39 +00:00
adding metrics for computing average compiled size
This commit is contained in:
@ -6,6 +6,8 @@ import sys
|
||||
import time
|
||||
import subprocess
|
||||
import shlex
|
||||
import glob
|
||||
import metrics
|
||||
from pathlib import Path
|
||||
from multiprocessing import Pool
|
||||
|
||||
@ -111,18 +113,18 @@ def cmake_board(board, build_args, build_flags_on):
|
||||
])
|
||||
ret[0 if rcmd.returncode == 0 else 1] += 1
|
||||
else:
|
||||
rcmd = run_cmd([
|
||||
'cmake', 'examples', '-B', build_dir, '-GNinja',
|
||||
f'-DBOARD={board}', '-DCMAKE_BUILD_TYPE=MinSizeRel', '-DLINKERMAP_OPTION=-q -f tinyusb/src',
|
||||
*build_args, *build_flags
|
||||
])
|
||||
rcmd = run_cmd(['cmake', 'examples', '-B', build_dir, '-GNinja',
|
||||
f'-DBOARD={board}', '-DCMAKE_BUILD_TYPE=MinSizeRel', '-DLINKERMAP_OPTION=-q -f tinyusb/src',
|
||||
*build_args, *build_flags])
|
||||
if rcmd.returncode == 0:
|
||||
cmd = [
|
||||
"cmake", "--build", build_dir,
|
||||
'--parallel', str(parallel_jobs)
|
||||
]
|
||||
cmd = ["cmake", "--build", build_dir, '--parallel', str(parallel_jobs)]
|
||||
rcmd = run_cmd(cmd)
|
||||
ret[0 if rcmd.returncode == 0 else 1] += 1
|
||||
if rcmd.returncode == 0:
|
||||
ret[0] += 1
|
||||
rcmd = run_cmd(["cmake", "--build", build_dir, '--target', 'tinyusb_examples_metrics'])
|
||||
# print(rcmd.stdout.decode("utf-8"))
|
||||
else:
|
||||
ret[1] += 1
|
||||
|
||||
example = 'all'
|
||||
print_build_result(board, example, 0 if ret[1] == 0 else 1, time.monotonic() - start_time)
|
||||
@ -195,8 +197,18 @@ def build_boards_list(boards, build_defines, build_system, build_flags_on):
|
||||
return ret
|
||||
|
||||
|
||||
def build_family(family, build_defines, build_system, build_flags_on, one_per_family, boards):
|
||||
skip_ci = ['pico_sdk']
|
||||
def get_family_boards(family, one_per_family, boards):
|
||||
"""Get list of boards for a family.
|
||||
|
||||
Args:
|
||||
family: Family name
|
||||
one_per_family: If True, return only one random board
|
||||
boards: List of boards already specified via -b flag
|
||||
|
||||
Returns:
|
||||
List of board names
|
||||
"""
|
||||
skip_ci = []
|
||||
if os.getenv('GITHUB_ACTIONS') or os.getenv('CIRCLECI'):
|
||||
skip_ci_file = Path(f"hw/bsp/{family}/skip_ci.txt")
|
||||
if skip_ci_file.exists():
|
||||
@ -207,17 +219,15 @@ def build_family(family, build_defines, build_system, build_flags_on, one_per_fa
|
||||
all_boards.append(entry.name)
|
||||
all_boards.sort()
|
||||
|
||||
ret = [0, 0, 0]
|
||||
# If only-one flag is set, select one random board
|
||||
if one_per_family:
|
||||
for b in boards:
|
||||
# skip if -b already specify one in this family
|
||||
if find_family(b) == family:
|
||||
return ret
|
||||
return []
|
||||
all_boards = [random.choice(all_boards)]
|
||||
|
||||
ret = build_boards_list(all_boards, build_defines, build_system, build_flags_on)
|
||||
return ret
|
||||
return all_boards
|
||||
|
||||
|
||||
# -----------------------------
|
||||
@ -258,9 +268,8 @@ def main():
|
||||
print(build_separator)
|
||||
print(build_format.format('Board', 'Example', '\033[39mResult\033[0m', 'Time'))
|
||||
total_time = time.monotonic()
|
||||
result = [0, 0, 0]
|
||||
|
||||
# build families
|
||||
# get all families
|
||||
all_families = []
|
||||
if 'all' in families:
|
||||
for entry in os.scandir("hw/bsp"):
|
||||
@ -270,23 +279,19 @@ def main():
|
||||
all_families = list(families)
|
||||
all_families.sort()
|
||||
|
||||
# succeeded, failed, skipped
|
||||
# get boards from families and append to boards list
|
||||
all_boards = list(boards)
|
||||
for f in all_families:
|
||||
r = build_family(f, build_defines, build_system, build_flags_on, one_per_family, boards)
|
||||
result[0] += r[0]
|
||||
result[1] += r[1]
|
||||
result[2] += r[2]
|
||||
all_boards.extend(get_family_boards(f, one_per_family, boards))
|
||||
|
||||
# build boards
|
||||
r = build_boards_list(boards, build_defines, build_system, build_flags_on)
|
||||
result[0] += r[0]
|
||||
result[1] += r[1]
|
||||
result[2] += r[2]
|
||||
# build all boards
|
||||
result = build_boards_list(all_boards, build_defines, build_system, build_flags_on)
|
||||
|
||||
total_time = time.monotonic() - total_time
|
||||
print(build_separator)
|
||||
print(f"Build Summary: {result[0]} {STATUS_OK}, {result[1]} {STATUS_FAILED} and took {total_time:.2f}s")
|
||||
print(build_separator)
|
||||
|
||||
return result[1]
|
||||
|
||||
|
||||
|
||||
@ -15,7 +15,7 @@ deps_mandatory = {
|
||||
'159e31b689577dbf69cf0683bbaffbd71fa5ee10',
|
||||
'all'],
|
||||
'tools/linkermap': ['https://github.com/hathach/linkermap.git',
|
||||
'ac1228d5bbde1e54cb2e17e928662094ae19c51d',
|
||||
'75d9d2c9e0f83297ddbc0da899f6cc0ab21076f0',
|
||||
'all'],
|
||||
'tools/uf2': ['https://github.com/microsoft/uf2.git',
|
||||
'c594542b2faa01cc33a2b97c9fbebc38549df80a',
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
"""Calculate average size from multiple linker map files."""
|
||||
|
||||
import argparse
|
||||
import glob
|
||||
import sys
|
||||
import os
|
||||
|
||||
@ -10,6 +11,24 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'linkermap'))
|
||||
import linkermap
|
||||
|
||||
|
||||
def expand_files(file_patterns):
|
||||
"""Expand file patterns (globs) to list of files.
|
||||
|
||||
Args:
|
||||
file_patterns: List of file paths or glob patterns
|
||||
|
||||
Returns:
|
||||
List of expanded file paths
|
||||
"""
|
||||
expanded = []
|
||||
for pattern in file_patterns:
|
||||
if '*' in pattern or '?' in pattern:
|
||||
expanded.extend(glob.glob(pattern))
|
||||
else:
|
||||
expanded.append(pattern)
|
||||
return expanded
|
||||
|
||||
|
||||
def combine_maps(map_files, filters=None):
|
||||
"""Combine multiple map files into a list of json_data.
|
||||
|
||||
@ -109,25 +128,37 @@ def compute_avg(all_json_data):
|
||||
return json_average
|
||||
|
||||
|
||||
def main():
|
||||
def main(argv=None):
|
||||
parser = argparse.ArgumentParser(description='Calculate average size from linker map files')
|
||||
parser.add_argument('files', nargs='+', help='Path to map file(s)')
|
||||
parser.add_argument('files', nargs='+', help='Path to map file(s) or glob pattern(s)')
|
||||
parser.add_argument('-f', '--filter', dest='filters', action='append', default=[],
|
||||
help='Only include object files whose path contains this substring (can be repeated)')
|
||||
parser.add_argument('-o', '--out', dest='out', default='metrics',
|
||||
help='Output path basename for JSON and Markdown files (default: metrics)')
|
||||
args = parser.parse_args()
|
||||
parser.add_argument('-j', '--json', dest='json_out', action='store_true',
|
||||
help='Write JSON output file')
|
||||
parser.add_argument('-m', '--markdown', dest='markdown_out', action='store_true',
|
||||
help='Write Markdown output file')
|
||||
parser.add_argument('-q', '--quiet', dest='quiet', action='store_true',
|
||||
help='Suppress summary output')
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
all_json_data = combine_maps(args.files, args.filters)
|
||||
# Expand glob patterns
|
||||
map_files = expand_files(args.files)
|
||||
|
||||
all_json_data = combine_maps(map_files, args.filters)
|
||||
json_average = compute_avg(all_json_data)
|
||||
|
||||
if json_average is None:
|
||||
print("No valid map files found", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
linkermap.print_summary(json_average, False)
|
||||
linkermap.write_json(json_average, args.out + '.json')
|
||||
linkermap.write_markdown(json_average, args.out + '.md')
|
||||
if not args.quiet:
|
||||
linkermap.print_summary(json_average, False)
|
||||
if args.json_out:
|
||||
linkermap.write_json(json_average, args.out + '.json')
|
||||
if args.markdown_out:
|
||||
linkermap.write_markdown(json_average, args.out + '.md')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
Reference in New Issue
Block a user