mirror of
https://github.com/hathach/tinyusb.git
synced 2026-02-04 12:45:26 +00:00
adding metrics for computing average compiled size
This commit is contained in:
@ -5,7 +5,24 @@ include(${CMAKE_CURRENT_SOURCE_DIR}/../hw/bsp/family_support.cmake)
|
|||||||
|
|
||||||
project(tinyusb_examples C CXX ASM)
|
project(tinyusb_examples C CXX ASM)
|
||||||
|
|
||||||
add_subdirectory(device)
|
set(EXAMPLES_LIST
|
||||||
add_subdirectory(dual)
|
device
|
||||||
add_subdirectory(host)
|
dual
|
||||||
add_subdirectory(typec)
|
host
|
||||||
|
typec
|
||||||
|
)
|
||||||
|
set(MAPJSON_PATTERNS "")
|
||||||
|
|
||||||
|
foreach (example ${EXAMPLES_LIST})
|
||||||
|
add_subdirectory(${example})
|
||||||
|
list(APPEND MAPJSON_PATTERNS "${CMAKE_BINARY_DIR}/${example}/*/*.map.json")
|
||||||
|
endforeach ()
|
||||||
|
|
||||||
|
# Post-build: run metrics.py on all map.json files
|
||||||
|
add_custom_target(tinyusb_examples_metrics
|
||||||
|
COMMAND ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/../tools/metrics.py
|
||||||
|
-f tinyusb/src -j -o ${CMAKE_BINARY_DIR}/metrics
|
||||||
|
${MAPJSON_PATTERNS}
|
||||||
|
COMMENT "Generating average code size metrics"
|
||||||
|
VERBATIM
|
||||||
|
)
|
||||||
|
|||||||
@ -232,13 +232,13 @@ function(family_add_linkermap TARGET)
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
add_custom_target(${TARGET}-linkermap
|
add_custom_target(${TARGET}-linkermap
|
||||||
COMMAND python ${LINKERMAP_PY} -j -m ${LINKERMAP_OPTION_LIST} $<TARGET_FILE:${TARGET}>.map
|
COMMAND python ${LINKERMAP_PY} -j ${LINKERMAP_OPTION_LIST} $<TARGET_FILE:${TARGET}>.map
|
||||||
VERBATIM
|
VERBATIM
|
||||||
)
|
)
|
||||||
|
|
||||||
# post build
|
# post build
|
||||||
add_custom_command(TARGET ${TARGET} POST_BUILD
|
add_custom_command(TARGET ${TARGET} POST_BUILD
|
||||||
COMMAND python ${LINKERMAP_PY} -j -m ${LINKERMAP_OPTION_LIST} $<TARGET_FILE:${TARGET}>.map
|
COMMAND python ${LINKERMAP_PY} -j ${LINKERMAP_OPTION_LIST} $<TARGET_FILE:${TARGET}>.map
|
||||||
VERBATIM)
|
VERBATIM)
|
||||||
endfunction()
|
endfunction()
|
||||||
|
|
||||||
|
|||||||
@ -6,6 +6,8 @@ import sys
|
|||||||
import time
|
import time
|
||||||
import subprocess
|
import subprocess
|
||||||
import shlex
|
import shlex
|
||||||
|
import glob
|
||||||
|
import metrics
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from multiprocessing import Pool
|
from multiprocessing import Pool
|
||||||
|
|
||||||
@ -111,18 +113,18 @@ def cmake_board(board, build_args, build_flags_on):
|
|||||||
])
|
])
|
||||||
ret[0 if rcmd.returncode == 0 else 1] += 1
|
ret[0 if rcmd.returncode == 0 else 1] += 1
|
||||||
else:
|
else:
|
||||||
rcmd = run_cmd([
|
rcmd = run_cmd(['cmake', 'examples', '-B', build_dir, '-GNinja',
|
||||||
'cmake', 'examples', '-B', build_dir, '-GNinja',
|
f'-DBOARD={board}', '-DCMAKE_BUILD_TYPE=MinSizeRel', '-DLINKERMAP_OPTION=-q -f tinyusb/src',
|
||||||
f'-DBOARD={board}', '-DCMAKE_BUILD_TYPE=MinSizeRel', '-DLINKERMAP_OPTION=-q -f tinyusb/src',
|
*build_args, *build_flags])
|
||||||
*build_args, *build_flags
|
|
||||||
])
|
|
||||||
if rcmd.returncode == 0:
|
if rcmd.returncode == 0:
|
||||||
cmd = [
|
cmd = ["cmake", "--build", build_dir, '--parallel', str(parallel_jobs)]
|
||||||
"cmake", "--build", build_dir,
|
|
||||||
'--parallel', str(parallel_jobs)
|
|
||||||
]
|
|
||||||
rcmd = run_cmd(cmd)
|
rcmd = run_cmd(cmd)
|
||||||
ret[0 if rcmd.returncode == 0 else 1] += 1
|
if rcmd.returncode == 0:
|
||||||
|
ret[0] += 1
|
||||||
|
rcmd = run_cmd(["cmake", "--build", build_dir, '--target', 'tinyusb_examples_metrics'])
|
||||||
|
# print(rcmd.stdout.decode("utf-8"))
|
||||||
|
else:
|
||||||
|
ret[1] += 1
|
||||||
|
|
||||||
example = 'all'
|
example = 'all'
|
||||||
print_build_result(board, example, 0 if ret[1] == 0 else 1, time.monotonic() - start_time)
|
print_build_result(board, example, 0 if ret[1] == 0 else 1, time.monotonic() - start_time)
|
||||||
@ -195,8 +197,18 @@ def build_boards_list(boards, build_defines, build_system, build_flags_on):
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def build_family(family, build_defines, build_system, build_flags_on, one_per_family, boards):
|
def get_family_boards(family, one_per_family, boards):
|
||||||
skip_ci = ['pico_sdk']
|
"""Get list of boards for a family.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
family: Family name
|
||||||
|
one_per_family: If True, return only one random board
|
||||||
|
boards: List of boards already specified via -b flag
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of board names
|
||||||
|
"""
|
||||||
|
skip_ci = []
|
||||||
if os.getenv('GITHUB_ACTIONS') or os.getenv('CIRCLECI'):
|
if os.getenv('GITHUB_ACTIONS') or os.getenv('CIRCLECI'):
|
||||||
skip_ci_file = Path(f"hw/bsp/{family}/skip_ci.txt")
|
skip_ci_file = Path(f"hw/bsp/{family}/skip_ci.txt")
|
||||||
if skip_ci_file.exists():
|
if skip_ci_file.exists():
|
||||||
@ -207,17 +219,15 @@ def build_family(family, build_defines, build_system, build_flags_on, one_per_fa
|
|||||||
all_boards.append(entry.name)
|
all_boards.append(entry.name)
|
||||||
all_boards.sort()
|
all_boards.sort()
|
||||||
|
|
||||||
ret = [0, 0, 0]
|
|
||||||
# If only-one flag is set, select one random board
|
# If only-one flag is set, select one random board
|
||||||
if one_per_family:
|
if one_per_family:
|
||||||
for b in boards:
|
for b in boards:
|
||||||
# skip if -b already specify one in this family
|
# skip if -b already specify one in this family
|
||||||
if find_family(b) == family:
|
if find_family(b) == family:
|
||||||
return ret
|
return []
|
||||||
all_boards = [random.choice(all_boards)]
|
all_boards = [random.choice(all_boards)]
|
||||||
|
|
||||||
ret = build_boards_list(all_boards, build_defines, build_system, build_flags_on)
|
return all_boards
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
@ -258,9 +268,8 @@ def main():
|
|||||||
print(build_separator)
|
print(build_separator)
|
||||||
print(build_format.format('Board', 'Example', '\033[39mResult\033[0m', 'Time'))
|
print(build_format.format('Board', 'Example', '\033[39mResult\033[0m', 'Time'))
|
||||||
total_time = time.monotonic()
|
total_time = time.monotonic()
|
||||||
result = [0, 0, 0]
|
|
||||||
|
|
||||||
# build families
|
# get all families
|
||||||
all_families = []
|
all_families = []
|
||||||
if 'all' in families:
|
if 'all' in families:
|
||||||
for entry in os.scandir("hw/bsp"):
|
for entry in os.scandir("hw/bsp"):
|
||||||
@ -270,23 +279,19 @@ def main():
|
|||||||
all_families = list(families)
|
all_families = list(families)
|
||||||
all_families.sort()
|
all_families.sort()
|
||||||
|
|
||||||
# succeeded, failed, skipped
|
# get boards from families and append to boards list
|
||||||
|
all_boards = list(boards)
|
||||||
for f in all_families:
|
for f in all_families:
|
||||||
r = build_family(f, build_defines, build_system, build_flags_on, one_per_family, boards)
|
all_boards.extend(get_family_boards(f, one_per_family, boards))
|
||||||
result[0] += r[0]
|
|
||||||
result[1] += r[1]
|
|
||||||
result[2] += r[2]
|
|
||||||
|
|
||||||
# build boards
|
# build all boards
|
||||||
r = build_boards_list(boards, build_defines, build_system, build_flags_on)
|
result = build_boards_list(all_boards, build_defines, build_system, build_flags_on)
|
||||||
result[0] += r[0]
|
|
||||||
result[1] += r[1]
|
|
||||||
result[2] += r[2]
|
|
||||||
|
|
||||||
total_time = time.monotonic() - total_time
|
total_time = time.monotonic() - total_time
|
||||||
print(build_separator)
|
print(build_separator)
|
||||||
print(f"Build Summary: {result[0]} {STATUS_OK}, {result[1]} {STATUS_FAILED} and took {total_time:.2f}s")
|
print(f"Build Summary: {result[0]} {STATUS_OK}, {result[1]} {STATUS_FAILED} and took {total_time:.2f}s")
|
||||||
print(build_separator)
|
print(build_separator)
|
||||||
|
|
||||||
return result[1]
|
return result[1]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -15,7 +15,7 @@ deps_mandatory = {
|
|||||||
'159e31b689577dbf69cf0683bbaffbd71fa5ee10',
|
'159e31b689577dbf69cf0683bbaffbd71fa5ee10',
|
||||||
'all'],
|
'all'],
|
||||||
'tools/linkermap': ['https://github.com/hathach/linkermap.git',
|
'tools/linkermap': ['https://github.com/hathach/linkermap.git',
|
||||||
'ac1228d5bbde1e54cb2e17e928662094ae19c51d',
|
'75d9d2c9e0f83297ddbc0da899f6cc0ab21076f0',
|
||||||
'all'],
|
'all'],
|
||||||
'tools/uf2': ['https://github.com/microsoft/uf2.git',
|
'tools/uf2': ['https://github.com/microsoft/uf2.git',
|
||||||
'c594542b2faa01cc33a2b97c9fbebc38549df80a',
|
'c594542b2faa01cc33a2b97c9fbebc38549df80a',
|
||||||
|
|||||||
@ -2,6 +2,7 @@
|
|||||||
"""Calculate average size from multiple linker map files."""
|
"""Calculate average size from multiple linker map files."""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import glob
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
|
||||||
@ -10,6 +11,24 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'linkermap'))
|
|||||||
import linkermap
|
import linkermap
|
||||||
|
|
||||||
|
|
||||||
|
def expand_files(file_patterns):
|
||||||
|
"""Expand file patterns (globs) to list of files.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_patterns: List of file paths or glob patterns
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of expanded file paths
|
||||||
|
"""
|
||||||
|
expanded = []
|
||||||
|
for pattern in file_patterns:
|
||||||
|
if '*' in pattern or '?' in pattern:
|
||||||
|
expanded.extend(glob.glob(pattern))
|
||||||
|
else:
|
||||||
|
expanded.append(pattern)
|
||||||
|
return expanded
|
||||||
|
|
||||||
|
|
||||||
def combine_maps(map_files, filters=None):
|
def combine_maps(map_files, filters=None):
|
||||||
"""Combine multiple map files into a list of json_data.
|
"""Combine multiple map files into a list of json_data.
|
||||||
|
|
||||||
@ -109,25 +128,37 @@ def compute_avg(all_json_data):
|
|||||||
return json_average
|
return json_average
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main(argv=None):
|
||||||
parser = argparse.ArgumentParser(description='Calculate average size from linker map files')
|
parser = argparse.ArgumentParser(description='Calculate average size from linker map files')
|
||||||
parser.add_argument('files', nargs='+', help='Path to map file(s)')
|
parser.add_argument('files', nargs='+', help='Path to map file(s) or glob pattern(s)')
|
||||||
parser.add_argument('-f', '--filter', dest='filters', action='append', default=[],
|
parser.add_argument('-f', '--filter', dest='filters', action='append', default=[],
|
||||||
help='Only include object files whose path contains this substring (can be repeated)')
|
help='Only include object files whose path contains this substring (can be repeated)')
|
||||||
parser.add_argument('-o', '--out', dest='out', default='metrics',
|
parser.add_argument('-o', '--out', dest='out', default='metrics',
|
||||||
help='Output path basename for JSON and Markdown files (default: metrics)')
|
help='Output path basename for JSON and Markdown files (default: metrics)')
|
||||||
args = parser.parse_args()
|
parser.add_argument('-j', '--json', dest='json_out', action='store_true',
|
||||||
|
help='Write JSON output file')
|
||||||
|
parser.add_argument('-m', '--markdown', dest='markdown_out', action='store_true',
|
||||||
|
help='Write Markdown output file')
|
||||||
|
parser.add_argument('-q', '--quiet', dest='quiet', action='store_true',
|
||||||
|
help='Suppress summary output')
|
||||||
|
args = parser.parse_args(argv)
|
||||||
|
|
||||||
all_json_data = combine_maps(args.files, args.filters)
|
# Expand glob patterns
|
||||||
|
map_files = expand_files(args.files)
|
||||||
|
|
||||||
|
all_json_data = combine_maps(map_files, args.filters)
|
||||||
json_average = compute_avg(all_json_data)
|
json_average = compute_avg(all_json_data)
|
||||||
|
|
||||||
if json_average is None:
|
if json_average is None:
|
||||||
print("No valid map files found", file=sys.stderr)
|
print("No valid map files found", file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
linkermap.print_summary(json_average, False)
|
if not args.quiet:
|
||||||
linkermap.write_json(json_average, args.out + '.json')
|
linkermap.print_summary(json_average, False)
|
||||||
linkermap.write_markdown(json_average, args.out + '.md')
|
if args.json_out:
|
||||||
|
linkermap.write_json(json_average, args.out + '.json')
|
||||||
|
if args.markdown_out:
|
||||||
|
linkermap.write_markdown(json_average, args.out + '.md')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
Reference in New Issue
Block a user