From df6f13600324b42710ae71d5320a9f2eae8303a5 Mon Sep 17 00:00:00 2001 From: hathach Date: Mon, 1 Dec 2025 14:39:45 +0700 Subject: [PATCH 1/9] add linkermap to deps and linkermap taget --- .gitignore | 1 + .idea/cmake.xml | 1 + .../build_system/cmake/toolchain/arm_iar.cmake | 3 ++- hw/bsp/family_support.cmake | 15 +++++++++++++++ src/portable/synopsys/dwc2/dwc2_info.py | 1 - test/hil/hil_test.py | 2 +- tools/get_deps.py | 3 +++ 7 files changed, 23 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index 977911dff..93d13503f 100644 --- a/.gitignore +++ b/.gitignore @@ -42,6 +42,7 @@ cov-int *-build-dir /_bin/ __pycache__ +cmake-build/ cmake-build-* sdkconfig .PVS-Studio diff --git a/.idea/cmake.xml b/.idea/cmake.xml index 677aaa662..0754253ad 100644 --- a/.idea/cmake.xml +++ b/.idea/cmake.xml @@ -124,6 +124,7 @@ + diff --git a/examples/build_system/cmake/toolchain/arm_iar.cmake b/examples/build_system/cmake/toolchain/arm_iar.cmake index 0b7e0b585..67d100bbc 100644 --- a/examples/build_system/cmake/toolchain/arm_iar.cmake +++ b/examples/build_system/cmake/toolchain/arm_iar.cmake @@ -24,7 +24,8 @@ set(CMAKE_C_ICSTAT ${CMAKE_IAR_CSTAT} --checks=${CMAKE_CURRENT_LIST_DIR}/cstat_sel_checks.txt --db=${CMAKE_BINARY_DIR}/cstat.db --sarif_dir=${CMAKE_BINARY_DIR}/cstat_sarif - --exclude ${TOP}/hw/mcu --exclude ${TOP}/lib + --exclude=${TOP}/hw/mcu + --exclude=${TOP}/lib ) endif () diff --git a/hw/bsp/family_support.cmake b/hw/bsp/family_support.cmake index 2b9612186..5afec32c2 100644 --- a/hw/bsp/family_support.cmake +++ b/hw/bsp/family_support.cmake @@ -9,6 +9,7 @@ set(TOP "${CMAKE_CURRENT_LIST_DIR}/../..") get_filename_component(TOP ${TOP} ABSOLUTE) set(UF2CONV_PY ${TOP}/tools/uf2/utils/uf2conv.py) +set(LINKERMAP_PY ${TOP}/tools/linkermap/linkermap.py) function(family_resolve_board BOARD_NAME BOARD_PATH_OUT) if ("${BOARD_NAME}" STREQUAL "") @@ -223,6 +224,18 @@ function(family_initialize_project PROJECT DIR) endif() endfunction() +# Add linkermap target (https://github.com/hathach/linkermap) +function(family_add_linkermap TARGET) + set(LINKERMAP_OPTION "") + if (ARGC GREATER 1) + set(LINKERMAP_OPTION "${ARGV1}") + endif () + add_custom_target(${TARGET}-linkermap + COMMAND python ${LINKERMAP_PY} -j -m ${LINKERMAP_OPTION} $.map + VERBATIM + ) +endfunction() + #------------------------------------------------------------- # Common Target Configure # Most families use these settings except rp2040 and espressif @@ -332,6 +345,8 @@ function(family_configure_common TARGET RTOS) endif () endif () + family_add_linkermap(${TARGET}) + # run size after build # find_program(SIZE_EXE ${CMAKE_SIZE}) # if(NOT ${SIZE_EXE} STREQUAL SIZE_EXE-NOTFOUND) diff --git a/src/portable/synopsys/dwc2/dwc2_info.py b/src/portable/synopsys/dwc2/dwc2_info.py index f6bd2785a..8fbbc00a0 100755 --- a/src/portable/synopsys/dwc2/dwc2_info.py +++ b/src/portable/synopsys/dwc2/dwc2_info.py @@ -2,7 +2,6 @@ import ctypes import argparse -import click import pandas as pd # hex value for register: guid, gsnpsid, ghwcfg1, ghwcfg2, ghwcfg3, ghwcfg4 diff --git a/test/hil/hil_test.py b/test/hil/hil_test.py index ba0826bd3..b2e883119 100755 --- a/test/hil/hil_test.py +++ b/test/hil/hil_test.py @@ -662,7 +662,7 @@ def test_example(board, f1, example): print(f'Flashing {fw_name}.elf') # flash firmware. It may fail randomly, retry a few times - max_rety = 1 + max_rety = 3 start_s = time.time() for i in range(max_rety): ret = globals()[f'flash_{board["flasher"]["name"].lower()}'](board, fw_name) diff --git a/tools/get_deps.py b/tools/get_deps.py index d749e4c84..c60766e50 100755 --- a/tools/get_deps.py +++ b/tools/get_deps.py @@ -14,6 +14,9 @@ deps_mandatory = { 'lib/lwip': ['https://github.com/lwip-tcpip/lwip.git', '159e31b689577dbf69cf0683bbaffbd71fa5ee10', 'all'], + 'tools/linkermap': ['https://github.com/hathach/linkermap.git', + 'e1a7a990fcd6eb1dbae13c2eb9fb0ca9db7ac483', + 'all'], 'tools/uf2': ['https://github.com/microsoft/uf2.git', 'c594542b2faa01cc33a2b97c9fbebc38549df80a', 'all'], From a337a6d337c0cdd50981ba2040aee99966ae3152 Mon Sep 17 00:00:00 2001 From: hathach Date: Mon, 1 Dec 2025 17:31:43 +0700 Subject: [PATCH 2/9] run linkermap as post build for size analyze --- .circleci/config2.yml | 4 ++- hw/bsp/family_support.cmake | 21 +++++++++++--- hw/bsp/rp2040/family.cmake | 7 +++++ tools/build.py | 58 ++++++++++++++++++++----------------- tools/get_deps.py | 2 +- 5 files changed, 59 insertions(+), 33 deletions(-) diff --git a/.circleci/config2.yml b/.circleci/config2.yml index ab0fd7ba1..869597289 100644 --- a/.circleci/config2.yml +++ b/.circleci/config2.yml @@ -119,7 +119,9 @@ commands: TOOLCHAIN_OPTION="--toolchain gcc" fi - python tools/build.py -s << parameters.build-system >> $TOOLCHAIN_OPTION << parameters.family >> + # circleci docker return $nproc as 36 core, limit parallel to 4 (resource-class = large) + # Required for IAR, also prevent crashed/killed by docker + python tools/build.py -s << parameters.build-system >> $TOOLCHAIN_OPTION -j 4 << parameters.family >> fi jobs: diff --git a/hw/bsp/family_support.cmake b/hw/bsp/family_support.cmake index 5afec32c2..1f91d0910 100644 --- a/hw/bsp/family_support.cmake +++ b/hw/bsp/family_support.cmake @@ -226,14 +226,26 @@ endfunction() # Add linkermap target (https://github.com/hathach/linkermap) function(family_add_linkermap TARGET) - set(LINKERMAP_OPTION "") - if (ARGC GREATER 1) - set(LINKERMAP_OPTION "${ARGV1}") + set(LINKERMAP_OPTION_LIST) + if (DEFINED LINKERMAP_OPTION) + separate_arguments(LINKERMAP_OPTION_LIST UNIX_COMMAND ${LINKERMAP_OPTION}) endif () + + if (ARGC GREATER 1) + separate_arguments(ARG_OPTION_LIST UNIX_COMMAND ${ARGV1}) + list(APPEND LINKERMAP_OPTION_LIST ${ARG_OPTION_LIST}) + endif () + + # target add_custom_target(${TARGET}-linkermap - COMMAND python ${LINKERMAP_PY} -j -m ${LINKERMAP_OPTION} $.map + COMMAND python ${LINKERMAP_PY} -j -m ${LINKERMAP_OPTION_LIST} $.map VERBATIM ) + + # post build + add_custom_command(TARGET ${TARGET} POST_BUILD + COMMAND python ${LINKERMAP_PY} -j -m ${LINKERMAP_OPTION_LIST} $.map + VERBATIM) endfunction() #------------------------------------------------------------- @@ -345,6 +357,7 @@ function(family_configure_common TARGET RTOS) endif () endif () + # Generate linkermap target and post build. LINKERMAP_OPTION can be set with -D to change default options family_add_linkermap(${TARGET}) # run size after build diff --git a/hw/bsp/rp2040/family.cmake b/hw/bsp/rp2040/family.cmake index 5d6d8b40e..390d6072c 100644 --- a/hw/bsp/rp2040/family.cmake +++ b/hw/bsp/rp2040/family.cmake @@ -222,6 +222,8 @@ function(family_add_default_example_warnings TARGET) endif() endfunction() + +# TODO merge with family_configure_common from family_support.cmake function(family_configure_target TARGET RTOS) if (RTOS STREQUAL noos OR RTOS STREQUAL "") set(RTOS_SUFFIX "") @@ -239,10 +241,15 @@ function(family_configure_target TARGET RTOS) pico_add_extra_outputs(${TARGET}) pico_enable_stdio_uart(${TARGET} 1) + + target_link_options(${TARGET} PUBLIC "LINKER:-Map=$.map") target_link_libraries(${TARGET} PUBLIC pico_stdlib tinyusb_board${RTOS_SUFFIX} tinyusb_additions) family_flash_openocd(${TARGET}) family_flash_jlink(${TARGET}) + + # Generate linkermap target and post build. LINKERMAP_OPTION can be set with -D to change default options + family_add_linkermap(${TARGET}) endfunction() diff --git a/tools/build.py b/tools/build.py index ce4d0ef1a..5328a987f 100755 --- a/tools/build.py +++ b/tools/build.py @@ -5,6 +5,7 @@ import os import sys import time import subprocess +import shlex from pathlib import Path from multiprocessing import Pool @@ -29,9 +30,12 @@ parallel_jobs = os.cpu_count() # Helper # ----------------------------- def run_cmd(cmd): - #print(cmd) - r = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - title = f'Command Error: {cmd}' + if isinstance(cmd, str): + raise TypeError("run_cmd expects a list/tuple of args, not a string") + args = cmd + cmd_display = " ".join(args) + r = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + title = f'Command Error: {cmd_display}' if r.returncode != 0: # print build output if failed if os.getenv('GITHUB_ACTIONS'): @@ -42,7 +46,7 @@ def run_cmd(cmd): print(title) print(r.stdout.decode("utf-8")) elif verbose: - print(cmd) + print(cmd_display) print(r.stdout.decode("utf-8")) return r @@ -87,10 +91,10 @@ def cmake_board(board, build_args, build_flags_on): start_time = time.monotonic() build_dir = f'cmake-build/cmake-build-{board}' - build_flags = '' + build_flags = [] if len(build_flags_on) > 0: - build_flags = ' '.join(f'-D{flag}=1' for flag in build_flags_on) - build_flags = f'-DCFLAGS_CLI="{build_flags}"' + cli_flags = ' '.join(f'-D{flag}=1' for flag in build_flags_on) + build_flags.append(f'-DCFLAGS_CLI={cli_flags}') build_dir += '-f1_' + '_'.join(build_flags_on) family = find_family(board) @@ -101,25 +105,22 @@ def cmake_board(board, build_args, build_flags_on): if build_utils.skip_example(example, board): ret[2] += 1 else: - rcmd = run_cmd(f'idf.py -C examples/{example} -B {build_dir}/{example} -G Ninja ' - f'-DBOARD={board} {build_flags} build') + rcmd = run_cmd([ + 'idf.py', '-C', f'examples/{example}', '-B', f'{build_dir}/{example}', '-GNinja', + f'-DBOARD={board}', *build_flags, 'build' + ]) ret[0 if rcmd.returncode == 0 else 1] += 1 else: - rcmd = run_cmd(f'cmake examples -B {build_dir} -G Ninja -DBOARD={board} -DCMAKE_BUILD_TYPE=MinSizeRel ' - f'{build_args} {build_flags}') + rcmd = run_cmd([ + 'cmake', 'examples', '-B', build_dir, '-GNinja', + f'-DBOARD={board}', '-DCMAKE_BUILD_TYPE=MinSizeRel', + '-DLINKERMAP_OPTION=-q -f tinyusb/src', *build_args, *build_flags + ]) if rcmd.returncode == 0: - cmd = f"cmake --build {build_dir}" - njobs = parallel_jobs - - # circleci docker return $nproc as 36 core, limit parallel according to resource class. - # Required for IAR, also prevent crashed/killed by docker - if os.getenv('CIRCLECI'): - resource_class = { 'small': 1, 'medium': 2, 'medium+': 3, 'large': 4 } - for rc in resource_class: - if rc in os.getenv('CIRCLE_JOB'): - njobs = resource_class[rc] - break - cmd += f' --parallel {njobs}' + cmd = [ + "cmake", "--build", build_dir, + '--parallel', str(parallel_jobs) + ] rcmd = run_cmd(cmd) ret[0 if rcmd.returncode == 0 else 1] += 1 @@ -141,9 +142,12 @@ def make_one_example(example, board, make_option): # skip -j for circleci if not os.getenv('CIRCLECI'): make_option += ' -j' - make_cmd = f"make -C examples/{example} BOARD={board} {make_option}" - # run_cmd(f"{make_cmd} clean") - build_result = run_cmd(f"{make_cmd} all") + make_args = ["make", "-C", f"examples/{example}", f"BOARD={board}"] + if make_option: + make_args += shlex.split(make_option) + make_args.append("all") + # run_cmd(make_args + ["clean"]) + build_result = run_cmd(make_args) r = 0 if build_result.returncode == 0 else 1 print_build_result(board, example, r, time.monotonic() - start_time) @@ -180,7 +184,7 @@ def build_boards_list(boards, build_defines, build_system, build_flags_on): for b in boards: r = [0, 0, 0] if build_system == 'cmake': - build_args = ' '.join(f'-D{d}' for d in build_defines) + build_args = [f'-D{d}' for d in build_defines] r = cmake_board(b, build_args, build_flags_on) elif build_system == 'make': build_args = ' '.join(f'{d}' for d in build_defines) diff --git a/tools/get_deps.py b/tools/get_deps.py index c60766e50..47cc5c7dd 100755 --- a/tools/get_deps.py +++ b/tools/get_deps.py @@ -15,7 +15,7 @@ deps_mandatory = { '159e31b689577dbf69cf0683bbaffbd71fa5ee10', 'all'], 'tools/linkermap': ['https://github.com/hathach/linkermap.git', - 'e1a7a990fcd6eb1dbae13c2eb9fb0ca9db7ac483', + '1f47651142646398c7746e109ae0481732aeb564', 'all'], 'tools/uf2': ['https://github.com/microsoft/uf2.git', 'c594542b2faa01cc33a2b97c9fbebc38549df80a', From c859744784cc396ae0993a16a1935b10fbd9b797 Mon Sep 17 00:00:00 2001 From: hathach Date: Tue, 2 Dec 2025 12:50:31 +0700 Subject: [PATCH 3/9] adding metrics for computing average compiled size --- examples/device/CMakeLists.txt | 63 +++++++++------- examples/dual/CMakeLists.txt | 10 ++- examples/host/CMakeLists.txt | 20 +++-- hw/bsp/family_support.cmake | 6 -- tools/build.py | 4 +- tools/get_deps.py | 2 +- tools/metrics.py | 134 +++++++++++++++++++++++++++++++++ 7 files changed, 193 insertions(+), 46 deletions(-) create mode 100644 tools/metrics.py diff --git a/examples/device/CMakeLists.txt b/examples/device/CMakeLists.txt index eb625ea51..660df67cb 100644 --- a/examples/device/CMakeLists.txt +++ b/examples/device/CMakeLists.txt @@ -6,31 +6,38 @@ project(tinyusb_device_examples C CXX ASM) family_initialize_project(tinyusb_device_examples ${CMAKE_CURRENT_LIST_DIR}) # family_add_subdirectory will filter what to actually add based on selected FAMILY -family_add_subdirectory(audio_4_channel_mic) -family_add_subdirectory(audio_test) -family_add_subdirectory(audio_4_channel_mic_freertos) -family_add_subdirectory(audio_test_freertos) -family_add_subdirectory(audio_test_multi_rate) -family_add_subdirectory(board_test) -family_add_subdirectory(cdc_dual_ports) -family_add_subdirectory(cdc_msc) -family_add_subdirectory(cdc_msc_freertos) -family_add_subdirectory(cdc_uac2) -family_add_subdirectory(dfu) -family_add_subdirectory(dfu_runtime) -family_add_subdirectory(dynamic_configuration) -family_add_subdirectory(hid_boot_interface) -family_add_subdirectory(hid_composite) -family_add_subdirectory(hid_composite_freertos) -family_add_subdirectory(hid_generic_inout) -family_add_subdirectory(hid_multiple_interface) -family_add_subdirectory(midi_test) -family_add_subdirectory(msc_dual_lun) -family_add_subdirectory(mtp) -family_add_subdirectory(net_lwip_webserver) -family_add_subdirectory(uac2_headset) -family_add_subdirectory(uac2_speaker_fb) -family_add_subdirectory(usbtmc) -family_add_subdirectory(video_capture) -family_add_subdirectory(video_capture_2ch) -family_add_subdirectory(webusb_serial) +set(EXAMPLE_LIST + audio_4_channel_mic + audio_4_channel_mic_freertos + audio_test + audio_test_freertos + audio_test_multi_rate + board_test + cdc_dual_ports + cdc_msc + cdc_msc_freertos + cdc_uac2 + dfu + dfu_runtime + dynamic_configuration + hid_boot_interface + hid_composite + hid_composite_freertos + hid_generic_inout + hid_multiple_interface + midi_test + midi_test_freertos + msc_dual_lun + mtp + net_lwip_webserver + uac2_headset + uac2_speaker_fb + usbtmc + video_capture + video_capture_2ch + webusb_serial + ) + +foreach (example ${EXAMPLE_LIST}) + family_add_subdirectory(${example}) +endforeach () diff --git a/examples/dual/CMakeLists.txt b/examples/dual/CMakeLists.txt index c5e3ffce4..4978f1fab 100644 --- a/examples/dual/CMakeLists.txt +++ b/examples/dual/CMakeLists.txt @@ -9,6 +9,12 @@ if (FAMILY STREQUAL "rp2040" AND NOT TARGET tinyusb_pico_pio_usb) message("Skipping dual host/device mode examples as Pico-PIO-USB is not available") else () # family_add_subdirectory will filter what to actually add based on selected FAMILY - family_add_subdirectory(host_hid_to_device_cdc) - family_add_subdirectory(host_info_to_device_cdc) + set(EXAMPLE_LIST + host_hid_to_device_cdc + host_info_to_device_cdc + ) + + foreach (example ${EXAMPLE_LIST}) + family_add_subdirectory(${example}) + endforeach () endif () diff --git a/examples/host/CMakeLists.txt b/examples/host/CMakeLists.txt index 2783dd84e..f8e0ce692 100644 --- a/examples/host/CMakeLists.txt +++ b/examples/host/CMakeLists.txt @@ -6,10 +6,16 @@ project(tinyusb_host_examples C CXX ASM) family_initialize_project(tinyusb_host_examples ${CMAKE_CURRENT_LIST_DIR}) # family_add_subdirectory will filter what to actually add based on selected FAMILY -family_add_subdirectory(bare_api) -family_add_subdirectory(cdc_msc_hid) -family_add_subdirectory(cdc_msc_hid_freertos) -family_add_subdirectory(device_info) -family_add_subdirectory(hid_controller) -family_add_subdirectory(midi_rx) -family_add_subdirectory(msc_file_explorer) +set(EXAMPLE_LIST + bare_api + cdc_msc_hid + cdc_msc_hid_freertos + device_info + hid_controller + midi_rx + msc_file_explorer + ) + +foreach (example ${EXAMPLE_LIST}) + family_add_subdirectory(${example}) +endforeach () diff --git a/hw/bsp/family_support.cmake b/hw/bsp/family_support.cmake index 1f91d0910..e7dfc19c8 100644 --- a/hw/bsp/family_support.cmake +++ b/hw/bsp/family_support.cmake @@ -231,12 +231,6 @@ function(family_add_linkermap TARGET) separate_arguments(LINKERMAP_OPTION_LIST UNIX_COMMAND ${LINKERMAP_OPTION}) endif () - if (ARGC GREATER 1) - separate_arguments(ARG_OPTION_LIST UNIX_COMMAND ${ARGV1}) - list(APPEND LINKERMAP_OPTION_LIST ${ARG_OPTION_LIST}) - endif () - - # target add_custom_target(${TARGET}-linkermap COMMAND python ${LINKERMAP_PY} -j -m ${LINKERMAP_OPTION_LIST} $.map VERBATIM diff --git a/tools/build.py b/tools/build.py index 5328a987f..692853297 100755 --- a/tools/build.py +++ b/tools/build.py @@ -113,8 +113,8 @@ def cmake_board(board, build_args, build_flags_on): else: rcmd = run_cmd([ 'cmake', 'examples', '-B', build_dir, '-GNinja', - f'-DBOARD={board}', '-DCMAKE_BUILD_TYPE=MinSizeRel', - '-DLINKERMAP_OPTION=-q -f tinyusb/src', *build_args, *build_flags + f'-DBOARD={board}', '-DCMAKE_BUILD_TYPE=MinSizeRel', '-DLINKERMAP_OPTION=-q -f tinyusb/src', + *build_args, *build_flags ]) if rcmd.returncode == 0: cmd = [ diff --git a/tools/get_deps.py b/tools/get_deps.py index 47cc5c7dd..5fb7e022c 100755 --- a/tools/get_deps.py +++ b/tools/get_deps.py @@ -15,7 +15,7 @@ deps_mandatory = { '159e31b689577dbf69cf0683bbaffbd71fa5ee10', 'all'], 'tools/linkermap': ['https://github.com/hathach/linkermap.git', - '1f47651142646398c7746e109ae0481732aeb564', + 'ac1228d5bbde1e54cb2e17e928662094ae19c51d', 'all'], 'tools/uf2': ['https://github.com/microsoft/uf2.git', 'c594542b2faa01cc33a2b97c9fbebc38549df80a', diff --git a/tools/metrics.py b/tools/metrics.py new file mode 100644 index 000000000..d972d3681 --- /dev/null +++ b/tools/metrics.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python3 +"""Calculate average size from multiple linker map files.""" + +import argparse +import sys +import os + +# Add linkermap module to path +sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'linkermap')) +import linkermap + + +def combine_maps(map_files, filters=None): + """Combine multiple map files into a list of json_data. + + Args: + map_files: List of paths to linker map files or JSON files + filters: List of path substrings to filter object files (default: []) + + Returns: + all_json_data: Dictionary with mapfiles list and data from each map file + """ + import json + + filters = filters or [] + all_json_data = {"mapfiles": [], "data": []} + + for map_file in map_files: + if not os.path.exists(map_file): + print(f"Warning: {map_file} not found, skipping", file=sys.stderr) + continue + + try: + if map_file.endswith('.json'): + with open(map_file, 'r', encoding='utf-8') as f: + json_data = json.load(f) + # Apply path filters to JSON data + if filters: + filtered_files = [ + f for f in json_data["files"] + if f.get("path") and any(filt in f["path"] for filt in filters) + ] + json_data["files"] = filtered_files + else: + json_data = linkermap.analyze_map(map_file, filters=filters) + all_json_data["mapfiles"].append(map_file) + all_json_data["data"].append(json_data) + except Exception as e: + print(f"Warning: Failed to analyze {map_file}: {e}", file=sys.stderr) + continue + + return all_json_data + + +def compute_avg(all_json_data): + """Compute average sizes from combined json_data. + + Args: + all_json_data: Dictionary with mapfiles and data from combine_maps() + + Returns: + json_average: Dictionary with averaged size data + """ + if not all_json_data["data"]: + return None + + # Collect all sections preserving order + all_sections = [] + for json_data in all_json_data["data"]: + for s in json_data["sections"]: + if s not in all_sections: + all_sections.append(s) + + # Merge files with the same 'file' value and compute averages + file_accumulator = {} # key: file name, value: {"sections": {section: [sizes]}, "totals": [totals]} + + for json_data in all_json_data["data"]: + for f in json_data["files"]: + fname = f["file"] + if fname not in file_accumulator: + file_accumulator[fname] = {"sections": {}, "totals": [], "path": f.get("path")} + file_accumulator[fname]["totals"].append(f["total"]) + for section, size in f["sections"].items(): + if section in file_accumulator[fname]["sections"]: + file_accumulator[fname]["sections"][section].append(size) + else: + file_accumulator[fname]["sections"][section] = [size] + + # Build json_average with averaged values + files_average = [] + for fname, data in file_accumulator.items(): + avg_total = round(sum(data["totals"]) / len(data["totals"])) + avg_sections = {} + for section, sizes in data["sections"].items(): + avg_sections[section] = round(sum(sizes) / len(sizes)) + files_average.append({ + "file": fname, + "path": data["path"], + "sections": avg_sections, + "total": avg_total + }) + + json_average = { + "mapfiles": all_json_data["mapfiles"], + "sections": all_sections, + "files": files_average + } + + return json_average + + +def main(): + parser = argparse.ArgumentParser(description='Calculate average size from linker map files') + parser.add_argument('files', nargs='+', help='Path to map file(s)') + parser.add_argument('-f', '--filter', dest='filters', action='append', default=[], + help='Only include object files whose path contains this substring (can be repeated)') + parser.add_argument('-o', '--out', dest='out', default='metrics', + help='Output path basename for JSON and Markdown files (default: metrics)') + args = parser.parse_args() + + all_json_data = combine_maps(args.files, args.filters) + json_average = compute_avg(all_json_data) + + if json_average is None: + print("No valid map files found", file=sys.stderr) + sys.exit(1) + + linkermap.print_summary(json_average, False) + linkermap.write_json(json_average, args.out + '.json') + linkermap.write_markdown(json_average, args.out + '.md') + + +if __name__ == '__main__': + main() From 09e1113aaf1b2618ffe42e9638d68e6047b6f1ef Mon Sep 17 00:00:00 2001 From: hathach Date: Tue, 2 Dec 2025 14:22:52 +0700 Subject: [PATCH 4/9] adding metrics for computing average compiled size --- examples/CMakeLists.txt | 25 ++++++++++++--- hw/bsp/family_support.cmake | 4 +-- tools/build.py | 61 ++++++++++++++++++++----------------- tools/get_deps.py | 2 +- tools/metrics.py | 45 ++++++++++++++++++++++----- 5 files changed, 95 insertions(+), 42 deletions(-) diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index d34c6ed5d..d9f97d598 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -5,7 +5,24 @@ include(${CMAKE_CURRENT_SOURCE_DIR}/../hw/bsp/family_support.cmake) project(tinyusb_examples C CXX ASM) -add_subdirectory(device) -add_subdirectory(dual) -add_subdirectory(host) -add_subdirectory(typec) +set(EXAMPLES_LIST + device + dual + host + typec + ) +set(MAPJSON_PATTERNS "") + +foreach (example ${EXAMPLES_LIST}) + add_subdirectory(${example}) + list(APPEND MAPJSON_PATTERNS "${CMAKE_BINARY_DIR}/${example}/*/*.map.json") +endforeach () + +# Post-build: run metrics.py on all map.json files +add_custom_target(tinyusb_examples_metrics + COMMAND ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/../tools/metrics.py + -f tinyusb/src -j -o ${CMAKE_BINARY_DIR}/metrics + ${MAPJSON_PATTERNS} + COMMENT "Generating average code size metrics" + VERBATIM + ) diff --git a/hw/bsp/family_support.cmake b/hw/bsp/family_support.cmake index e7dfc19c8..3ede95e3f 100644 --- a/hw/bsp/family_support.cmake +++ b/hw/bsp/family_support.cmake @@ -232,13 +232,13 @@ function(family_add_linkermap TARGET) endif () add_custom_target(${TARGET}-linkermap - COMMAND python ${LINKERMAP_PY} -j -m ${LINKERMAP_OPTION_LIST} $.map + COMMAND python ${LINKERMAP_PY} -j ${LINKERMAP_OPTION_LIST} $.map VERBATIM ) # post build add_custom_command(TARGET ${TARGET} POST_BUILD - COMMAND python ${LINKERMAP_PY} -j -m ${LINKERMAP_OPTION_LIST} $.map + COMMAND python ${LINKERMAP_PY} -j ${LINKERMAP_OPTION_LIST} $.map VERBATIM) endfunction() diff --git a/tools/build.py b/tools/build.py index 692853297..5392a9aa4 100755 --- a/tools/build.py +++ b/tools/build.py @@ -6,6 +6,8 @@ import sys import time import subprocess import shlex +import glob +import metrics from pathlib import Path from multiprocessing import Pool @@ -111,18 +113,18 @@ def cmake_board(board, build_args, build_flags_on): ]) ret[0 if rcmd.returncode == 0 else 1] += 1 else: - rcmd = run_cmd([ - 'cmake', 'examples', '-B', build_dir, '-GNinja', - f'-DBOARD={board}', '-DCMAKE_BUILD_TYPE=MinSizeRel', '-DLINKERMAP_OPTION=-q -f tinyusb/src', - *build_args, *build_flags - ]) + rcmd = run_cmd(['cmake', 'examples', '-B', build_dir, '-GNinja', + f'-DBOARD={board}', '-DCMAKE_BUILD_TYPE=MinSizeRel', '-DLINKERMAP_OPTION=-q -f tinyusb/src', + *build_args, *build_flags]) if rcmd.returncode == 0: - cmd = [ - "cmake", "--build", build_dir, - '--parallel', str(parallel_jobs) - ] + cmd = ["cmake", "--build", build_dir, '--parallel', str(parallel_jobs)] rcmd = run_cmd(cmd) - ret[0 if rcmd.returncode == 0 else 1] += 1 + if rcmd.returncode == 0: + ret[0] += 1 + rcmd = run_cmd(["cmake", "--build", build_dir, '--target', 'tinyusb_examples_metrics']) + # print(rcmd.stdout.decode("utf-8")) + else: + ret[1] += 1 example = 'all' print_build_result(board, example, 0 if ret[1] == 0 else 1, time.monotonic() - start_time) @@ -195,8 +197,18 @@ def build_boards_list(boards, build_defines, build_system, build_flags_on): return ret -def build_family(family, build_defines, build_system, build_flags_on, one_per_family, boards): - skip_ci = ['pico_sdk'] +def get_family_boards(family, one_per_family, boards): + """Get list of boards for a family. + + Args: + family: Family name + one_per_family: If True, return only one random board + boards: List of boards already specified via -b flag + + Returns: + List of board names + """ + skip_ci = [] if os.getenv('GITHUB_ACTIONS') or os.getenv('CIRCLECI'): skip_ci_file = Path(f"hw/bsp/{family}/skip_ci.txt") if skip_ci_file.exists(): @@ -207,17 +219,15 @@ def build_family(family, build_defines, build_system, build_flags_on, one_per_fa all_boards.append(entry.name) all_boards.sort() - ret = [0, 0, 0] # If only-one flag is set, select one random board if one_per_family: for b in boards: # skip if -b already specify one in this family if find_family(b) == family: - return ret + return [] all_boards = [random.choice(all_boards)] - ret = build_boards_list(all_boards, build_defines, build_system, build_flags_on) - return ret + return all_boards # ----------------------------- @@ -258,9 +268,8 @@ def main(): print(build_separator) print(build_format.format('Board', 'Example', '\033[39mResult\033[0m', 'Time')) total_time = time.monotonic() - result = [0, 0, 0] - # build families + # get all families all_families = [] if 'all' in families: for entry in os.scandir("hw/bsp"): @@ -270,23 +279,19 @@ def main(): all_families = list(families) all_families.sort() - # succeeded, failed, skipped + # get boards from families and append to boards list + all_boards = list(boards) for f in all_families: - r = build_family(f, build_defines, build_system, build_flags_on, one_per_family, boards) - result[0] += r[0] - result[1] += r[1] - result[2] += r[2] + all_boards.extend(get_family_boards(f, one_per_family, boards)) - # build boards - r = build_boards_list(boards, build_defines, build_system, build_flags_on) - result[0] += r[0] - result[1] += r[1] - result[2] += r[2] + # build all boards + result = build_boards_list(all_boards, build_defines, build_system, build_flags_on) total_time = time.monotonic() - total_time print(build_separator) print(f"Build Summary: {result[0]} {STATUS_OK}, {result[1]} {STATUS_FAILED} and took {total_time:.2f}s") print(build_separator) + return result[1] diff --git a/tools/get_deps.py b/tools/get_deps.py index 5fb7e022c..029c33607 100755 --- a/tools/get_deps.py +++ b/tools/get_deps.py @@ -15,7 +15,7 @@ deps_mandatory = { '159e31b689577dbf69cf0683bbaffbd71fa5ee10', 'all'], 'tools/linkermap': ['https://github.com/hathach/linkermap.git', - 'ac1228d5bbde1e54cb2e17e928662094ae19c51d', + '75d9d2c9e0f83297ddbc0da899f6cc0ab21076f0', 'all'], 'tools/uf2': ['https://github.com/microsoft/uf2.git', 'c594542b2faa01cc33a2b97c9fbebc38549df80a', diff --git a/tools/metrics.py b/tools/metrics.py index d972d3681..c6cd49d57 100644 --- a/tools/metrics.py +++ b/tools/metrics.py @@ -2,6 +2,7 @@ """Calculate average size from multiple linker map files.""" import argparse +import glob import sys import os @@ -10,6 +11,24 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'linkermap')) import linkermap +def expand_files(file_patterns): + """Expand file patterns (globs) to list of files. + + Args: + file_patterns: List of file paths or glob patterns + + Returns: + List of expanded file paths + """ + expanded = [] + for pattern in file_patterns: + if '*' in pattern or '?' in pattern: + expanded.extend(glob.glob(pattern)) + else: + expanded.append(pattern) + return expanded + + def combine_maps(map_files, filters=None): """Combine multiple map files into a list of json_data. @@ -109,25 +128,37 @@ def compute_avg(all_json_data): return json_average -def main(): +def main(argv=None): parser = argparse.ArgumentParser(description='Calculate average size from linker map files') - parser.add_argument('files', nargs='+', help='Path to map file(s)') + parser.add_argument('files', nargs='+', help='Path to map file(s) or glob pattern(s)') parser.add_argument('-f', '--filter', dest='filters', action='append', default=[], help='Only include object files whose path contains this substring (can be repeated)') parser.add_argument('-o', '--out', dest='out', default='metrics', help='Output path basename for JSON and Markdown files (default: metrics)') - args = parser.parse_args() + parser.add_argument('-j', '--json', dest='json_out', action='store_true', + help='Write JSON output file') + parser.add_argument('-m', '--markdown', dest='markdown_out', action='store_true', + help='Write Markdown output file') + parser.add_argument('-q', '--quiet', dest='quiet', action='store_true', + help='Suppress summary output') + args = parser.parse_args(argv) - all_json_data = combine_maps(args.files, args.filters) + # Expand glob patterns + map_files = expand_files(args.files) + + all_json_data = combine_maps(map_files, args.filters) json_average = compute_avg(all_json_data) if json_average is None: print("No valid map files found", file=sys.stderr) sys.exit(1) - linkermap.print_summary(json_average, False) - linkermap.write_json(json_average, args.out + '.json') - linkermap.write_markdown(json_average, args.out + '.md') + if not args.quiet: + linkermap.print_summary(json_average, False) + if args.json_out: + linkermap.write_json(json_average, args.out + '.json') + if args.markdown_out: + linkermap.write_markdown(json_average, args.out + '.md') if __name__ == '__main__': From 3d190475ad2b71d913e059dfc6f5cc5dafe6555d Mon Sep 17 00:00:00 2001 From: hathach Date: Wed, 3 Dec 2025 00:08:45 +0700 Subject: [PATCH 5/9] upload metrics.json, test ci --- .github/workflows/build.yml | 374 +++++++++++++++---------------- .github/workflows/build_util.yml | 9 +- examples/CMakeLists.txt | 1 + tools/get_deps.py | 2 +- 4 files changed, 197 insertions(+), 189 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f1b134b8a..7d7901c3a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -83,190 +83,190 @@ jobs: # --------------------------------------- # Build Make: only build on push with one-per-family # --------------------------------------- - make: - if: github.event_name == 'push' - needs: set-matrix - uses: ./.github/workflows/build_util.yml - strategy: - fail-fast: false - matrix: - toolchain: - - 'aarch64-gcc' - #- 'arm-clang' - - 'arm-gcc' - - 'msp430-gcc' - - 'riscv-gcc' - - 'rx-gcc' - with: - build-system: 'make' - toolchain: ${{ matrix.toolchain }} - build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.json)[matrix.toolchain]) }} - one-per-family: true - - # --------------------------------------- - # Build IAR - # Since IAR Token secret is not passed to forked PR, only build non-forked PR with make. - # cmake is built by circle-ci. Due to IAR limit capacity, only build oe per family - # --------------------------------------- - arm-iar: - if: false # disable for now since we got reach capacity limit too often - #if: github.event_name == 'push' && github.repository_owner == 'hathach' - needs: set-matrix - uses: ./.github/workflows/build_util.yml - secrets: inherit - strategy: - fail-fast: false - matrix: - build-system: - - 'make' - with: - build-system: ${{ matrix.build-system }} - toolchain: 'arm-iar' - build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.json)['arm-iar']) }} - one-per-family: true - - # --------------------------------------- - # Build Make/CMake on Windows/MacOS - # --------------------------------------- - build-os: - if: github.event_name == 'pull_request' - uses: ./.github/workflows/build_util.yml - strategy: - fail-fast: false - matrix: - os: [windows-latest, macos-latest] - build-system: [ 'make', 'cmake' ] - with: - os: ${{ matrix.os }} - build-system: ${{ matrix.build-system }} - toolchain: 'arm-gcc-${{ matrix.os }}' - build-args: '["stm32h7"]' - one-per-family: true - - # --------------------------------------- - # Zephyr - # --------------------------------------- - zephyr: - if: github.event_name == 'push' - runs-on: ubuntu-latest - steps: - - name: Checkout TinyUSB - uses: actions/checkout@v4 - - - name: Setup Zephyr project - uses: zephyrproject-rtos/action-zephyr-setup@v1 - with: - app-path: examples - toolchains: arm-zephyr-eabi - - - name: Build - run: | - west build -b nrf52840dk -d examples/device/cdc_msc/build examples/device/cdc_msc -- -DRTOS=zephyr - west build -b nrf52840dk -d examples/device/msc_dual_lun/build examples/device/msc_dual_lun -- -DRTOS=zephyr - - # --------------------------------------- - # Hardware in the loop (HIL) - # Run on PR only (hil-tinyusb), hil-hfp only run on non-forked PR - # --------------------------------------- - hil-build: - if: | - github.repository_owner == 'hathach' && - (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') - needs: set-matrix - uses: ./.github/workflows/build_util.yml - strategy: - fail-fast: false - matrix: - toolchain: - - 'arm-gcc' - - 'esp-idf' - with: - build-system: 'cmake' - toolchain: ${{ matrix.toolchain }} - build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.hil_json)[matrix.toolchain]) }} - one-per-family: true - upload-artifacts: true - - # --------------------------------------- - # Hardware in the loop (HIL) - # self-hosted on local VM, for attached hardware checkout HIL_JSON - # --------------------------------------- - hil-tinyusb: - if: | - github.repository_owner == 'hathach' && - (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') - needs: hil-build - runs-on: [self-hosted, X64, hathach, hardware-in-the-loop] - steps: - - name: Get Skip Boards from previous run - if: github.run_attempt != '1' - run: | - if [ -f "${{ env.HIL_JSON }}.skip" ]; then - SKIP_BOARDS=$(cat "${{ env.HIL_JSON }}.skip") - else - SKIP_BOARDS="" - fi - echo "SKIP_BOARDS=$SKIP_BOARDS" - echo "SKIP_BOARDS=$SKIP_BOARDS" >> $GITHUB_ENV - - - name: Clean workspace - run: | - echo "Cleaning up for the first run" - rm -rf "${{ github.workspace }}" - mkdir -p "${{ github.workspace }}" - - - name: Checkout TinyUSB - uses: actions/checkout@v4 - - - name: Download Artifacts - uses: actions/download-artifact@v5 - with: - path: cmake-build - merge-multiple: true - - - name: Test on actual hardware - run: | - python3 test/hil/hil_test.py ${{ env.HIL_JSON }} $SKIP_BOARDS - - # --------------------------------------- - # Hardware in the loop (HIL) - # self-hosted by HFP, build with IAR toolchain, for attached hardware checkout test/hil/hfp.json - # Since IAR Token secret is not passed to forked PR, only build non-forked PR - # --------------------------------------- - hil-hfp: - if: | - github.repository_owner == 'hathach' && - github.event.pull_request.head.repo.fork == false && - (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') - runs-on: [self-hosted, Linux, X64, hifiphile] - env: - IAR_LMS_BEARER_TOKEN: ${{ secrets.IAR_LMS_BEARER_TOKEN }} - steps: - - name: Clean workspace - run: | - echo "Cleaning up previous run" - rm -rf "${{ github.workspace }}"3 - mkdir -p "${{ github.workspace }}" - - - name: Toolchain version - run: | - iccarm --version - - - name: Checkout TinyUSB - uses: actions/checkout@v4 - - - name: Get build boards - run: | - MATRIX_JSON=$(python test/hil/hil_ci_set_matrix.py test/hil/hfp.json) - BUILD_ARGS=$(echo $MATRIX_JSON | jq -r '.["arm-gcc"] | join(" ")') - echo "BUILD_ARGS=$BUILD_ARGS" - echo "BUILD_ARGS=$BUILD_ARGS" >> $GITHUB_ENV - - - name: Get Dependencies - run: python3 tools/get_deps.py $BUILD_ARGS - - - name: Build - run: python3 tools/build.py -j 4 --toolchain iar $BUILD_ARGS - - - name: Test on actual hardware (hardware in the loop) - run: python3 test/hil/hil_test.py hfp.json +# make: +# if: github.event_name == 'push' +# needs: set-matrix +# uses: ./.github/workflows/build_util.yml +# strategy: +# fail-fast: false +# matrix: +# toolchain: +# - 'aarch64-gcc' +# #- 'arm-clang' +# - 'arm-gcc' +# - 'msp430-gcc' +# - 'riscv-gcc' +# - 'rx-gcc' +# with: +# build-system: 'make' +# toolchain: ${{ matrix.toolchain }} +# build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.json)[matrix.toolchain]) }} +# one-per-family: true +# +# # --------------------------------------- +# # Build IAR +# # Since IAR Token secret is not passed to forked PR, only build non-forked PR with make. +# # cmake is built by circle-ci. Due to IAR limit capacity, only build oe per family +# # --------------------------------------- +# arm-iar: +# if: false # disable for now since we got reach capacity limit too often +# #if: github.event_name == 'push' && github.repository_owner == 'hathach' +# needs: set-matrix +# uses: ./.github/workflows/build_util.yml +# secrets: inherit +# strategy: +# fail-fast: false +# matrix: +# build-system: +# - 'make' +# with: +# build-system: ${{ matrix.build-system }} +# toolchain: 'arm-iar' +# build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.json)['arm-iar']) }} +# one-per-family: true +# +# # --------------------------------------- +# # Build Make/CMake on Windows/MacOS +# # --------------------------------------- +# build-os: +# if: github.event_name == 'pull_request' +# uses: ./.github/workflows/build_util.yml +# strategy: +# fail-fast: false +# matrix: +# os: [windows-latest, macos-latest] +# build-system: [ 'make', 'cmake' ] +# with: +# os: ${{ matrix.os }} +# build-system: ${{ matrix.build-system }} +# toolchain: 'arm-gcc-${{ matrix.os }}' +# build-args: '["stm32h7"]' +# one-per-family: true +# +# # --------------------------------------- +# # Zephyr +# # --------------------------------------- +# zephyr: +# if: github.event_name == 'push' +# runs-on: ubuntu-latest +# steps: +# - name: Checkout TinyUSB +# uses: actions/checkout@v4 +# +# - name: Setup Zephyr project +# uses: zephyrproject-rtos/action-zephyr-setup@v1 +# with: +# app-path: examples +# toolchains: arm-zephyr-eabi +# +# - name: Build +# run: | +# west build -b nrf52840dk -d examples/device/cdc_msc/build examples/device/cdc_msc -- -DRTOS=zephyr +# west build -b nrf52840dk -d examples/device/msc_dual_lun/build examples/device/msc_dual_lun -- -DRTOS=zephyr +# +# # --------------------------------------- +# # Hardware in the loop (HIL) +# # Run on PR only (hil-tinyusb), hil-hfp only run on non-forked PR +# # --------------------------------------- +# hil-build: +# if: | +# github.repository_owner == 'hathach' && +# (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') +# needs: set-matrix +# uses: ./.github/workflows/build_util.yml +# strategy: +# fail-fast: false +# matrix: +# toolchain: +# - 'arm-gcc' +# - 'esp-idf' +# with: +# build-system: 'cmake' +# toolchain: ${{ matrix.toolchain }} +# build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.hil_json)[matrix.toolchain]) }} +# one-per-family: true +# upload-artifacts: true +# +# # --------------------------------------- +# # Hardware in the loop (HIL) +# # self-hosted on local VM, for attached hardware checkout HIL_JSON +# # --------------------------------------- +# hil-tinyusb: +# if: | +# github.repository_owner == 'hathach' && +# (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') +# needs: hil-build +# runs-on: [self-hosted, X64, hathach, hardware-in-the-loop] +# steps: +# - name: Get Skip Boards from previous run +# if: github.run_attempt != '1' +# run: | +# if [ -f "${{ env.HIL_JSON }}.skip" ]; then +# SKIP_BOARDS=$(cat "${{ env.HIL_JSON }}.skip") +# else +# SKIP_BOARDS="" +# fi +# echo "SKIP_BOARDS=$SKIP_BOARDS" +# echo "SKIP_BOARDS=$SKIP_BOARDS" >> $GITHUB_ENV +# +# - name: Clean workspace +# run: | +# echo "Cleaning up for the first run" +# rm -rf "${{ github.workspace }}" +# mkdir -p "${{ github.workspace }}" +# +# - name: Checkout TinyUSB +# uses: actions/checkout@v4 +# +# - name: Download Artifacts +# uses: actions/download-artifact@v5 +# with: +# path: cmake-build +# merge-multiple: true +# +# - name: Test on actual hardware +# run: | +# python3 test/hil/hil_test.py ${{ env.HIL_JSON }} $SKIP_BOARDS +# +# # --------------------------------------- +# # Hardware in the loop (HIL) +# # self-hosted by HFP, build with IAR toolchain, for attached hardware checkout test/hil/hfp.json +# # Since IAR Token secret is not passed to forked PR, only build non-forked PR +# # --------------------------------------- +# hil-hfp: +# if: | +# github.repository_owner == 'hathach' && +# github.event.pull_request.head.repo.fork == false && +# (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') +# runs-on: [self-hosted, Linux, X64, hifiphile] +# env: +# IAR_LMS_BEARER_TOKEN: ${{ secrets.IAR_LMS_BEARER_TOKEN }} +# steps: +# - name: Clean workspace +# run: | +# echo "Cleaning up previous run" +# rm -rf "${{ github.workspace }}"3 +# mkdir -p "${{ github.workspace }}" +# +# - name: Toolchain version +# run: | +# iccarm --version +# +# - name: Checkout TinyUSB +# uses: actions/checkout@v4 +# +# - name: Get build boards +# run: | +# MATRIX_JSON=$(python test/hil/hil_ci_set_matrix.py test/hil/hfp.json) +# BUILD_ARGS=$(echo $MATRIX_JSON | jq -r '.["arm-gcc"] | join(" ")') +# echo "BUILD_ARGS=$BUILD_ARGS" +# echo "BUILD_ARGS=$BUILD_ARGS" >> $GITHUB_ENV +# +# - name: Get Dependencies +# run: python3 tools/get_deps.py $BUILD_ARGS +# +# - name: Build +# run: python3 tools/build.py -j 4 --toolchain iar $BUILD_ARGS +# +# - name: Test on actual hardware (hardware in the loop) +# run: python3 test/hil/hil_test.py hfp.json diff --git a/.github/workflows/build_util.yml b/.github/workflows/build_util.yml index 55901b838..848694597 100644 --- a/.github/workflows/build_util.yml +++ b/.github/workflows/build_util.yml @@ -69,11 +69,18 @@ jobs: fi shell: bash + - name: Upload Artifacts for Metrics + if: inputs.build-system == 'cmake' + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.arg }}-metrics + path: cmake-build/cmake-build-*/metrics.json + - name: Upload Artifacts for Hardware Testing if: ${{ inputs.upload-artifacts }} uses: actions/upload-artifact@v4 with: - name: ${{ matrix.arg }} + name: ${{ matrix.arg }}-binaries path: | cmake-build/cmake-build-*/*/*/*.elf cmake-build/cmake-build-*/*/*/*.bin diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index d9f97d598..694681467 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -19,6 +19,7 @@ foreach (example ${EXAMPLES_LIST}) endforeach () # Post-build: run metrics.py on all map.json files +find_package(Python3 REQUIRED COMPONENTS Interpreter) add_custom_target(tinyusb_examples_metrics COMMAND ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/../tools/metrics.py -f tinyusb/src -j -o ${CMAKE_BINARY_DIR}/metrics diff --git a/tools/get_deps.py b/tools/get_deps.py index 029c33607..fe2f51e01 100755 --- a/tools/get_deps.py +++ b/tools/get_deps.py @@ -15,7 +15,7 @@ deps_mandatory = { '159e31b689577dbf69cf0683bbaffbd71fa5ee10', 'all'], 'tools/linkermap': ['https://github.com/hathach/linkermap.git', - '75d9d2c9e0f83297ddbc0da899f6cc0ab21076f0', + '87f94869f9ff828812f4551138f82c3bfcaf2620', 'all'], 'tools/uf2': ['https://github.com/microsoft/uf2.git', 'c594542b2faa01cc33a2b97c9fbebc38549df80a', From ee3d3e3551f95757b85de1c2c9777a1daed8f78d Mon Sep 17 00:00:00 2001 From: hathach Date: Wed, 3 Dec 2025 09:57:49 +0700 Subject: [PATCH 6/9] upload metrics.json and aggregate code metrics, fine tune ci matrix run --- .github/workflows/build.yml | 26 +++++++++++++++++++++++--- .github/workflows/build_util.yml | 12 ++++++++---- .github/workflows/ci_set_matrix.py | 18 +++++------------- tools/build.py | 13 +++++++++---- tools/get_deps.py | 2 +- 5 files changed, 46 insertions(+), 25 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7d7901c3a..5e996d9d9 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -57,11 +57,10 @@ jobs: echo "hil_matrix=$HIL_MATRIX_JSON" >> $GITHUB_OUTPUT # --------------------------------------- - # Build CMake: only build on push with one-per-family. + # Build CMake: only one-per-family. # Full built is done by CircleCI in PR # --------------------------------------- cmake: - if: github.event_name == 'push' needs: set-matrix uses: ./.github/workflows/build_util.yml strategy: @@ -71,7 +70,7 @@ jobs: - 'aarch64-gcc' #- 'arm-clang' - 'arm-gcc' - - 'esp-idf' + # - 'esp-idf' - 'msp430-gcc' - 'riscv-gcc' with: @@ -79,6 +78,27 @@ jobs: toolchain: ${{ matrix.toolchain }} build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.json)[matrix.toolchain]) }} one-per-family: true + upload-metrics: true + + code-metrics: + needs: cmake + runs-on: ubuntu-latest + steps: + - name: Checkout TinyUSB + uses: actions/checkout@v4 + + - name: Download Artifacts + uses: actions/download-artifact@v5 + with: + pattern: metrics-* + path: cmake-build + merge-multiple: true + + - name: Aggregate Code Metrics + run: | + tree cmake-build + python tools/get_deps.py + python tools/metrics.py -f tinyusb/src cmake-build/*/metrics.json # --------------------------------------- # Build Make: only build on push with one-per-family diff --git a/.github/workflows/build_util.yml b/.github/workflows/build_util.yml index 848694597..2de0ed229 100644 --- a/.github/workflows/build_util.yml +++ b/.github/workflows/build_util.yml @@ -20,6 +20,10 @@ on: required: false default: false type: boolean + upload-metrics: + required: false + default: false + type: boolean os: required: false type: string @@ -70,17 +74,17 @@ jobs: shell: bash - name: Upload Artifacts for Metrics - if: inputs.build-system == 'cmake' + if: ${{ inputs.upload-metrics }} uses: actions/upload-artifact@v4 with: - name: ${{ matrix.arg }}-metrics + name: metrics-${{ matrix.arg }} path: cmake-build/cmake-build-*/metrics.json - name: Upload Artifacts for Hardware Testing if: ${{ inputs.upload-artifacts }} - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: - name: ${{ matrix.arg }}-binaries + name: binaries-${{ matrix.arg }} path: | cmake-build/cmake-build-*/*/*/*.elf cmake-build/cmake-build-*/*/*/*.bin diff --git a/.github/workflows/ci_set_matrix.py b/.github/workflows/ci_set_matrix.py index 9d0e42c2e..5032c83ae 100755 --- a/.github/workflows/ci_set_matrix.py +++ b/.github/workflows/ci_set_matrix.py @@ -15,28 +15,22 @@ toolchain_list = [ # family: [supported toolchain] family_list = { - "at32f402_405 at32f403a_407 at32f413 at32f415 at32f423 at32f425 at32f435_437": ["arm-gcc"], - "broadcom_32bit": ["arm-gcc"], + "at32f402_405 at32f403a_407 at32f413 at32f415 at32f423 at32f425 at32f435_437 broadcom_32bit da1469x": ["arm-gcc"], "broadcom_64bit": ["aarch64-gcc"], "ch32v10x ch32v20x ch32v30x fomu gd32vf103": ["riscv-gcc"], - "da1469x": ["arm-gcc"], "imxrt": ["arm-gcc", "arm-clang"], "kinetis_k kinetis_kl kinetis_k32l2": ["arm-gcc", "arm-clang"], "lpc11 lpc13 lpc15": ["arm-gcc", "arm-clang"], "lpc17 lpc18 lpc40 lpc43": ["arm-gcc", "arm-clang"], "lpc51 lpc54 lpc55": ["arm-gcc", "arm-clang"], - "maxim": ["arm-gcc"], - "mcx": ["arm-gcc"], - "mm32": ["arm-gcc"], + "maxim mcx mm32 msp432e4 tm4c": ["arm-gcc"], "msp430": ["msp430-gcc"], - "msp432e4 tm4c": ["arm-gcc"], "nrf": ["arm-gcc", "arm-clang"], - "nuc100_120 nuc121_125 nuc126 nuc505": ["arm-gcc"], + "nuc100_120 nuc121_125 nuc126 nuc505 xmc4000": ["arm-gcc"], "ra": ["arm-gcc"], "rp2040": ["arm-gcc"], "rx": ["rx-gcc"], - "samd11 samd2x_l2x": ["arm-gcc", "arm-clang"], - "samd5x_e5x samg": ["arm-gcc", "arm-clang"], + "samd11 samd2x_l2x samd5x_e5x samg": ["arm-gcc", "arm-clang"], "stm32c0 stm32f0 stm32f1 stm32f2 stm32f3": ["arm-gcc", "arm-clang", "arm-iar"], "stm32f4": ["arm-gcc", "arm-clang", "arm-iar"], "stm32f7": ["arm-gcc", "arm-clang", "arm-iar"], @@ -45,9 +39,7 @@ family_list = { "stm32h7rs": ["arm-gcc", "arm-clang", "arm-iar"], "stm32l0 stm32l4": ["arm-gcc", "arm-clang", "arm-iar"], "stm32n6": ["arm-gcc"], - "stm32u0 stm32u5 stm32wb": ["arm-gcc", "arm-clang", "arm-iar"], - "stm32wba": ["arm-gcc", "arm-clang"], - "xmc4000": ["arm-gcc"], + "stm32u0 stm32u5 stm32wb stm32wba": ["arm-gcc", "arm-clang", "arm-iar"], "-bespressif_s2_devkitc": ["esp-idf"], # S3, P4 will be built by hil test # "-bespressif_s3_devkitm": ["esp-idf"], diff --git a/tools/build.py b/tools/build.py index 5392a9aa4..b87af6c6a 100755 --- a/tools/build.py +++ b/tools/build.py @@ -6,8 +6,6 @@ import sys import time import subprocess import shlex -import glob -import metrics from pathlib import Path from multiprocessing import Pool @@ -26,6 +24,7 @@ build_separator = '-' * 95 build_status = [STATUS_OK, STATUS_FAILED, STATUS_SKIPPED] verbose = False +clean_build = False parallel_jobs = os.cpu_count() # ----------------------------- @@ -117,11 +116,13 @@ def cmake_board(board, build_args, build_flags_on): f'-DBOARD={board}', '-DCMAKE_BUILD_TYPE=MinSizeRel', '-DLINKERMAP_OPTION=-q -f tinyusb/src', *build_args, *build_flags]) if rcmd.returncode == 0: + if clean_build: + run_cmd(["cmake", "--build", build_dir, '--target', 'clean']) cmd = ["cmake", "--build", build_dir, '--parallel', str(parallel_jobs)] rcmd = run_cmd(cmd) if rcmd.returncode == 0: ret[0] += 1 - rcmd = run_cmd(["cmake", "--build", build_dir, '--target', 'tinyusb_examples_metrics']) + run_cmd(["cmake", "--build", build_dir, '--target', 'tinyusb_examples_metrics']) # print(rcmd.stdout.decode("utf-8")) else: ret[1] += 1 @@ -148,7 +149,8 @@ def make_one_example(example, board, make_option): if make_option: make_args += shlex.split(make_option) make_args.append("all") - # run_cmd(make_args + ["clean"]) + if clean_build: + run_cmd(make_args + ["clean"]) build_result = run_cmd(make_args) r = 0 if build_result.returncode == 0 else 1 print_build_result(board, example, r, time.monotonic() - start_time) @@ -235,11 +237,13 @@ def get_family_boards(family, one_per_family, boards): # ----------------------------- def main(): global verbose + global clean_build global parallel_jobs parser = argparse.ArgumentParser() parser.add_argument('families', nargs='*', default=[], help='Families to build') parser.add_argument('-b', '--board', action='append', default=[], help='Boards to build') + parser.add_argument('-c', '--clean', action='store_true', default=False, help='Clean before build') parser.add_argument('-t', '--toolchain', default='gcc', help='Toolchain to use, default is gcc') parser.add_argument('-s', '--build-system', default='cmake', help='Build system to use, default is cmake') parser.add_argument('-D', '--define-symbol', action='append', default=[], help='Define to pass to build system') @@ -257,6 +261,7 @@ def main(): build_flags_on = args.build_flags_on one_per_family = args.one_per_family verbose = args.verbose + clean_build = args.clean parallel_jobs = args.jobs build_defines.append(f'TOOLCHAIN={toolchain}') diff --git a/tools/get_deps.py b/tools/get_deps.py index fe2f51e01..9634451e2 100755 --- a/tools/get_deps.py +++ b/tools/get_deps.py @@ -15,7 +15,7 @@ deps_mandatory = { '159e31b689577dbf69cf0683bbaffbd71fa5ee10', 'all'], 'tools/linkermap': ['https://github.com/hathach/linkermap.git', - '87f94869f9ff828812f4551138f82c3bfcaf2620', + '46c3c2947db366fb66af6723709febf80d860bc1', 'all'], 'tools/uf2': ['https://github.com/microsoft/uf2.git', 'c594542b2faa01cc33a2b97c9fbebc38549df80a', From f51ca33f25841147e93c72458c927261806cdc0e Mon Sep 17 00:00:00 2001 From: hathach Date: Wed, 3 Dec 2025 11:09:41 +0700 Subject: [PATCH 7/9] upload metrics.json and aggregate code metrics, post metrics comment fine tune ci matrix run --- .github/workflows/build.yml | 416 ++++++++++++++++------------- .github/workflows/build_util.yml | 2 +- .github/workflows/ci_set_matrix.py | 6 +- examples/CMakeLists.txt | 4 +- hw/bsp/family_support.cmake | 6 +- tools/build.py | 2 +- tools/get_deps.py | 2 +- tools/metrics.py | 254 ++++++++++++++++-- 8 files changed, 472 insertions(+), 220 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5e996d9d9..b0b636c65 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -83,6 +83,8 @@ jobs: code-metrics: needs: cmake runs-on: ubuntu-latest + permissions: + pull-requests: write steps: - name: Checkout TinyUSB uses: actions/checkout@v4 @@ -96,197 +98,233 @@ jobs: - name: Aggregate Code Metrics run: | - tree cmake-build python tools/get_deps.py - python tools/metrics.py -f tinyusb/src cmake-build/*/metrics.json + pip install tools/linkermap/ + python tools/metrics.py combine -j -m -f tinyusb/src cmake-build/*/metrics.json + + - name: Upload Metrics Artifact + if: github.event_name == 'push' + uses: actions/upload-artifact@v5 + with: + name: metrics-tinyusb + path: metrics.json + + - name: Download Base Branch Metrics + if: github.event_name == 'pull_request' + uses: dawidd6/action-download-artifact@v11 + with: + workflow: build.yml + branch: ${{ github.base_ref }} + name: metrics-tinyusb + path: base-metrics + continue-on-error: true + + - name: Compare with Base Branch + if: github.event_name == 'pull_request' + run: | + if [ -f base-metrics/metrics.json ]; then + python tools/metrics.py compare -f tinyusb/src base-metrics/metrics.json metrics.json + cat metrics_compare.md + else + echo "No base metrics found, skipping comparison" + cp metrics.md metrics_compare.md + fi + + - name: Post Code Metrics as PR Comment + if: github.event_name == 'pull_request' + uses: marocchino/sticky-pull-request-comment@v2 + with: + header: code-metrics + path: metrics_compare.md + # --------------------------------------- # Build Make: only build on push with one-per-family # --------------------------------------- -# make: -# if: github.event_name == 'push' -# needs: set-matrix -# uses: ./.github/workflows/build_util.yml -# strategy: -# fail-fast: false -# matrix: -# toolchain: -# - 'aarch64-gcc' -# #- 'arm-clang' -# - 'arm-gcc' -# - 'msp430-gcc' -# - 'riscv-gcc' -# - 'rx-gcc' -# with: -# build-system: 'make' -# toolchain: ${{ matrix.toolchain }} -# build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.json)[matrix.toolchain]) }} -# one-per-family: true -# -# # --------------------------------------- -# # Build IAR -# # Since IAR Token secret is not passed to forked PR, only build non-forked PR with make. -# # cmake is built by circle-ci. Due to IAR limit capacity, only build oe per family -# # --------------------------------------- -# arm-iar: -# if: false # disable for now since we got reach capacity limit too often -# #if: github.event_name == 'push' && github.repository_owner == 'hathach' -# needs: set-matrix -# uses: ./.github/workflows/build_util.yml -# secrets: inherit -# strategy: -# fail-fast: false -# matrix: -# build-system: -# - 'make' -# with: -# build-system: ${{ matrix.build-system }} -# toolchain: 'arm-iar' -# build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.json)['arm-iar']) }} -# one-per-family: true -# -# # --------------------------------------- -# # Build Make/CMake on Windows/MacOS -# # --------------------------------------- -# build-os: -# if: github.event_name == 'pull_request' -# uses: ./.github/workflows/build_util.yml -# strategy: -# fail-fast: false -# matrix: -# os: [windows-latest, macos-latest] -# build-system: [ 'make', 'cmake' ] -# with: -# os: ${{ matrix.os }} -# build-system: ${{ matrix.build-system }} -# toolchain: 'arm-gcc-${{ matrix.os }}' -# build-args: '["stm32h7"]' -# one-per-family: true -# -# # --------------------------------------- -# # Zephyr -# # --------------------------------------- -# zephyr: -# if: github.event_name == 'push' -# runs-on: ubuntu-latest -# steps: -# - name: Checkout TinyUSB -# uses: actions/checkout@v4 -# -# - name: Setup Zephyr project -# uses: zephyrproject-rtos/action-zephyr-setup@v1 -# with: -# app-path: examples -# toolchains: arm-zephyr-eabi -# -# - name: Build -# run: | -# west build -b nrf52840dk -d examples/device/cdc_msc/build examples/device/cdc_msc -- -DRTOS=zephyr -# west build -b nrf52840dk -d examples/device/msc_dual_lun/build examples/device/msc_dual_lun -- -DRTOS=zephyr -# -# # --------------------------------------- -# # Hardware in the loop (HIL) -# # Run on PR only (hil-tinyusb), hil-hfp only run on non-forked PR -# # --------------------------------------- -# hil-build: -# if: | -# github.repository_owner == 'hathach' && -# (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') -# needs: set-matrix -# uses: ./.github/workflows/build_util.yml -# strategy: -# fail-fast: false -# matrix: -# toolchain: -# - 'arm-gcc' -# - 'esp-idf' -# with: -# build-system: 'cmake' -# toolchain: ${{ matrix.toolchain }} -# build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.hil_json)[matrix.toolchain]) }} -# one-per-family: true -# upload-artifacts: true -# -# # --------------------------------------- -# # Hardware in the loop (HIL) -# # self-hosted on local VM, for attached hardware checkout HIL_JSON -# # --------------------------------------- -# hil-tinyusb: -# if: | -# github.repository_owner == 'hathach' && -# (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') -# needs: hil-build -# runs-on: [self-hosted, X64, hathach, hardware-in-the-loop] -# steps: -# - name: Get Skip Boards from previous run -# if: github.run_attempt != '1' -# run: | -# if [ -f "${{ env.HIL_JSON }}.skip" ]; then -# SKIP_BOARDS=$(cat "${{ env.HIL_JSON }}.skip") -# else -# SKIP_BOARDS="" -# fi -# echo "SKIP_BOARDS=$SKIP_BOARDS" -# echo "SKIP_BOARDS=$SKIP_BOARDS" >> $GITHUB_ENV -# -# - name: Clean workspace -# run: | -# echo "Cleaning up for the first run" -# rm -rf "${{ github.workspace }}" -# mkdir -p "${{ github.workspace }}" -# -# - name: Checkout TinyUSB -# uses: actions/checkout@v4 -# -# - name: Download Artifacts -# uses: actions/download-artifact@v5 -# with: -# path: cmake-build -# merge-multiple: true -# -# - name: Test on actual hardware -# run: | -# python3 test/hil/hil_test.py ${{ env.HIL_JSON }} $SKIP_BOARDS -# -# # --------------------------------------- -# # Hardware in the loop (HIL) -# # self-hosted by HFP, build with IAR toolchain, for attached hardware checkout test/hil/hfp.json -# # Since IAR Token secret is not passed to forked PR, only build non-forked PR -# # --------------------------------------- -# hil-hfp: -# if: | -# github.repository_owner == 'hathach' && -# github.event.pull_request.head.repo.fork == false && -# (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') -# runs-on: [self-hosted, Linux, X64, hifiphile] -# env: -# IAR_LMS_BEARER_TOKEN: ${{ secrets.IAR_LMS_BEARER_TOKEN }} -# steps: -# - name: Clean workspace -# run: | -# echo "Cleaning up previous run" -# rm -rf "${{ github.workspace }}"3 -# mkdir -p "${{ github.workspace }}" -# -# - name: Toolchain version -# run: | -# iccarm --version -# -# - name: Checkout TinyUSB -# uses: actions/checkout@v4 -# -# - name: Get build boards -# run: | -# MATRIX_JSON=$(python test/hil/hil_ci_set_matrix.py test/hil/hfp.json) -# BUILD_ARGS=$(echo $MATRIX_JSON | jq -r '.["arm-gcc"] | join(" ")') -# echo "BUILD_ARGS=$BUILD_ARGS" -# echo "BUILD_ARGS=$BUILD_ARGS" >> $GITHUB_ENV -# -# - name: Get Dependencies -# run: python3 tools/get_deps.py $BUILD_ARGS -# -# - name: Build -# run: python3 tools/build.py -j 4 --toolchain iar $BUILD_ARGS -# -# - name: Test on actual hardware (hardware in the loop) -# run: python3 test/hil/hil_test.py hfp.json + make: + if: github.event_name == 'push' + needs: set-matrix + uses: ./.github/workflows/build_util.yml + strategy: + fail-fast: false + matrix: + toolchain: + - 'aarch64-gcc' + #- 'arm-clang' + - 'arm-gcc' + - 'msp430-gcc' + - 'riscv-gcc' + - 'rx-gcc' + with: + build-system: 'make' + toolchain: ${{ matrix.toolchain }} + build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.json)[matrix.toolchain]) }} + one-per-family: true + + # --------------------------------------- + # Build IAR + # Since IAR Token secret is not passed to forked PR, only build non-forked PR with make. + # cmake is built by circle-ci. Due to IAR limit capacity, only build oe per family + # --------------------------------------- + arm-iar: + if: false # disable for now since we got reach capacity limit too often + #if: github.event_name == 'push' && github.repository_owner == 'hathach' + needs: set-matrix + uses: ./.github/workflows/build_util.yml + secrets: inherit + strategy: + fail-fast: false + matrix: + build-system: + - 'make' + with: + build-system: ${{ matrix.build-system }} + toolchain: 'arm-iar' + build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.json)['arm-iar']) }} + one-per-family: true + + # --------------------------------------- + # Build Make/CMake on Windows/MacOS + # --------------------------------------- + build-os: + if: github.event_name == 'pull_request' + uses: ./.github/workflows/build_util.yml + strategy: + fail-fast: false + matrix: + os: [ windows-latest, macos-latest ] + build-system: [ 'make', 'cmake' ] + with: + os: ${{ matrix.os }} + build-system: ${{ matrix.build-system }} + toolchain: 'arm-gcc-${{ matrix.os }}' + build-args: '["stm32h7"]' + one-per-family: true + + # --------------------------------------- + # Zephyr + # --------------------------------------- + zephyr: + if: github.event_name == 'push' + runs-on: ubuntu-latest + steps: + - name: Checkout TinyUSB + uses: actions/checkout@v4 + + - name: Setup Zephyr project + uses: zephyrproject-rtos/action-zephyr-setup@v1 + with: + app-path: examples + toolchains: arm-zephyr-eabi + + - name: Build + run: | + west build -b nrf52840dk -d examples/device/cdc_msc/build examples/device/cdc_msc -- -DRTOS=zephyr + west build -b nrf52840dk -d examples/device/msc_dual_lun/build examples/device/msc_dual_lun -- -DRTOS=zephyr + + # --------------------------------------- + # Hardware in the loop (HIL) + # Run on PR only (hil-tinyusb), hil-hfp only run on non-forked PR + # --------------------------------------- + hil-build: + if: | + github.repository_owner == 'hathach' && + (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') + needs: set-matrix + uses: ./.github/workflows/build_util.yml + strategy: + fail-fast: false + matrix: + toolchain: + - 'arm-gcc' + - 'esp-idf' + with: + build-system: 'cmake' + toolchain: ${{ matrix.toolchain }} + build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.hil_json)[matrix.toolchain]) }} + one-per-family: true + upload-artifacts: true + + # --------------------------------------- + # Hardware in the loop (HIL) + # self-hosted on local VM, for attached hardware checkout HIL_JSON + # --------------------------------------- + hil-tinyusb: + if: | + github.repository_owner == 'hathach' && + (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') + needs: hil-build + runs-on: [ self-hosted, X64, hathach, hardware-in-the-loop ] + steps: + - name: Get Skip Boards from previous run + if: github.run_attempt != '1' + run: | + if [ -f "${{ env.HIL_JSON }}.skip" ]; then + SKIP_BOARDS=$(cat "${{ env.HIL_JSON }}.skip") + else + SKIP_BOARDS="" + fi + echo "SKIP_BOARDS=$SKIP_BOARDS" + echo "SKIP_BOARDS=$SKIP_BOARDS" >> $GITHUB_ENV + + - name: Clean workspace + run: | + echo "Cleaning up for the first run" + rm -rf "${{ github.workspace }}" + mkdir -p "${{ github.workspace }}" + + - name: Checkout TinyUSB + uses: actions/checkout@v4 + + - name: Download Artifacts + uses: actions/download-artifact@v5 + with: + path: cmake-build + merge-multiple: true + + - name: Test on actual hardware + run: | + python3 test/hil/hil_test.py ${{ env.HIL_JSON }} $SKIP_BOARDS + + # --------------------------------------- + # Hardware in the loop (HIL) + # self-hosted by HFP, build with IAR toolchain, for attached hardware checkout test/hil/hfp.json + # Since IAR Token secret is not passed to forked PR, only build non-forked PR + # --------------------------------------- + hil-hfp: + if: | + github.repository_owner == 'hathach' && + github.event.pull_request.head.repo.fork == false && + (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') + runs-on: [ self-hosted, Linux, X64, hifiphile ] + env: + IAR_LMS_BEARER_TOKEN: ${{ secrets.IAR_LMS_BEARER_TOKEN }} + steps: + - name: Clean workspace + run: | + echo "Cleaning up previous run" + rm -rf "${{ github.workspace }}"3 + mkdir -p "${{ github.workspace }}" + + - name: Toolchain version + run: | + iccarm --version + + - name: Checkout TinyUSB + uses: actions/checkout@v4 + + - name: Get build boards + run: | + MATRIX_JSON=$(python test/hil/hil_ci_set_matrix.py test/hil/hfp.json) + BUILD_ARGS=$(echo $MATRIX_JSON | jq -r '.["arm-gcc"] | join(" ")') + echo "BUILD_ARGS=$BUILD_ARGS" + echo "BUILD_ARGS=$BUILD_ARGS" >> $GITHUB_ENV + + - name: Get Dependencies + run: python3 tools/get_deps.py $BUILD_ARGS + + - name: Build + run: python3 tools/build.py -j 4 --toolchain iar $BUILD_ARGS + + - name: Test on actual hardware (hardware in the loop) + run: python3 test/hil/hil_test.py hfp.json diff --git a/.github/workflows/build_util.yml b/.github/workflows/build_util.yml index 2de0ed229..36043a1d5 100644 --- a/.github/workflows/build_util.yml +++ b/.github/workflows/build_util.yml @@ -75,7 +75,7 @@ jobs: - name: Upload Artifacts for Metrics if: ${{ inputs.upload-metrics }} - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: metrics-${{ matrix.arg }} path: cmake-build/cmake-build-*/metrics.json diff --git a/.github/workflows/ci_set_matrix.py b/.github/workflows/ci_set_matrix.py index 5032c83ae..933a8375f 100755 --- a/.github/workflows/ci_set_matrix.py +++ b/.github/workflows/ci_set_matrix.py @@ -20,8 +20,7 @@ family_list = { "ch32v10x ch32v20x ch32v30x fomu gd32vf103": ["riscv-gcc"], "imxrt": ["arm-gcc", "arm-clang"], "kinetis_k kinetis_kl kinetis_k32l2": ["arm-gcc", "arm-clang"], - "lpc11 lpc13 lpc15": ["arm-gcc", "arm-clang"], - "lpc17 lpc18 lpc40 lpc43": ["arm-gcc", "arm-clang"], + "lpc11 lpc13 lpc15 lpc17 lpc18 lpc40 lpc43": ["arm-gcc", "arm-clang"], "lpc51 lpc54 lpc55": ["arm-gcc", "arm-clang"], "maxim mcx mm32 msp432e4 tm4c": ["arm-gcc"], "msp430": ["msp430-gcc"], @@ -36,8 +35,7 @@ family_list = { "stm32f7": ["arm-gcc", "arm-clang", "arm-iar"], "stm32g0 stm32g4 stm32h5": ["arm-gcc", "arm-clang", "arm-iar"], "stm32h7": ["arm-gcc", "arm-clang", "arm-iar"], - "stm32h7rs": ["arm-gcc", "arm-clang", "arm-iar"], - "stm32l0 stm32l4": ["arm-gcc", "arm-clang", "arm-iar"], + "stm32h7rs stm32l0 stm32l4": ["arm-gcc", "arm-clang", "arm-iar"], "stm32n6": ["arm-gcc"], "stm32u0 stm32u5 stm32wb stm32wba": ["arm-gcc", "arm-clang", "arm-iar"], "-bespressif_s2_devkitc": ["esp-idf"], diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 694681467..b34131c2b 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -20,9 +20,9 @@ endforeach () # Post-build: run metrics.py on all map.json files find_package(Python3 REQUIRED COMPONENTS Interpreter) -add_custom_target(tinyusb_examples_metrics +add_custom_target(tinyusb_metrics COMMAND ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/../tools/metrics.py - -f tinyusb/src -j -o ${CMAKE_BINARY_DIR}/metrics + combine -f tinyusb/src -j -o ${CMAKE_BINARY_DIR}/metrics ${MAPJSON_PATTERNS} COMMENT "Generating average code size metrics" VERBATIM diff --git a/hw/bsp/family_support.cmake b/hw/bsp/family_support.cmake index 3ede95e3f..15d9f1eae 100644 --- a/hw/bsp/family_support.cmake +++ b/hw/bsp/family_support.cmake @@ -351,8 +351,10 @@ function(family_configure_common TARGET RTOS) endif () endif () - # Generate linkermap target and post build. LINKERMAP_OPTION can be set with -D to change default options - family_add_linkermap(${TARGET}) + if (NOT RTOS STREQUAL zephyr) + # Generate linkermap target and post build. LINKERMAP_OPTION can be set with -D to change default options + family_add_linkermap(${TARGET}) + endif () # run size after build # find_program(SIZE_EXE ${CMAKE_SIZE}) diff --git a/tools/build.py b/tools/build.py index b87af6c6a..e4909f45f 100755 --- a/tools/build.py +++ b/tools/build.py @@ -122,7 +122,7 @@ def cmake_board(board, build_args, build_flags_on): rcmd = run_cmd(cmd) if rcmd.returncode == 0: ret[0] += 1 - run_cmd(["cmake", "--build", build_dir, '--target', 'tinyusb_examples_metrics']) + run_cmd(["cmake", "--build", build_dir, '--target', 'tinyusb_metrics']) # print(rcmd.stdout.decode("utf-8")) else: ret[1] += 1 diff --git a/tools/get_deps.py b/tools/get_deps.py index 9634451e2..99e406ce7 100755 --- a/tools/get_deps.py +++ b/tools/get_deps.py @@ -15,7 +15,7 @@ deps_mandatory = { '159e31b689577dbf69cf0683bbaffbd71fa5ee10', 'all'], 'tools/linkermap': ['https://github.com/hathach/linkermap.git', - '46c3c2947db366fb66af6723709febf80d860bc1', + '8a8206c39d0dfd7abfa615a676b3291165fcd65c', 'all'], 'tools/uf2': ['https://github.com/microsoft/uf2.git', 'c594542b2faa01cc33a2b97c9fbebc38549df80a', diff --git a/tools/metrics.py b/tools/metrics.py index c6cd49d57..7e54531f5 100644 --- a/tools/metrics.py +++ b/tools/metrics.py @@ -3,6 +3,7 @@ import argparse import glob +import json import sys import os @@ -39,8 +40,6 @@ def combine_maps(map_files, filters=None): Returns: all_json_data: Dictionary with mapfiles list and data from each map file """ - import json - filters = filters or [] all_json_data = {"mapfiles": [], "data": []} @@ -128,24 +127,185 @@ def compute_avg(all_json_data): return json_average -def main(argv=None): - parser = argparse.ArgumentParser(description='Calculate average size from linker map files') - parser.add_argument('files', nargs='+', help='Path to map file(s) or glob pattern(s)') - parser.add_argument('-f', '--filter', dest='filters', action='append', default=[], - help='Only include object files whose path contains this substring (can be repeated)') - parser.add_argument('-o', '--out', dest='out', default='metrics', - help='Output path basename for JSON and Markdown files (default: metrics)') - parser.add_argument('-j', '--json', dest='json_out', action='store_true', - help='Write JSON output file') - parser.add_argument('-m', '--markdown', dest='markdown_out', action='store_true', - help='Write Markdown output file') - parser.add_argument('-q', '--quiet', dest='quiet', action='store_true', - help='Suppress summary output') - args = parser.parse_args(argv) +def compare_maps(base_file, new_file, filters=None): + """Compare two map/json files and generate difference report. - # Expand glob patterns + Args: + base_file: Path to base map/json file + new_file: Path to new map/json file + filters: List of path substrings to filter object files + + Returns: + Dictionary with comparison data + """ + filters = filters or [] + + # Load both files + base_data = combine_maps([base_file], filters) + new_data = combine_maps([new_file], filters) + + if not base_data["data"] or not new_data["data"]: + return None + + base_avg = compute_avg(base_data) + new_avg = compute_avg(new_data) + + if not base_avg or not new_avg: + return None + + # Collect all sections from both + all_sections = list(base_avg["sections"]) + for s in new_avg["sections"]: + if s not in all_sections: + all_sections.append(s) + + # Build file lookup + base_files = {f["file"]: f for f in base_avg["files"]} + new_files = {f["file"]: f for f in new_avg["files"]} + + # Get all file names + all_file_names = set(base_files.keys()) | set(new_files.keys()) + + # Build comparison data + comparison = [] + for fname in sorted(all_file_names): + base_f = base_files.get(fname) + new_f = new_files.get(fname) + + row = {"file": fname, "sections": {}, "total": {}} + + for section in all_sections: + base_val = base_f["sections"].get(section, 0) if base_f else 0 + new_val = new_f["sections"].get(section, 0) if new_f else 0 + row["sections"][section] = {"base": base_val, "new": new_val, "diff": new_val - base_val} + + base_total = base_f["total"] if base_f else 0 + new_total = new_f["total"] if new_f else 0 + row["total"] = {"base": base_total, "new": new_total, "diff": new_total - base_total} + + comparison.append(row) + + return { + "base_file": base_file, + "new_file": new_file, + "sections": all_sections, + "files": comparison + } + + +def format_diff(base, new, diff): + """Format a diff value with percentage.""" + if base == 0 and new == 0: + return "0" + if base == 0: + return f"{new} (new)" + if new == 0: + return f"{base} ➡ 0" + if diff == 0: + return f"{base} ➡ {new}" + pct = (diff / base) * 100 + sign = "+" if diff > 0 else "" + return f"{base} ➡ {new} ({sign}{diff}, {sign}{pct:.1f}%)" + + +def get_sort_key(sort_order): + """Get sort key function based on sort order. + + Args: + sort_order: One of 'size-', 'size+', 'name-', 'name+' + + Returns: + Tuple of (key_func, reverse) + """ + if sort_order == 'size-': + return lambda x: x.get('total', 0) if isinstance(x.get('total'), int) else x['total']['new'], True + elif sort_order == 'size+': + return lambda x: x.get('total', 0) if isinstance(x.get('total'), int) else x['total']['new'], False + elif sort_order == 'name-': + return lambda x: x.get('file', ''), True + else: # name+ + return lambda x: x.get('file', ''), False + + +def write_compare_markdown(comparison, path, sort_order='size'): + """Write comparison data to markdown file.""" + sections = comparison["sections"] + + md_lines = [ + "# TinyUSB Code Size Different Report", + "", + f"**Base:** `{comparison['base_file']}`", + f"**New:** `{comparison['new_file']}`", + "", + ] + + # Build header + header = "| File |" + separator = "|:-----|" + for s in sections: + header += f" {s} |" + separator += "-----:|" + header += " Total |" + separator += "------:|" + + md_lines.append(header) + md_lines.append(separator) + + # Sort files based on sort_order + if sort_order == 'size-': + key_func = lambda x: abs(x["total"]["diff"]) + reverse = True + elif sort_order in ('size', 'size+'): + key_func = lambda x: abs(x["total"]["diff"]) + reverse = False + elif sort_order == 'name-': + key_func = lambda x: x['file'] + reverse = True + else: # name or name+ + key_func = lambda x: x['file'] + reverse = False + sorted_files = sorted(comparison["files"], key=key_func, reverse=reverse) + + sum_base = {s: 0 for s in sections} + sum_base["total"] = 0 + sum_new = {s: 0 for s in sections} + sum_new["total"] = 0 + + for f in sorted_files: + # Skip files with no changes + if f["total"]["diff"] == 0 and all(f["sections"][s]["diff"] == 0 for s in sections): + continue + + row = f"| {f['file']} |" + for s in sections: + sd = f["sections"][s] + sum_base[s] += sd["base"] + sum_new[s] += sd["new"] + row += f" {format_diff(sd['base'], sd['new'], sd['diff'])} |" + + td = f["total"] + sum_base["total"] += td["base"] + sum_new["total"] += td["new"] + row += f" {format_diff(td['base'], td['new'], td['diff'])} |" + + md_lines.append(row) + + # Add sum row + sum_row = "| **SUM** |" + for s in sections: + diff = sum_new[s] - sum_base[s] + sum_row += f" {format_diff(sum_base[s], sum_new[s], diff)} |" + total_diff = sum_new["total"] - sum_base["total"] + sum_row += f" {format_diff(sum_base['total'], sum_new['total'], total_diff)} |" + md_lines.append(sum_row) + + with open(path, "w", encoding="utf-8") as f: + f.write("\n".join(md_lines)) + + +def cmd_combine(args): + """Handle combine subcommand.""" map_files = expand_files(args.files) - all_json_data = combine_maps(map_files, args.filters) json_average = compute_avg(all_json_data) @@ -154,11 +314,65 @@ def main(argv=None): sys.exit(1) if not args.quiet: - linkermap.print_summary(json_average, False) + linkermap.print_summary(json_average, False, args.sort) if args.json_out: linkermap.write_json(json_average, args.out + '.json') if args.markdown_out: - linkermap.write_markdown(json_average, args.out + '.md') + linkermap.write_markdown(json_average, args.out + '.md', sort_opt=args.sort, + title="TinyUSB Average Code Size Metrics") + + +def cmd_compare(args): + """Handle compare subcommand.""" + comparison = compare_maps(args.base, args.new, args.filters) + + if comparison is None: + print("Failed to compare files", file=sys.stderr) + sys.exit(1) + + write_compare_markdown(comparison, args.out + '.md', args.sort) + print(f"Comparison written to {args.out}.md") + + +def main(argv=None): + parser = argparse.ArgumentParser(description='Code size metrics tool') + subparsers = parser.add_subparsers(dest='command', required=True, help='Available commands') + + # Combine subcommand + combine_parser = subparsers.add_parser('combine', help='Combine and average multiple map files') + combine_parser.add_argument('files', nargs='+', help='Path to map file(s) or glob pattern(s)') + combine_parser.add_argument('-f', '--filter', dest='filters', action='append', default=[], + help='Only include object files whose path contains this substring (can be repeated)') + combine_parser.add_argument('-o', '--out', dest='out', default='metrics', + help='Output path basename for JSON and Markdown files (default: metrics)') + combine_parser.add_argument('-j', '--json', dest='json_out', action='store_true', + help='Write JSON output file') + combine_parser.add_argument('-m', '--markdown', dest='markdown_out', action='store_true', + help='Write Markdown output file') + combine_parser.add_argument('-q', '--quiet', dest='quiet', action='store_true', + help='Suppress summary output') + combine_parser.add_argument('-S', '--sort', dest='sort', default='name+', + choices=['size', 'size-', 'size+', 'name', 'name-', 'name+'], + help='Sort order: size/size- (descending), size+ (ascending), name/name+ (ascending), name- (descending). Default: name+') + + # Compare subcommand + compare_parser = subparsers.add_parser('compare', help='Compare two map files') + compare_parser.add_argument('base', help='Base map/json file') + compare_parser.add_argument('new', help='New map/json file') + compare_parser.add_argument('-f', '--filter', dest='filters', action='append', default=[], + help='Only include object files whose path contains this substring (can be repeated)') + compare_parser.add_argument('-o', '--out', dest='out', default='metrics_compare', + help='Output path basename for Markdown file (default: metrics_compare)') + compare_parser.add_argument('-S', '--sort', dest='sort', default='name+', + choices=['size', 'size-', 'size+', 'name', 'name-', 'name+'], + help='Sort order: size/size- (descending), size+ (ascending), name/name+ (ascending), name- (descending). Default: name+') + + args = parser.parse_args(argv) + + if args.command == 'combine': + cmd_combine(args) + elif args.command == 'compare': + cmd_compare(args) if __name__ == '__main__': From b0093ff067c1d3728d21a2e0756bc234cf2599c5 Mon Sep 17 00:00:00 2001 From: Ha Thach Date: Wed, 3 Dec 2025 23:08:58 +0700 Subject: [PATCH 8/9] Apply suggestion from @Copilot Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- tools/metrics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/metrics.py b/tools/metrics.py index 7e54531f5..bb84f803e 100644 --- a/tools/metrics.py +++ b/tools/metrics.py @@ -55,7 +55,7 @@ def combine_maps(map_files, filters=None): # Apply path filters to JSON data if filters: filtered_files = [ - f for f in json_data["files"] + f for f in json_data.get("files", []) if f.get("path") and any(filt in f["path"] for filt in filters) ] json_data["files"] = filtered_files From 1b6f2b90a45e172a2eb45b3dea2fa2d11382c2b0 Mon Sep 17 00:00:00 2001 From: hathach Date: Wed, 3 Dec 2025 23:10:21 +0700 Subject: [PATCH 9/9] clean up --- .github/workflows/build.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b0b636c65..77f2d573f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -70,7 +70,7 @@ jobs: - 'aarch64-gcc' #- 'arm-clang' - 'arm-gcc' - # - 'esp-idf' + - 'esp-idf' - 'msp430-gcc' - 'riscv-gcc' with: @@ -279,6 +279,7 @@ jobs: - name: Download Artifacts uses: actions/download-artifact@v5 with: + pattern: binaries-* path: cmake-build merge-multiple: true