diff --git a/.circleci/config2.yml b/.circleci/config2.yml
index ab0fd7ba1..869597289 100644
--- a/.circleci/config2.yml
+++ b/.circleci/config2.yml
@@ -119,7 +119,9 @@ commands:
TOOLCHAIN_OPTION="--toolchain gcc"
fi
- python tools/build.py -s << parameters.build-system >> $TOOLCHAIN_OPTION << parameters.family >>
+ # circleci docker return $nproc as 36 core, limit parallel to 4 (resource-class = large)
+ # Required for IAR, also prevent crashed/killed by docker
+ python tools/build.py -s << parameters.build-system >> $TOOLCHAIN_OPTION -j 4 << parameters.family >>
fi
jobs:
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index f1b134b8a..77f2d573f 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -57,11 +57,10 @@ jobs:
echo "hil_matrix=$HIL_MATRIX_JSON" >> $GITHUB_OUTPUT
# ---------------------------------------
- # Build CMake: only build on push with one-per-family.
+ # Build CMake: only one-per-family.
# Full built is done by CircleCI in PR
# ---------------------------------------
cmake:
- if: github.event_name == 'push'
needs: set-matrix
uses: ./.github/workflows/build_util.yml
strategy:
@@ -79,6 +78,65 @@ jobs:
toolchain: ${{ matrix.toolchain }}
build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.json)[matrix.toolchain]) }}
one-per-family: true
+ upload-metrics: true
+
+ code-metrics:
+ needs: cmake
+ runs-on: ubuntu-latest
+ permissions:
+ pull-requests: write
+ steps:
+ - name: Checkout TinyUSB
+ uses: actions/checkout@v4
+
+ - name: Download Artifacts
+ uses: actions/download-artifact@v5
+ with:
+ pattern: metrics-*
+ path: cmake-build
+ merge-multiple: true
+
+ - name: Aggregate Code Metrics
+ run: |
+ python tools/get_deps.py
+ pip install tools/linkermap/
+ python tools/metrics.py combine -j -m -f tinyusb/src cmake-build/*/metrics.json
+
+ - name: Upload Metrics Artifact
+ if: github.event_name == 'push'
+ uses: actions/upload-artifact@v5
+ with:
+ name: metrics-tinyusb
+ path: metrics.json
+
+ - name: Download Base Branch Metrics
+ if: github.event_name == 'pull_request'
+ uses: dawidd6/action-download-artifact@v11
+ with:
+ workflow: build.yml
+ branch: ${{ github.base_ref }}
+ name: metrics-tinyusb
+ path: base-metrics
+ continue-on-error: true
+
+ - name: Compare with Base Branch
+ if: github.event_name == 'pull_request'
+ run: |
+ if [ -f base-metrics/metrics.json ]; then
+ python tools/metrics.py compare -f tinyusb/src base-metrics/metrics.json metrics.json
+ cat metrics_compare.md
+ else
+ echo "No base metrics found, skipping comparison"
+ cp metrics.md metrics_compare.md
+ fi
+
+ - name: Post Code Metrics as PR Comment
+ if: github.event_name == 'pull_request'
+ uses: marocchino/sticky-pull-request-comment@v2
+ with:
+ header: code-metrics
+ path: metrics_compare.md
+
# ---------------------------------------
# Build Make: only build on push with one-per-family
@@ -134,7 +192,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- os: [windows-latest, macos-latest]
+ os: [ windows-latest, macos-latest ]
build-system: [ 'make', 'cmake' ]
with:
os: ${{ matrix.os }}
@@ -196,7 +254,7 @@ jobs:
github.repository_owner == 'hathach' &&
(github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch')
needs: hil-build
- runs-on: [self-hosted, X64, hathach, hardware-in-the-loop]
+ runs-on: [ self-hosted, X64, hathach, hardware-in-the-loop ]
steps:
- name: Get Skip Boards from previous run
if: github.run_attempt != '1'
@@ -221,6 +279,7 @@ jobs:
- name: Download Artifacts
uses: actions/download-artifact@v5
with:
+ pattern: binaries-*
path: cmake-build
merge-multiple: true
@@ -238,7 +297,7 @@ jobs:
github.repository_owner == 'hathach' &&
github.event.pull_request.head.repo.fork == false &&
(github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch')
- runs-on: [self-hosted, Linux, X64, hifiphile]
+ runs-on: [ self-hosted, Linux, X64, hifiphile ]
env:
IAR_LMS_BEARER_TOKEN: ${{ secrets.IAR_LMS_BEARER_TOKEN }}
steps:
diff --git a/.github/workflows/build_util.yml b/.github/workflows/build_util.yml
index 55901b838..36043a1d5 100644
--- a/.github/workflows/build_util.yml
+++ b/.github/workflows/build_util.yml
@@ -20,6 +20,10 @@ on:
required: false
default: false
type: boolean
+ upload-metrics:
+ required: false
+ default: false
+ type: boolean
os:
required: false
type: string
@@ -69,11 +73,18 @@ jobs:
fi
shell: bash
+ - name: Upload Artifacts for Metrics
+ if: ${{ inputs.upload-metrics }}
+ uses: actions/upload-artifact@v5
+ with:
+ name: metrics-${{ matrix.arg }}
+ path: cmake-build/cmake-build-*/metrics.json
+
- name: Upload Artifacts for Hardware Testing
if: ${{ inputs.upload-artifacts }}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v5
with:
- name: ${{ matrix.arg }}
+ name: binaries-${{ matrix.arg }}
path: |
cmake-build/cmake-build-*/*/*/*.elf
cmake-build/cmake-build-*/*/*/*.bin
diff --git a/.github/workflows/ci_set_matrix.py b/.github/workflows/ci_set_matrix.py
index 9d0e42c2e..933a8375f 100755
--- a/.github/workflows/ci_set_matrix.py
+++ b/.github/workflows/ci_set_matrix.py
@@ -15,39 +15,29 @@ toolchain_list = [
# family: [supported toolchain]
family_list = {
- "at32f402_405 at32f403a_407 at32f413 at32f415 at32f423 at32f425 at32f435_437": ["arm-gcc"],
- "broadcom_32bit": ["arm-gcc"],
+ "at32f402_405 at32f403a_407 at32f413 at32f415 at32f423 at32f425 at32f435_437 broadcom_32bit da1469x": ["arm-gcc"],
"broadcom_64bit": ["aarch64-gcc"],
"ch32v10x ch32v20x ch32v30x fomu gd32vf103": ["riscv-gcc"],
- "da1469x": ["arm-gcc"],
"imxrt": ["arm-gcc", "arm-clang"],
"kinetis_k kinetis_kl kinetis_k32l2": ["arm-gcc", "arm-clang"],
- "lpc11 lpc13 lpc15": ["arm-gcc", "arm-clang"],
- "lpc17 lpc18 lpc40 lpc43": ["arm-gcc", "arm-clang"],
+ "lpc11 lpc13 lpc15 lpc17 lpc18 lpc40 lpc43": ["arm-gcc", "arm-clang"],
"lpc51 lpc54 lpc55": ["arm-gcc", "arm-clang"],
- "maxim": ["arm-gcc"],
- "mcx": ["arm-gcc"],
- "mm32": ["arm-gcc"],
+ "maxim mcx mm32 msp432e4 tm4c": ["arm-gcc"],
"msp430": ["msp430-gcc"],
- "msp432e4 tm4c": ["arm-gcc"],
"nrf": ["arm-gcc", "arm-clang"],
- "nuc100_120 nuc121_125 nuc126 nuc505": ["arm-gcc"],
+ "nuc100_120 nuc121_125 nuc126 nuc505 xmc4000": ["arm-gcc"],
"ra": ["arm-gcc"],
"rp2040": ["arm-gcc"],
"rx": ["rx-gcc"],
- "samd11 samd2x_l2x": ["arm-gcc", "arm-clang"],
- "samd5x_e5x samg": ["arm-gcc", "arm-clang"],
+ "samd11 samd2x_l2x samd5x_e5x samg": ["arm-gcc", "arm-clang"],
"stm32c0 stm32f0 stm32f1 stm32f2 stm32f3": ["arm-gcc", "arm-clang", "arm-iar"],
"stm32f4": ["arm-gcc", "arm-clang", "arm-iar"],
"stm32f7": ["arm-gcc", "arm-clang", "arm-iar"],
"stm32g0 stm32g4 stm32h5": ["arm-gcc", "arm-clang", "arm-iar"],
"stm32h7": ["arm-gcc", "arm-clang", "arm-iar"],
- "stm32h7rs": ["arm-gcc", "arm-clang", "arm-iar"],
- "stm32l0 stm32l4": ["arm-gcc", "arm-clang", "arm-iar"],
+ "stm32h7rs stm32l0 stm32l4": ["arm-gcc", "arm-clang", "arm-iar"],
"stm32n6": ["arm-gcc"],
- "stm32u0 stm32u5 stm32wb": ["arm-gcc", "arm-clang", "arm-iar"],
- "stm32wba": ["arm-gcc", "arm-clang"],
- "xmc4000": ["arm-gcc"],
+ "stm32u0 stm32u5 stm32wb stm32wba": ["arm-gcc", "arm-clang", "arm-iar"],
"-bespressif_s2_devkitc": ["esp-idf"],
# S3, P4 will be built by hil test
# "-bespressif_s3_devkitm": ["esp-idf"],
diff --git a/.gitignore b/.gitignore
index 977911dff..93d13503f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -42,6 +42,7 @@ cov-int
*-build-dir
/_bin/
__pycache__
+cmake-build/
cmake-build-*
sdkconfig
.PVS-Studio
diff --git a/.idea/cmake.xml b/.idea/cmake.xml
index 677aaa662..0754253ad 100644
--- a/.idea/cmake.xml
+++ b/.idea/cmake.xml
@@ -124,6 +124,7 @@
+
diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt
index d34c6ed5d..b34131c2b 100644
--- a/examples/CMakeLists.txt
+++ b/examples/CMakeLists.txt
@@ -5,7 +5,25 @@ include(${CMAKE_CURRENT_SOURCE_DIR}/../hw/bsp/family_support.cmake)
project(tinyusb_examples C CXX ASM)
-add_subdirectory(device)
-add_subdirectory(dual)
-add_subdirectory(host)
-add_subdirectory(typec)
+set(EXAMPLES_LIST
+ device
+ dual
+ host
+ typec
+ )
+set(MAPJSON_PATTERNS "")
+
+foreach (example ${EXAMPLES_LIST})
+ add_subdirectory(${example})
+ list(APPEND MAPJSON_PATTERNS "${CMAKE_BINARY_DIR}/${example}/*/*.map.json")
+endforeach ()
+
+# Post-build: run metrics.py on all map.json files
+find_package(Python3 REQUIRED COMPONENTS Interpreter)
+add_custom_target(tinyusb_metrics
+ COMMAND ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/../tools/metrics.py
+ combine -f tinyusb/src -j -o ${CMAKE_BINARY_DIR}/metrics
+ ${MAPJSON_PATTERNS}
+ COMMENT "Generating average code size metrics"
+ VERBATIM
+ )
diff --git a/examples/build_system/cmake/toolchain/arm_iar.cmake b/examples/build_system/cmake/toolchain/arm_iar.cmake
index 0b7e0b585..67d100bbc 100644
--- a/examples/build_system/cmake/toolchain/arm_iar.cmake
+++ b/examples/build_system/cmake/toolchain/arm_iar.cmake
@@ -24,7 +24,8 @@ set(CMAKE_C_ICSTAT ${CMAKE_IAR_CSTAT}
--checks=${CMAKE_CURRENT_LIST_DIR}/cstat_sel_checks.txt
--db=${CMAKE_BINARY_DIR}/cstat.db
--sarif_dir=${CMAKE_BINARY_DIR}/cstat_sarif
- --exclude ${TOP}/hw/mcu --exclude ${TOP}/lib
+ --exclude=${TOP}/hw/mcu
+ --exclude=${TOP}/lib
)
endif ()
diff --git a/examples/device/CMakeLists.txt b/examples/device/CMakeLists.txt
index eb625ea51..660df67cb 100644
--- a/examples/device/CMakeLists.txt
+++ b/examples/device/CMakeLists.txt
@@ -6,31 +6,38 @@ project(tinyusb_device_examples C CXX ASM)
family_initialize_project(tinyusb_device_examples ${CMAKE_CURRENT_LIST_DIR})
# family_add_subdirectory will filter what to actually add based on selected FAMILY
-family_add_subdirectory(audio_4_channel_mic)
-family_add_subdirectory(audio_test)
-family_add_subdirectory(audio_4_channel_mic_freertos)
-family_add_subdirectory(audio_test_freertos)
-family_add_subdirectory(audio_test_multi_rate)
-family_add_subdirectory(board_test)
-family_add_subdirectory(cdc_dual_ports)
-family_add_subdirectory(cdc_msc)
-family_add_subdirectory(cdc_msc_freertos)
-family_add_subdirectory(cdc_uac2)
-family_add_subdirectory(dfu)
-family_add_subdirectory(dfu_runtime)
-family_add_subdirectory(dynamic_configuration)
-family_add_subdirectory(hid_boot_interface)
-family_add_subdirectory(hid_composite)
-family_add_subdirectory(hid_composite_freertos)
-family_add_subdirectory(hid_generic_inout)
-family_add_subdirectory(hid_multiple_interface)
-family_add_subdirectory(midi_test)
-family_add_subdirectory(msc_dual_lun)
-family_add_subdirectory(mtp)
-family_add_subdirectory(net_lwip_webserver)
-family_add_subdirectory(uac2_headset)
-family_add_subdirectory(uac2_speaker_fb)
-family_add_subdirectory(usbtmc)
-family_add_subdirectory(video_capture)
-family_add_subdirectory(video_capture_2ch)
-family_add_subdirectory(webusb_serial)
+set(EXAMPLE_LIST
+ audio_4_channel_mic
+ audio_4_channel_mic_freertos
+ audio_test
+ audio_test_freertos
+ audio_test_multi_rate
+ board_test
+ cdc_dual_ports
+ cdc_msc
+ cdc_msc_freertos
+ cdc_uac2
+ dfu
+ dfu_runtime
+ dynamic_configuration
+ hid_boot_interface
+ hid_composite
+ hid_composite_freertos
+ hid_generic_inout
+ hid_multiple_interface
+ midi_test
+ midi_test_freertos
+ msc_dual_lun
+ mtp
+ net_lwip_webserver
+ uac2_headset
+ uac2_speaker_fb
+ usbtmc
+ video_capture
+ video_capture_2ch
+ webusb_serial
+ )
+
+foreach (example ${EXAMPLE_LIST})
+ family_add_subdirectory(${example})
+endforeach ()
diff --git a/examples/dual/CMakeLists.txt b/examples/dual/CMakeLists.txt
index c5e3ffce4..4978f1fab 100644
--- a/examples/dual/CMakeLists.txt
+++ b/examples/dual/CMakeLists.txt
@@ -9,6 +9,12 @@ if (FAMILY STREQUAL "rp2040" AND NOT TARGET tinyusb_pico_pio_usb)
message("Skipping dual host/device mode examples as Pico-PIO-USB is not available")
else ()
# family_add_subdirectory will filter what to actually add based on selected FAMILY
- family_add_subdirectory(host_hid_to_device_cdc)
- family_add_subdirectory(host_info_to_device_cdc)
+ set(EXAMPLE_LIST
+ host_hid_to_device_cdc
+ host_info_to_device_cdc
+ )
+
+ foreach (example ${EXAMPLE_LIST})
+ family_add_subdirectory(${example})
+ endforeach ()
endif ()
diff --git a/examples/host/CMakeLists.txt b/examples/host/CMakeLists.txt
index 2783dd84e..f8e0ce692 100644
--- a/examples/host/CMakeLists.txt
+++ b/examples/host/CMakeLists.txt
@@ -6,10 +6,16 @@ project(tinyusb_host_examples C CXX ASM)
family_initialize_project(tinyusb_host_examples ${CMAKE_CURRENT_LIST_DIR})
# family_add_subdirectory will filter what to actually add based on selected FAMILY
-family_add_subdirectory(bare_api)
-family_add_subdirectory(cdc_msc_hid)
-family_add_subdirectory(cdc_msc_hid_freertos)
-family_add_subdirectory(device_info)
-family_add_subdirectory(hid_controller)
-family_add_subdirectory(midi_rx)
-family_add_subdirectory(msc_file_explorer)
+set(EXAMPLE_LIST
+ bare_api
+ cdc_msc_hid
+ cdc_msc_hid_freertos
+ device_info
+ hid_controller
+ midi_rx
+ msc_file_explorer
+ )
+
+foreach (example ${EXAMPLE_LIST})
+ family_add_subdirectory(${example})
+endforeach ()
diff --git a/hw/bsp/family_support.cmake b/hw/bsp/family_support.cmake
index 2b9612186..15d9f1eae 100644
--- a/hw/bsp/family_support.cmake
+++ b/hw/bsp/family_support.cmake
@@ -9,6 +9,7 @@ set(TOP "${CMAKE_CURRENT_LIST_DIR}/../..")
get_filename_component(TOP ${TOP} ABSOLUTE)
set(UF2CONV_PY ${TOP}/tools/uf2/utils/uf2conv.py)
+set(LINKERMAP_PY ${TOP}/tools/linkermap/linkermap.py)
function(family_resolve_board BOARD_NAME BOARD_PATH_OUT)
if ("${BOARD_NAME}" STREQUAL "")
@@ -223,6 +224,24 @@ function(family_initialize_project PROJECT DIR)
endif()
endfunction()
+# Add linkermap target (https://github.com/hathach/linkermap)
+function(family_add_linkermap TARGET)
+ set(LINKERMAP_OPTION_LIST)
+ if (DEFINED LINKERMAP_OPTION)
+ separate_arguments(LINKERMAP_OPTION_LIST UNIX_COMMAND ${LINKERMAP_OPTION})
+ endif ()
+
+ add_custom_target(${TARGET}-linkermap
+ COMMAND python ${LINKERMAP_PY} -j ${LINKERMAP_OPTION_LIST} $.map
+ VERBATIM
+ )
+
+ # post build
+ add_custom_command(TARGET ${TARGET} POST_BUILD
+ COMMAND python ${LINKERMAP_PY} -j ${LINKERMAP_OPTION_LIST} $.map
+ VERBATIM)
+endfunction()
+
#-------------------------------------------------------------
# Common Target Configure
# Most families use these settings except rp2040 and espressif
@@ -332,6 +351,11 @@ function(family_configure_common TARGET RTOS)
endif ()
endif ()
+ if (NOT RTOS STREQUAL zephyr)
+ # Generate linkermap target and post build. LINKERMAP_OPTION can be set with -D to change default options
+ family_add_linkermap(${TARGET})
+ endif ()
+
# run size after build
# find_program(SIZE_EXE ${CMAKE_SIZE})
# if(NOT ${SIZE_EXE} STREQUAL SIZE_EXE-NOTFOUND)
diff --git a/hw/bsp/rp2040/family.cmake b/hw/bsp/rp2040/family.cmake
index 5d6d8b40e..390d6072c 100644
--- a/hw/bsp/rp2040/family.cmake
+++ b/hw/bsp/rp2040/family.cmake
@@ -222,6 +222,8 @@ function(family_add_default_example_warnings TARGET)
endif()
endfunction()
+
+# TODO merge with family_configure_common from family_support.cmake
function(family_configure_target TARGET RTOS)
if (RTOS STREQUAL noos OR RTOS STREQUAL "")
set(RTOS_SUFFIX "")
@@ -239,10 +241,15 @@ function(family_configure_target TARGET RTOS)
pico_add_extra_outputs(${TARGET})
pico_enable_stdio_uart(${TARGET} 1)
+
+ target_link_options(${TARGET} PUBLIC "LINKER:-Map=$.map")
target_link_libraries(${TARGET} PUBLIC pico_stdlib tinyusb_board${RTOS_SUFFIX} tinyusb_additions)
family_flash_openocd(${TARGET})
family_flash_jlink(${TARGET})
+
+ # Generate linkermap target and post build. LINKERMAP_OPTION can be set with -D to change default options
+ family_add_linkermap(${TARGET})
endfunction()
diff --git a/src/portable/synopsys/dwc2/dwc2_info.py b/src/portable/synopsys/dwc2/dwc2_info.py
index f6bd2785a..8fbbc00a0 100755
--- a/src/portable/synopsys/dwc2/dwc2_info.py
+++ b/src/portable/synopsys/dwc2/dwc2_info.py
@@ -2,7 +2,6 @@
import ctypes
import argparse
-import click
import pandas as pd
# hex value for register: guid, gsnpsid, ghwcfg1, ghwcfg2, ghwcfg3, ghwcfg4
diff --git a/test/hil/hil_test.py b/test/hil/hil_test.py
index ba0826bd3..b2e883119 100755
--- a/test/hil/hil_test.py
+++ b/test/hil/hil_test.py
@@ -662,7 +662,7 @@ def test_example(board, f1, example):
print(f'Flashing {fw_name}.elf')
# flash firmware. It may fail randomly, retry a few times
- max_rety = 1
+ max_rety = 3
start_s = time.time()
for i in range(max_rety):
ret = globals()[f'flash_{board["flasher"]["name"].lower()}'](board, fw_name)
diff --git a/tools/build.py b/tools/build.py
index ce4d0ef1a..e4909f45f 100755
--- a/tools/build.py
+++ b/tools/build.py
@@ -5,6 +5,7 @@ import os
import sys
import time
import subprocess
+import shlex
from pathlib import Path
from multiprocessing import Pool
@@ -23,15 +24,19 @@ build_separator = '-' * 95
build_status = [STATUS_OK, STATUS_FAILED, STATUS_SKIPPED]
verbose = False
+clean_build = False
parallel_jobs = os.cpu_count()
# -----------------------------
# Helper
# -----------------------------
def run_cmd(cmd):
- #print(cmd)
- r = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- title = f'Command Error: {cmd}'
+ if isinstance(cmd, str):
+ raise TypeError("run_cmd expects a list/tuple of args, not a string")
+ args = cmd
+ cmd_display = " ".join(args)
+ r = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ title = f'Command Error: {cmd_display}'
if r.returncode != 0:
# print build output if failed
if os.getenv('GITHUB_ACTIONS'):
@@ -42,7 +47,7 @@ def run_cmd(cmd):
print(title)
print(r.stdout.decode("utf-8"))
elif verbose:
- print(cmd)
+ print(cmd_display)
print(r.stdout.decode("utf-8"))
return r
@@ -87,10 +92,10 @@ def cmake_board(board, build_args, build_flags_on):
start_time = time.monotonic()
build_dir = f'cmake-build/cmake-build-{board}'
- build_flags = ''
+ build_flags = []
if len(build_flags_on) > 0:
- build_flags = ' '.join(f'-D{flag}=1' for flag in build_flags_on)
- build_flags = f'-DCFLAGS_CLI="{build_flags}"'
+ cli_flags = ' '.join(f'-D{flag}=1' for flag in build_flags_on)
+ build_flags.append(f'-DCFLAGS_CLI={cli_flags}')
build_dir += '-f1_' + '_'.join(build_flags_on)
family = find_family(board)
@@ -101,27 +106,26 @@ def cmake_board(board, build_args, build_flags_on):
if build_utils.skip_example(example, board):
ret[2] += 1
else:
- rcmd = run_cmd(f'idf.py -C examples/{example} -B {build_dir}/{example} -G Ninja '
- f'-DBOARD={board} {build_flags} build')
+ rcmd = run_cmd([
+ 'idf.py', '-C', f'examples/{example}', '-B', f'{build_dir}/{example}', '-GNinja',
+ f'-DBOARD={board}', *build_flags, 'build'
+ ])
ret[0 if rcmd.returncode == 0 else 1] += 1
else:
- rcmd = run_cmd(f'cmake examples -B {build_dir} -G Ninja -DBOARD={board} -DCMAKE_BUILD_TYPE=MinSizeRel '
- f'{build_args} {build_flags}')
+ rcmd = run_cmd(['cmake', 'examples', '-B', build_dir, '-GNinja',
+ f'-DBOARD={board}', '-DCMAKE_BUILD_TYPE=MinSizeRel', '-DLINKERMAP_OPTION=-q -f tinyusb/src',
+ *build_args, *build_flags])
if rcmd.returncode == 0:
- cmd = f"cmake --build {build_dir}"
- njobs = parallel_jobs
-
- # circleci docker return $nproc as 36 core, limit parallel according to resource class.
- # Required for IAR, also prevent crashed/killed by docker
- if os.getenv('CIRCLECI'):
- resource_class = { 'small': 1, 'medium': 2, 'medium+': 3, 'large': 4 }
- for rc in resource_class:
- if rc in os.getenv('CIRCLE_JOB'):
- njobs = resource_class[rc]
- break
- cmd += f' --parallel {njobs}'
+ if clean_build:
+ run_cmd(["cmake", "--build", build_dir, '--target', 'clean'])
+ cmd = ["cmake", "--build", build_dir, '--parallel', str(parallel_jobs)]
rcmd = run_cmd(cmd)
- ret[0 if rcmd.returncode == 0 else 1] += 1
+ if rcmd.returncode == 0:
+ ret[0] += 1
+ run_cmd(["cmake", "--build", build_dir, '--target', 'tinyusb_metrics'])
+ # print(rcmd.stdout.decode("utf-8"))
+ else:
+ ret[1] += 1
example = 'all'
print_build_result(board, example, 0 if ret[1] == 0 else 1, time.monotonic() - start_time)
@@ -141,9 +145,13 @@ def make_one_example(example, board, make_option):
# skip -j for circleci
if not os.getenv('CIRCLECI'):
make_option += ' -j'
- make_cmd = f"make -C examples/{example} BOARD={board} {make_option}"
- # run_cmd(f"{make_cmd} clean")
- build_result = run_cmd(f"{make_cmd} all")
+ make_args = ["make", "-C", f"examples/{example}", f"BOARD={board}"]
+ if make_option:
+ make_args += shlex.split(make_option)
+ make_args.append("all")
+ if clean_build:
+ run_cmd(make_args + ["clean"])
+ build_result = run_cmd(make_args)
r = 0 if build_result.returncode == 0 else 1
print_build_result(board, example, r, time.monotonic() - start_time)
@@ -180,7 +188,7 @@ def build_boards_list(boards, build_defines, build_system, build_flags_on):
for b in boards:
r = [0, 0, 0]
if build_system == 'cmake':
- build_args = ' '.join(f'-D{d}' for d in build_defines)
+ build_args = [f'-D{d}' for d in build_defines]
r = cmake_board(b, build_args, build_flags_on)
elif build_system == 'make':
build_args = ' '.join(f'{d}' for d in build_defines)
@@ -191,8 +199,18 @@ def build_boards_list(boards, build_defines, build_system, build_flags_on):
return ret
-def build_family(family, build_defines, build_system, build_flags_on, one_per_family, boards):
- skip_ci = ['pico_sdk']
+def get_family_boards(family, one_per_family, boards):
+ """Get list of boards for a family.
+
+ Args:
+ family: Family name
+ one_per_family: If True, return only one random board
+ boards: List of boards already specified via -b flag
+
+ Returns:
+ List of board names
+ """
+ skip_ci = []
if os.getenv('GITHUB_ACTIONS') or os.getenv('CIRCLECI'):
skip_ci_file = Path(f"hw/bsp/{family}/skip_ci.txt")
if skip_ci_file.exists():
@@ -203,17 +221,15 @@ def build_family(family, build_defines, build_system, build_flags_on, one_per_fa
all_boards.append(entry.name)
all_boards.sort()
- ret = [0, 0, 0]
# If only-one flag is set, select one random board
if one_per_family:
for b in boards:
# skip if -b already specify one in this family
if find_family(b) == family:
- return ret
+ return []
all_boards = [random.choice(all_boards)]
- ret = build_boards_list(all_boards, build_defines, build_system, build_flags_on)
- return ret
+ return all_boards
# -----------------------------
@@ -221,11 +237,13 @@ def build_family(family, build_defines, build_system, build_flags_on, one_per_fa
# -----------------------------
def main():
global verbose
+ global clean_build
global parallel_jobs
parser = argparse.ArgumentParser()
parser.add_argument('families', nargs='*', default=[], help='Families to build')
parser.add_argument('-b', '--board', action='append', default=[], help='Boards to build')
+ parser.add_argument('-c', '--clean', action='store_true', default=False, help='Clean before build')
parser.add_argument('-t', '--toolchain', default='gcc', help='Toolchain to use, default is gcc')
parser.add_argument('-s', '--build-system', default='cmake', help='Build system to use, default is cmake')
parser.add_argument('-D', '--define-symbol', action='append', default=[], help='Define to pass to build system')
@@ -243,6 +261,7 @@ def main():
build_flags_on = args.build_flags_on
one_per_family = args.one_per_family
verbose = args.verbose
+ clean_build = args.clean
parallel_jobs = args.jobs
build_defines.append(f'TOOLCHAIN={toolchain}')
@@ -254,9 +273,8 @@ def main():
print(build_separator)
print(build_format.format('Board', 'Example', '\033[39mResult\033[0m', 'Time'))
total_time = time.monotonic()
- result = [0, 0, 0]
- # build families
+ # get all families
all_families = []
if 'all' in families:
for entry in os.scandir("hw/bsp"):
@@ -266,23 +284,19 @@ def main():
all_families = list(families)
all_families.sort()
- # succeeded, failed, skipped
+ # get boards from families and append to boards list
+ all_boards = list(boards)
for f in all_families:
- r = build_family(f, build_defines, build_system, build_flags_on, one_per_family, boards)
- result[0] += r[0]
- result[1] += r[1]
- result[2] += r[2]
+ all_boards.extend(get_family_boards(f, one_per_family, boards))
- # build boards
- r = build_boards_list(boards, build_defines, build_system, build_flags_on)
- result[0] += r[0]
- result[1] += r[1]
- result[2] += r[2]
+ # build all boards
+ result = build_boards_list(all_boards, build_defines, build_system, build_flags_on)
total_time = time.monotonic() - total_time
print(build_separator)
print(f"Build Summary: {result[0]} {STATUS_OK}, {result[1]} {STATUS_FAILED} and took {total_time:.2f}s")
print(build_separator)
+
return result[1]
diff --git a/tools/get_deps.py b/tools/get_deps.py
index d749e4c84..99e406ce7 100755
--- a/tools/get_deps.py
+++ b/tools/get_deps.py
@@ -14,6 +14,9 @@ deps_mandatory = {
'lib/lwip': ['https://github.com/lwip-tcpip/lwip.git',
'159e31b689577dbf69cf0683bbaffbd71fa5ee10',
'all'],
+ 'tools/linkermap': ['https://github.com/hathach/linkermap.git',
+ '8a8206c39d0dfd7abfa615a676b3291165fcd65c',
+ 'all'],
'tools/uf2': ['https://github.com/microsoft/uf2.git',
'c594542b2faa01cc33a2b97c9fbebc38549df80a',
'all'],
diff --git a/tools/metrics.py b/tools/metrics.py
new file mode 100644
index 000000000..bb84f803e
--- /dev/null
+++ b/tools/metrics.py
@@ -0,0 +1,379 @@
+#!/usr/bin/env python3
+"""Calculate average size from multiple linker map files."""
+
+import argparse
+import glob
+import json
+import sys
+import os
+
+# Add linkermap module to path
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'linkermap'))
+import linkermap
+
+
+def expand_files(file_patterns):
+ """Expand file patterns (globs) to list of files.
+
+ Args:
+ file_patterns: List of file paths or glob patterns
+
+ Returns:
+ List of expanded file paths
+ """
+ expanded = []
+ for pattern in file_patterns:
+ if '*' in pattern or '?' in pattern:
+ expanded.extend(glob.glob(pattern))
+ else:
+ expanded.append(pattern)
+ return expanded
+
+
+def combine_maps(map_files, filters=None):
+ """Combine multiple map files into a list of json_data.
+
+ Args:
+ map_files: List of paths to linker map files or JSON files
+ filters: List of path substrings to filter object files (default: [])
+
+ Returns:
+ all_json_data: Dictionary with mapfiles list and data from each map file
+ """
+ filters = filters or []
+ all_json_data = {"mapfiles": [], "data": []}
+
+ for map_file in map_files:
+ if not os.path.exists(map_file):
+ print(f"Warning: {map_file} not found, skipping", file=sys.stderr)
+ continue
+
+ try:
+ if map_file.endswith('.json'):
+ with open(map_file, 'r', encoding='utf-8') as f:
+ json_data = json.load(f)
+ # Apply path filters to JSON data
+ if filters:
+ filtered_files = [
+ f for f in json_data.get("files", [])
+ if f.get("path") and any(filt in f["path"] for filt in filters)
+ ]
+ json_data["files"] = filtered_files
+ else:
+ json_data = linkermap.analyze_map(map_file, filters=filters)
+ all_json_data["mapfiles"].append(map_file)
+ all_json_data["data"].append(json_data)
+ except Exception as e:
+ print(f"Warning: Failed to analyze {map_file}: {e}", file=sys.stderr)
+ continue
+
+ return all_json_data
+
+
+def compute_avg(all_json_data):
+ """Compute average sizes from combined json_data.
+
+ Args:
+ all_json_data: Dictionary with mapfiles and data from combine_maps()
+
+ Returns:
+ json_average: Dictionary with averaged size data
+ """
+ if not all_json_data["data"]:
+ return None
+
+ # Collect all sections preserving order
+ all_sections = []
+ for json_data in all_json_data["data"]:
+ for s in json_data["sections"]:
+ if s not in all_sections:
+ all_sections.append(s)
+
+ # Merge files with the same 'file' value and compute averages
+ file_accumulator = {} # key: file name, value: {"sections": {section: [sizes]}, "totals": [totals]}
+
+ for json_data in all_json_data["data"]:
+ for f in json_data["files"]:
+ fname = f["file"]
+ if fname not in file_accumulator:
+ file_accumulator[fname] = {"sections": {}, "totals": [], "path": f.get("path")}
+ file_accumulator[fname]["totals"].append(f["total"])
+ for section, size in f["sections"].items():
+ if section in file_accumulator[fname]["sections"]:
+ file_accumulator[fname]["sections"][section].append(size)
+ else:
+ file_accumulator[fname]["sections"][section] = [size]
+
+ # Build json_average with averaged values
+ files_average = []
+ for fname, data in file_accumulator.items():
+ avg_total = round(sum(data["totals"]) / len(data["totals"]))
+ avg_sections = {}
+ for section, sizes in data["sections"].items():
+ avg_sections[section] = round(sum(sizes) / len(sizes))
+ files_average.append({
+ "file": fname,
+ "path": data["path"],
+ "sections": avg_sections,
+ "total": avg_total
+ })
+
+ json_average = {
+ "mapfiles": all_json_data["mapfiles"],
+ "sections": all_sections,
+ "files": files_average
+ }
+
+ return json_average
+
+
+def compare_maps(base_file, new_file, filters=None):
+ """Compare two map/json files and generate difference report.
+
+ Args:
+ base_file: Path to base map/json file
+ new_file: Path to new map/json file
+ filters: List of path substrings to filter object files
+
+ Returns:
+ Dictionary with comparison data
+ """
+ filters = filters or []
+
+ # Load both files
+ base_data = combine_maps([base_file], filters)
+ new_data = combine_maps([new_file], filters)
+
+ if not base_data["data"] or not new_data["data"]:
+ return None
+
+ base_avg = compute_avg(base_data)
+ new_avg = compute_avg(new_data)
+
+ if not base_avg or not new_avg:
+ return None
+
+ # Collect all sections from both
+ all_sections = list(base_avg["sections"])
+ for s in new_avg["sections"]:
+ if s not in all_sections:
+ all_sections.append(s)
+
+ # Build file lookup
+ base_files = {f["file"]: f for f in base_avg["files"]}
+ new_files = {f["file"]: f for f in new_avg["files"]}
+
+ # Get all file names
+ all_file_names = set(base_files.keys()) | set(new_files.keys())
+
+ # Build comparison data
+ comparison = []
+ for fname in sorted(all_file_names):
+ base_f = base_files.get(fname)
+ new_f = new_files.get(fname)
+
+ row = {"file": fname, "sections": {}, "total": {}}
+
+ for section in all_sections:
+ base_val = base_f["sections"].get(section, 0) if base_f else 0
+ new_val = new_f["sections"].get(section, 0) if new_f else 0
+ row["sections"][section] = {"base": base_val, "new": new_val, "diff": new_val - base_val}
+
+ base_total = base_f["total"] if base_f else 0
+ new_total = new_f["total"] if new_f else 0
+ row["total"] = {"base": base_total, "new": new_total, "diff": new_total - base_total}
+
+ comparison.append(row)
+
+ return {
+ "base_file": base_file,
+ "new_file": new_file,
+ "sections": all_sections,
+ "files": comparison
+ }
+
+
+def format_diff(base, new, diff):
+ """Format a diff value with percentage."""
+ if base == 0 and new == 0:
+ return "0"
+ if base == 0:
+ return f"{new} (new)"
+ if new == 0:
+ return f"{base} ➡ 0"
+ if diff == 0:
+ return f"{base} ➡ {new}"
+ pct = (diff / base) * 100
+ sign = "+" if diff > 0 else ""
+ return f"{base} ➡ {new} ({sign}{diff}, {sign}{pct:.1f}%)"
+
+
+def get_sort_key(sort_order):
+ """Get sort key function based on sort order.
+
+ Args:
+ sort_order: One of 'size-', 'size+', 'name-', 'name+'
+
+ Returns:
+ Tuple of (key_func, reverse)
+ """
+ if sort_order == 'size-':
+ return lambda x: x.get('total', 0) if isinstance(x.get('total'), int) else x['total']['new'], True
+ elif sort_order == 'size+':
+ return lambda x: x.get('total', 0) if isinstance(x.get('total'), int) else x['total']['new'], False
+ elif sort_order == 'name-':
+ return lambda x: x.get('file', ''), True
+ else: # name+
+ return lambda x: x.get('file', ''), False
+
+
+def write_compare_markdown(comparison, path, sort_order='size'):
+ """Write comparison data to markdown file."""
+ sections = comparison["sections"]
+
+ md_lines = [
+ "# TinyUSB Code Size Different Report",
+ "",
+ f"**Base:** `{comparison['base_file']}`",
+ f"**New:** `{comparison['new_file']}`",
+ "",
+ ]
+
+ # Build header
+ header = "| File |"
+ separator = "|:-----|"
+ for s in sections:
+ header += f" {s} |"
+ separator += "-----:|"
+ header += " Total |"
+ separator += "------:|"
+
+ md_lines.append(header)
+ md_lines.append(separator)
+
+ # Sort files based on sort_order
+ if sort_order == 'size-':
+ key_func = lambda x: abs(x["total"]["diff"])
+ reverse = True
+ elif sort_order in ('size', 'size+'):
+ key_func = lambda x: abs(x["total"]["diff"])
+ reverse = False
+ elif sort_order == 'name-':
+ key_func = lambda x: x['file']
+ reverse = True
+ else: # name or name+
+ key_func = lambda x: x['file']
+ reverse = False
+ sorted_files = sorted(comparison["files"], key=key_func, reverse=reverse)
+
+ sum_base = {s: 0 for s in sections}
+ sum_base["total"] = 0
+ sum_new = {s: 0 for s in sections}
+ sum_new["total"] = 0
+
+ for f in sorted_files:
+ # Skip files with no changes
+ if f["total"]["diff"] == 0 and all(f["sections"][s]["diff"] == 0 for s in sections):
+ continue
+
+ row = f"| {f['file']} |"
+ for s in sections:
+ sd = f["sections"][s]
+ sum_base[s] += sd["base"]
+ sum_new[s] += sd["new"]
+ row += f" {format_diff(sd['base'], sd['new'], sd['diff'])} |"
+
+ td = f["total"]
+ sum_base["total"] += td["base"]
+ sum_new["total"] += td["new"]
+ row += f" {format_diff(td['base'], td['new'], td['diff'])} |"
+
+ md_lines.append(row)
+
+ # Add sum row
+ sum_row = "| **SUM** |"
+ for s in sections:
+ diff = sum_new[s] - sum_base[s]
+ sum_row += f" {format_diff(sum_base[s], sum_new[s], diff)} |"
+ total_diff = sum_new["total"] - sum_base["total"]
+ sum_row += f" {format_diff(sum_base['total'], sum_new['total'], total_diff)} |"
+ md_lines.append(sum_row)
+
+ with open(path, "w", encoding="utf-8") as f:
+ f.write("\n".join(md_lines))
+
+
+def cmd_combine(args):
+ """Handle combine subcommand."""
+ map_files = expand_files(args.files)
+ all_json_data = combine_maps(map_files, args.filters)
+ json_average = compute_avg(all_json_data)
+
+ if json_average is None:
+ print("No valid map files found", file=sys.stderr)
+ sys.exit(1)
+
+ if not args.quiet:
+ linkermap.print_summary(json_average, False, args.sort)
+ if args.json_out:
+ linkermap.write_json(json_average, args.out + '.json')
+ if args.markdown_out:
+ linkermap.write_markdown(json_average, args.out + '.md', sort_opt=args.sort,
+ title="TinyUSB Average Code Size Metrics")
+
+
+def cmd_compare(args):
+ """Handle compare subcommand."""
+ comparison = compare_maps(args.base, args.new, args.filters)
+
+ if comparison is None:
+ print("Failed to compare files", file=sys.stderr)
+ sys.exit(1)
+
+ write_compare_markdown(comparison, args.out + '.md', args.sort)
+ print(f"Comparison written to {args.out}.md")
+
+
+def main(argv=None):
+ parser = argparse.ArgumentParser(description='Code size metrics tool')
+ subparsers = parser.add_subparsers(dest='command', required=True, help='Available commands')
+
+ # Combine subcommand
+ combine_parser = subparsers.add_parser('combine', help='Combine and average multiple map files')
+ combine_parser.add_argument('files', nargs='+', help='Path to map file(s) or glob pattern(s)')
+ combine_parser.add_argument('-f', '--filter', dest='filters', action='append', default=[],
+ help='Only include object files whose path contains this substring (can be repeated)')
+ combine_parser.add_argument('-o', '--out', dest='out', default='metrics',
+ help='Output path basename for JSON and Markdown files (default: metrics)')
+ combine_parser.add_argument('-j', '--json', dest='json_out', action='store_true',
+ help='Write JSON output file')
+ combine_parser.add_argument('-m', '--markdown', dest='markdown_out', action='store_true',
+ help='Write Markdown output file')
+ combine_parser.add_argument('-q', '--quiet', dest='quiet', action='store_true',
+ help='Suppress summary output')
+ combine_parser.add_argument('-S', '--sort', dest='sort', default='name+',
+ choices=['size', 'size-', 'size+', 'name', 'name-', 'name+'],
+ help='Sort order: size/size- (descending), size+ (ascending), name/name+ (ascending), name- (descending). Default: name+')
+
+ # Compare subcommand
+ compare_parser = subparsers.add_parser('compare', help='Compare two map files')
+ compare_parser.add_argument('base', help='Base map/json file')
+ compare_parser.add_argument('new', help='New map/json file')
+ compare_parser.add_argument('-f', '--filter', dest='filters', action='append', default=[],
+ help='Only include object files whose path contains this substring (can be repeated)')
+ compare_parser.add_argument('-o', '--out', dest='out', default='metrics_compare',
+ help='Output path basename for Markdown file (default: metrics_compare)')
+ compare_parser.add_argument('-S', '--sort', dest='sort', default='name+',
+ choices=['size', 'size-', 'size+', 'name', 'name-', 'name+'],
+ help='Sort order: size/size- (descending), size+ (ascending), name/name+ (ascending), name- (descending). Default: name+')
+
+ args = parser.parse_args(argv)
+
+ if args.command == 'combine':
+ cmd_combine(args)
+ elif args.command == 'compare':
+ cmd_compare(args)
+
+
+if __name__ == '__main__':
+ main()