diff --git a/.circleci/config.yml b/.circleci/config.yml index 42b790c83..c084fc226 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,7 +9,7 @@ jobs: executor: continuation/default docker: - image: cimg/base:current - resource_class: small + resource_class: large steps: - checkout - run: @@ -52,8 +52,8 @@ jobs: else echo " - build:" >> .circleci/config2.yml fi - echo " matrix:" >> .circleci/config2.yml + echo " alias: build-${build_system}-${toolchain}" >> .circleci/config2.yml echo " parameters:" >> .circleci/config2.yml echo " build-system: ['$build_system']" >> .circleci/config2.yml echo " toolchain: ['$toolchain']" >> .circleci/config2.yml @@ -62,6 +62,9 @@ jobs: echo " build-args: ['$build_args']" >> .circleci/config2.yml } + # Collect all build aliases for code-metrics requires (cmake only, exclude esp-idf) + BUILD_ALIASES=() + for build_system in "${BUILDSYSTEM_LIST[@]}"; do for toolchain in "${TOOLCHAIN_LIST[@]}"; do # make does not support these toolchains @@ -72,9 +75,21 @@ jobs: FAMILY=$(echo $MATRIX_JSON | jq -r ".\"$toolchain\"") echo "FAMILY_${toolchain}=$FAMILY" gen_build_entry "$build_system" "$toolchain" "$FAMILY" + + # Only add cmake builds: excluding esp-idf or build_args="--one-random" to metrics requirements + if [ "$build_system" == "cmake" ] && [ "$toolchain" != "esp-idf" ] && [ "$toolchain" != "arm-iar" ]; then + BUILD_ALIASES+=("build-${build_system}-${toolchain}") + fi done done + # Add code-metrics job that requires all build jobs + echo " - code-metrics:" >> .circleci/config2.yml + echo " requires:" >> .circleci/config2.yml + for alias in "${BUILD_ALIASES[@]}"; do + echo " - $alias" >> .circleci/config2.yml + done + - continuation/continue: configuration_path: .circleci/config2.yml diff --git a/.circleci/config2.yml b/.circleci/config2.yml index 77bc4f790..a39682067 100644 --- a/.circleci/config2.yml +++ b/.circleci/config2.yml @@ -127,6 +127,35 @@ commands: python tools/build.py -s << parameters.build-system >> $TOOLCHAIN_OPTION -j 4 << parameters.build-args >> << parameters.family >> fi + # Only collect and persist metrics for cmake builds (excluding esp-idf and --one-random) + - when: + condition: + and: + - equal: [ cmake, << parameters.build-system >> ] + - not: + equal: [ esp-idf, << parameters.toolchain >> ] + - not: + equal: [ arm-iar, << parameters.toolchain >> ] + steps: + - run: + name: Collect Metrics + command: | + # Create unique directory per toolchain to avoid workspace conflicts + METRICS_DIR="/tmp/metrics/<< parameters.toolchain >>" + mkdir -p "${METRICS_DIR}" + # Copy all metrics.json files + for f in cmake-build/cmake-build-*/metrics.json; do + if [ -f "$f" ]; then + BOARD_DIR=$(dirname "$f" | xargs basename) + cp "$f" "${METRICS_DIR}/${BOARD_DIR}.json" + fi + done + + - persist_to_workspace: + root: /tmp + paths: + - metrics/<< parameters.toolchain >> + jobs: # Build using docker build: @@ -146,6 +175,7 @@ jobs: docker: - image: cimg/base:current + working_directory: ~/project/tinyusb resource_class: << parameters.resource_class >> steps: @@ -173,6 +203,7 @@ jobs: machine: image: ubuntu-2404:current + working_directory: ~/project/tinyusb resource_class: << parameters.resource_class >> steps: @@ -182,20 +213,79 @@ jobs: family: << parameters.family >> build-args: << parameters.build-args >> + # Aggregate code metrics from all builds + code-metrics: + docker: + - image: cimg/python:3.12 + resource_class: large + steps: + - checkout + - attach_workspace: + at: /tmp + + - run: + name: Aggregate Code Metrics + command: | + python tools/get_deps.py + pip install tools/linkermap/ + # Combine all metrics files from all toolchain subdirectories + ls -R /tmp/metrics + if ls /tmp/metrics/*/*.json 1> /dev/null 2>&1; then + python tools/metrics.py combine -j -m -f tinyusb/src /tmp/metrics/*/*.json + else + echo "No metrics files found" + exit 1 + fi + + - store_artifacts: + path: metrics.json + destination: metrics.json + + # Compare with base master metrics on PR branches + - when: + condition: + not: + equal: [ master, << pipeline.git.branch >> ] + steps: + - run: + name: Download Base Branch Metrics + command: | + # Download metrics.json artifact from the latest successful build on master branch + mkdir -p base-metrics + # Use CircleCI API to get the latest artifact + curl -s -L "https://dl.circleci.com/api/v2/project/gh/${CIRCLE_PROJECT_USERNAME}/${CIRCLE_PROJECT_REPONAME}/latest/artifacts?branch=master&filter=successful" \ + -H "Circle-Token: ${CIRCLE_TOKEN:-}" | \ + jq -r '.items[] | select(.path == "metrics.json") | .url' | \ + head -1 | xargs -I {} curl -s -L -o base-metrics/metrics.json {} || true + + - run: + name: Compare with Base Branch + command: | + if [ -f base-metrics/metrics.json ]; then + python tools/metrics.py compare -f tinyusb/src base-metrics/metrics.json metrics.json + cat metrics_compare.md + else + echo "No base metrics found, skipping comparison" + cp metrics.md metrics_compare.md + fi + + - store_artifacts: + path: metrics_compare.md + destination: metrics_compare.md + workflows: build: jobs: +# The jobs below are populated dynamically by config.yml set-matrix job +# Example entries that will be generated: # - build: # matrix: +# alias: build-cmake-arm-gcc # parameters: # toolchain: [ 'arm-gcc' ] # build-system: [ 'cmake' ] # family: [ 'nrf' ] # resource_class: ['large'] -# - build-vm: -# matrix: -# parameters: -# toolchain: ['esp-idf'] -# build-system: ['cmake'] -# family: ['-bespressif_kaluga_1'] -# resource_class: ['large'] +# - code-metrics: +# requires: +# - build-cmake-arm-gcc diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index bc2fdac77..5017cb3cd 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -111,7 +111,7 @@ jobs: path: metrics.json - name: Download Base Branch Metrics - if: github.event_name == 'pull_request' + if: github.event_name != 'push' uses: dawidd6/action-download-artifact@v11 with: workflow: build.yml @@ -121,7 +121,7 @@ jobs: continue-on-error: true - name: Compare with Base Branch - if: github.event_name == 'pull_request' + if: github.event_name != 'push' run: | if [ -f base-metrics/metrics.json ]; then python tools/metrics.py compare -f tinyusb/src base-metrics/metrics.json metrics.json @@ -132,7 +132,7 @@ jobs: fi - name: Post Code Metrics as PR Comment - if: github.event_name == 'pull_request' + if: github.event_name != 'push' uses: marocchino/sticky-pull-request-comment@v2 with: header: code-metrics @@ -203,9 +203,7 @@ jobs: # self-hosted on local VM, for attached hardware checkout HIL_JSON # --------------------------------------- hil-tinyusb: - if: | - github.repository_owner == 'hathach' && - (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') + if: github.repository_owner == 'hathach' && github.event_name != 'push' needs: hil-build runs-on: [ self-hosted, X64, hathach, hardware-in-the-loop ] steps: @@ -249,7 +247,7 @@ jobs: if: | github.repository_owner == 'hathach' && github.event.pull_request.head.repo.fork == false && - (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') + github.event_name != 'push' runs-on: [ self-hosted, Linux, X64, hifiphile ] env: IAR_LMS_BEARER_TOKEN: ${{ secrets.IAR_LMS_BEARER_TOKEN }} diff --git a/tools/metrics.py b/tools/metrics.py index bdc64fccc..2794c7a2a 100644 --- a/tools/metrics.py +++ b/tools/metrics.py @@ -275,11 +275,13 @@ def write_compare_markdown(comparison, path, sort_order='size'): significant = [] minor = [] + unchanged = [] for f in sorted_files: - # Skip files with no changes - if f["total"]["diff"] == 0 and all(f["sections"][s]["diff"] == 0 for s in sections): - continue - (significant if is_significant(f) else minor).append(f) + no_change = f["total"]["diff"] == 0 and all(f["sections"][s]["diff"] == 0 for s in sections) + if no_change: + unchanged.append(f) + else: + (significant if is_significant(f) else minor).append(f) def render_table(title, rows): md_lines.append(f"## {title}") @@ -323,6 +325,7 @@ def write_compare_markdown(comparison, path, sort_order='size'): render_table("Changes >1% in any section", significant) render_table("Changes <1% in all sections", minor) + render_table("No changes", unchanged) with open(path, "w", encoding="utf-8") as f: f.write("\n".join(md_lines))