mirror of
https://github.com/hathach/tinyusb.git
synced 2026-02-04 14:15:40 +00:00
upload metrics.json and metrics compare to release page (#3409)
* upload metrics.json and metrics compare to release page * Adjust workflow comment handling for forks
This commit is contained in:
43
.github/workflows/build.yml
vendored
43
.github/workflows/build.yml
vendored
@ -27,6 +27,8 @@ on:
|
|||||||
- '.github/workflows/build.yml'
|
- '.github/workflows/build.yml'
|
||||||
- '.github/workflows/build_util.yml'
|
- '.github/workflows/build_util.yml'
|
||||||
- '.github/workflows/ci_set_matrix.py'
|
- '.github/workflows/ci_set_matrix.py'
|
||||||
|
release:
|
||||||
|
types: [ published ]
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
@ -42,7 +44,7 @@ jobs:
|
|||||||
hil_json: ${{ steps.set-matrix-json.outputs.hil_matrix }}
|
hil_json: ${{ steps.set-matrix-json.outputs.hil_matrix }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout TinyUSB
|
- name: Checkout TinyUSB
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Generate matrix json
|
- name: Generate matrix json
|
||||||
id: set-matrix-json
|
id: set-matrix-json
|
||||||
@ -86,9 +88,12 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout TinyUSB
|
- name: Checkout TinyUSB
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
fetch-tags: ${{ github.event_name == 'release' }}
|
||||||
|
|
||||||
- name: Download Artifacts
|
- name: Download Artifacts
|
||||||
uses: actions/download-artifact@v5
|
uses: actions/download-artifact@v5
|
||||||
@ -103,14 +108,14 @@ jobs:
|
|||||||
python tools/metrics.py combine -j -m -f tinyusb/src cmake-build/*/metrics.json
|
python tools/metrics.py combine -j -m -f tinyusb/src cmake-build/*/metrics.json
|
||||||
|
|
||||||
- name: Upload Metrics Artifact
|
- name: Upload Metrics Artifact
|
||||||
if: github.event_name == 'push'
|
if: github.event_name == 'push' || github.event_name == 'release'
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v5
|
||||||
with:
|
with:
|
||||||
name: metrics-tinyusb
|
name: metrics-tinyusb
|
||||||
path: metrics.json
|
path: metrics.json
|
||||||
|
|
||||||
- name: Download Base Branch Metrics
|
- name: Download Base Branch Metrics
|
||||||
if: github.event_name != 'push'
|
if: github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch'
|
||||||
uses: dawidd6/action-download-artifact@v11
|
uses: dawidd6/action-download-artifact@v11
|
||||||
with:
|
with:
|
||||||
workflow: build.yml
|
workflow: build.yml
|
||||||
@ -119,6 +124,18 @@ jobs:
|
|||||||
path: base-metrics
|
path: base-metrics
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Download Previous Release Asset
|
||||||
|
if: github.event_name == 'release'
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
run: |
|
||||||
|
PREV_TAG=$(git tag --sort=-creatordate | head -n 2 | tail -n 1)
|
||||||
|
echo "Previous Release: $PREV_TAG"
|
||||||
|
echo "PREV_TAG=$PREV_TAG" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
mkdir -p base-metrics
|
||||||
|
gh release download $PREV_TAG -p metrics.json -D base-metrics || echo "No metrics.json found in $PREV_TAG release"
|
||||||
|
|
||||||
- name: Compare with Base Branch
|
- name: Compare with Base Branch
|
||||||
if: github.event_name != 'push'
|
if: github.event_name != 'push'
|
||||||
run: |
|
run: |
|
||||||
@ -130,6 +147,16 @@ jobs:
|
|||||||
cp metrics.md metrics_compare.md
|
cp metrics.md metrics_compare.md
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
- name: Upload Release Assets
|
||||||
|
if: github.event_name == 'release'
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
run: |
|
||||||
|
CURR_TAG=${{ github.event.release.tag_name }}
|
||||||
|
COMPARE_FILE="metrics_compare_${CURR_TAG}-${PREV_TAG}.md"
|
||||||
|
mv metrics_compare.md $COMPARE_FILE
|
||||||
|
gh release upload $CURR_TAG metrics.json $COMPARE_FILE
|
||||||
|
|
||||||
- name: Save PR number
|
- name: Save PR number
|
||||||
if: github.event_name == 'pull_request'
|
if: github.event_name == 'pull_request'
|
||||||
run: echo ${{ github.event.number }} > pr_number.txt
|
run: echo ${{ github.event.number }} > pr_number.txt
|
||||||
@ -144,7 +171,7 @@ jobs:
|
|||||||
pr_number.txt
|
pr_number.txt
|
||||||
|
|
||||||
- name: Post Code Metrics as PR Comment
|
- name: Post Code Metrics as PR Comment
|
||||||
if: github.event_name != 'push'
|
if: (github.event_name == 'workflow_dispatch') || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == false)
|
||||||
uses: marocchino/sticky-pull-request-comment@v2
|
uses: marocchino/sticky-pull-request-comment@v2
|
||||||
with:
|
with:
|
||||||
header: code-metrics
|
header: code-metrics
|
||||||
@ -175,7 +202,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout TinyUSB
|
- name: Checkout TinyUSB
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Setup Zephyr project
|
- name: Setup Zephyr project
|
||||||
uses: zephyrproject-rtos/action-zephyr-setup@v1
|
uses: zephyrproject-rtos/action-zephyr-setup@v1
|
||||||
@ -237,7 +264,7 @@ jobs:
|
|||||||
mkdir -p "${{ github.workspace }}"
|
mkdir -p "${{ github.workspace }}"
|
||||||
|
|
||||||
- name: Checkout TinyUSB
|
- name: Checkout TinyUSB
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Download Artifacts
|
- name: Download Artifacts
|
||||||
uses: actions/download-artifact@v5
|
uses: actions/download-artifact@v5
|
||||||
@ -275,7 +302,7 @@ jobs:
|
|||||||
iccarm --version
|
iccarm --version
|
||||||
|
|
||||||
- name: Checkout TinyUSB
|
- name: Checkout TinyUSB
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Get build boards
|
- name: Get build boards
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
2
.github/workflows/build_util.yml
vendored
2
.github/workflows/build_util.yml
vendored
@ -38,7 +38,7 @@ jobs:
|
|||||||
arg: ${{ fromJSON(inputs.build-args) }}
|
arg: ${{ fromJSON(inputs.build-args) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout TinyUSB
|
- name: Checkout TinyUSB
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Setup Toolchain
|
- name: Setup Toolchain
|
||||||
id: setup-toolchain
|
id: setup-toolchain
|
||||||
|
|||||||
1
.github/workflows/metrics_comment.yml
vendored
1
.github/workflows/metrics_comment.yml
vendored
@ -13,6 +13,7 @@ jobs:
|
|||||||
github.event.workflow_run.event == 'pull_request' &&
|
github.event.workflow_run.event == 'pull_request' &&
|
||||||
github.event.workflow_run.conclusion == 'success'
|
github.event.workflow_run.conclusion == 'success'
|
||||||
permissions:
|
permissions:
|
||||||
|
actions: read
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- name: Download Artifacts
|
- name: Download Artifacts
|
||||||
|
|||||||
2
.github/workflows/pre-commit.yml
vendored
2
.github/workflows/pre-commit.yml
vendored
@ -20,7 +20,7 @@ jobs:
|
|||||||
ruby-version: '3.0'
|
ruby-version: '3.0'
|
||||||
|
|
||||||
- name: Checkout TinyUSB
|
- name: Checkout TinyUSB
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Get Dependencies
|
- name: Get Dependencies
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
8
.github/workflows/static_analysis.yml
vendored
8
.github/workflows/static_analysis.yml
vendored
@ -37,7 +37,7 @@ jobs:
|
|||||||
- 'metro_m4_express'
|
- 'metro_m4_express'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout TinyUSB
|
- name: Checkout TinyUSB
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Get Dependencies
|
- name: Get Dependencies
|
||||||
uses: ./.github/actions/get_deps
|
uses: ./.github/actions/get_deps
|
||||||
@ -100,7 +100,7 @@ jobs:
|
|||||||
- 'raspberry_pi_pico'
|
- 'raspberry_pi_pico'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout TinyUSB
|
- name: Checkout TinyUSB
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Get Dependencies
|
- name: Get Dependencies
|
||||||
uses: ./.github/actions/get_deps
|
uses: ./.github/actions/get_deps
|
||||||
@ -154,7 +154,7 @@ jobs:
|
|||||||
- 'stm32h743eval'
|
- 'stm32h743eval'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout TinyUSB
|
- name: Checkout TinyUSB
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
|
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
|
||||||
|
|
||||||
@ -197,7 +197,7 @@ jobs:
|
|||||||
- 'b_g474e_dpow1'
|
- 'b_g474e_dpow1'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout TinyUSB
|
- name: Checkout TinyUSB
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Get Dependencies
|
- name: Get Dependencies
|
||||||
uses: ./.github/actions/get_deps
|
uses: ./.github/actions/get_deps
|
||||||
|
|||||||
2
.github/workflows/trigger.yml
vendored
2
.github/workflows/trigger.yml
vendored
@ -23,7 +23,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Push to tinyusb_src
|
- name: Push to tinyusb_src
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
@ -74,7 +74,6 @@ def parse_bloaty_csv(csv_text, filters=None):
|
|||||||
"file": os.path.basename(unit_path) or unit_path,
|
"file": os.path.basename(unit_path) or unit_path,
|
||||||
"path": unit_path,
|
"path": unit_path,
|
||||||
"size": total_size,
|
"size": total_size,
|
||||||
"total": total_size,
|
|
||||||
"symbols": symbols,
|
"symbols": symbols,
|
||||||
"sections": sections,
|
"sections": sections,
|
||||||
}
|
}
|
||||||
@ -146,7 +145,7 @@ def compute_avg(all_json_data):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
# Merge files with the same 'file' value and compute averages
|
# Merge files with the same 'file' value and compute averages
|
||||||
file_accumulator = {} # key: file name, value: {"sizes": [sizes], "totals": [totals], "symbols": {name: [sizes]}, "sections": {name: [sizes]}}
|
file_accumulator = {} # key: file name, value: {"sizes": [sizes], "symbols": {name: [sizes]}, "sections": {name: [sizes]}}
|
||||||
|
|
||||||
for json_data in all_json_data["data"]:
|
for json_data in all_json_data["data"]:
|
||||||
for f in json_data.get("files", []):
|
for f in json_data.get("files", []):
|
||||||
@ -154,14 +153,12 @@ def compute_avg(all_json_data):
|
|||||||
if fname not in file_accumulator:
|
if fname not in file_accumulator:
|
||||||
file_accumulator[fname] = {
|
file_accumulator[fname] = {
|
||||||
"sizes": [],
|
"sizes": [],
|
||||||
"totals": [],
|
|
||||||
"path": f.get("path"),
|
"path": f.get("path"),
|
||||||
"symbols": defaultdict(list),
|
"symbols": defaultdict(list),
|
||||||
"sections": defaultdict(list),
|
"sections": defaultdict(list),
|
||||||
}
|
}
|
||||||
size_val = f.get("size", f.get("total", 0))
|
size_val = f.get("size", 0)
|
||||||
file_accumulator[fname]["sizes"].append(size_val)
|
file_accumulator[fname]["sizes"].append(size_val)
|
||||||
file_accumulator[fname]["totals"].append(f.get("total", size_val))
|
|
||||||
for sym in f.get("symbols", []):
|
for sym in f.get("symbols", []):
|
||||||
name = sym.get("name")
|
name = sym.get("name")
|
||||||
if name is None:
|
if name is None:
|
||||||
@ -196,9 +193,7 @@ def compute_avg(all_json_data):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
totals_list = [d.get("TOTAL") for d in all_json_data["data"] if isinstance(d.get("TOTAL"), (int, float))]
|
total_size = sum(f["size"] for f in files_average) or 1
|
||||||
total_size = round(sum(totals_list) / len(totals_list)) if totals_list else (
|
|
||||||
sum(f["size"] for f in files_average) or 1)
|
|
||||||
|
|
||||||
for f in files_average:
|
for f in files_average:
|
||||||
f["percent"] = (f["size"] / total_size) * 100 if total_size else 0
|
f["percent"] = (f["size"] / total_size) * 100 if total_size else 0
|
||||||
@ -207,7 +202,6 @@ def compute_avg(all_json_data):
|
|||||||
|
|
||||||
json_average = {
|
json_average = {
|
||||||
"file_list": all_json_data["file_list"],
|
"file_list": all_json_data["file_list"],
|
||||||
"TOTAL": total_size,
|
|
||||||
"files": files_average,
|
"files": files_average,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -262,10 +256,12 @@ def compare_files(base_file, new_file, filters=None):
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
base_total = sum(f["size"] for f in base_avg["files"])
|
||||||
|
new_total = sum(f["size"] for f in new_avg["files"])
|
||||||
total = {
|
total = {
|
||||||
"base": base_avg.get("TOTAL", 0),
|
"base": base_total,
|
||||||
"new": new_avg.get("TOTAL", 0),
|
"new": new_total,
|
||||||
"diff": new_avg.get("TOTAL", 0) - base_avg.get("TOTAL", 0),
|
"diff": new_total - base_total,
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@ -287,10 +283,6 @@ def get_sort_key(sort_order):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def _size_val(entry):
|
def _size_val(entry):
|
||||||
if isinstance(entry.get('total'), int):
|
|
||||||
return entry.get('total', 0)
|
|
||||||
if isinstance(entry.get('total'), dict):
|
|
||||||
return entry['total'].get('new', 0)
|
|
||||||
return entry.get('size', 0)
|
return entry.get('size', 0)
|
||||||
|
|
||||||
if sort_order == 'size-':
|
if sort_order == 'size-':
|
||||||
|
|||||||
Reference in New Issue
Block a user