A couple of script changes after CI integration

- Renamed struct_.py -> structs.py again.

- Removed lfs.csv, instead prefering script specific csv files.

- Added *-diff make rules for quick comparison against a previous
  result, results are now implicitly written on each run.

  For example, `make code` creates lfs.code.csv and prints the summary, which
  can be followed by `make code-diff` to compare changes against the saved
  lfs.code.csv without overwriting.

- Added nargs=? support for -s and -S, now uses a per-result _sort
  attribute to decide sort if fields are unspecified.
This commit is contained in:
Christopher Haster 2022-11-29 14:10:50 -06:00
parent 9990342440
commit c4b3e9d826
10 changed files with 380 additions and 208 deletions

View File

@ -117,21 +117,18 @@ jobs:
-DLFS_NO_DEBUG \
-DLFS_NO_WARN \
-DLFS_NO_ERROR" \
make lfs.code.csv lfs.data.csv lfs.stack.csv lfs.struct.csv
./scripts/summary.py lfs.struct.csv \
-bstruct \
-fsize=struct_size
make lfs.code.csv lfs.data.csv lfs.stack.csv lfs.structs.csv
./scripts/structs.py -u lfs.structs.csv
./scripts/summary.py lfs.code.csv lfs.data.csv lfs.stack.csv \
-bfunction \
-fcode=code_size \
-fdata=data_size \
-fstack=stack_limit \
--max=stack_limit
-fstack=stack_limit --max=stack_limit
mkdir -p sizes
cp lfs.code.csv sizes/${{matrix.arch}}.code.csv
cp lfs.data.csv sizes/${{matrix.arch}}.data.csv
cp lfs.stack.csv sizes/${{matrix.arch}}.stack.csv
cp lfs.struct.csv sizes/${{matrix.arch}}.struct.csv
cp lfs.structs.csv sizes/${{matrix.arch}}.structs.csv
- name: sizes-readonly
run: |
make clean
@ -141,21 +138,18 @@ jobs:
-DLFS_NO_WARN \
-DLFS_NO_ERROR \
-DLFS_READONLY" \
make lfs.code.csv lfs.data.csv lfs.stack.csv lfs.struct.csv
./scripts/summary.py lfs.struct.csv \
-bstruct \
-fsize=struct_size
make lfs.code.csv lfs.data.csv lfs.stack.csv lfs.structs.csv
./scripts/structs.py -u lfs.structs.csv
./scripts/summary.py lfs.code.csv lfs.data.csv lfs.stack.csv \
-bfunction \
-fcode=code_size \
-fdata=data_size \
-fstack=stack_limit \
--max=stack_limit
-fstack=stack_limit --max=stack_limit
mkdir -p sizes
cp lfs.code.csv sizes/${{matrix.arch}}-readonly.code.csv
cp lfs.data.csv sizes/${{matrix.arch}}-readonly.data.csv
cp lfs.stack.csv sizes/${{matrix.arch}}-readonly.stack.csv
cp lfs.struct.csv sizes/${{matrix.arch}}-readonly.struct.csv
cp lfs.structs.csv sizes/${{matrix.arch}}-readonly.structs.csv
- name: sizes-threadsafe
run: |
make clean
@ -165,21 +159,18 @@ jobs:
-DLFS_NO_WARN \
-DLFS_NO_ERROR \
-DLFS_THREADSAFE" \
make lfs.code.csv lfs.data.csv lfs.stack.csv lfs.struct.csv
./scripts/summary.py lfs.struct.csv \
-bstruct \
-fsize=struct_size
make lfs.code.csv lfs.data.csv lfs.stack.csv lfs.structs.csv
./scripts/structs.py -u lfs.structs.csv
./scripts/summary.py lfs.code.csv lfs.data.csv lfs.stack.csv \
-bfunction \
-fcode=code_size \
-fdata=data_size \
-fstack=stack_limit \
--max=stack_limit
-fstack=stack_limit --max=stack_limit
mkdir -p sizes
cp lfs.code.csv sizes/${{matrix.arch}}-threadsafe.code.csv
cp lfs.data.csv sizes/${{matrix.arch}}-threadsafe.data.csv
cp lfs.stack.csv sizes/${{matrix.arch}}-threadsafe.stack.csv
cp lfs.struct.csv sizes/${{matrix.arch}}-threadsafe.struct.csv
cp lfs.structs.csv sizes/${{matrix.arch}}-threadsafe.structs.csv
- name: sizes-migrate
run: |
make clean
@ -189,21 +180,18 @@ jobs:
-DLFS_NO_WARN \
-DLFS_NO_ERROR \
-DLFS_MIGRATE" \
make lfs.code.csv lfs.data.csv lfs.stack.csv lfs.struct.csv
./scripts/summary.py lfs.struct.csv \
-bstruct \
-fsize=struct_size
make lfs.code.csv lfs.data.csv lfs.stack.csv lfs.structs.csv
./scripts/structs.py -u lfs.structs.csv
./scripts/summary.py lfs.code.csv lfs.data.csv lfs.stack.csv \
-bfunction \
-fcode=code_size \
-fdata=data_size \
-fstack=stack_limit \
--max=stack_limit
-fstack=stack_limit --max=stack_limit
mkdir -p sizes
cp lfs.code.csv sizes/${{matrix.arch}}-migrate.code.csv
cp lfs.data.csv sizes/${{matrix.arch}}-migrate.data.csv
cp lfs.stack.csv sizes/${{matrix.arch}}-migrate.stack.csv
cp lfs.struct.csv sizes/${{matrix.arch}}-migrate.struct.csv
cp lfs.structs.csv sizes/${{matrix.arch}}-migrate.structs.csv
- name: sizes-error-asserts
run: |
make clean
@ -212,21 +200,18 @@ jobs:
-DLFS_NO_WARN \
-DLFS_NO_ERROR \
-D'LFS_ASSERT(test)=do {if(!(test)) {return -1;}} while(0)'" \
make lfs.code.csv lfs.data.csv lfs.stack.csv lfs.struct.csv
./scripts/summary.py lfs.struct.csv \
-bstruct \
-fsize=struct_size
make lfs.code.csv lfs.data.csv lfs.stack.csv lfs.structs.csv
./scripts/structs.py -u lfs.structs.csv
./scripts/summary.py lfs.code.csv lfs.data.csv lfs.stack.csv \
-bfunction \
-fcode=code_size \
-fdata=data_size \
-fstack=stack_limit \
--max=stack_limit
-fstack=stack_limit --max=stack_limit
mkdir -p sizes
cp lfs.code.csv sizes/${{matrix.arch}}-error-asserts.code.csv
cp lfs.data.csv sizes/${{matrix.arch}}-error-asserts.data.csv
cp lfs.stack.csv sizes/${{matrix.arch}}-error-asserts.stack.csv
cp lfs.struct.csv sizes/${{matrix.arch}}-error-asserts.struct.csv
cp lfs.structs.csv sizes/${{matrix.arch}}-error-asserts.structs.csv
# create size statuses
- name: upload-sizes
@ -671,7 +656,7 @@ jobs:
table[$i,$j]=$c_camel
((j+=1))
for s in code stack struct
for s in code stack structs
do
f=sizes/thumb${c:+-$c}.$s.csv
[ -e $f ] && table[$i,$j]=$( \

272
Makefile
View File

@ -91,13 +91,13 @@ CFLAGS += -fno-omit-frame-pointer
endif
ifdef VERBOSE
CODEFLAGS += -v
DATAFLAGS += -v
STACKFLAGS += -v
STRUCTFLAGS += -v
COVFLAGS += -v
PERFFLAGS += -v
PERFBDFLAGS += -v
CODEFLAGS += -v
DATAFLAGS += -v
STACKFLAGS += -v
STRUCTSFLAGS += -v
COVFLAGS += -v
PERFFLAGS += -v
PERFBDFLAGS += -v
endif
# forward -j flag
PERFFLAGS += $(filter -j%,$(MAKEFLAGS))
@ -107,11 +107,11 @@ CODEFLAGS += --nm-path="$(NM)"
DATAFLAGS += --nm-path="$(NM)"
endif
ifneq ($(OBJDUMP),objdump)
CODEFLAGS += --objdump-path="$(OBJDUMP)"
DATAFLAGS += --objdump-path="$(OBJDUMP)"
STRUCTFLAGS += --objdump-path="$(OBJDUMP)"
PERFFLAGS += --objdump-path="$(OBJDUMP)"
PERFBDFLAGS += --objdump-path="$(OBJDUMP)"
CODEFLAGS += --objdump-path="$(OBJDUMP)"
DATAFLAGS += --objdump-path="$(OBJDUMP)"
STRUCTSFLAGS += --objdump-path="$(OBJDUMP)"
PERFFLAGS += --objdump-path="$(OBJDUMP)"
PERFBDFLAGS += --objdump-path="$(OBJDUMP)"
endif
ifneq ($(PERF),perf)
PERFFLAGS += --perf-path="$(PERF)"
@ -123,14 +123,14 @@ BENCHFLAGS += -b
TESTFLAGS += $(filter -j%,$(MAKEFLAGS))
BENCHFLAGS += $(filter -j%,$(MAKEFLAGS))
ifdef YES_PERF
TESTFLAGS += -p$(TEST_PERF)
BENCHFLAGS += -p$(BENCH_PERF)
TESTFLAGS += -p $(TEST_PERF)
BENCHFLAGS += -p $(BENCH_PERF)
endif
ifdef YES_PERFBD
TESTFLAGS += -t$(TEST_TRACE) --trace-backtrace --trace-freq=100
TESTFLAGS += -t $(TEST_TRACE) --trace-backtrace --trace-freq=100
endif
ifndef NO_PERFBD
BENCHFLAGS += -t$(BENCH_TRACE) --trace-backtrace --trace-freq=100
BENCHFLAGS += -t $(BENCH_TRACE) --trace-backtrace --trace-freq=100
endif
ifdef VERBOSE
TESTFLAGS += -v
@ -187,6 +187,161 @@ help:
printf " "" %-25s %s\n", rule, $$0 \
}' $(MAKEFILE_LIST))
## Find the per-function code size
.PHONY: code
code: CODEFLAGS+=-S
code: $(OBJ) $(BUILDDIR)/lfs.code.csv
./scripts/code.py $(OBJ) $(CODEFLAGS)
## Compare per-function code size
.PHONY: code-diff
code-diff: $(OBJ)
./scripts/code.py $^ $(CODEFLAGS) -d $(BUILDDIR)/lfs.code.csv
## Find the per-function data size
.PHONY: data
data: DATAFLAGS+=-S
data: $(OBJ) $(BUILDDIR)/lfs.data.csv
./scripts/data.py $(OBJ) $(DATAFLAGS)
## Compare per-function data size
.PHONY: data-diff
data-diff: $(OBJ)
./scripts/data.py $^ $(DATAFLAGS) -d $(BUILDDIR)/lfs.data.csv
## Find the per-function stack usage
.PHONY: stack
stack: STACKFLAGS+=-S
stack: $(CI) $(BUILDDIR)/lfs.stack.csv
./scripts/stack.py $(CI) $(STACKFLAGS)
## Compare per-function stack usage
.PHONY: stack-diff
stack-diff: $(CI)
./scripts/stack.py $^ $(STACKFLAGS) -d $(BUILDDIR)/lfs.stack.csv
## Find function sizes
.PHONY: funcs
funcs: SUMMARYFLAGS+=-S
funcs: \
$(BUILDDIR)/lfs.code.csv \
$(BUILDDIR)/lfs.data.csv \
$(BUILDDIR)/lfs.stack.csv
$(strip ./scripts/summary.py $^ \
-bfunction \
-fcode=code_size \
-fdata=data_size \
-fstack=stack_limit --max=stack \
$(SUMMARYFLAGS))
## Compare function sizes
.PHONY: funcs-diff
funcs-diff: SHELL=/bin/bash
funcs-diff: $(OBJ) $(CI)
$(strip ./scripts/summary.py \
<(./scripts/code.py $(OBJ) -q $(CODEFLAGS) -o-) \
<(./scripts/data.py $(OBJ) -q $(DATAFLAGS) -o-) \
<(./scripts/stack.py $(CI) -q $(STACKFLAGS) -o-) \
-bfunction \
-fcode=code_size \
-fdata=data_size \
-fstack=stack_limit --max=stack \
$(SUMMARYFLAGS) -d <(./scripts/summary.py \
$(BUILDDIR)/lfs.code.csv \
$(BUILDDIR)/lfs.data.csv \
$(BUILDDIR)/lfs.stack.csv \
-q $(SUMMARYFLAGS) -o-))
## Find struct sizes
.PHONY: structs
structs: STRUCTSFLAGS+=-S
structs: $(OBJ) $(BUILDDIR)/lfs.structs.csv
./scripts/structs.py $(OBJ) $(STRUCTSFLAGS)
## Compare struct sizes
.PHONY: structs-diff
structs-diff: $(OBJ)
./scripts/structs.py $^ $(STRUCTSFLAGS) -d $(BUILDDIR)/lfs.structs.csv
## Find the line/branch coverage after a test run
.PHONY: cov
cov: COVFLAGS+=-s
cov: $(GCDA) $(BUILDDIR)/lfs.cov.csv
$(strip ./scripts/cov.py $(GCDA) \
$(patsubst %,-F%,$(SRC)) \
$(COVFLAGS))
## Compare line/branch coverage
.PHONY: cov-diff
cov-diff: $(GCDA)
$(strip ./scripts/cov.py $^ \
$(patsubst %,-F%,$(SRC)) \
$(COVFLAGS) -d $(BUILDDIR)/lfs.cov.csv)
## Find the perf results after bench run with YES_PERF
.PHONY: perf
perf: PERFFLAGS+=-S
perf: $(BENCH_PERF) $(BUILDDIR)/lfs.perf.csv
$(strip ./scripts/perf.py $(BENCH_PERF) \
$(patsubst %,-F%,$(SRC)) \
$(PERFFLAGS))
## Compare perf results
.PHONY: perf-diff
perf-diff: $(BENCH_PERF)
$(strip ./scripts/perf.py $^ \
$(patsubst %,-F%,$(SRC)) \
$(PERFFLAGS) -d $(BUILDDIR)/lfs.perf.csv)
## Find the perfbd results after a bench run
.PHONY: perfbd
perfbd: PERFBDFLAGS+=-S
perfbd: $(BENCH_TRACE) $(BUILDDIR)/lfs.perfbd.csv
$(strip ./scripts/perfbd.py $(BENCH_RUNNER) $(BENCH_TRACE) \
$(patsubst %,-F%,$(SRC)) \
$(PERFBDFLAGS))
## Compare perfbd results
.PHONY: perfbd-diff
perfbd-diff: $(BENCH_TRACE)
$(strip ./scripts/perfbd.py $(BENCH_RUNNER) $^ \
$(patsubst %,-F%,$(SRC)) \
$(PERFBDFLAGS) -d $(BUILDDIR)/lfs.perfbd.csv)
## Find a summary of compile-time sizes
.PHONY: summary sizes
summary sizes: \
$(BUILDDIR)/lfs.code.csv \
$(BUILDDIR)/lfs.data.csv \
$(BUILDDIR)/lfs.stack.csv \
$(BUILDDIR)/lfs.structs.csv
$(strip ./scripts/summary.py $^ \
-fcode=code_size \
-fdata=data_size \
-fstack=stack_limit --max=stack \
-fstructs=struct_size \
-Y $(SUMMARYFLAGS))
## Compare compile-time sizes
.PHONY: summary-diff sizes-diff
summary-diff sizes-diff: SHELL=/bin/bash
summary-diff sizes-diff: $(OBJ) $(CI)
$(strip ./scripts/summary.py \
<(./scripts/code.py $(OBJ) -q $(CODEFLAGS) -o-) \
<(./scripts/data.py $(OBJ) -q $(DATAFLAGS) -o-) \
<(./scripts/stack.py $(CI) -q $(STACKFLAGS) -o-) \
<(./scripts/structs.py $(OBJ) -q $(STRUCTSFLAGS) -o-) \
-fcode=code_size \
-fdata=data_size \
-fstack=stack_limit --max=stack \
-fstructs=struct_size \
-Y $(SUMMARYFLAGS) -d <(./scripts/summary.py \
$(BUILDDIR)/lfs.code.csv \
$(BUILDDIR)/lfs.data.csv \
$(BUILDDIR)/lfs.stack.csv \
$(BUILDDIR)/lfs.structs.csv \
-q $(SUMMARYFLAGS) -o-))
## Build the test-runner
.PHONY: test-runner build-test
ifndef NO_COV
@ -255,61 +410,6 @@ bench: bench-runner
bench-list: bench-runner
./scripts/bench.py $(BENCH_RUNNER) $(BENCHFLAGS) -l
## Find the per-function code size
.PHONY: code
code: $(OBJ)
./scripts/code.py $^ -Ssize $(CODEFLAGS)
## Find the per-function data size
.PHONY: data
data: $(OBJ)
./scripts/data.py $^ -Ssize $(DATAFLAGS)
## Find the per-function stack usage
.PHONY: stack
stack: $(CI)
./scripts/stack.py $^ -Slimit -Sframe $(STACKFLAGS)
## Find the struct sizes
.PHONY: struct
struct: $(OBJ)
./scripts/struct_.py $^ -Ssize $(STRUCTFLAGS)
## Find the line/branch coverage after a test run
.PHONY: cov
cov: $(GCDA)
$(strip ./scripts/cov.py \
$^ $(patsubst %,-F%,$(SRC)) \
-slines -sbranches \
$(COVFLAGS))
## Find the perf results after bench run with YES_PERF
.PHONY: perf
perf: $(BENCH_PERF)
$(strip ./scripts/perf.py \
$^ $(patsubst %,-F%,$(SRC)) \
-Scycles \
$(PERFFLAGS))
## Find the perfbd results after a bench run
.PHONY: perfbd
perfbd: $(BENCH_TRACE)
$(strip ./scripts/perfbd.py \
$(BENCH_RUNNER) $^ $(patsubst %,-F%,$(SRC)) \
-Serased -Sproged -Sreaded \
$(PERFBDFLAGS))
## Find a summary of compile-time sizes
.PHONY: summary sizes
summary sizes: $(BUILDDIR)/lfs.csv
$(strip ./scripts/summary.py -Y $^ \
-fcode=code_size \
-fdata=data_size \
-fstack=stack_limit \
-fstruct=struct_size \
--max=stack \
$(SUMMARYFLAGS))
# rules
-include $(DEP)
@ -327,32 +427,29 @@ $(BUILDDIR)/lfs.code.csv: $(OBJ)
./scripts/code.py $^ -q $(CODEFLAGS) -o $@
$(BUILDDIR)/lfs.data.csv: $(OBJ)
./scripts/data.py $^ -q $(CODEFLAGS) -o $@
./scripts/data.py $^ -q $(DATAFLAGS) -o $@
$(BUILDDIR)/lfs.stack.csv: $(CI)
./scripts/stack.py $^ -q $(CODEFLAGS) -o $@
./scripts/stack.py $^ -q $(STACKFLAGS) -o $@
$(BUILDDIR)/lfs.struct.csv: $(OBJ)
./scripts/struct_.py $^ -q $(CODEFLAGS) -o $@
$(BUILDDIR)/lfs.structs.csv: $(OBJ)
./scripts/structs.py $^ -q $(STRUCTSFLAGS) -o $@
$(BUILDDIR)/lfs.cov.csv: $(GCDA)
./scripts/cov.py $^ $(patsubst %,-F%,$(SRC)) -q $(COVFLAGS) -o $@
$(strip ./scripts/cov.py $^ \
$(patsubst %,-F%,$(SRC)) \
-q $(COVFLAGS) -o $@)
$(BUILDDIR)/lfs.perf.csv: $(BENCH_PERF)
./scripts/perf.py $^ $(patsubst %,-F%,$(SRC)) -q $(PERFFLAGS) -o $@
$(strip ./scripts/perf.py $^ \
$(patsubst %,-F%,$(SRC)) \
-q $(PERFFLAGS) -o $@)
$(BUILDDIR)/lfs.perfbd.csv: $(BENCH_TRACE)
$(strip ./scripts/perfbd.py \
$(BENCH_RUNNER) $^ $(patsubst %,-F%,$(SRC)) \
$(strip ./scripts/perfbd.py $(BENCH_RUNNER) $^ \
$(patsubst %,-F%,$(SRC)) \
-q $(PERFBDFLAGS) -o $@)
$(BUILDDIR)/lfs.csv: \
$(BUILDDIR)/lfs.code.csv \
$(BUILDDIR)/lfs.data.csv \
$(BUILDDIR)/lfs.stack.csv \
$(BUILDDIR)/lfs.struct.csv
./scripts/summary.py $^ -q $(SUMMARYFLAGS) -o $@
$(BUILDDIR)/runners/test_runner: $(TEST_OBJ)
$(CC) $(CFLAGS) $^ $(LFLAGS) -o $@
@ -391,11 +488,10 @@ clean:
rm -f $(BUILDDIR)/lfs
rm -f $(BUILDDIR)/liblfs.a
$(strip rm -f \
$(BUILDDIR)/lfs.csv \
$(BUILDDIR)/lfs.code.csv \
$(BUILDDIR)/lfs.data.csv \
$(BUILDDIR)/lfs.stack.csv \
$(BUILDDIR)/lfs.struct.csv \
$(BUILDDIR)/lfs.structs.csv \
$(BUILDDIR)/lfs.cov.csv \
$(BUILDDIR)/lfs.perf.csv \
$(BUILDDIR)/lfs.perfbd.csv)

View File

@ -112,6 +112,7 @@ class CodeResult(co.namedtuple('CodeResult', [
'size'])):
_by = ['file', 'function']
_fields = ['size']
_sort = ['size']
_types = {'size': Int}
__slots__ = ()
@ -385,8 +386,12 @@ def table(Result, results, diff_results=None, *,
reverse=True)
if sort:
for k, reverse in reversed(sort):
names.sort(key=lambda n: (getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else (),
names.sort(
key=lambda n: tuple(
(getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else ()
for k in ([k] if k else [
k for k in Result._sort if k in fields])),
reverse=reverse ^ (not k or k in Result._fields))
@ -544,8 +549,10 @@ def main(obj_paths, *,
results.sort()
if sort:
for k, reverse in reversed(sort):
results.sort(key=lambda r: (getattr(r, k),)
if getattr(r, k) is not None else (),
results.sort(
key=lambda r: tuple(
(getattr(r, k),) if getattr(r, k) is not None else ()
for k in ([k] if k else CodeResult._sort)),
reverse=reverse ^ (not k or k in CodeResult._fields))
# write results to CSV
@ -553,14 +560,15 @@ def main(obj_paths, *,
with openio(args['output'], 'w') as f:
writer = csv.DictWriter(f,
(by if by is not None else CodeResult._by)
+ ['code_'+k for k in CodeResult._fields])
+ ['code_'+k for k in (
fields if fields is not None else CodeResult._fields)])
writer.writeheader()
for r in results:
writer.writerow(
{k: getattr(r, k)
for k in (by if by is not None else CodeResult._by)}
| {'code_'+k: getattr(r, k)
for k in CodeResult._fields})
{k: getattr(r, k) for k in (
by if by is not None else CodeResult._by)}
| {'code_'+k: getattr(r, k) for k in (
fields if fields is not None else CodeResult._fields)})
# find previous results?
if args.get('diff'):
@ -655,12 +663,14 @@ if __name__ == "__main__":
namespace.sort.append((value, True if option == '-S' else False))
parser.add_argument(
'-s', '--sort',
nargs='?',
action=AppendSort,
help="Sort by this fields.")
help="Sort by this field.")
parser.add_argument(
'-S', '--reverse-sort',
nargs='?',
action=AppendSort,
help="Sort by this fields, but backwards.")
help="Sort by this field, but backwards.")
parser.add_argument(
'-Y', '--summary',
action='store_true',

View File

@ -180,6 +180,7 @@ class CovResult(co.namedtuple('CovResult', [
'calls', 'hits', 'funcs', 'lines', 'branches'])):
_by = ['file', 'function', 'line']
_fields = ['calls', 'hits', 'funcs', 'lines', 'branches']
_sort = ['funcs', 'lines', 'branches', 'hits', 'calls']
_types = {
'calls': Int, 'hits': Int,
'funcs': Frac, 'lines': Frac, 'branches': Frac}
@ -372,8 +373,12 @@ def table(Result, results, diff_results=None, *,
reverse=True)
if sort:
for k, reverse in reversed(sort):
names.sort(key=lambda n: (getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else (),
names.sort(
key=lambda n: tuple(
(getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else ()
for k in ([k] if k else [
k for k in Result._sort if k in fields])),
reverse=reverse ^ (not k or k in Result._fields))
@ -617,8 +622,10 @@ def main(gcda_paths, *,
results.sort()
if sort:
for k, reverse in reversed(sort):
results.sort(key=lambda r: (getattr(r, k),)
if getattr(r, k) is not None else (),
results.sort(
key=lambda r: tuple(
(getattr(r, k),) if getattr(r, k) is not None else ()
for k in ([k] if k else CovResult._sort)),
reverse=reverse ^ (not k or k in CovResult._fields))
# write results to CSV
@ -626,14 +633,15 @@ def main(gcda_paths, *,
with openio(args['output'], 'w') as f:
writer = csv.DictWriter(f,
(by if by is not None else CovResult._by)
+ ['cov_'+k for k in CovResult._fields])
+ ['cov_'+k for k in (
fields if fields is not None else CovResult._fields)])
writer.writeheader()
for r in results:
writer.writerow(
{k: getattr(r, k)
for k in (by if by is not None else CovResult._by)}
| {'cov_'+k: getattr(r, k)
for k in CovResult._fields})
{k: getattr(r, k) for k in (
by if by is not None else CovResult._by)}
| {'cov_'+k: getattr(r, k) for k in (
fields if fields is not None else CovResult._fields)})
# find previous results?
if args.get('diff'):
@ -748,10 +756,12 @@ if __name__ == "__main__":
namespace.sort.append((value, True if option == '-S' else False))
parser.add_argument(
'-s', '--sort',
nargs='?',
action=AppendSort,
help="Sort by this field.")
parser.add_argument(
'-S', '--reverse-sort',
nargs='?',
action=AppendSort,
help="Sort by this field, but backwards.")
parser.add_argument(

View File

@ -112,6 +112,7 @@ class DataResult(co.namedtuple('DataResult', [
'size'])):
_by = ['file', 'function']
_fields = ['size']
_sort = ['size']
_types = {'size': Int}
__slots__ = ()
@ -385,8 +386,12 @@ def table(Result, results, diff_results=None, *,
reverse=True)
if sort:
for k, reverse in reversed(sort):
names.sort(key=lambda n: (getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else (),
names.sort(
key=lambda n: tuple(
(getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else ()
for k in ([k] if k else [
k for k in Result._sort if k in fields])),
reverse=reverse ^ (not k or k in Result._fields))
@ -541,8 +546,10 @@ def main(obj_paths, *,
results.sort()
if sort:
for k, reverse in reversed(sort):
results.sort(key=lambda r: (getattr(r, k),)
if getattr(r, k) is not None else (),
results.sort(
key=lambda r: tuple(
(getattr(r, k),) if getattr(r, k) is not None else ()
for k in ([k] if k else DataResult._sort)),
reverse=reverse ^ (not k or k in DataResult._fields))
# write results to CSV
@ -550,14 +557,15 @@ def main(obj_paths, *,
with openio(args['output'], 'w') as f:
writer = csv.DictWriter(f,
(by if by is not None else DataResult._by)
+ ['data_'+k for k in DataResult._fields])
+ ['data_'+k for k in (
fields if fields is not None else DataResult._fields)])
writer.writeheader()
for r in results:
writer.writerow(
{k: getattr(r, k)
for k in (by if by is not None else DataResult._by)}
| {'data_'+k: getattr(r, k)
for k in DataResult._fields})
{k: getattr(r, k) for k in (
by if by is not None else DataResult._by)}
| {'data_'+k: getattr(r, k) for k in (
fields if fields is not None else DataResult._fields)})
# find previous results?
if args.get('diff'):
@ -652,12 +660,14 @@ if __name__ == "__main__":
namespace.sort.append((value, True if option == '-S' else False))
parser.add_argument(
'-s', '--sort',
nargs='?',
action=AppendSort,
help="Sort by this fields.")
help="Sort by this field.")
parser.add_argument(
'-S', '--reverse-sort',
nargs='?',
action=AppendSort,
help="Sort by this fields, but backwards.")
help="Sort by this field, but backwards.")
parser.add_argument(
'-Y', '--summary',
action='store_true',

View File

@ -122,6 +122,7 @@ class PerfResult(co.namedtuple('PerfResult', [
'children'])):
_by = ['file', 'function', 'line']
_fields = ['cycles', 'bmisses', 'branches', 'cmisses', 'caches']
_sort = ['cycles', 'bmisses', 'cmisses', 'branches', 'caches']
_types = {
'cycles': Int,
'bmisses': Int, 'branches': Int,
@ -712,10 +713,15 @@ def table(Result, results, diff_results=None, *,
reverse=True)
if sort:
for k, reverse in reversed(sort):
names.sort(key=lambda n: (getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else (),
names.sort(
key=lambda n: tuple(
(getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else ()
for k in ([k] if k else [
k for k in Result._sort if k in fields])),
reverse=reverse ^ (not k or k in Result._fields))
# build up our lines
lines = []
@ -853,8 +859,13 @@ def table(Result, results, diff_results=None, *,
names_.sort()
if sort:
for k, reverse in reversed(sort):
names_.sort(key=lambda n: (getattr(table_[n], k),)
if getattr(table_.get(n), k, None) is not None else (),
names_.sort(
key=lambda n: tuple(
(getattr(table_[n], k),)
if getattr(table_.get(n), k, None) is not None
else ()
for k in ([k] if k else [
k for k in Result._sort if k in fields])),
reverse=reverse ^ (not k or k in Result._fields))
for i, name in enumerate(names_):
@ -1051,8 +1062,10 @@ def report(perf_paths, *,
results.sort()
if sort:
for k, reverse in reversed(sort):
results.sort(key=lambda r: (getattr(r, k),)
if getattr(r, k) is not None else (),
results.sort(
key=lambda r: tuple(
(getattr(r, k),) if getattr(r, k) is not None else ()
for k in ([k] if k else PerfResult._sort)),
reverse=reverse ^ (not k or k in PerfResult._fields))
# write results to CSV
@ -1060,14 +1073,15 @@ def report(perf_paths, *,
with openio(args['output'], 'w') as f:
writer = csv.DictWriter(f,
(by if by is not None else PerfResult._by)
+ ['perf_'+k for k in PerfResult._fields])
+ ['perf_'+k for k in (
fields if fields is not None else PerfResult._fields)])
writer.writeheader()
for r in results:
writer.writerow(
{k: getattr(r, k)
for k in (by if by is not None else PerfResult._by)}
| {'perf_'+k: getattr(r, k)
for k in PerfResult._fields})
{k: getattr(r, k) for k in (
by if by is not None else PerfResult._by)}
| {'perf_'+k: getattr(r, k) for k in (
fields if fields is not None else PerfResult._fields)})
# find previous results?
if args.get('diff'):
@ -1192,12 +1206,14 @@ if __name__ == "__main__":
namespace.sort.append((value, True if option == '-S' else False))
parser.add_argument(
'-s', '--sort',
nargs='?',
action=AppendSort,
help="Sort by this fields.")
help="Sort by this field.")
parser.add_argument(
'-S', '--reverse-sort',
nargs='?',
action=AppendSort,
help="Sort by this fields, but backwards.")
help="Sort by this field, but backwards.")
parser.add_argument(
'-Y', '--summary',
action='store_true',

View File

@ -113,6 +113,7 @@ class PerfBdResult(co.namedtuple('PerfBdResult', [
'children'])):
_by = ['file', 'function', 'line']
_fields = ['readed', 'proged', 'erased']
_sort = ['erased', 'proged', 'readed']
_types = {'readed': Int, 'proged': Int, 'erased': Int}
__slots__ = ()
@ -678,10 +679,15 @@ def table(Result, results, diff_results=None, *,
reverse=True)
if sort:
for k, reverse in reversed(sort):
names.sort(key=lambda n: (getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else (),
names.sort(
key=lambda n: tuple(
(getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else ()
for k in ([k] if k else [
k for k in Result._sort if k in fields])),
reverse=reverse ^ (not k or k in Result._fields))
# build up our lines
lines = []
@ -819,8 +825,13 @@ def table(Result, results, diff_results=None, *,
names_.sort()
if sort:
for k, reverse in reversed(sort):
names_.sort(key=lambda n: (getattr(table_[n], k),)
if getattr(table_.get(n), k, None) is not None else (),
names_.sort(
key=lambda n: tuple(
(getattr(table_[n], k),)
if getattr(table_.get(n), k, None) is not None
else ()
for k in ([k] if k else [
k for k in Result._sort if k in fields])),
reverse=reverse ^ (not k or k in Result._fields))
for i, name in enumerate(names_):
@ -1031,8 +1042,10 @@ def report(obj_path='', trace_paths=[], *,
results.sort()
if sort:
for k, reverse in reversed(sort):
results.sort(key=lambda r: (getattr(r, k),)
if getattr(r, k) is not None else (),
results.sort(
key=lambda r: tuple(
(getattr(r, k),) if getattr(r, k) is not None else ()
for k in ([k] if k else PerfBdResult._sort)),
reverse=reverse ^ (not k or k in PerfBdResult._fields))
# write results to CSV
@ -1040,14 +1053,15 @@ def report(obj_path='', trace_paths=[], *,
with openio(args['output'], 'w') as f:
writer = csv.DictWriter(f,
(by if by is not None else PerfBdResult._by)
+ ['perfbd_'+k for k in PerfBdResult._fields])
+ ['perfbd_'+k for k in (
fields if fields is not None else PerfBdResult._fields)])
writer.writeheader()
for r in results:
writer.writerow(
{k: getattr(r, k)
for k in (by if by is not None else PerfBdResult._by)}
| {'perfbd_'+k: getattr(r, k)
for k in PerfBdResult._fields})
{k: getattr(r, k) for k in (
by if by is not None else PerfBdResult._by)}
| {'perfbd_'+k: getattr(r, k) for k in (
fields if fields is not None else PerfBdResult._fields)})
# find previous results?
if args.get('diff'):
@ -1164,12 +1178,14 @@ if __name__ == "__main__":
namespace.sort.append((value, True if option == '-S' else False))
parser.add_argument(
'-s', '--sort',
nargs='?',
action=AppendSort,
help="Sort by this fields.")
help="Sort by this field.")
parser.add_argument(
'-S', '--reverse-sort',
nargs='?',
action=AppendSort,
help="Sort by this fields, but backwards.")
help="Sort by this field, but backwards.")
parser.add_argument(
'-Y', '--summary',
action='store_true',

View File

@ -102,6 +102,7 @@ class StackResult(co.namedtuple('StackResult', [
'file', 'function', 'frame', 'limit', 'children'])):
_by = ['file', 'function']
_fields = ['frame', 'limit']
_sort = ['limit', 'frame']
_types = {'frame': Int, 'limit': Int}
__slots__ = ()
@ -350,8 +351,12 @@ def table(Result, results, diff_results=None, *,
reverse=True)
if sort:
for k, reverse in reversed(sort):
names.sort(key=lambda n: (getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else (),
names.sort(
key=lambda n: tuple(
(getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else ()
for k in ([k] if k else [
k for k in Result._sort if k in fields])),
reverse=reverse ^ (not k or k in Result._fields))
@ -569,8 +574,10 @@ def main(ci_paths,
results.sort()
if sort:
for k, reverse in reversed(sort):
results.sort(key=lambda r: (getattr(r, k),)
if getattr(r, k) is not None else (),
results.sort(
key=lambda r: tuple(
(getattr(r, k),) if getattr(r, k) is not None else ()
for k in ([k] if k else StackResult._sort)),
reverse=reverse ^ (not k or k in StackResult._fields))
# write results to CSV
@ -578,14 +585,15 @@ def main(ci_paths,
with openio(args['output'], 'w') as f:
writer = csv.DictWriter(f,
(by if by is not None else StackResult._by)
+ ['stack_'+k for k in StackResult._fields])
+ ['stack_'+k for k in (
fields if fields is not None else StackResult._fields)])
writer.writeheader()
for r in results:
writer.writerow(
{k: getattr(r, k)
for k in (by if by is not None else StackResult._by)}
| {'stack_'+k: getattr(r, k)
for k in StackResult._fields})
{k: getattr(r, k) for k in (
by if by is not None else StackResult._by)}
| {'stack_'+k: getattr(r, k) for k in (
fields if fields is not None else StackResult._fields)})
# find previous results?
if args.get('diff'):
@ -685,12 +693,14 @@ if __name__ == "__main__":
namespace.sort.append((value, True if option == '-S' else False))
parser.add_argument(
'-s', '--sort',
nargs='?',
action=AppendSort,
help="Sort by this fields.")
help="Sort by this field.")
parser.add_argument(
'-S', '--reverse-sort',
nargs='?',
action=AppendSort,
help="Sort by this fields, but backwards.")
help="Sort by this field, but backwards.")
parser.add_argument(
'-Y', '--summary',
action='store_true',

View File

@ -3,7 +3,7 @@
# Script to find struct sizes.
#
# Example:
# ./scripts/struct_.py lfs.o lfs_util.o -Ssize
# ./scripts/structs.py lfs.o lfs_util.o -Ssize
#
# Copyright (c) 2022, The littlefs authors.
# SPDX-License-Identifier: BSD-3-Clause
@ -106,6 +106,7 @@ class Int(co.namedtuple('Int', 'x')):
class StructResult(co.namedtuple('StructResult', ['file', 'struct', 'size'])):
_by = ['file', 'struct']
_fields = ['size']
_sort = ['size']
_types = {'size': Int}
__slots__ = ()
@ -333,8 +334,12 @@ def table(Result, results, diff_results=None, *,
reverse=True)
if sort:
for k, reverse in reversed(sort):
names.sort(key=lambda n: (getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else (),
names.sort(
key=lambda n: tuple(
(getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else ()
for k in ([k] if k else [
k for k in Result._sort if k in fields])),
reverse=reverse ^ (not k or k in Result._fields))
@ -494,8 +499,10 @@ def main(obj_paths, *,
results.sort()
if sort:
for k, reverse in reversed(sort):
results.sort(key=lambda r: (getattr(r, k),)
if getattr(r, k) is not None else (),
results.sort(
key=lambda r: tuple(
(getattr(r, k),) if getattr(r, k) is not None else ()
for k in ([k] if k else StructResult._sort)),
reverse=reverse ^ (not k or k in StructResult._fields))
# write results to CSV
@ -503,14 +510,15 @@ def main(obj_paths, *,
with openio(args['output'], 'w') as f:
writer = csv.DictWriter(f,
(by if by is not None else StructResult._by)
+ ['struct_'+k for k in StructResult._fields])
+ ['struct_'+k for k in (
fields if fields is not None else StructResult._fields)])
writer.writeheader()
for r in results:
writer.writerow(
{k: getattr(r, k)
for k in (by if by is not None else StructResult._by)}
| {'struct_'+k: getattr(r, k)
for k in StructResult._fields})
{k: getattr(r, k) for k in (
by if by is not None else StructResult._by)}
| {'struct_'+k: getattr(r, k) for k in (
fields if fields is not None else StructResult._fields)})
# find previous results?
if args.get('diff'):
@ -607,10 +615,12 @@ if __name__ == "__main__":
namespace.sort.append((value, True if option == '-S' else False))
parser.add_argument(
'-s', '--sort',
nargs='?',
action=AppendSort,
help="Sort by this field.")
parser.add_argument(
'-S', '--reverse-sort',
nargs='?',
action=AppendSort,
help="Sort by this field, but backwards.")
parser.add_argument(

View File

@ -339,6 +339,7 @@ def infer(results, *,
'__getattribute__': __getattribute__,
'_by': by,
'_fields': fields,
'_sort': fields,
'_types': types_,
})
@ -419,8 +420,12 @@ def table(Result, results, diff_results=None, *,
reverse=True)
if sort:
for k, reverse in reversed(sort):
names.sort(key=lambda n: (getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else (),
names.sort(
key=lambda n: tuple(
(getattr(table[n], k),)
if getattr(table.get(n), k, None) is not None else ()
for k in ([k] if k else [
k for k in Result._sort if k in fields])),
reverse=reverse ^ (not k or k in Result._fields))
@ -649,8 +654,10 @@ def main(csv_paths, *,
results.sort()
if sort:
for k, reverse in reversed(sort):
results.sort(key=lambda r: (getattr(r, k),)
if getattr(r, k) is not None else (),
results.sort(
key=lambda r: tuple(
(getattr(r, k),) if getattr(r, k) is not None else ()
for k in ([k] if k else Result._sort)),
reverse=reverse ^ (not k or k in Result._fields))
# write results to CSV
@ -761,12 +768,14 @@ if __name__ == "__main__":
namespace.sort.append((value, True if option == '-S' else False))
parser.add_argument(
'-s', '--sort',
nargs='?',
action=AppendSort,
help="Sort by this fields.")
help="Sort by this field.")
parser.add_argument(
'-S', '--reverse-sort',
nargs='?',
action=AppendSort,
help="Sort by this fields, but backwards.")
help="Sort by this field, but backwards.")
parser.add_argument(
'-Y', '--summary',
action='store_true',