summaryrefslogtreecommitdiff
path: root/build-aux
diff options
context:
space:
mode:
Diffstat (limited to 'build-aux')
-rwxr-xr-xbuild-aux/gcov-prune33
-rwxr-xr-xbuild-aux/get-dscname36
-rwxr-xr-xbuild-aux/lint-bin15
-rwxr-xr-xbuild-aux/lint-generic60
-rwxr-xr-xbuild-aux/lint-h27
-rwxr-xr-xbuild-aux/lint-src160
-rwxr-xr-xbuild-aux/lint-unknown24
-rwxr-xr-xbuild-aux/linux-errno.txt.gen17
-rw-r--r--build-aux/measurestack/analyze.py288
-rw-r--r--build-aux/measurestack/app_main.py17
-rw-r--r--build-aux/measurestack/app_output.py46
-rw-r--r--build-aux/measurestack/app_plugins.py517
-rw-r--r--build-aux/measurestack/test_analyze.py55
-rw-r--r--build-aux/measurestack/testutil.py134
-rw-r--r--build-aux/measurestack/util.py18
-rwxr-xr-xbuild-aux/tent-graph180
-rwxr-xr-xbuild-aux/valgrind16
17 files changed, 1082 insertions, 561 deletions
diff --git a/build-aux/gcov-prune b/build-aux/gcov-prune
new file mode 100755
index 0000000..dc190a9
--- /dev/null
+++ b/build-aux/gcov-prune
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+# build-aux/gcov-prune - Prune old GCC coverage files
+#
+# Copyright (C) 2025 Luke T. Shumaker <lukeshu@lukeshu.com>
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+set -e
+
+[[ $# == 1 ]]
+
+sourcedir="$(realpath -- .)"
+builddir="$(realpath -- "$1")"
+
+# `gcc` writes .gcno
+# Running the program writes .gcda (updates existing files, concurrent-safe)
+# GCC `gcov` post-processes .gcno+.gcda to .gcov
+# `gcovr` is a Python script that calls `gcov` and merges and post-processes the .gcov files to other formats
+
+# Prune orphaned .gcno files.
+find "$builddir" -name '*.gcno' -printf '%P\0' | while read -d '' -r gcno_file; do
+ rel_base="${gcno_file%/CMakeFiles/*}"
+ src_file="$gcno_file"
+ src_file="${src_file#*/CMakeFiles/*.dir/}"
+ src_file="${src_file%.gcno}"
+ src_file="${src_file//__/..}"
+ src_file="$rel_base/$src_file"
+ if [[ ! -e "$sourcedir/$src_file" || "$sourcedir/$src_file" -nt "$builddir/$gcno_file" ]]; then
+ rm -fv -- "$builddir/$gcno_file"
+ fi
+done
+
+# Prune all .gcda files.
+find "$builddir" -name '*.gcda' -delete
diff --git a/build-aux/get-dscname b/build-aux/get-dscname
deleted file mode 100755
index 34a1b08..0000000
--- a/build-aux/get-dscname
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env bash
-# build-aux/get-dscname - Get a file's self-described filename
-#
-# Copyright (C) 2024-2025 Luke T. Shumaker <lukeshu@lukeshu.com>
-# SPDX-License-Identifier: AGPL-3.0-or-later
-
-if [ $# -ne 1 ]; then
- echo "$0: expected exactly 1 argument"
- exit 2
-fi
-
-if [[ $1 == */Documentation/* ]] && [[ "$(sed 1q -- "$1")" == 'NAME' ]]; then
- sed -n '
- 2{
- s,[/.],_,g;
- s,^\s*_,Documentation/,;
- s,$,.txt,;
-
- p;
- q;
- }
- ' -- "$1"
-else
- sed -n '
- 1,3{
- /^\#!/d;
- /^<!--$/d;
- /-\*- .* -\*-/d;
- s,[/*\# ]*,,;
- s/ - .*//;
-
- p;
- q;
- }
- ' -- "$1"
-fi
diff --git a/build-aux/lint-bin b/build-aux/lint-bin
index 91f1612..3b9eb4b 100755
--- a/build-aux/lint-bin
+++ b/build-aux/lint-bin
@@ -18,7 +18,7 @@ shopt -s extglob
# Textual info:
# - ${elf%.elf}.dis : `objdump --section-headers ${elf}; objdump --disassemble ${elf}; picotool coprodis --quiet ${elf}`
# - ${elf}.map : `ld --print-map` info
-# - ${elf%.elf}_stack.c : `stack.c.gen`
+# - stack.c : `stack.c.gen`
RED=$(tput setaf 1)
RESET=$(tput sgr0)
@@ -97,13 +97,13 @@ lint_stack() {
while read -r line; do
func=${line#$'\t'}
if [[ $line == $'\t'* ]]; then
- err "$in_elffile" "function in binary but not _stack.c: ${func}"
+ err "$in_elffile" "function in binary but not stack.c: ${func}"
else
- err "$in_elffile" "function in _stack.c but not binary: ${func}"
+ err "$in_elffile" "function in stack.c but not binary: ${func}"
fi
done < <(
comm -3 \
- <(sed -En 's/^included: (.*:)?//p' "${in_elffile%.elf}_stack.c" | sort -u) \
+ <(sed -En 's/^included: (.*:)?//p' "${in_elffile%/*}/stack.c" | sort -u) \
<(readelf_funcs "$in_elffile" | sed -E -e 's/\.part\.[0-9]*$//' -e 's/^__(.*)_veneer$/\1/' | sort -u)
)
}
@@ -114,6 +114,7 @@ lint_func_blocklist() {
local blocklist=(
gpio_default_irq_handler
+ {,__wrap,weak_raw_,stdio_,_}{,v}{,sn}printf
)
while read -r func; do
@@ -131,6 +132,12 @@ main() {
{
echo 'Global variables:'
lint_globals "${elf}.map" | sed 's/^/ /'
+ echo
+ heap=$(grep -B1 'HeapLimit =' -- "${elf}.map" |
+ sed -E -e 's/^\s*(\.heap\s*)?0x/0x/' -e 's/\s.*//' |
+ sed -E -e '1{N;s/(.*)\n(.*)/\2-\1/;}' -e 's/.*/print(&)/' |
+ python)
+ printf "Left for heap: 0x%04x (%'d)\n" "$heap" "$heap"
} >"${elf%.elf}.lint.globals"
(lint_stack "$elf") &>"${elf%.elf}.lint.stack"
lint_func_blocklist "$elf"
diff --git a/build-aux/lint-generic b/build-aux/lint-generic
deleted file mode 100755
index 290988c..0000000
--- a/build-aux/lint-generic
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/bin/sh
-# build-aux/lint-generic - Non-language-specific lint checks
-#
-# Copyright (C) 2024-2025 Luke T. Shumaker <lukeshu@lukeshu.com>
-# SPDX-License-Identifier: AGPL-3.0-or-later
-
-RED=$(tput setaf 1)
-RESET=$(tput sgr0)
-
-err() {
- printf "${RED}%s${RESET}: %s\n" "$1" "$2" >&2
- r=1
-}
-
-r=0
-for filename in "$@"; do
- if ! { [ -f "$filename" ] && ! [ -h "$filename" ]; }; then
- # Ignore non-files
- continue
- fi
-
- # File header ##########################################################
-
- shebang="$(sed -n '1{/^#!/{/^#!\/hint\//q; p;};}' "$filename")"
- if [ -x "$filename" ] && [ -z "$shebang" ]; then
- err "$filename" 'is executable but does not have a shebang'
- elif [ -n "$shebang" ] && ! [ -x "$filename" ]; then
- err "$filename" 'has a shebang but is executable'
- fi
-
- if ! grep -E -q 'Copyright \(C\) 202[4-9]((-|, )202[5-9])* Luke T. Shumaker' "$filename"; then
- err "$filename" 'is missing a copyright statement'
- fi
- if test -e .git && ! git diff --quiet milestone/2025-01-01 HEAD -- "$filename"; then
- if ! grep -E -q 'Copyright \(C\) .*2025 Luke T. Shumaker' "$filename"; then
- err "$filename" 'has an outdated copyright statement'
- fi
- fi
- if ! grep -q '\sSPDX-License-Identifier[:] ' "$filename"; then
- err "$filename" 'is missing an SPDX-License-Identifier'
- fi
-
- dscname_act=$(./build-aux/get-dscname "$filename")
- dscname_exp=$(echo "$filename" | sed \
- -e 's,.*/config/,,' \
- -e 's,.*/config\.h$,config.h,' \
- -e 's,.*include/,,' \
- -e 's,.*static/,,' \
- -e 's/\.wip$//')
- if [ "$dscname_act" != "$dscname_exp" ] && [ "cmd/$dscname_act" != "$dscname_exp" ]; then
- err "$filename" "self-identifies as $dscname_act (expected $dscname_exp)"
- fi
-
- # File body ############################################################
-
- if grep -n --color=auto "$(printf '\\S\t')" "$filename"; then
- err "$filename" 'uses tabs for alignment'
- fi
-done
-exit $r
diff --git a/build-aux/lint-h b/build-aux/lint-h
deleted file mode 100755
index 7459032..0000000
--- a/build-aux/lint-h
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env bash
-# build-aux/lint-h - Lint checks for C header files
-#
-# Copyright (C) 2024-2025 Luke T. Shumaker <lukeshu@lukeshu.com>
-# SPDX-License-Identifier: AGPL-3.0-or-later
-
-RED=$(tput setaf 1)
-RESET=$(tput sgr0)
-
-err() {
- printf "${RED}%s${RESET}: %s\n" "$1" "$2" >&2
- r=1
-}
-
-r=0
-for filename in "$@"; do
- dscname=$(./build-aux/get-dscname "$filename")
- guard=${dscname//'/'/'_'}
- guard=${guard//'.'/'_'}
- guard="_${guard^^}_"
- if ! { grep -Fxq "#ifndef ${guard}" "$filename" &&
- grep -Fxq "#define ${guard}" "$filename" &&
- grep -Fxq "#endif /* ${guard} */" "$filename"; }; then
- err "$filename" "does not have ${guard} guard"
- fi
-done
-exit $r
diff --git a/build-aux/lint-src b/build-aux/lint-src
new file mode 100755
index 0000000..d536631
--- /dev/null
+++ b/build-aux/lint-src
@@ -0,0 +1,160 @@
+#!/usr/bin/env bash
+# build-aux/lint-src - Lint checks for source files
+#
+# Copyright (C) 2024-2025 Luke T. Shumaker <lukeshu@lukeshu.com>
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+RED=$(tput setaf 1)
+RESET=$(tput sgr0)
+
+err() {
+ printf "${RED}%s${RESET}: %s\n" "$1" "$2" >&2
+ r=1
+}
+
+# `get-dscname FILENAME` reads FILENAME and prints the name that the
+# comment at the top of the file self-identifies the file as.
+get-dscname() {
+ if [[ $1 == */Documentation/* && "$(sed 1q -- "$1")" == 'NAME' ]]; then
+ sed -n '
+ 2{
+ s,/,_,g;
+ s,^\s*_,Documentation/,;
+ s,$,.txt,;
+
+ p;
+ q;
+ }
+ ' -- "$1"
+ else
+ sed -n '
+ 1,3{
+ /^\#!/d;
+ /^<!--$/d;
+ /-\*- .* -\*-/d;
+ s,[/*\# ]*,,;
+ s/ - .*//;
+
+ p;
+ q;
+ }
+ ' -- "$1"
+ fi
+}
+
+{
+ filetype=$1
+ filenames=("${@:2}")
+
+ r=0
+ for filename in "${filenames[@]}"; do
+ # File header ##########################################################
+
+ shebang="$(sed -n '1{/^#!/p;}' "$filename")"
+ if [[ -x $filename && (-z $shebang || $shebang == '#!/hint/'*) ]]; then
+ err "$filename" 'is executable but does not have a shebang'
+ elif [[ (-n $shebang && $shebang != '#!/hint/'*) && ! -x $filename ]]; then
+ err "$filename" 'has a shebang but is not executable'
+ fi
+ case "$shebang" in
+ '') : ;;
+ '#!/bin/sh') : ;;
+ '#!/usr/bin/env bash') : ;;
+ '#!/usr/bin/env python3') : ;;
+ *) err "$filename" 'has an unrecognized shebang' ;;
+ esac
+ if [[ -n $shebang && $shebang != */"$filetype" && $shebang != *' '"$filetype" ]]; then
+ err "$filename" "wrong shebang for $filetype"
+ fi
+
+ if ! grep -E -q 'Copyright \(C\) 202[4-9]((-|, )202[5-9])* Luke T. Shumaker' "$filename"; then
+ err "$filename" 'is missing a copyright statement'
+ fi
+ if test -e .git && ! git diff --quiet milestone/2025-01-01 HEAD -- "$filename"; then
+ if ! grep -E -q 'Copyright \(C\) .*2025 Luke T. Shumaker' "$filename"; then
+ err "$filename" 'has an outdated copyright statement'
+ fi
+ fi
+ if ! grep -q '\sSPDX-License-Identifier[:] ' "$filename"; then
+ err "$filename" 'is missing an SPDX-License-Identifier'
+ fi
+
+ dscname_act=$(get-dscname "$filename")
+ dscname_exp=$(echo "$filename" | sed \
+ -e 's,.*include/,,' \
+ -e 's,.*static/,,' \
+ -e 's/\.wip$//')
+ if [[ $dscname_act != "$dscname_exp" ]]; then
+ err "$filename" "self-identifies as $dscname_act (expected $dscname_exp)"
+ fi
+
+ # File body ############################################################
+
+ if grep -n --color=auto $'\\S\t' "$filename"; then
+ err "$filename" 'uses tabs for alignment'
+ fi
+ done
+ case "$filetype" in
+ unknown)
+ for filename in "${filenames[@]}"; do
+ err "$filename" 'cannot lint unknown file type'
+ done
+ ;;
+ c)
+ for filename in "${filenames[@]}"; do
+ if [[ $filename == *.h ]]; then
+ dscname=$(get-dscname "$filename")
+ guard=$dscname
+ guard=${guard#*/config/}
+ if [[ $guard == */config.h ]]; then
+ guard=config.h
+ fi
+ guard=${guard//'/'/'_'}
+ guard=${guard//'.'/'_'}
+ guard="_${guard^^}_"
+ if ! { grep -Fxq "#ifndef ${guard}" "$filename" &&
+ grep -Fxq "#define ${guard}" "$filename" &&
+ grep -Fxq "#endif /* ${guard} */" "$filename"; }; then
+ err "$filename" "does not have ${guard} guard"
+ fi
+ if [[ $filename != libmisc/include/libmisc/obj.h ]] &&
+ grep -Fn --color=auto -e LO_IMPLEMENTATION_C -e LO_IMPLEMENTATION_STATIC "$filename"; then
+ err "$filename" "contains LO_IMPLEMENTATION_C and/or LO_IMPLEMENTATION_STATIC"
+ fi
+ fi
+ if [[ $filename == *.c ]]; then
+ if [[ $filename != libmisc/tests/test_obj.c ]] &&
+ grep -Fn --color=auto L_IMPLEMENTATION_H "$filename"; then
+ err "$filename" "contains LO_IMPLEMENTATION_H"
+ fi
+ fi
+ done
+ ;;
+ sh | bash)
+ shellcheck "${filenames[@]}" || exit $?
+ shfmt --diff --case-indent --simplify "${filenames[@]}" || exit $?
+ ;;
+ python3)
+ ./build-aux/venv/bin/mypy --strict --scripts-are-modules "${filenames[@]}" || exit $?
+ ./build-aux/venv/bin/black --check "${filenames[@]}" || exit $?
+ ./build-aux/venv/bin/isort --check "${filenames[@]}" || exit $?
+ ./build-aux/venv/bin/pylint "${filenames[@]}" || exit $?
+ if grep -nh 'SPECIAL$$' -- lib9p/core.gen lib9p/core_gen/*.py; then exit 1; fi
+ testfiles=()
+ for filename in "${filenames[@]}"; do
+ if [[ ${filename##*/} == test_*.py ]]; then
+ testfiles+=("$filename")
+ fi
+ done
+ ./build-aux/venv/bin/pytest "${testfiles[@]}" || exit $?
+ ;;
+ make | cmake | gitignore | ini | 9p-idl | 9p-log | markdown | pip | man-cat)
+ # TODO: Write/adopt linters for these file types
+ :
+ ;;
+ *)
+ err "$0" "unknown filetype: ${filetype}"
+ ;;
+ esac
+ exit $r
+}
diff --git a/build-aux/lint-unknown b/build-aux/lint-unknown
deleted file mode 100755
index dda9541..0000000
--- a/build-aux/lint-unknown
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/sh
-# build-aux/lint-unknown - Lint checks for unknown files
-#
-# Copyright (C) 2024-2025 Luke T. Shumaker <lukeshu@lukeshu.com>
-# SPDX-License-Identifier: AGPL-3.0-or-later
-
-RED=$(tput setaf 1)
-RESET=$(tput sgr0)
-
-err() {
- printf "${RED}%s${RESET}: %s\n" "$1" "$2" >&2
- r=1
-}
-
-r=0
-for filename in "$@"; do
- if ! { [ -f "$filename" ] && ! [ -h "$filename" ]; }; then
- # Ignore non-files
- continue
- fi
-
- err "$filename" 'cannot lint unknown file type'
-done
-exit $r
diff --git a/build-aux/linux-errno.txt.gen b/build-aux/linux-errno.txt.gen
deleted file mode 100755
index f94178f..0000000
--- a/build-aux/linux-errno.txt.gen
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env bash
-# build-aux/linux-errno.txt.gen - Generate a listing of Linux kernel errnos
-#
-# Copyright (C) 2024 Luke T. Shumaker <lukeshu@lukeshu.com>
-# SPDX-License-Identifier: AGPL-3.0-or-later
-
-set -e
-linux_git=${1:?}
-outfile=${2:?}
-
-(
- cd "${linux_git}"
- echo "# ${outfile} - Generated from $0 and linux.git $(git describe). DO NOT EDIT!"
- git ls-files include/uapi/ | grep errno |
- xargs sed -nE 's,#\s*define\s+(E[A-Z0-9]+)\s+([0-9]+)\s+/\* (.*) \*/,\2 \1 \3,p' |
- sort --numeric-sort
-) >"${outfile}"
diff --git a/build-aux/measurestack/analyze.py b/build-aux/measurestack/analyze.py
index a93874f..f151642 100644
--- a/build-aux/measurestack/analyze.py
+++ b/build-aux/measurestack/analyze.py
@@ -3,24 +3,103 @@
# Copyright (C) 2024-2025 Luke T. Shumaker <lukeshu@lukeshu.com>
# SPDX-License-Identifier: AGPL-3.0-or-later
+import random
import re
import sys
import typing
from . import vcg
+# Whether to print "//dbg-cache:" on cache writes
+dbg_cache = False
+# Whether to print the graph in a /* comment */ before processing it
+dbg_dumpgraph = False
+# Whether to print "//dbg-nstatic:" lines that trace nstatic() execution
+dbg_nstatic = False
+# Whether to disable nstatic() caching (but does NOT disable any cache-related debug logging)
+dbg_nocache = False
+# Whether to sort things for consistently-ordered execution, or shuffle things to detect bugs
+dbg_sort: typing.Literal["unsorted", "sorted", "shuffled"] = "unsorted"
+
# pylint: disable=unused-variable
__all__ = [
"BaseName",
"QName",
"UsageKind",
"Node",
+ "maybe_sorted",
"AnalyzeResultVal",
"AnalyzeResultGroup",
"AnalyzeResult",
"analyze",
]
+
+def dumps(x: typing.Any, depth: int = 0, compact: bool = False) -> str:
+ match x:
+ case int() | str() | None:
+ return repr(x)
+ case dict():
+ if len(x) == 0:
+ return "{}"
+ ret = "{"
+ if not compact:
+ ret += "\n"
+ for k, v in x.items():
+ if not compact:
+ ret += "\t" * (depth + 1)
+ ret += dumps(k, depth + 1, True)
+ ret += ":"
+ if not compact:
+ ret += " "
+ ret += dumps(v, depth + 1, compact)
+ ret += ","
+ if not compact:
+ ret += "\n"
+ if not compact:
+ ret += "\t" * depth
+ ret += "}"
+ return ret
+ case list():
+ if len(x) == 0:
+ return "[]"
+ ret = "["
+ if not compact:
+ ret += "\n"
+ for v in x:
+ if not compact:
+ ret += "\t" * (depth + 1)
+ ret += dumps(v, depth + 1, compact)
+ ret += ","
+ if not compact:
+ ret += "\n"
+ if not compact:
+ ret += "\t" * depth
+ ret += "]"
+ return ret
+ case set():
+ if len(x) == 0:
+ return "set()"
+ ret = "{"
+ if not compact:
+ ret += "\n"
+ for v in x:
+ if not compact:
+ ret += "\t" * (depth + 1)
+ ret += dumps(v, depth + 1, compact)
+ ret += ","
+ if not compact:
+ ret += "\n"
+ if not compact:
+ ret += "\t" * depth
+ ret += "}"
+ return ret
+ case _:
+ if hasattr(x, "__dict__"):
+ return f"{x.__class__.__name__}(*{dumps(x.__dict__, depth, compact)})"
+ return f"TODO({x.__class__.__name__})"
+
+
# types ########################################################################
@@ -152,32 +231,44 @@ class AnalyzeResult(typing.NamedTuple):
class SkipModel(typing.NamedTuple):
"""Running the skipmodel calls `.fn(chain, ...)` with the chain
- consisting of the last `.nchain` items (if .nchain is an int), or
- the chain starting with the *last* occurance of `.nchain` (if
- .nchain is a collection). If the chain is not that long or does
- not contain a member of the collection, then .fn is not called and
- the call is *not* skipped.
+ consisting of the last few items of the input chain.
+
+ If `.nchain` is an int:
+
+ - the chain is the last `.nchain` items or the input chain. If
+ the input chain is not that long, then `.fn` is not called and
+ the call is *not* skipped.
+ If `.nchain` is a collection:
+
+ - the chain starts with the *last* occurance of `.nchain` in the
+ input chain. If the input chain does not contain a member of
+ the collection, then .fn is called with an empty chain.
"""
nchain: int | typing.Collection[BaseName]
- fn: typing.Callable[[typing.Sequence[QName], QName], bool]
-
- def __call__(self, chain: typing.Sequence[QName], call: QName) -> tuple[bool, int]:
- if isinstance(self.nchain, int):
- if len(chain) >= self.nchain:
- _chain = chain[-self.nchain :]
- return self.fn(_chain, call), len(_chain)
- else:
- for i in reversed(range(len(chain))):
- if chain[i].base() in self.nchain:
- _chain = chain[i - 1 :]
- return self.fn(_chain, call), len(_chain)
- return False, 0
+ fn: typing.Callable[[typing.Sequence[QName], Node, QName], bool]
+
+ def __call__(
+ self, chain: typing.Sequence[QName], node: Node, call: QName
+ ) -> tuple[bool, int]:
+ match self.nchain:
+ case int():
+ if len(chain) >= self.nchain:
+ _chain = chain[-self.nchain :]
+ return self.fn(_chain, node, call), len(_chain) + 1
+ return False, 0
+ case _:
+ for i in reversed(range(len(chain))):
+ if chain[i].base() in self.nchain:
+ _chain = chain[i:]
+ return self.fn(_chain, node, call), len(_chain) + 1
+ return self.fn([], node, call), 1
class Application(typing.Protocol):
def extra_nodes(self) -> typing.Collection[Node]: ...
+ def mutate_node(self, node: Node) -> None: ...
def indirect_callees(
self, elem: vcg.VCGElem
) -> tuple[typing.Collection[QName], bool]: ...
@@ -186,7 +277,7 @@ class Application(typing.Protocol):
# code #########################################################################
-re_node_label = re.compile(
+re_node_normal_label = re.compile(
r"(?P<funcname>[^\n]+)\n"
+ r"(?P<location>[^\n]+:[0-9]+:[0-9]+)\n"
+ r"(?P<nstatic>[0-9]+) bytes \((?P<usage_kind>static|dynamic|dynamic,bounded)\)\n"
@@ -194,6 +285,10 @@ re_node_label = re.compile(
+ r"(?:\n.*)*",
flags=re.MULTILINE,
)
+re_node_alias_label = re.compile(
+ r"(?P<funcname>[^\n]+)\n" + r"(?P<location>[^\n]+:[0-9]+:[0-9]+)",
+ flags=re.MULTILINE,
+)
class _Graph:
@@ -235,6 +330,39 @@ class _Graph:
return self._resolve_cache[funcname]
+if typing.TYPE_CHECKING:
+ from _typeshed import SupportsRichComparisonT as _T_sortable
+
+_T = typing.TypeVar("_T")
+
+
+@typing.overload
+def maybe_sorted(
+ unsorted: typing.Iterable["_T_sortable"], /, *, key: None = None
+) -> typing.Iterable["_T_sortable"]: ...
+@typing.overload
+def maybe_sorted(
+ unsorted: typing.Iterable[_T], /, *, key: typing.Callable[[_T], "_T_sortable"]
+) -> typing.Iterable[_T]: ...
+
+
+def maybe_sorted(
+ unsorted: typing.Iterable[_T],
+ /,
+ *,
+ key: typing.Callable[[_T], "_T_sortable"] | None = None,
+) -> typing.Iterable[_T]:
+ match dbg_sort:
+ case "unsorted":
+ return unsorted
+ case "sorted":
+ return sorted(unsorted, key=key) # type: ignore
+ case "shuffled":
+ ret = [*unsorted]
+ random.shuffle(ret)
+ return ret
+
+
def _make_graph(
ci_fnames: typing.Collection[str],
app: Application,
@@ -253,20 +381,44 @@ def _make_graph(
case "title":
node.funcname = QName(v)
case "label":
- if elem.attrs.get("shape", "") != "ellipse":
- m = re_node_label.fullmatch(v)
- if not m:
- raise ValueError(f"unexpected label value {v!r}")
- node.location = m.group("location")
- node.usage_kind = typing.cast(
- UsageKind, m.group("usage_kind")
- )
- node.nstatic = int(m.group("nstatic"))
- node.ndynamic = int(m.group("ndynamic"))
+ shape: str | None = elem.attrs.get("shape", None)
+ match shape:
+ case "ellipse": # external
+ pass
+ case "triangle": # alias (since GCC 15)
+ m = re_node_alias_label.fullmatch(v)
+ if not m:
+ raise ValueError(
+ f"unexpected label value {v!r}"
+ )
+ node.location = m.group("location")
+ node.usage_kind = "static"
+ node.nstatic = 0
+ node.ndynamic = 0
+ case None: # normal
+ m = re_node_normal_label.fullmatch(v)
+ if not m:
+ raise ValueError(
+ f"unexpected label value {v!r}"
+ )
+ node.location = m.group("location")
+ node.usage_kind = typing.cast(
+ UsageKind, m.group("usage_kind")
+ )
+ node.nstatic = int(m.group("nstatic"))
+ node.ndynamic = int(m.group("ndynamic"))
+ case _:
+ raise ValueError(
+ f"unexpected shape value {shape!r}"
+ )
case "shape":
- if v != "ellipse":
- raise ValueError(f"unexpected shape value {v!r}")
- skip = True
+ match v:
+ case "ellipse": # external
+ skip = True
+ case "triangle": # alias (since GCC 15)
+ pass
+ case _:
+ raise ValueError(f"unexpected shape value {v!r}")
case _:
raise ValueError(f"unknown edge key {k!r}")
if not skip:
@@ -297,7 +449,10 @@ def _make_graph(
raise ValueError(f"unknown caller: {caller}")
if callee == QName("__indirect_call"):
callees, missing_ok = app.indirect_callees(elem)
- for callee in callees:
+ assert (
+ len(callees) > 0
+ ), f"app returning 0 callees for {elem.attrs.get('label')} indicates the code would crash"
+ for callee in maybe_sorted(callees):
if callee not in graph[caller].calls:
graph[caller].calls[callee] = missing_ok
else:
@@ -305,16 +460,22 @@ def _make_graph(
case _:
raise ValueError(f"unknown elem type {elem.typ!r}")
- for ci_fname in ci_fnames:
+ for ci_fname in maybe_sorted(ci_fnames):
with open(ci_fname, "r", encoding="utf-8") as fh:
for elem in vcg.parse_vcg(fh):
handle_elem(elem)
- for node in app.extra_nodes():
+ def sort_key(node: Node) -> QName:
+ return node.funcname
+
+ for node in maybe_sorted(app.extra_nodes(), key=sort_key):
if node.funcname in graph:
raise ValueError(f"duplicate node {node.funcname}")
graph[node.funcname] = node
+ for node in graph.values():
+ app.mutate_node(node)
+
ret = _Graph()
ret.graph = graph
ret.qualified = {}
@@ -332,33 +493,33 @@ def analyze(
cfg_max_call_depth: int,
) -> AnalyzeResult:
graphdata = _make_graph(ci_fnames, app)
+ if dbg_dumpgraph:
+ print(f"/* {dumps(graphdata)} */")
missing: set[QName] = set()
dynamic: set[QName] = set()
included_funcs: set[QName] = set()
- dbg = False
-
track_inclusion: bool = True
skipmodels = app.skipmodels()
for name, model in skipmodels.items():
- if isinstance(model.nchain, int):
- assert model.nchain > 1
- else:
+ if not isinstance(model.nchain, int):
assert len(model.nchain) > 0
_nstatic_cache: dict[QName, int] = {}
def _nstatic(chain: list[QName], funcname: QName) -> tuple[int, int]:
- nonlocal dbg
nonlocal track_inclusion
assert funcname in graphdata.graph
+ def putdbg(msg: str) -> None:
+ print(f"//dbg-nstatic: {'- '*len(chain)}{msg}")
+
node = graphdata.graph[funcname]
- if dbg:
- print(f"//dbg: {'- '*len(chain)}{funcname}\t{node.nstatic}")
+ if dbg_nstatic:
+ putdbg(f"{funcname}\t{node.nstatic}")
if node.usage_kind == "dynamic" or node.ndynamic > 0:
dynamic.add(funcname)
if track_inclusion:
@@ -378,37 +539,52 @@ def analyze(
call_qname = graphdata.resolve_funcname(call_orig_qname)
if not call_qname:
if skipmodel:
- skip, _ = skipmodel(chain, call_orig_qname)
+ skip, _ = skipmodel(chain[:-1], node, call_orig_qname)
if skip:
- if dbg:
- print(
- f"//dbg: {'- '*len(chain)}{call_orig_qname}\tskip missing"
- )
+ if dbg_nstatic:
+ putdbg(f"{call_orig_qname}\tskip missing")
continue
if not call_missing_ok:
missing.add(call_orig_qname)
- if dbg:
- print(f"//dbg: {'- '*len(chain)}{call_orig_qname}\tmissing")
+ if dbg_nstatic:
+ putdbg(f"{call_orig_qname}\tmissing")
continue
# 2. Skip
if skipmodel:
- skip, skip_nchain = skipmodel(chain, call_qname)
+ skip, skip_nchain = skipmodel(chain[:-1], node, call_qname)
max_call_nchain = max(max_call_nchain, skip_nchain)
if skip:
- if dbg:
- print(f"//dbg: {'- '*len(chain)}{call_qname}\tskip")
+ if dbg_nstatic:
+ putdbg(f"{call_qname}\tskip")
continue
# 3. Call
- if skip_nchain == 0 and call_qname in _nstatic_cache:
- max_call_nstatic = max(max_call_nstatic, _nstatic_cache[call_qname])
+ if (
+ (not dbg_nocache)
+ and skip_nchain == 0
+ and call_qname in _nstatic_cache
+ ):
+ call_nstatic = _nstatic_cache[call_qname]
+ if dbg_nstatic:
+ putdbg(f"{call_qname}\ttotal={call_nstatic} (cache-read)")
+ max_call_nstatic = max(max_call_nstatic, call_nstatic)
else:
call_nstatic, call_nchain = _nstatic(chain, call_qname)
max_call_nstatic = max(max_call_nstatic, call_nstatic)
max_call_nchain = max(max_call_nchain, call_nchain)
if skip_nchain == 0 and call_nchain == 0:
- _nstatic_cache[call_qname] = call_nstatic
+ if dbg_nstatic:
+ putdbg(f"{call_qname}\ttotal={call_nstatic} (cache-write)")
+ if call_qname not in _nstatic_cache:
+ if dbg_cache:
+ print(f"//dbg-cache: {call_qname} = {call_nstatic}")
+ _nstatic_cache[call_qname] = call_nstatic
+ else:
+ assert dbg_nocache
+ assert _nstatic_cache[call_qname] == call_nstatic
+ elif dbg_nstatic:
+ putdbg(f"{call_qname}\ttotal={call_nstatic} (do-not-cache)")
chain.pop()
return node.nstatic + max_call_nstatic, max(0, max_call_nchain - 1)
diff --git a/build-aux/measurestack/app_main.py b/build-aux/measurestack/app_main.py
index 7573146..884aeee 100644
--- a/build-aux/measurestack/app_main.py
+++ b/build-aux/measurestack/app_main.py
@@ -27,8 +27,8 @@ def main(
# sbc-harness ####################################################
- libobj_plugin = app_plugins.LibObjPlugin(arg_c_fnames)
- lib9p_plugin = app_plugins.Lib9PPlugin(arg_base_dir, arg_c_fnames, libobj_plugin)
+ libmisc_plugin = app_plugins.LibMiscPlugin(arg_c_fnames)
+ lib9p_plugin = app_plugins.Lib9PPlugin(arg_base_dir, arg_c_fnames)
def sbc_is_thread(name: QName) -> int:
if str(name).endswith("_cr") and name.base() != BaseName("lib9p_srv_read_cr"):
@@ -47,13 +47,11 @@ def main(
plugins += [
app_plugins.CmdPlugin(),
- libobj_plugin,
- app_plugins.PicoFmtPlugin(arg_pico_platform),
- app_plugins.LibHWPlugin(arg_pico_platform, libobj_plugin),
+ libmisc_plugin,
+ app_plugins.LibHWPlugin(arg_pico_platform, libmisc_plugin),
app_plugins.LibCRPlugin(),
app_plugins.LibCRIPCPlugin(),
lib9p_plugin,
- app_plugins.LibMiscPlugin(),
]
# pico-sdk #######################################################
@@ -69,6 +67,7 @@ def main(
plugins += [
app_plugins.PicoSDKPlugin(
get_init_array=get_init_array,
+ PICO_PANIC_FUNCTION="assert_panic",
),
app_plugins.TinyUSBDevicePlugin(arg_c_fnames),
app_plugins.NewlibPlugin(),
@@ -89,12 +88,10 @@ def main(
def misc_filter(name: QName) -> tuple[int, bool]:
if name in [
QName("__assert_msg_fail"),
- QName("__lm_printf"),
- QName("__lm_light_printf"),
- QName("fmt_vfctprintf"),
- QName("fmt_vsnprintf"),
]:
return 1, False
+ if str(name.base()).endswith("_putb"):
+ return 1, False
return 0, False
extra_includes: list[BaseName] = []
diff --git a/build-aux/measurestack/app_output.py b/build-aux/measurestack/app_output.py
index 5336b85..5cf7d17 100644
--- a/build-aux/measurestack/app_output.py
+++ b/build-aux/measurestack/app_output.py
@@ -1,4 +1,4 @@
-# build-aux/measurestack/app_output.py - Generate `*_stack.c` files
+# build-aux/measurestack/app_output.py - Generate `stack.c` files
#
# Copyright (C) 2024-2025 Luke T. Shumaker <lukeshu@lukeshu.com>
# SPDX-License-Identifier: AGPL-3.0-or-later
@@ -51,14 +51,14 @@ def print_group(
print(sep1)
-def next_power_of_2(x: int) -> int:
- return 1 << (x.bit_length())
+def lm_round_up(n: int, d: int) -> int:
+ return ((n + d - 1) // d) * d
def print_c(
result: analyze.AnalyzeResult, location_xform: typing.Callable[[QName], str]
) -> None:
- print("#include <stddef.h> /* for size_t */")
+ print('#include "config.h" /* for COROUTINE_STACK_* extern declarations */')
print()
print("/*")
print_group(result, location_xform, "Threads")
@@ -75,6 +75,9 @@ def print_c(
base: int
size: int
+ print("[[gnu::aligned]] void _bogus_aligned_fn(void) {};")
+ print("#define STACK_ALIGNED [[gnu::aligned(__alignof__(_bogus_aligned_fn))]]")
+
rows: list[CrRow] = []
mainrow: CrRow | None = None
for funcname, val in result.groups["Threads"].rows.items():
@@ -84,20 +87,20 @@ def print_c(
if name in ["main", "_entry_point"]:
mainrow = CrRow(name=name, cnt=1, base=base, size=size)
else:
- size = next_power_of_2(size + stack_guard_size) - stack_guard_size
+ size = lm_round_up(size + stack_guard_size, 512)
rows.append(CrRow(name=name, cnt=val.cnt, base=base, size=size))
- namelen = max(len(r.name) for r in rows)
+ namelen = max(len(f"{r.name}{r.cnt}" if r.cnt > 1 else r.name) for r in rows)
baselen = max(len(str(r.base)) for r in rows)
sizesum = sum(r.cnt * (r.size + stack_guard_size) for r in rows)
sizelen = len(str(max(sizesum, mainrow.size if mainrow else 0)))
def print_row(comment: bool, name: str, size: int, eqn: str | None = None) -> None:
- prefix = "const size_t CONFIG_COROUTINE_STACK_SIZE_"
+ prefix = "STACK_ALIGNED char COROUTINE_STACK_"
if comment:
print(f"/* {name}".ljust(len(prefix) + namelen), end="")
else:
print(f"{prefix}{name:<{namelen}}", end="")
- print(f" = {size:>{sizelen}};", end="")
+ print(f"[{size:>{sizelen}}];", end="")
if comment:
print(" */", end="")
elif eqn:
@@ -107,13 +110,15 @@ def print_c(
print()
for row in sorted(rows):
- print_row(
- False,
- row.name,
- row.size,
- f"LM_NEXT_POWER_OF_2({row.base:>{baselen}}+{intrstack}+{stack_guard_size})-{stack_guard_size}",
+ comment = (
+ f"LM_ROUND_UP({row.base:>{baselen}}+{intrstack}+{stack_guard_size}, 512)"
)
- print_row(True, "TOTAL (inc. stack guard)", sizesum)
+ if row.cnt > 1:
+ for i in range(row.cnt):
+ print_row(False, f"{row.name}{i}", row.size, comment)
+ else:
+ print_row(False, row.name, row.size, comment)
+ print_row(True, "TOTAL", sizesum)
if mainrow:
print_row(
True,
@@ -122,6 +127,19 @@ def print_c(
f" {mainrow.base:>{baselen}}+{intrstack}",
)
print()
+ for row in sorted(rows):
+ name = row.name
+ if row.cnt > 1:
+ name += "0"
+ print(f"char *const COROUTINE_STACK_{row.name}[{row.cnt}] = {{")
+ for i in range(row.cnt):
+ print(f"\tCOROUTINE_STACK_{row.name}{i},")
+ print("};")
+ print(
+ f"const size_t COROUTINE_STACK_{row.name}_len = sizeof(COROUTINE_STACK_{name});"
+ )
+
+ print()
print("/*")
print_group(result, location_xform, "Misc")
diff --git a/build-aux/measurestack/app_plugins.py b/build-aux/measurestack/app_plugins.py
index 6eeb35b..a921407 100644
--- a/build-aux/measurestack/app_plugins.py
+++ b/build-aux/measurestack/app_plugins.py
@@ -4,6 +4,7 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
import re
+import subprocess
import typing
from . import analyze, util
@@ -13,13 +14,11 @@ from .util import synthetic_node
# pylint: disable=unused-variable
__all__ = [
"CmdPlugin",
- "LibObjPlugin",
"LibHWPlugin",
"LibCRPlugin",
"LibCRIPCPlugin",
"Lib9PPlugin",
"LibMiscPlugin",
- "PicoFmtPlugin",
"PicoSDKPlugin",
"TinyUSBDevicePlugin",
"NewlibPlugin",
@@ -40,32 +39,54 @@ class CmdPlugin:
def extra_nodes(self) -> typing.Collection[Node]:
return []
+ def mutate_node(self, node: Node) -> None:
+ pass
+
def indirect_callees(
self, loc: str, line: str
) -> tuple[typing.Collection[QName], bool] | None:
if "/3rd-party/" in loc:
return None
if "srv->auth" in line:
- return [], False
+ return [QName("__indirect_call_with_null_check:srv->auth")], False
if "srv->rootdir" in line:
return [QName("get_root")], False
+ if "/ihex.c" in loc:
+ if "self->handle_data" in line:
+ return [QName("flash_handle_ihex_data")], False
+ if "self->handle_eof" in line:
+ return [QName("flash_handle_ihex_eof")], False
+ if "self->handle_set_exec_start_lin" in line:
+ return [
+ QName(
+ "__indirect_call_with_null_check:self->handle_set_exec_start_lin"
+ )
+ ], False
+ if "self->handle_set_exec_start_seg" in line:
+ return [
+ QName(
+ "__indirect_call_with_null_check:self->handle_set_exec_start_seg"
+ )
+ ], False
return None
def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
return {}
-re_comment = re.compile(r"/\*.*?\*/")
-re_ws = re.compile(r"\s+")
-re_lo_iface = re.compile(r"^\s*#\s*define\s+(?P<name>\S+)_LO_IFACE")
-re_lo_func = re.compile(r"LO_FUNC *\([^,]*, *(?P<name>[^,) ]+) *[,)]")
-re_lo_implementation = re.compile(
- r"^LO_IMPLEMENTATION_[HC]\s*\(\s*(?P<iface>[^, ]+)\s*,\s*(?P<impl_typ>[^,]+)\s*,\s*(?P<impl_name>[^, ]+)\s*[,)].*"
-)
-re_call_objcall = re.compile(r"LO_CALL\((?P<obj>[^,]+), (?P<meth>[^,)]+)[,)].*")
-
+class LibMiscPlugin:
+ re_comment = re.compile(r"/\*.*?\*/")
+ re_ws = re.compile(r"\s+")
+ re_lo_iface = re.compile(r"^\s*#\s*define\s+(?P<name>\S+)_LO_IFACE")
+ re_lo_func = re.compile(r"LO_FUNC *\([^,]*, *(?P<name>[^,) ]+) *[,)]")
+ re_lo_implementation = re.compile(
+ r"^LO_IMPLEMENTATION_(?P<vis>H|C|STATIC)\s*\("
+ r"\s*(?P<iface>[^, ]+)\s*,"
+ r"\s*(?P<impl_typ>[^,]+)\s*,"
+ r"\s*(?P<impl_name>[^, ]+)\s*\)"
+ )
+ re_lo_call = re.compile(r".*\bLO_CALL\((?P<obj>[^,]+), (?P<meth>[^,)]+)[,)].*")
-class LibObjPlugin:
objcalls: dict[str, set[QName]] # method_name => {method_impls}
def __init__(self, arg_c_fnames: typing.Collection[str]) -> None:
@@ -73,16 +94,16 @@ class LibObjPlugin:
for fname in arg_c_fnames:
with open(fname, "r", encoding="utf-8") as fh:
while line := fh.readline():
- if m := re_lo_iface.match(line):
+ if m := self.re_lo_iface.match(line):
iface_name = m.group("name")
if iface_name not in ifaces:
ifaces[iface_name] = set()
while line.endswith("\\\n"):
line += fh.readline()
line = line.replace("\\\n", " ")
- line = re_comment.sub(" ", line)
- line = re_ws.sub(" ", line)
- for m2 in re_lo_func.finditer(line):
+ line = self.re_comment.sub(" ", line)
+ line = self.re_ws.sub(" ", line)
+ for m2 in self.re_lo_func.finditer(line):
ifaces[iface_name].add(m2.group("name"))
implementations: dict[str, set[str]] = {} # iface_name => {impl_names}
@@ -92,7 +113,7 @@ class LibObjPlugin:
with open(fname, "r", encoding="utf-8") as fh:
for line in fh:
line = line.strip()
- if m := re_lo_implementation.match(line):
+ if m := self.re_lo_implementation.match(line):
implementations[m.group("iface")].add(m.group("impl_name"))
objcalls: dict[str, set[QName]] = {} # method_name => {method_impls}
@@ -116,15 +137,25 @@ class LibObjPlugin:
def extra_nodes(self) -> typing.Collection[Node]:
return []
+ def mutate_node(self, node: Node) -> None:
+ pass
+
def indirect_callees(
self, loc: str, line: str
) -> tuple[typing.Collection[QName], bool] | None:
-
if "/3rd-party/" in loc:
return None
- if m := re_call_objcall.fullmatch(line):
- if m.group("meth") in self.objcalls:
- return self.objcalls[m.group("meth")], False
+ if m := self.re_lo_call.fullmatch(line):
+ meth = m.group("meth")
+ if meth in self.objcalls:
+ callees: typing.Collection[QName] = self.objcalls[meth]
+ if len(callees) == 0:
+ raise ValueError(f"{loc}: no implementors of {meth}")
+ if meth == "writev" and "lib9p/srv.c" in loc: # KLUDGE
+ callees = [
+ c for c in callees if c.base() != BaseName("rread_writev")
+ ]
+ return callees, False
return [
QName(f"__indirect_call:{m.group('obj')}.vtable->{m.group('meth')}")
], False
@@ -136,11 +167,11 @@ class LibObjPlugin:
class LibHWPlugin:
pico_platform: str
- libobj: LibObjPlugin
+ libmisc: LibMiscPlugin
- def __init__(self, arg_pico_platform: str, libobj: LibObjPlugin) -> None:
+ def __init__(self, arg_pico_platform: str, libmisc: LibMiscPlugin) -> None:
self.pico_platform = arg_pico_platform
- self.libobj = libobj
+ self.libmisc = libmisc
def is_intrhandler(self, name: QName) -> bool:
return name.base() in [
@@ -160,6 +191,9 @@ class LibHWPlugin:
def extra_nodes(self) -> typing.Collection[Node]:
return []
+ def mutate_node(self, node: Node) -> None:
+ pass
+
def indirect_callees(
self, loc: str, line: str
) -> tuple[typing.Collection[QName], bool] | None:
@@ -174,11 +208,14 @@ class LibHWPlugin:
"io_readwritev",
]:
if f"{fn}(" in line:
- return self.libobj.indirect_callees(loc, f"LO_CALL(x, {fn[3:]})")
- if "io_read(" in line:
- return self.libobj.indirect_callees(loc, "LO_CALL(x, readv)")
- if "io_writev(" in line:
- return self.libobj.indirect_callees(loc, "LO_CALL(x, writev)")
+ return self.libmisc.indirect_callees(loc, f"LO_CALL(x, {fn[3:]})")
+ for fn in [
+ "io_read",
+ "io_write",
+ ]:
+ if f"{fn}(" in line:
+ # Like above, but add a "v" to the end.
+ return self.libmisc.indirect_callees(loc, f"LO_CALL(x, {fn[3:]}v)")
if "trigger->cb(trigger->cb_arg)" in line:
ret = [
QName("alarmclock_sleep_intrhandler"),
@@ -218,6 +255,9 @@ class LibCRPlugin:
def extra_nodes(self) -> typing.Collection[Node]:
return []
+ def mutate_node(self, node: Node) -> None:
+ pass
+
def indirect_callees(
self, loc: str, line: str
) -> tuple[typing.Collection[QName], bool] | None:
@@ -240,6 +280,9 @@ class LibCRIPCPlugin:
def extra_nodes(self) -> typing.Collection[Node]:
return []
+ def mutate_node(self, node: Node) -> None:
+ pass
+
def indirect_callees(
self, loc: str, line: str
) -> tuple[typing.Collection[QName], bool] | None:
@@ -256,38 +299,18 @@ class LibCRIPCPlugin:
return {}
-re_tmessage_handler = re.compile(
- r"^\s*\[LIB9P_TYP_T[^]]+\]\s*=\s*\(tmessage_handler\)\s*(?P<handler>\S+),\s*$"
-)
-re_lib9p_msg_entry = re.compile(r"^\s*_MSG_(?:[A-Z]+)\((?P<typ>\S+)\),$")
-re_lib9p_caller = re.compile(
- r"^lib9p_(?P<grp>[TR])msg_(?P<meth>validate|unmarshal|marshal)$"
-)
-re_lib9p_callee = re.compile(
- r"^(?P<meth>validate|unmarshal|marshal)_(?P<msg>(?P<grp>[TR]).*)$"
-)
-
-
class Lib9PPlugin:
- tmessage_handlers: set[QName] | None
+ re_lib9p_msg_entry = re.compile(r"^\s*_MSG\((?P<typ>\S+)\),$")
+
lib9p_msgs: set[str]
- _CONFIG_9P_NUM_SOCKS: int | None
- CONFIG_9P_SRV_MAX_REQS: int | None
- CONFIG_9P_SRV_MAX_DEPTH: int | None
- formatters: typing.Collection[BaseName]
+ _CONFIG_9P_MAX_CONNS: int | None
+ _CONFIG_9P_MAX_REQS: int | None
def __init__(
self,
arg_base_dir: str,
arg_c_fnames: typing.Collection[str],
- libobj_plugin: LibObjPlugin,
) -> None:
- self.formatters = {
- x.base()
- for x in libobj_plugin.objcalls["format"]
- if str(x.base()).startswith("lib9p_")
- }
-
# Find filenames #######################################################
def _is_config_h(fname: str) -> bool:
@@ -305,55 +328,45 @@ class Lib9PPlugin:
)
lib9p_generated_c_fname = util.get_zero_or_one(
- lambda fname: fname.endswith("lib9p/9p.generated.c"), arg_c_fnames
+ lambda fname: fname.endswith("lib9p/core_generated.c"), arg_c_fnames
)
# Read config ##########################################################
def config_h_get(varname: str) -> int | None:
if config_h_fname:
- with open(config_h_fname, "r", encoding="utf-8") as fh:
- for line in fh:
- line = line.rstrip()
- if line.startswith("#define"):
- parts = line.split()
- if parts[1] == varname:
- return int(parts[2])
+ line = subprocess.run(
+ ["cpp"],
+ input=f'#include "{config_h_fname}"\n{varname}\n',
+ check=True,
+ capture_output=True,
+ encoding="utf-8",
+ ).stdout.split("\n")[-2]
+ return int(eval(line)) # pylint: disable=eval-used
return None
- self._CONFIG_9P_NUM_SOCKS = config_h_get("_CONFIG_9P_NUM_SOCKS")
- self.CONFIG_9P_SRV_MAX_REQS = config_h_get("CONFIG_9P_SRV_MAX_REQS")
- self.CONFIG_9P_SRV_MAX_DEPTH = config_h_get("CONFIG_9P_SRV_MAX_DEPTH")
+ self._CONFIG_9P_MAX_CONNS = config_h_get("_CONFIG_9P_MAX_CONNS")
+ self._CONFIG_9P_MAX_REQS = config_h_get("_CONFIG_9P_MAX_REQS")
# Read sources #########################################################
- tmessage_handlers: set[QName] | None = None
- if lib9p_srv_c_fname:
- tmessage_handlers = set()
- with open(lib9p_srv_c_fname, "r", encoding="utf-8") as fh:
- for line in fh:
- line = line.rstrip()
- if m := re_tmessage_handler.fullmatch(line):
- tmessage_handlers.add(QName(m.group("handler")))
- self.tmessage_handlers = tmessage_handlers
-
lib9p_msgs: set[str] = set()
if lib9p_generated_c_fname:
with open(lib9p_generated_c_fname, "r", encoding="utf-8") as fh:
for line in fh:
line = line.rstrip()
- if m := re_lib9p_msg_entry.fullmatch(line):
+ if m := self.re_lib9p_msg_entry.fullmatch(line):
typ = m.group("typ")
lib9p_msgs.add(typ)
self.lib9p_msgs = lib9p_msgs
def thread_count(self, name: QName) -> int:
- assert self._CONFIG_9P_NUM_SOCKS
- assert self.CONFIG_9P_SRV_MAX_REQS
+ assert self._CONFIG_9P_MAX_CONNS
+ assert self._CONFIG_9P_MAX_REQS
if "read" in str(name.base()):
- return self._CONFIG_9P_NUM_SOCKS
+ return self._CONFIG_9P_MAX_CONNS
if "write" in str(name.base()):
- return self._CONFIG_9P_NUM_SOCKS * self.CONFIG_9P_SRV_MAX_REQS
+ return self._CONFIG_9P_MAX_REQS
return 1
def is_intrhandler(self, name: QName) -> bool:
@@ -368,211 +381,53 @@ class Lib9PPlugin:
def extra_nodes(self) -> typing.Collection[Node]:
return []
- def indirect_callees(
- self, loc: str, line: str
- ) -> tuple[typing.Collection[QName], bool] | None:
- if "/3rd-party/" in loc:
- return None
- if (
- self.tmessage_handlers
- and "/srv.c:" in loc
- and "tmessage_handlers[typ](" in line
- ):
- # Functions for disabled protocol extensions will be missing.
- return self.tmessage_handlers, True
- if self.lib9p_msgs and "/9p.c:" in loc:
- for meth in ["validate", "unmarshal", "marshal"]:
- if line.startswith(f"tentry.{meth}("):
- # Functions for disabled protocol extensions will be missing.
- return [QName(f"{meth}_{msg}") for msg in self.lib9p_msgs], True
- return None
-
- def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
- ret: dict[BaseName, analyze.SkipModel] = {
- BaseName("_lib9p_validate"): analyze.SkipModel(
- 2,
- self._skipmodel__lib9p_validate_unmarshal_marshal,
- ),
- BaseName("_lib9p_unmarshal"): analyze.SkipModel(
- 2,
- self._skipmodel__lib9p_validate_unmarshal_marshal,
- ),
- BaseName("_lib9p_marshal"): analyze.SkipModel(
- 2,
- self._skipmodel__lib9p_validate_unmarshal_marshal,
- ),
- BaseName("_vfctprintf"): analyze.SkipModel(
- self.formatters, self._skipmodel__vfctprintf
- ),
- }
- if isinstance(self.CONFIG_9P_SRV_MAX_DEPTH, int):
- ret[BaseName("srv_util_pathfree")] = analyze.SkipModel(
- self.CONFIG_9P_SRV_MAX_DEPTH,
- self._skipmodel_srv_util_pathfree,
- )
- return ret
-
- def _skipmodel__lib9p_validate_unmarshal_marshal(
- self, chain: typing.Sequence[QName], call: QName
- ) -> bool:
- m_caller = re_lib9p_caller.fullmatch(str(chain[-2].base()))
- assert m_caller
-
- m_callee = re_lib9p_callee.fullmatch(str(call.base()))
- if not m_callee:
- return False
- return m_caller.group("grp") != m_callee.group("grp")
-
- def _skipmodel_srv_util_pathfree(
- self, chain: typing.Sequence[QName], call: QName
- ) -> bool:
- assert isinstance(self.CONFIG_9P_SRV_MAX_DEPTH, int)
- if call.base() == BaseName("srv_util_pathfree"):
- return len(chain) >= self.CONFIG_9P_SRV_MAX_DEPTH and all(
- c.base() == BaseName("srv_util_pathfree")
- for c in chain[-self.CONFIG_9P_SRV_MAX_DEPTH :]
- )
- return False
-
- def _skipmodel__vfctprintf(
- self, chain: typing.Sequence[QName], call: QName
- ) -> bool:
- if call.base() == BaseName("libfmt_conv_formatter"):
- return any(c.base() in self.formatters for c in chain)
- return False
-
-
-class LibMiscPlugin:
- def is_intrhandler(self, name: QName) -> bool:
- return False
-
- def init_array(self) -> typing.Collection[QName]:
- return []
-
- def extra_includes(self) -> typing.Collection[BaseName]:
- return []
-
- def extra_nodes(self) -> typing.Collection[Node]:
- return []
-
- def indirect_callees(
- self, loc: str, line: str
- ) -> tuple[typing.Collection[QName], bool] | None:
- return None
-
- def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
- return {
- BaseName("__assert_msg_fail"): analyze.SkipModel(
- {BaseName("__assert_msg_fail")}, self._skipmodel___assert_msg_fail
- ),
- }
-
- def _skipmodel___assert_msg_fail(
- self, chain: typing.Sequence[QName], call: QName
- ) -> bool:
- if call.base() in [BaseName("__lm_printf"), BaseName("__lm_light_printf")]:
- return any(
- c.base() == BaseName("__assert_msg_fail") for c in reversed(chain[:-1])
- )
- return False
-
-
-class PicoFmtPlugin:
- known_fct: dict[BaseName, BaseName]
-
- def __init__(self, arg_pico_platform: str) -> None:
- self.known_fct = {
- # pico_fmt
- BaseName("fmt_vsnprintf"): BaseName("_out_buffer"),
- }
- match arg_pico_platform:
- case "rp2040":
- self.known_fct.update(
- {
- # pico_stdio
- BaseName("__wrap_vprintf"): BaseName("stdio_buffered_printer"),
- BaseName("stdio_vprintf"): BaseName("stdio_buffered_printer"),
- # libfmt
- BaseName("__lm_light_printf"): BaseName("libfmt_light_fct"),
- }
- )
- case "host":
- self.known_fct.update(
- {
- # libfmt
- BaseName("__lm_printf"): BaseName("libfmt_libc_fct"),
- BaseName("__lm_light_printf"): BaseName("libfmt_libc_fct"),
- }
- )
-
- def is_intrhandler(self, name: QName) -> bool:
- return False
-
- def init_array(self) -> typing.Collection[QName]:
- return []
-
- def extra_includes(self) -> typing.Collection[BaseName]:
- return []
+ def mutate_node(self, node: Node) -> None:
+ pass
- def extra_nodes(self) -> typing.Collection[Node]:
- return []
+ re_table_call = re.compile(
+ r"\s*_lib9p_(?P<meth>validate|unmarshal|marshal)\(.*(?P<grp>[RT])msg.*\);\s*"
+ )
+ re_print_call = re.compile(r".*lib9p_table_msg.*\.print\(.*")
def indirect_callees(
self, loc: str, line: str
) -> tuple[typing.Collection[QName], bool] | None:
- if "/3rd-party/pico-fmt/" not in loc:
+ if "/3rd-party/" in loc:
return None
- if "/printf.c:" in loc:
- m = util.re_call_other.fullmatch(line)
- call: str | None = m.group("func") if m else None
- if "->fct" in line:
- return [x.as_qname() for x in self.known_fct.values()], False
- if "specifier_table" in line:
+ if self.lib9p_msgs and "lib9p/core.c:" in loc:
+ if m := self.re_table_call.fullmatch(line):
+ meth = m.group("meth")
+ grp = m.group("grp")
+ # Functions for disabled protocol extensions will be missing.
return [
- # pico-fmt
- QName("conv_sint"),
- QName("conv_uint"),
- # QName("conv_double"),
- QName("conv_char"),
- QName("conv_str"),
- QName("conv_ptr"),
- QName("conv_pct"),
- # libfmt
- QName("libfmt_conv_formatter"),
- QName("libfmt_conv_quote"),
- ], False
+ QName(f"{meth}_{msg}")
+ for msg in self.lib9p_msgs
+ if msg.startswith(grp)
+ ], True
+ if self.re_print_call.fullmatch(line):
+ # Functions for disabled protocol extensions will be missing.
+ return [QName(f"fmt_print_{msg}") for msg in self.lib9p_msgs], True
+ if "lib9p/srv.c:" in loc:
+ if "srv->msglog(" in line:
+ # Actual ROMs shouldn't set this, and so will be missing on rp2040 builds.
+ return [QName("log_msg")], True
return None
def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
- ret: dict[BaseName, analyze.SkipModel] = {
- BaseName("fmt_state_putchar"): analyze.SkipModel(
- self.known_fct.keys(), self._skipmodel_fmt_state_putchar
- ),
- }
- return ret
-
- def _skipmodel_fmt_state_putchar(
- self, chain: typing.Sequence[QName], call: QName
- ) -> bool:
- if call.base() in self.known_fct.values():
- fct: BaseName | None = None
- for pcall in reversed(chain):
- if pcall.base() in self.known_fct:
- fct = self.known_fct[pcall.base()]
- return call.base() != fct
- return True
- return False
+ return {}
class PicoSDKPlugin:
get_init_array: typing.Callable[[], typing.Collection[QName]]
app_init_array: typing.Collection[QName] | None
app_preinit_array: typing.Collection[QName]
+ _PICO_PANIC_FUNCTION: str | None
def __init__(
self,
*,
get_init_array: typing.Callable[[], typing.Collection[QName]],
+ PICO_PANIC_FUNCTION: str | None,
) -> None:
# grep for '__attribute__((constructor))' / '[[gnu::constructor]]'.
self.get_init_array = get_init_array
@@ -606,6 +461,8 @@ class PicoSDKPlugin:
QName("runtime_init_install_ram_vector_table"),
]
+ self._PICO_PANIC_FUNCTION = PICO_PANIC_FUNCTION
+
def is_intrhandler(self, name: QName) -> bool:
return name.base() in [
BaseName("isr_invalid"),
@@ -642,6 +499,8 @@ class PicoSDKPlugin:
return [QName("rom_func_lookup(ROM_FUNC_FLASH_RANGE_ERASE)")], False
case "flash_flush_cache_func":
return [QName("rom_func_lookup(ROM_FUNC_FLASH_FLUSH_CACHE)")], False
+ case "flash_range_program_func":
+ return [QName("rom_func_lookup(ROM_FUNC_FLASH_RANGE_PROGRAM)")], False
case "rom_table_lookup":
return [QName("rom_hword_as_ptr(BOOTROM_TABLE_LOOKUP_OFFSET)")], False
if "/flash.c:" in loc and "boot2_copyout" in line:
@@ -662,7 +521,7 @@ class PicoSDKPlugin:
case "in_chars":
return [QName("stdio_uart_in_chars")], False
if "/newlib_interface.c:" in loc:
- if line == "*p)();":
+ if line == "(*p)();":
if self.app_init_array is None:
self.app_init_array = self.get_init_array()
return self.app_init_array, False
@@ -676,11 +535,6 @@ class PicoSDKPlugin:
def extra_nodes(self) -> typing.Collection[Node]:
ret = []
- # src/rp2_common/hardware_divider/include/hardware/divider_helper.S
- save_div_state_and_lr = 5 * 4
- # src/rp2_common/pico_divider/divider_hardware.S
- save_div_state_and_lr_64 = 5 * 4
-
# src/src/rp2_common/pico_crt0/crt0.S
for n in range(32):
ret += [synthetic_node(f"isr_irq{n}", 0, {"__unhandled_user_irq"})]
@@ -696,10 +550,16 @@ class PicoSDKPlugin:
synthetic_node("_reset_handler", 0, {"runtime_init", "main", "exit"}),
]
+ # src/rp2_common/pico_int64_ops/pico_int64_ops_aeabi.S
ret += [
- # src/rp2_common/pico_int64_ops/pico_int64_ops_aeabi.S
synthetic_node("__wrap___aeabi_lmul", 4),
- # src/rp2_common/pico_divider/divider_hardware.S
+ ]
+
+ # src/rp2_common/hardware_divider/include/hardware/divider_helper.S
+ save_div_state_and_lr = 5 * 4
+ # src/rp2_common/pico_divider/divider_hardware.S
+ save_div_state_and_lr_64 = 5 * 4
+ ret += [
# s32 aliases
synthetic_node("div_s32s32", 0, {"divmod_s32s32"}),
synthetic_node("__wrap___aeabi_idiv", 0, {"divmod_s32s32"}),
@@ -754,7 +614,10 @@ class PicoSDKPlugin:
# *_rem
synthetic_node("divod_s64s64_rem", 2 * 4, {"divmod_s64s64"}),
synthetic_node("divod_u64u64_rem", 2 * 4, {"divmod_u64u64"}),
- # src/rp2_common/pico_mem_ops/mem_ops_aeabi.S
+ ]
+
+ # src/rp2_common/pico_mem_ops/mem_ops_aeabi.S
+ ret += [
synthetic_node("__aeabi_mem_init", 0, {"rom_funcs_lookup"}),
synthetic_node(
"__wrap___aeabi_memset", 0, {"rom_func_lookup(ROM_FUNC_MEMSET)"}
@@ -770,7 +633,10 @@ class PicoSDKPlugin:
synthetic_node("__wrap_memset", 0, {"rom_func_lookup(ROM_FUNC_MEMSET)"}),
synthetic_node("__wrap___aeabi_memcpy", 0, {"__wrap_memcpy"}),
synthetic_node("__wrap_memcpy", 0, {"rom_func_lookup(ROM_FUNC_MEMCPY)"}),
- # src/rp2_common/pico_bit_ops/bit_ops_aeabi.S
+ ]
+
+ # src/rp2_common/pico_bit_ops/bit_ops_aeabi.S
+ ret += [
synthetic_node("__aeabi_bits_init", 0, {"rom_funcs_lookup"}),
synthetic_node("__wrap___clz", 0, {"__wrap___clzsi2"}),
synthetic_node("__wrap___clzl", 0, {"__wrap___clzsi2"}),
@@ -790,29 +656,46 @@ class PicoSDKPlugin:
synthetic_node("reverse32", 0, {"rom_func_lookup(ROM_FUNC_REVERSE32)"}),
synthetic_node("__revll", 0, {"reverse64"}),
synthetic_node("reverse64", 3 * 4, {"rom_func_lookup(ROM_FUNC_REVERSE32)"}),
- # src/rp2040/boot_stage2/boot2_${name,,}.S for name=W25Q080,
- # controlled by `#define PICO_BOOT_STAGE2_{name} 1` in
- # src/boards/include/boards/pico.h
+ ]
+
+ # src/rp2040/boot_stage2/boot2_${name,,}.S for name=W25Q080,
+ # controlled by `#define PICO_BOOT_STAGE2_{name} 1` in
+ # src/boards/include/boards/pico.h
+ ret += [
# synthetic_node("_stage2_boot", 0), # TODO
- # https://github.com/raspberrypi/pico-bootrom-rp2040
+ ]
+
+ # https://github.com/raspberrypi/pico-bootrom-rp2040
+ ret += [
# synthetic_node("rom_func_lookup(ROM_FUNC_CONNECT_INTERNAL_FLASH)", 0), # TODO
# synthetic_node("rom_func_lookup(ROM_FUNC_FLASH_EXIT_XIP)", 0), # TODO
# synthetic_node("rom_func_lookup(ROM_FUNC_FLASH_FLUSH_CACHE)", 0), # TODO
# synthetic_node("rom_hword_as_ptr(BOOTROM_TABLE_LOOKUP_OFFSET)", 0), # TODO
]
- return ret
+ return ret
-re_tud_class = re.compile(
- r"^\s*#\s*define\s+(?P<k>CFG_TUD_(?:\S{3}|AUDIO|VIDEO|MIDI|VENDOR|USBTMC|DFU_RUNTIME|ECM_RNDIS))\s+(?P<v>\S+).*"
-)
-re_tud_entry = re.compile(r"^\s+\.(?P<meth>\S+)\s*=\s*(?P<impl>[a-zA-Z0-9_]+)(?:,.*)?")
-re_tud_if1 = re.compile(r"^\s*#\s*if (\S+)\s*")
-re_tud_if2 = re.compile(r"^\s*#\s*if (\S+)\s*\|\|\s*(\S+)\s*")
-re_tud_endif = re.compile(r"^\s*#\s*endif\s*")
+ def mutate_node(self, node: Node) -> None:
+ if self._PICO_PANIC_FUNCTION and node.funcname.base() == BaseName("panic"):
+ # inline assembly from src/rp2_common/pico_platform_panic/panic.c
+ assert node.nstatic == 0
+ assert node.ndynamic == 0
+ assert len(node.calls) == 0
+ node.nstatic += 4
+ node.calls[QName(self._PICO_PANIC_FUNCTION)] = False
class TinyUSBDevicePlugin:
+ re_tud_class = re.compile(
+ r"^\s*#\s*define\s+(?P<k>CFG_TUD_(?:\S{3}|AUDIO|VIDEO|MIDI|VENDOR|USBTMC|DFU_RUNTIME|ECM_RNDIS))\s+(?P<v>\S+).*"
+ )
+ re_tud_entry = re.compile(
+ r"^\s+\.(?P<meth>\S+)\s*=\s*(?P<impl>[a-zA-Z0-9_]+)(?:,.*)?"
+ )
+ re_tud_if1 = re.compile(r"^\s*#\s*if (\S+)\s*")
+ re_tud_if2 = re.compile(r"^\s*#\s*if (\S+)\s*\|\|\s*(\S+)\s*")
+ re_tud_endif = re.compile(r"^\s*#\s*endif\s*")
+
tud_drivers: dict[str, set[QName]] # method_name => {method_impls}
def __init__(self, arg_c_fnames: typing.Collection[str]) -> None:
@@ -834,7 +717,7 @@ class TinyUSBDevicePlugin:
in_table = False
for line in fh:
line = line.rstrip()
- if m := re_tud_class.fullmatch(line):
+ if m := self.re_tud_class.fullmatch(line):
k = m.group("k")
v = m.group("v")
tusb_config[k] = bool(int(v))
@@ -846,13 +729,13 @@ class TinyUSBDevicePlugin:
for line in fh:
line = line.rstrip()
if in_table:
- if m := re_tud_if1.fullmatch(line):
+ if m := self.re_tud_if1.fullmatch(line):
enabled = tusb_config[m.group(1)]
- elif m := re_tud_if2.fullmatch(line):
+ elif m := self.re_tud_if2.fullmatch(line):
enabled = tusb_config[m.group(1)] or tusb_config[m.group(2)]
- elif re_tud_endif.fullmatch(line):
+ elif self.re_tud_endif.fullmatch(line):
enabled = True
- if m := re_tud_entry.fullmatch(line):
+ if m := self.re_tud_entry.fullmatch(line):
meth = m.group("meth")
impl = m.group("impl")
if meth == "name" or not enabled:
@@ -879,6 +762,9 @@ class TinyUSBDevicePlugin:
def extra_nodes(self) -> typing.Collection[Node]:
return []
+ def mutate_node(self, node: Node) -> None:
+ pass
+
def indirect_callees(
self, loc: str, line: str
) -> tuple[typing.Collection[QName], bool] | None:
@@ -895,10 +781,18 @@ class TinyUSBDevicePlugin:
ret.update(self.tud_drivers["control_xfer_cb"])
return ret, False
if call.startswith("driver->"):
- return self.tud_drivers[call[len("driver->") :]], False
+ meth = call[len("driver->") :]
+ callees = self.tud_drivers[meth]
+ if len(callees) == 0:
+ if meth == "sof":
+ return [QName(f"__indirect_call_with_null_check:{call}")], False
+ raise ValueError(f"{loc}: no implementors of {meth}")
+ return callees, False
if call == "event.func_call.func":
# callback from usb_defer_func()
- return [], False
+ return [
+ QName("__indirect_call_with_null_check:event.func_call.func")
+ ], False
return None
@@ -920,11 +814,14 @@ class NewlibPlugin:
]
def extra_nodes(self) -> typing.Collection[Node]:
+ ret = []
+
# This is accurate to
# /usr/arm-none-eabi/lib/thumb/v6-m/nofp/libg.a as of
# Parabola's arm-none-eabi-newlib 4.5.0.20241231-1.
- return [
- # malloc
+
+ # malloc
+ ret += [
synthetic_node("free", 8, {"_free_r"}),
synthetic_node("malloc", 8, {"_malloc_r"}),
synthetic_node("realloc", 8, {"_realloc_r"}),
@@ -934,20 +831,27 @@ class NewlibPlugin:
# synthetic_node("_malloc_r", 0), # TODO
# synthetic_node("_realloc_r", 0), # TODO
# synthetic_node("_memalign_r", 0), # TODO
- # execution
+ ]
+
+ # execution
+ ret += [
synthetic_node("raise", 16, {"_getpid_r"}),
synthetic_node("abort", 8, {"raise", "_exit"}),
synthetic_node("longjmp", 0),
synthetic_node("setjmp", 0),
- # <strings.h>
+ ]
+
+ # <strings.h>
+ ret += [
synthetic_node("memcmp", 12),
- synthetic_node("memcpy", 28),
- synthetic_node("memset", 20),
synthetic_node("strcmp", 16),
synthetic_node("strlen", 8),
synthetic_node("strncpy", 16),
synthetic_node("strnlen", 8),
- # other
+ ]
+
+ # other
+ ret += [
synthetic_node("__errno", 0),
synthetic_node("_getpid_r", 8, {"_getpid"}),
synthetic_node("random", 8),
@@ -964,6 +868,11 @@ class NewlibPlugin:
synthetic_node("__libc_fini_array", 16, {"_fini"}),
]
+ return ret
+
+ def mutate_node(self, node: Node) -> None:
+ pass
+
def indirect_callees(
self, loc: str, line: str
) -> tuple[typing.Collection[QName], bool] | None:
@@ -978,10 +887,7 @@ class LibGCCPlugin:
return False
def init_array(self) -> typing.Collection[QName]:
- return [
- QName("libfmt_install_formatter"),
- QName("libfmt_install_quote"),
- ]
+ return []
def extra_includes(self) -> typing.Collection[BaseName]:
return []
@@ -997,6 +903,9 @@ class LibGCCPlugin:
synthetic_node("_fini", 24),
]
+ def mutate_node(self, node: Node) -> None:
+ pass
+
def indirect_callees(
self, loc: str, line: str
) -> tuple[typing.Collection[QName], bool] | None:
diff --git a/build-aux/measurestack/test_analyze.py b/build-aux/measurestack/test_analyze.py
index ff1732d..df205e8 100644
--- a/build-aux/measurestack/test_analyze.py
+++ b/build-aux/measurestack/test_analyze.py
@@ -5,17 +5,20 @@
# pylint: disable=unused-variable
+import re
+import typing
+
import pytest
-from .analyze import BaseName, QName
+from . import analyze, testutil, util
def test_name_base() -> None:
- assert QName("foo.c:bar.1").base() == BaseName("bar")
+ assert analyze.QName("foo.c:bar.1").base() == analyze.BaseName("bar")
def test_name_pretty() -> None:
- name = QName("foo.c:bar.1")
+ name = analyze.QName("foo.c:bar.1")
assert f"{name}" == "QName('foo.c:bar.1')"
assert f"{name.base()}" == "BaseName('bar')"
assert f"{[name]}" == "[QName('foo.c:bar.1')]"
@@ -23,7 +26,7 @@ def test_name_pretty() -> None:
def test_name_eq() -> None:
- name = QName("foo.c:bar.1")
+ name = analyze.QName("foo.c:bar.1")
with pytest.raises(AssertionError) as e:
if name == "foo":
pass
@@ -32,3 +35,47 @@ def test_name_eq() -> None:
if name.base() == "foo":
pass
assert "comparing BaseName with str" in str(e)
+
+
+def test_max_call_depth() -> None:
+ graph: typing.Sequence[tuple[str, typing.Collection[str]]] = [
+ ("a", {"b"}), # 1
+ ("b", {"c"}), # 2
+ ("c", {"d"}), # 3
+ ("d", {"e"}), # 4
+ ("e", {}), # 5
+ ]
+
+ testcases: dict[int, bool] = {
+ 1: True,
+ 2: True,
+ 3: True,
+ 4: True,
+ 5: False,
+ 6: False,
+ 7: False,
+ }
+
+ def test_filter(name: analyze.QName) -> tuple[int, bool]:
+ if str(name.base()) in ["a"]:
+ return 1, True
+ return 0, False
+
+ def doit(depth: int, graph_plugin: util.Plugin) -> None:
+ analyze.analyze(
+ ci_fnames=[],
+ app_func_filters={"Main": test_filter},
+ app=util.PluginApplication(testutil.nop_location_xform, [graph_plugin]),
+ cfg_max_call_depth=depth,
+ )
+
+ pat = re.compile("^max call depth exceeded: ")
+
+ for depth, should_fail in testcases.items():
+ graph_plugin = testutil.GraphProviderPlugin(depth, graph)
+
+ if should_fail:
+ with pytest.raises(ValueError, match=pat):
+ doit(depth, graph_plugin)
+ else:
+ doit(depth, graph_plugin)
diff --git a/build-aux/measurestack/testutil.py b/build-aux/measurestack/testutil.py
new file mode 100644
index 0000000..3c32134
--- /dev/null
+++ b/build-aux/measurestack/testutil.py
@@ -0,0 +1,134 @@
+# build-aux/measurestack/testutil.py - Utilities for writing tests
+#
+# Copyright (C) 2025 Luke T. Shumaker <lukeshu@lukeshu.com>
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+import typing
+
+from . import analyze, util
+
+# pylint: disable=unused-variable
+__all__ = [
+ "aprime_gen",
+ "aprime_decompose",
+ "NopPlugin",
+ "GraphProviderPlugin",
+ "nop_location_xform",
+]
+
+
+def aprime_gen(l: int, n: int) -> typing.Sequence[int]:
+ """Return an `l`-length sequence of nonnegative
+ integers such that any `n`-length-or-shorter combination of
+ members with repeats allowed can be uniquely identified by its
+ sum.
+
+ (If that were "product" instead of "sum", the obvious solution
+ would be the first `l` primes.)
+
+ """
+ seq = [1]
+ while len(seq) < l:
+ x = seq[-1] * n + 1
+ seq.append(x)
+ return seq
+
+
+def aprime_decompose(
+ aprimes: typing.Sequence[int], tot: int
+) -> tuple[typing.Collection[int], typing.Collection[int]]:
+ ret_idx = []
+ ret_val = []
+ while tot:
+ idx = max(i for i in range(len(aprimes)) if aprimes[i] <= tot)
+ val = aprimes[idx]
+ ret_idx.append(idx)
+ ret_val.append(val)
+ tot -= val
+ return ret_idx, ret_val
+
+
+class NopPlugin:
+ def is_intrhandler(self, name: analyze.QName) -> bool:
+ return False
+
+ def init_array(self) -> typing.Collection[analyze.QName]:
+ return []
+
+ def extra_includes(self) -> typing.Collection[analyze.BaseName]:
+ return []
+
+ def indirect_callees(
+ self, loc: str, line: str
+ ) -> tuple[typing.Collection[analyze.QName], bool] | None:
+ return None
+
+ def skipmodels(self) -> dict[analyze.BaseName, analyze.SkipModel]:
+ return {}
+
+ def extra_nodes(self) -> typing.Collection[analyze.Node]:
+ return []
+
+ def mutate_node(self, node: analyze.Node) -> None:
+ pass
+
+
+class GraphProviderPlugin(NopPlugin):
+ _nodes: typing.Sequence[analyze.Node]
+
+ def __init__(
+ self,
+ max_call_depth: int,
+ graph: typing.Sequence[tuple[str, typing.Collection[str]]],
+ ) -> None:
+ seq = aprime_gen(len(graph), max_call_depth)
+ nodes: list[analyze.Node] = []
+ for i, (name, calls) in enumerate(graph):
+ nodes.append(util.synthetic_node(name, seq[i], calls))
+ assert (
+ len(graph)
+ == len(nodes)
+ == len(set(n.nstatic for n in nodes))
+ == len(set(str(n.funcname.base()) for n in nodes))
+ )
+ self._nodes = nodes
+
+ def extra_nodes(self) -> typing.Collection[analyze.Node]:
+ return self._nodes
+
+ def decode_nstatic(self, tot: int) -> typing.Collection[str]:
+ idxs, _ = aprime_decompose([n.nstatic for n in self._nodes], tot)
+ return [str(self._nodes[i].funcname.base()) for i in idxs]
+
+ def encode_nstatic(self, calls: typing.Collection[str]) -> int:
+ tot = 0
+ d: dict[str, int] = {}
+ for node in self._nodes:
+ d[str(node.funcname.base())] = node.nstatic
+ print(d)
+ for call in calls:
+ tot += d[call]
+ return tot
+
+ def sorted_calls(self, calls: typing.Collection[str]) -> typing.Sequence[str]:
+ d: dict[str, int] = {}
+ for node in self._nodes:
+ d[str(node.funcname.base())] = node.nstatic
+
+ def k(call: str) -> int:
+ return d[call]
+
+ return sorted(calls, key=k)
+
+ def assert_nstatic(self, act_tot: int, exp_calls: typing.Collection[str]) -> None:
+ exp_tot = self.encode_nstatic(exp_calls)
+ if act_tot != exp_tot:
+ act_str = f"{act_tot}: {self.sorted_calls(self.decode_nstatic(act_tot))}"
+ exp_str = f"{exp_tot}: {self.sorted_calls(exp_calls)}"
+ assert (
+ False
+ ), f"act:{act_tot} != exp:{exp_tot}\n\t-exp = {exp_str}\n\t+act = {act_str}"
+
+
+def nop_location_xform(loc: str) -> str:
+ return loc
diff --git a/build-aux/measurestack/util.py b/build-aux/measurestack/util.py
index 47b2617..c94ce07 100644
--- a/build-aux/measurestack/util.py
+++ b/build-aux/measurestack/util.py
@@ -7,7 +7,7 @@ import re
import typing
from . import analyze, vcg
-from .analyze import BaseName, Node, QName
+from .analyze import BaseName, Node, QName, maybe_sorted
# pylint: disable=unused-variable
__all__ = [
@@ -32,7 +32,7 @@ def synthetic_node(
n.nstatic = nstatic
n.ndynamic = 0
- n.calls = dict((QName(c), False) for c in calls)
+ n.calls = dict((QName(c), False) for c in maybe_sorted(calls))
return n
@@ -46,9 +46,9 @@ def read_source(location: str) -> str:
raise ValueError(f"unexpected label value {location!r}")
filename = m.group("filename")
row = int(m.group("row")) - 1
- col = int(m.group("col")) - 1
+ # col = int(m.group("col")) - 1
with open(filename, "r", encoding="utf-8") as fh:
- return fh.readlines()[row][col:].rstrip()
+ return fh.readlines()[row].strip()
def get_zero_or_one(
@@ -61,7 +61,7 @@ def get_zero_or_one(
return None
-re_call_other = re.compile(r"(?P<func>[^(]+)\(.*")
+re_call_other = re.compile(r".*?\b(?P<func>(?!if\b)[->.a-zA-Z0-9_]+)\(.*")
class Plugin(typing.Protocol):
@@ -79,6 +79,7 @@ class Plugin(typing.Protocol):
def extra_includes(self) -> typing.Collection[BaseName]: ...
def extra_nodes(self) -> typing.Collection[Node]: ...
+ def mutate_node(self, node: Node) -> None: ...
def indirect_callees(
self, loc: str, line: str
) -> tuple[typing.Collection[QName], bool] | None: ...
@@ -101,6 +102,10 @@ class PluginApplication:
ret.extend(plugin.extra_nodes())
return ret
+ def mutate_node(self, node: Node) -> None:
+ for plugin in self._plugins:
+ plugin.mutate_node(node)
+
def indirect_callees(
self, elem: vcg.VCGElem
) -> tuple[typing.Collection[QName], bool]:
@@ -110,6 +115,9 @@ class PluginApplication:
for plugin in self._plugins:
ret = plugin.indirect_callees(loc, line)
if ret is not None:
+ assert (
+ len(ret[0]) > 0
+ ), f"{plugin.__class__.__name__} returning 0 calles for {loc} indicates the code would crash"
return ret
placeholder = "__indirect_call"
diff --git a/build-aux/tent-graph b/build-aux/tent-graph
new file mode 100755
index 0000000..25c58c5
--- /dev/null
+++ b/build-aux/tent-graph
@@ -0,0 +1,180 @@
+#!/usr/bin/env python3
+# build-aux/tent-graph - Take dbg_noncache=True dbg_nstatic=True stack.c on stdin, and produce a tent graph SVG on stdout
+#
+# Copyright (C) 2025 Luke T. Shumaker <lukeshu@lukeshu.com>
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+import ast
+import re
+import sys
+
+
+class Block:
+ title: str
+ parent: "Block|None"
+ children: list["Block"]
+ nbytes: int
+
+ def __init__(self, *, title: str, nbytes: int, parent: "Block|None") -> None:
+ self.title = title
+ self.parent = parent
+ self.children = []
+ self.nbytes = nbytes
+
+ @property
+ def rows(self) -> int:
+ if not self.children:
+ return 1
+ return sum(c.rows for c in self.children)
+
+ @property
+ def sum_nbytes(self) -> int:
+ if not self.children:
+ return self.nbytes
+ return self.nbytes + max(c.sum_nbytes for c in self.children)
+
+ def prune(self) -> None:
+ tgt = self.sum_nbytes - self.nbytes
+ self.children = [c for c in self.children if c.sum_nbytes == tgt]
+
+
+re_line = re.compile(
+ r"^//dbg-nstatic:(?P<indent>(?: -)*) QName\((?P<func>.*)\)\t(?P<size>[0-9]+)$"
+)
+
+
+def parse() -> list[Block]:
+ roots: list[Block] = []
+
+ stack: list[Block] = []
+ for line in sys.stdin:
+ m = re_line.fullmatch(line.strip())
+ if not m:
+ continue
+
+ depth = len(m.group("indent")) // 2
+ func = ast.literal_eval(m.group("func"))
+ size = int(m.group("size"), 10)
+
+ stack = stack[:depth]
+
+ block = Block(
+ title=func,
+ nbytes=size,
+ parent=stack[-1] if stack else None,
+ )
+ if block.parent:
+ block.parent.children.append(block)
+ else:
+ roots.append(block)
+ stack.append(block)
+
+ return roots
+
+
+def render(roots: list[Block]) -> None:
+ total_nbytes = max(r.sum_nbytes for r in roots)
+ total_rows = sum(r.rows for r in roots)
+
+ img_w = 1920
+ img_h = 948
+
+ details_h = 16
+ text_yoff = 12
+ text_xoff = 3
+
+ main_h = img_h - details_h
+ nbyte_h = main_h / total_nbytes
+ row_w = img_w / total_rows
+
+ print(
+ f"""<?xml version="1.0" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<svg version="1.1" width="{img_w}" height="{img_h}" onload="init(evt)" viewBox="0 0 {img_w} {img_h}" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<style type="text/css">
+ .func_g:hover {{ stroke:black; stroke-width:0.5; }}
+ .func_g rect {{ rx: 2px; ry: 2px; }}
+ rect#background {{ fill: #EEEEEE; }}
+ text {{ font-size: 12px; font-family: Verdana; fill: rgb(0,0,0); }}
+</style>
+<script type="text/ecmascript">
+<![CDATA[
+ var details;
+ function init(evt) {{ details = document.getElementById("details").firstChild; }}
+ function s(info) {{ details.nodeValue = "Function: " + info; }}
+ function c() {{ details.nodeValue = ' '; }}
+]]>
+</script>
+<rect id="background" x="0" y="0" width="{img_w}" height="{img_h}" />
+<text text-anchor="" x="{text_xoff}" y="{img_h-details_h+text_yoff}" id="details"> </text>"""
+ )
+
+ min_nbytes = roots[0].nbytes
+ max_nbytes = 0
+
+ def visit(b: Block) -> None:
+ nonlocal min_nbytes
+ nonlocal max_nbytes
+ min_nbytes = min(min_nbytes, b.nbytes)
+ max_nbytes = max(max_nbytes, b.nbytes)
+ for c in b.children:
+ visit(c)
+
+ for r in roots:
+ visit(r)
+
+ def print_block(block: Block, nbyte: int, row: int) -> None:
+ nonlocal min_nbytes
+ nonlocal max_nbytes
+
+ if block.nbytes:
+ hue = 100 - int(
+ ((block.nbytes - min_nbytes) / (max_nbytes - min_nbytes)) * 100
+ )
+
+ x = row * row_w
+ y = nbyte * nbyte_h
+ w = max(1, block.rows * row_w - 1)
+ h = block.nbytes * nbyte_h
+ title = f"{block.title} = {block.nbytes} / {block.sum_nbytes} bytes"
+
+ nonlocal main_h
+ print(f'<g class="func_g" onmouseover="s(\'{title}\')" onmouseout="c()">')
+ print(f"\t<title>{title}</title>")
+ print(
+ f'\t<rect x="{x}" y="{main_h-y-h}" width="{w}" height="{h}" fill="hsl({hue} 60% 60%)" />'
+ )
+
+ short_title = title.rsplit(":", 1)[-1]
+ if h > details_h and w > len(short_title) * 10:
+ print(
+ f'\t<text x="{x+text_xoff}" y="{main_h-y-h+text_yoff}">{short_title}</text>'
+ )
+ print("</g>")
+
+ def sort_key(c: Block) -> int:
+ return c.sum_nbytes
+
+ for c in sorted(block.children, key=sort_key, reverse=True):
+ print_block(c, nbyte + block.nbytes, row)
+ row += c.rows
+
+ row = 0
+ for r in roots:
+ print_block(r, 0, row)
+ row += r.rows
+
+ print("</svg>")
+
+
+def main() -> None:
+ roots = parse()
+
+ # tgt = max(r.sum_nbytes for r in roots)
+ # roots = [r for r in roots if r.sum_nbytes == tgt]
+
+ render(roots)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/build-aux/valgrind b/build-aux/valgrind
new file mode 100755
index 0000000..0700e4d
--- /dev/null
+++ b/build-aux/valgrind
@@ -0,0 +1,16 @@
+#!/bin/sh
+# build-aux/valgrind - Wrapper around valgrind to keep flags consistent
+#
+# Copyright (C) 2025 Luke T. Shumaker <lukeshu@lukeshu.com>
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+exec \
+ valgrind \
+ --fair-sched=yes \
+ --error-exitcode=2 \
+ --leak-check=full \
+ --show-leak-kinds=all \
+ --errors-for-leak-kinds=all \
+ --show-error-list=all \
+ -- \
+ "$@"