diff options
Diffstat (limited to 'build-aux')
-rwxr-xr-x | build-aux/lint-bin | 22 | ||||
-rwxr-xr-x | build-aux/lint-unknown | 16 | ||||
-rw-r--r-- | build-aux/requirements.txt | 3 | ||||
-rwxr-xr-x | build-aux/stack.c.gen | 904 |
4 files changed, 654 insertions, 291 deletions
diff --git a/build-aux/lint-bin b/build-aux/lint-bin index 0b955de..78ed19f 100755 --- a/build-aux/lint-bin +++ b/build-aux/lint-bin @@ -25,7 +25,7 @@ RESET=$(tput sgr0) err() { printf "${RED}%s${RESET}: %s\n" "$1" "$2" >&2 - #r=1 + r=1 } # Input is `ld --print-map` format. @@ -104,7 +104,22 @@ lint_stack() { done < <( comm -3 \ <(sed -En 's/^included: (.*:)?//p' "${in_elffile%.elf}_stack.c" | sort -u) \ - <(readelf_funcs "$in_elffile" | sed 's/\.part\.[0-9]*$//' | sort -u)) + <(readelf_funcs "$in_elffile" | sed -E -e 's/\.part\.[0-9]*$//' -e 's/^__(.*)_veneer$/\1/' | sort -u)) +} + +lint_func_blocklist() { + local in_elffile + in_elffile=$1 + + local blocklist=( + gpio_default_irq_handler + ) + + while read -r func; do + err "$in_elffile" "Contains blocklisted function: ${func}" + done < <(readelf --syms --wide -- "$in_elffile" | + awk '$4 == "FUNC" { print $8 }' | + grep -Fx "${blocklist[@]/#/-e}") } main() { @@ -116,7 +131,8 @@ main() { echo 'Global variables:' lint_globals "${elf}.map" | sed 's/^/ /' } > "${elf%.elf}.lint.globals" - lint_stack "$elf" &> "${elf%.elf}.lint.stack" + (lint_stack "$elf") &> "${elf%.elf}.lint.stack" + lint_func_blocklist "$elf" done return $r diff --git a/build-aux/lint-unknown b/build-aux/lint-unknown index 3c2e91b..dda9541 100755 --- a/build-aux/lint-unknown +++ b/build-aux/lint-unknown @@ -7,4 +7,18 @@ RED=$(tput setaf 1) RESET=$(tput sgr0) -printf "${RED}%s${RESET}: cannot lint unknown file type\n" "$@" >&2 +err() { + printf "${RED}%s${RESET}: %s\n" "$1" "$2" >&2 + r=1 +} + +r=0 +for filename in "$@"; do + if ! { [ -f "$filename" ] && ! [ -h "$filename" ]; }; then + # Ignore non-files + continue + fi + + err "$filename" 'cannot lint unknown file type' +done +exit $r diff --git a/build-aux/requirements.txt b/build-aux/requirements.txt index 43a13be..fb76559 100644 --- a/build-aux/requirements.txt +++ b/build-aux/requirements.txt @@ -1,9 +1,10 @@ # build-aux/requirements.txt - List of Python dev requirements # -# Copyright (C) 2024 Luke T. Shumaker <lukeshu@lukeshu.com> +# Copyright (C) 2024-2025 Luke T. Shumaker <lukeshu@lukeshu.com> # SPDX-License-Identifier: AGPL-3.0-or-later mypy types-gdb>=15.0.0.20241204 # https://github.com/python/typeshed/pull/13169 black isort +pylint diff --git a/build-aux/stack.c.gen b/build-aux/stack.c.gen index edc7bae..a8e2149 100755 --- a/build-aux/stack.c.gen +++ b/build-aux/stack.c.gen @@ -71,7 +71,7 @@ def parse_vcg(reader: typing.TextIO) -> typing.Iterator[VCGElem]: k = m.group(1) v = m.group(2) if k in elem.attrs: - _raise(f"duplicate key: {repr(k)}") + _raise(f"duplicate key: {k!r}") if v.startswith('"'): def unesc(esc: re.Match[str]) -> str: @@ -83,12 +83,16 @@ def parse_vcg(reader: typing.TextIO) -> typing.Iterator[VCGElem]: case "\\": return "\\" case _: - _raise(f"invalid escape code {repr(esc.group(0))}") + _raise(f"invalid escape code {esc.group(0)!r}") v = re_esc.sub(unesc, v[1:-1]) elem.attrs[k] = v pos = m.end() + del _raise + del pos + del line + del lineno yield elem @@ -98,11 +102,58 @@ def parse_vcg(reader: typing.TextIO) -> typing.Iterator[VCGElem]: UsageKind: typing.TypeAlias = typing.Literal["static", "dynamic", "dynamic,bounded"] +class BaseName: + _content: str + + def __init__(self, content: str) -> None: + if ":" in content: + raise ValueError(f"invalid non-qualified name: {content!r}") + self._content = content + + def __str__(self) -> str: + return self._content + + def __eq__(self, other: typing.Any) -> bool: + assert isinstance(other, BaseName) + return self._content == other._content + + def __lt__(self, other: "BaseName") -> bool: + return self._content < other._content + + def __hash__(self) -> int: + return hash(self._content) + + +class QName: + _content: str + + def __init__(self, content: str) -> None: + self._content = content + + def __str__(self) -> str: + return self._content + + def __eq__(self, other: typing.Any) -> bool: + assert isinstance( + other, QName + ), f"comparing QName with {other.__class__.__name__}" + return self._content == other._content + + def __lt__(self, other: "QName") -> bool: + return self._content < other._content + + def __hash__(self) -> int: + return hash(self._content) + + def base(self) -> BaseName: + return BaseName(str(self).rsplit(":", 1)[-1].split(".", 1)[0]) + + class Node: # from .title (`static` and `__weak` functions are prefixed with # the compilation unit .c file. For static functions that's fine, # but we'll have to handle it specially for __weak.). - funcname: str + funcname: QName # .label is "{funcname}\n{location}\n{nstatic} bytes (static}\n{ndynamic} dynamic objects" location: str usage_kind: UsageKind @@ -111,51 +162,55 @@ class Node: # edges with .sourcename set to this node, val is if it's # OK/expected that the function be missing. - calls: dict[str, bool] + calls: dict[QName, bool] def synthetic_node( - name: str, nstatic: int, calls: typing.Collection[str] = set() + name: str, nstatic: int, calls: typing.Collection[str] = frozenset() ) -> Node: n = Node() - n.funcname = name + n.funcname = QName(name) n.location = "<synthetic>" n.usage_kind = "static" n.nstatic = nstatic n.ndynamic = 0 - n.calls = dict((c, False) for c in calls) + n.calls = dict((QName(c), False) for c in calls) return n +class AnalyzeResultVal(typing.NamedTuple): + nstatic: int + cnt: int + + class AnalyzeResultGroup(typing.NamedTuple): - rows: dict[str, int] - nmax: int - nsum: int + rows: dict[QName, AnalyzeResultVal] class AnalyzeResult(typing.NamedTuple): groups: dict[str, AnalyzeResultGroup] - missing: set[str] - dynamic: set[str] + missing: set[QName] + dynamic: set[QName] - included_funcs: set[str] + included_funcs: set[QName] class Application(typing.Protocol): def extra_nodes(self) -> typing.Collection[Node]: ... - def location_xform(self, loc: str) -> str: ... - def indirect_callees(self, elem: VCGElem) -> tuple[list[str], bool]: ... - def skip_call(self, chain: list[str], funcname: str) -> bool: ... + def indirect_callees( + self, elem: VCGElem + ) -> tuple[typing.Collection[QName], bool]: ... + def skip_call(self, chain: typing.Sequence[QName], funcname: QName) -> bool: ... def analyze( *, ci_fnames: typing.Collection[str], - app_func_filters: dict[str, typing.Callable[[str], int]], + app_func_filters: dict[str, typing.Callable[[QName], tuple[int, bool]]], app: Application, cfg_max_call_depth: int, ) -> AnalyzeResult: @@ -168,8 +223,8 @@ def analyze( flags=re.MULTILINE, ) - graph: dict[str, Node] = dict() - qualified: dict[str, set[str]] = dict() + graph: dict[QName, Node] = {} + qualified: dict[BaseName, set[QName]] = {} def handle_elem(elem: VCGElem) -> None: match elem.typ: @@ -180,14 +235,12 @@ def analyze( for k, v in elem.attrs.items(): match k: case "title": - node.funcname = v + node.funcname = QName(v) case "label": if elem.attrs.get("shape", "") != "ellipse": m = re_node_label.fullmatch(v) if not m: - raise ValueError( - f"unexpected label value {repr(v)}" - ) + raise ValueError(f"unexpected label value {v!r}") node.location = m.group("location") node.usage_kind = typing.cast( UsageKind, m.group("usage_kind") @@ -196,37 +249,37 @@ def analyze( node.ndynamic = int(m.group("ndynamic")) case "shape": if v != "ellipse": - raise ValueError(f"unexpected shape value {repr(v)}") + raise ValueError(f"unexpected shape value {v!r}") skip = True case _: - raise ValueError(f"unknown edge key {repr(k)}") + raise ValueError(f"unknown edge key {k!r}") if not skip: if node.funcname in graph: - raise ValueError(f"duplicate node {repr(node.funcname)}") + raise ValueError(f"duplicate node {str(node.funcname)!r}") graph[node.funcname] = node - if ":" in node.funcname: - _, shortname = node.funcname.rsplit(":", 1) - if shortname not in qualified: - qualified[shortname] = set() - qualified[shortname].add(node.funcname) + if ":" in str(node.funcname): + basename = node.funcname.base() + if basename not in qualified: + qualified[basename] = set() + qualified[basename].add(node.funcname) case "edge": - caller: str | None = None - callee: str | None = None + caller: QName | None = None + callee: QName | None = None for k, v in elem.attrs.items(): match k: case "sourcename": - caller = v + caller = QName(v) case "targetname": - callee = v + callee = QName(v) case "label": pass case _: - raise ValueError(f"unknown edge key {repr(k)}") + raise ValueError(f"unknown edge key {k!r}") if caller is None or callee is None: - raise ValueError(f"incomplete edge: {repr(elem.attrs)}") + raise ValueError(f"incomplete edge: {elem.attrs!r}") if caller not in graph: raise ValueError(f"unknown caller: {caller}") - if callee == "__indirect_call": + if str(callee) == "__indirect_call": callees, missing_ok = app.indirect_callees(elem) for callee in callees: if callee not in graph[caller].calls: @@ -234,87 +287,100 @@ def analyze( else: graph[caller].calls[callee] = False case _: - raise ValueError(f"unknown elem type {repr(elem.typ)}") + raise ValueError(f"unknown elem type {elem.typ!r}") for ci_fname in ci_fnames: - with open(ci_fname, "r") as fh: + with open(ci_fname, "r", encoding="utf-8") as fh: for elem in parse_vcg(fh): handle_elem(elem) for node in app.extra_nodes(): if node.funcname in graph: - raise ValueError(f"duplicate node {repr(node.funcname)}") + raise ValueError(f"duplicate node {str(node.funcname)!r}") graph[node.funcname] = node - missing: set[str] = set() - dynamic: set[str] = set() - included_funcs: set[str] = set() + missing: set[QName] = set() + dynamic: set[QName] = set() + included_funcs: set[QName] = set() dbg = False - def resolve_funcname(funcname: str) -> str | None: + def resolve_funcname(funcname: QName) -> QName | None: # Handle `ld --wrap` functions - if f"__wrap_{funcname}" in graph: - return f"__wrap_{funcname}" - if funcname.startswith("__real_") and funcname[len("__real_") :] in graph: - funcname = funcname[len("__real_") :] + if QName(f"__wrap_{funcname}") in graph: + return QName(f"__wrap_{funcname}") + if ( + str(funcname).startswith("__real_") + and QName(str(funcname)[len("__real_") :]) in graph + ): + funcname = QName(str(funcname)[len("__real_") :]) # Usual case - if funcname in graph: - return funcname + if QName(str(funcname)) in graph: + return QName(str(funcname)) # Handle `__weak` functions - if funcname in qualified and len(qualified[funcname]) == 1: - return sorted(qualified[funcname])[0] + if ( + ":" not in str(funcname) + and len(qualified.get(BaseName(str(funcname)), set())) == 1 + ): + return sorted(qualified[BaseName(str(funcname))])[0] return None + track_inclusion: bool = True + def nstatic( - orig_funcname: str, chain: list[str] = [], missing_ok: bool = False + orig_funcname: QName, + chain: typing.Sequence[QName] = (), + missing_ok: bool = False, ) -> int: nonlocal dbg + nonlocal track_inclusion funcname = resolve_funcname(orig_funcname) if not funcname: - if app.skip_call(chain, orig_funcname): + if chain and app.skip_call(chain, QName(str(orig_funcname))): + if dbg: + print(f"//dbg: {'- '*len(chain)}{orig_funcname}\tskip missing") return 0 if not missing_ok: missing.add(orig_funcname) + if dbg: + print(f"//dbg: {'- '*len(chain)}{orig_funcname}\tmissing") return 0 - if app.skip_call(chain, funcname): + if chain and app.skip_call(chain, funcname): + if dbg: + print(f"//dbg: {'- '*len(chain)}{orig_funcname}\tskip") return 0 if len(chain) == cfg_max_call_depth: - raise ValueError(f"max call depth exceeded: {chain+[funcname]}") + raise ValueError(f"max call depth exceeded: {[*chain, funcname]}") node = graph[funcname] if dbg: - print(f"//dbg: {funcname}\t{node.nstatic}") + print(f"//dbg: {'- '*len(chain)}{funcname}\t{node.nstatic}") if node.usage_kind == "dynamic" or node.ndynamic > 0: - dynamic.add(app.location_xform(funcname)) - included_funcs.add(funcname) + dynamic.add(funcname) + if track_inclusion: + included_funcs.add(funcname) return node.nstatic + max( [ 0, *[ - nstatic(call, chain + [funcname], missing_ok) + nstatic(call, [*chain, funcname], missing_ok) for call, missing_ok in node.calls.items() ], ] ) - groups: dict[str, AnalyzeResultGroup] = dict() + groups: dict[str, AnalyzeResultGroup] = {} for grp_name, grp_filter in app_func_filters.items(): - nmax = 0 - nsum = 0 - rows: dict[str, int] = {} + rows: dict[QName, AnalyzeResultVal] = {} for funcname in graph: - if cnt := grp_filter(funcname): - n = nstatic(funcname) - rows[app.location_xform(funcname)] = n - if n > nmax: - nmax = n - nsum += cnt * n - groups[grp_name] = AnalyzeResultGroup(rows=rows, nmax=nmax, nsum=nsum) + cnt, track_inclusion = grp_filter(funcname) + if cnt: + rows[funcname] = AnalyzeResultVal(nstatic=nstatic(funcname), cnt=cnt) + groups[grp_name] = AnalyzeResultGroup(rows=rows) return AnalyzeResult( groups=groups, missing=missing, dynamic=dynamic, included_funcs=included_funcs @@ -329,11 +395,11 @@ def read_source(location: str) -> str: re_location = re.compile(r"(?P<filename>.+):(?P<row>[0-9]+):(?P<col>[0-9]+)") m = re_location.fullmatch(location) if not m: - raise ValueError(f"unexpected label value {repr(location)}") + raise ValueError(f"unexpected label value {location!r}") filename = m.group("filename") row = int(m.group("row")) - 1 col = int(m.group("col")) - 1 - with open(m.group("filename"), "r") as fh: + with open(filename, "r", encoding="utf-8") as fh: return fh.readlines()[row][col:].rstrip() @@ -351,12 +417,24 @@ re_call_other = re.compile(r"(?P<func>[^(]+)\(.*") class Plugin(typing.Protocol): - def is_intrhandler(self, name: str) -> bool: ... + def is_intrhandler(self, name: QName) -> bool: ... + + # init_array returns a list of functions that are placed in the + # `.init_array.*` section; AKA functions marked with + # `__attribute__((constructor))`. + def init_array(self) -> typing.Collection[QName]: ... + + # extra_includes returns a list of functions that are never + # called, but are included in the binary anyway. This may because + # it is an unused method in a used vtable. This may be because it + # is an atexit() callback (we never exit). + def extra_includes(self) -> typing.Collection[str]: ... + def extra_nodes(self) -> typing.Collection[Node]: ... def indirect_callees( self, loc: str, line: str - ) -> tuple[list[str], bool] | None: ... - def skip_call(self, chain: list[str], call: str) -> bool: ... + ) -> tuple[typing.Collection[QName], bool] | None: ... + def skip_call(self, chain: typing.Sequence[QName], call: QName) -> bool: ... class PluginApplication: @@ -375,10 +453,7 @@ class PluginApplication: ret.extend(plugin.extra_nodes()) return ret - def location_xform(self, loc: str) -> str: - return self._location_xform(loc) - - def indirect_callees(self, elem: VCGElem) -> tuple[list[str], bool]: + def indirect_callees(self, elem: VCGElem) -> tuple[typing.Collection[QName], bool]: loc = elem.attrs.get("label", "") line = read_source(loc) @@ -390,10 +465,10 @@ class PluginApplication: placeholder = "__indirect_call" if m := re_call_other.fullmatch(line): placeholder += ":" + m.group("func") - placeholder += " at " + self.location_xform(elem.attrs.get("label", "")) - return [placeholder], False + placeholder += " at " + self._location_xform(elem.attrs.get("label", "")) + return [QName(placeholder)], False - def skip_call(self, chain: list[str], funcname: str) -> bool: + def skip_call(self, chain: typing.Sequence[QName], funcname: QName) -> bool: for plugin in self._plugins: if plugin.skip_call(chain, funcname): return True @@ -404,33 +479,36 @@ class PluginApplication: # Application-specific code -class AppPlugin: - def is_intrhandler(self, name: str) -> bool: - return name in [ - "rp2040_hwtimer_intrhandler", - "_cr_gdb_intrhandler", - "hostclock_handle_sig_alarm", - "hostnet_handle_sig_io", - ] +class CmdPlugin: + def is_intrhandler(self, name: QName) -> bool: + return False + + def init_array(self) -> typing.Collection[QName]: + return [] + + def extra_includes(self) -> typing.Collection[str]: + return [] def extra_nodes(self) -> typing.Collection[Node]: return [] - def indirect_callees(self, loc: str, line: str) -> tuple[list[str], bool] | None: + def indirect_callees( + self, loc: str, line: str + ) -> tuple[typing.Collection[QName], bool] | None: if "/3rd-party/" in loc: return None if "srv->auth" in line: return [], False if "srv->rootdir" in line: - return ["get_root"], False + return [QName("get_root")], False return None - def skip_call(self, chain: list[str], call: str) -> bool: + def skip_call(self, chain: typing.Sequence[QName], call: QName) -> bool: return False class LibObjPlugin: - objcalls: dict[str, set[str]] # method_name => {method_impls} + objcalls: dict[str, set[QName]] # method_name => {method_impls} def __init__(self, arg_c_fnames: typing.Collection[str]) -> None: ifaces: dict[str, set[str]] = {} # iface_name => {method_names} @@ -439,7 +517,7 @@ class LibObjPlugin: re_lo_iface = re.compile(r"^\s*#\s*define\s+(?P<name>\S+)_LO_IFACE") re_lo_func = re.compile(r"LO_FUNC *\([^,]*, *(?P<name>[^,) ]+) *[,)]") for fname in arg_c_fnames: - with open(fname, "r") as fh: + with open(fname, "r", encoding="utf-8") as fh: while line := fh.readline(): if m := re_lo_iface.match(line): iface_name = m.group("name") @@ -460,98 +538,174 @@ class LibObjPlugin: r"^LO_IMPLEMENTATION_[HC]\s*\(\s*(?P<iface>[^, ]+)\s*,\s*(?P<impl_typ>[^,]+)\s*,\s*(?P<impl_name>[^, ]+)\s*[,)].*" ) for fname in arg_c_fnames: - with open(fname, "r") as fh: + with open(fname, "r", encoding="utf-8") as fh: for line in fh: line = line.strip() if m := re_lo_implementation.match(line): implementations[m.group("iface")].add(m.group("impl_name")) - objcalls: dict[str, set[str]] = {} # method_name => {method_impls} - for iface_name in ifaces: - for method_name in ifaces[iface_name]: + objcalls: dict[str, set[QName]] = {} # method_name => {method_impls} + for iface_name, iface in ifaces.items(): + for method_name in iface: if method_name not in objcalls: objcalls[method_name] = set() for impl_name in implementations[iface_name]: - objcalls[method_name].add(impl_name + "_" + method_name) + objcalls[method_name].add(QName(impl_name + "_" + method_name)) self.objcalls = objcalls - def is_intrhandler(self, name: str) -> bool: + def is_intrhandler(self, name: QName) -> bool: return False + def init_array(self) -> typing.Collection[QName]: + return [] + + def extra_includes(self) -> typing.Collection[str]: + return [] + def extra_nodes(self) -> typing.Collection[Node]: return [] - def indirect_callees(self, loc: str, line: str) -> tuple[list[str], bool] | None: + def indirect_callees( + self, loc: str, line: str + ) -> tuple[typing.Collection[QName], bool] | None: re_call_objcall = re.compile(r"LO_CALL\((?P<obj>[^,]+), (?P<meth>[^,)]+)[,)].*") if "/3rd-party/" in loc: return None if m := re_call_objcall.fullmatch(line): if m.group("meth") in self.objcalls: - return sorted(self.objcalls[m.group("meth")]), False + return self.objcalls[m.group("meth")], False return [ - f"__indirect_call:{m.group('obj')}.vtable->{m.group('meth')}" + QName(f"__indirect_call:{m.group('obj')}.vtable->{m.group('meth')}") ], False return None - def skip_call(self, chain: list[str], call: str) -> bool: + def skip_call(self, chain: typing.Sequence[QName], call: QName) -> bool: return False class LibHWPlugin: pico_platform: str + libobj: LibObjPlugin - def __init__(self, arg_pico_platform: str) -> None: + def __init__(self, arg_pico_platform: str, libobj: LibObjPlugin) -> None: self.pico_platform = arg_pico_platform + self.libobj = libobj - def is_intrhandler(self, name: str) -> bool: - return False + def is_intrhandler(self, name: QName) -> bool: + return str(name.base()) in [ + "rp2040_hwtimer_intrhandler", + "hostclock_handle_sig_alarm", + "hostnet_handle_sig_io", + "gpioirq_handler", + "dmairq_handler", + ] + + def init_array(self) -> typing.Collection[QName]: + return [] + + def extra_includes(self) -> typing.Collection[str]: + return [] def extra_nodes(self) -> typing.Collection[Node]: return [] - def indirect_callees(self, loc: str, line: str) -> tuple[list[str], bool] | None: + def indirect_callees( + self, loc: str, line: str + ) -> tuple[typing.Collection[QName], bool] | None: if "/3rd-party/" in loc: return None + for fn in ( + "io_readv", + "io_writev", + "io_close", + "io_close_read", + "io_close_write", + "io_readwritev", + ): + if f"{fn}(" in line: + return self.libobj.indirect_callees(loc, f"LO_CALL(x, {fn[3:]})") + if "io_read(" in line: + return self.libobj.indirect_callees(loc, "LO_CALL(x, readv)") + if "io_writev(" in line: + return self.libobj.indirect_callees(loc, "LO_CALL(x, writev)") if "trigger->cb(trigger->cb_arg)" in line: ret = [ - "alarmclock_sleep_intrhandler", + QName("alarmclock_sleep_intrhandler"), ] if self.pico_platform == "rp2040": ret += [ - "w5500_tcp_alarm_handler", - "w5500_udp_alarm_handler", + QName("w5500_tcp_alarm_handler"), + QName("w5500_udp_alarm_handler"), ] return ret, False + if "/rp2040_gpioirq.c:" in loc and "handler->fn" in line: + return [ + QName("w5500_intrhandler"), + ], False + if "/rp2040_dma.c:" in loc and "handler->fn" in line: + return [ + QName("rp2040_hwspi_intrhandler"), + ], False + return None + + def skip_call(self, chain: typing.Sequence[QName], call: QName) -> bool: + return False + + +class LibCRPlugin: + def is_intrhandler(self, name: QName) -> bool: + return str(name.base()) in ("_cr_gdb_intrhandler",) + + def init_array(self) -> typing.Collection[QName]: + return [] + + def extra_includes(self) -> typing.Collection[str]: + return [] + + def extra_nodes(self) -> typing.Collection[Node]: + return [] + + def indirect_callees( + self, loc: str, line: str + ) -> tuple[typing.Collection[QName], bool] | None: return None - def skip_call(self, chain: list[str], call: str) -> bool: + def skip_call(self, chain: typing.Sequence[QName], call: QName) -> bool: return False class LibCRIPCPlugin: - def is_intrhandler(self, name: str) -> bool: + def is_intrhandler(self, name: QName) -> bool: return False + def init_array(self) -> typing.Collection[QName]: + return [] + + def extra_includes(self) -> typing.Collection[str]: + return [] + def extra_nodes(self) -> typing.Collection[Node]: return [] - def indirect_callees(self, loc: str, line: str) -> tuple[list[str], bool] | None: + def indirect_callees( + self, loc: str, line: str + ) -> tuple[typing.Collection[QName], bool] | None: if "/3rd-party/" in loc: return None if "/chan.c:" in loc and "front->dequeue(" in line: return [ - "_cr_chan_dequeue", - "_cr_select_dequeue", + QName("_cr_chan_dequeue"), + QName("_cr_select_dequeue"), ], False return None - def skip_call(self, chain: list[str], call: str) -> bool: + def skip_call(self, chain: typing.Sequence[QName], call: QName) -> bool: return False class Lib9PPlugin: - tmessage_handlers: set[str] | None + tmessage_handlers: set[QName] | None lib9p_msgs: set[str] _CONFIG_9P_NUM_SOCKS: int | None CONFIG_9P_SRV_MAX_REQS: int | None @@ -582,7 +736,7 @@ class Lib9PPlugin: def config_h_get(varname: str) -> int | None: if config_h_fname: - with open(config_h_fname, "r") as fh: + with open(config_h_fname, "r", encoding="utf-8") as fh: for line in fh: line = line.rstrip() if line.startswith("#define"): @@ -597,23 +751,23 @@ class Lib9PPlugin: # Read sources ######################################################### - tmessage_handlers: set[str] | None = None + tmessage_handlers: set[QName] | None = None if lib9p_srv_c_fname: re_tmessage_handler = re.compile( r"^\s*\[LIB9P_TYP_T[^]]+\]\s*=\s*\(tmessage_handler\)\s*(?P<handler>\S+),\s*$" ) tmessage_handlers = set() - with open(lib9p_srv_c_fname, "r") as fh: + with open(lib9p_srv_c_fname, "r", encoding="utf-8") as fh: for line in fh: line = line.rstrip() if m := re_tmessage_handler.fullmatch(line): - tmessage_handlers.add(m.group("handler")) + tmessage_handlers.add(QName(m.group("handler"))) self.tmessage_handlers = tmessage_handlers lib9p_msgs: set[str] = set() if lib9p_generated_c_fname: re_lib9p_msg_entry = re.compile(r"^\s*_MSG_(?:[A-Z]+)\((?P<typ>\S+)\),$") - with open(lib9p_generated_c_fname, "r") as fh: + with open(lib9p_generated_c_fname, "r", encoding="utf-8") as fh: for line in fh: line = line.rstrip() if m := re_lib9p_msg_entry.fullmatch(line): @@ -621,22 +775,30 @@ class Lib9PPlugin: lib9p_msgs.add(typ) self.lib9p_msgs = lib9p_msgs - def thread_count(self, name: str) -> int: + def thread_count(self, name: QName) -> int: assert self._CONFIG_9P_NUM_SOCKS assert self.CONFIG_9P_SRV_MAX_REQS - if "read" in name: + if "read" in str(name.base()): return self._CONFIG_9P_NUM_SOCKS - elif "write" in name: + if "write" in str(name.base()): return self._CONFIG_9P_NUM_SOCKS * self.CONFIG_9P_SRV_MAX_REQS return 1 - def is_intrhandler(self, name: str) -> bool: + def is_intrhandler(self, name: QName) -> bool: return False + def init_array(self) -> typing.Collection[QName]: + return [] + + def extra_includes(self) -> typing.Collection[str]: + return [] + def extra_nodes(self) -> typing.Collection[Node]: return [] - def indirect_callees(self, loc: str, line: str) -> tuple[list[str], bool] | None: + def indirect_callees( + self, loc: str, line: str + ) -> tuple[typing.Collection[QName], bool] | None: if "/3rd-party/" in loc: return None if ( @@ -645,101 +807,171 @@ class Lib9PPlugin: and "tmessage_handlers[typ](" in line ): # Functions for disabled protocol extensions will be missing. - return sorted(self.tmessage_handlers), True + return self.tmessage_handlers, True if self.lib9p_msgs and "/9p.c:" in loc: for meth in ["validate", "unmarshal", "marshal"]: if line.startswith(f"tentry.{meth}("): # Functions for disabled protocol extensions will be missing. - return sorted(f"{meth}_{msg}" for msg in self.lib9p_msgs), True + return [QName(f"{meth}_{msg}") for msg in self.lib9p_msgs], True return None - def skip_call(self, chain: list[str], call: str) -> bool: - if "lib9p/srv.c:srv_util_pathfree" in call: + def skip_call(self, chain: typing.Sequence[QName], call: QName) -> bool: + if "lib9p/srv.c:srv_util_pathfree" in str(call): assert isinstance(self.CONFIG_9P_SRV_MAX_DEPTH, int) if len(chain) >= self.CONFIG_9P_SRV_MAX_DEPTH and all( - ("lib9p/srv.c:srv_util_pathfree" in c) + ("lib9p/srv.c:srv_util_pathfree" in str(c)) for c in chain[-self.CONFIG_9P_SRV_MAX_DEPTH :] ): return True re_msg_meth = re.compile( r"^lib9p_(?P<grp>[TR])msg_(?P<meth>validate|unmarshal|marshal)$" ) - wrapper = next((c for c in chain if re_msg_meth.match(c)), None) + wrapper = next((c for c in chain if re_msg_meth.match(str(c))), None) if wrapper: - m = re_msg_meth.match(wrapper) + m = re_msg_meth.match(str(wrapper)) assert m - deny = ":" + m.group("meth") + "_" + ("R" if m.group("grp") == "T" else "T") - if deny in call: + deny = m.group("meth") + "_" + ("R" if m.group("grp") == "T" else "T") + if str(call.base()).startswith(deny): return True return False class LibMiscPlugin: - def is_intrhandler(self, name: str) -> bool: + def is_intrhandler(self, name: QName) -> bool: return False + def init_array(self) -> typing.Collection[QName]: + return [] + + def extra_includes(self) -> typing.Collection[str]: + return [] + def extra_nodes(self) -> typing.Collection[Node]: return [] - def indirect_callees(self, loc: str, line: str) -> tuple[list[str], bool] | None: + def indirect_callees( + self, loc: str, line: str + ) -> tuple[typing.Collection[QName], bool] | None: return None - def skip_call(self, chain: list[str], call: str) -> bool: + def skip_call(self, chain: typing.Sequence[QName], call: QName) -> bool: if ( len(chain) > 1 - and chain[-1] == "__assert_msg_fail" - and call.endswith(":__lm_printf") - and "__assert_msg_fail" in chain[:-1] + and str(chain[-1].base()) == "__assert_msg_fail" + and str(call.base()) == "__lm_printf" + and any(str(c.base()) == "__assert_msg_fail" for c in chain[:-1]) ): return True return False +class PicoFmtPlugin: + known_out: dict[str, str] + known_fct: dict[str, str] + + def __init__(self) -> None: + self.known_out = { + "": "_out_null", # XXX + "__wrap_sprintf": "_out_buffer", + "__wrap_snprintf": "_out_buffer", + "__wrap_vsnprintf": "_out_buffer", + "vfctprintf": "_out_fct", + } + self.known_fct = { + "stdio_vprintf": "stdio_buffered_printer", + "__wrap_vprintf": "stdio_buffered_printer", + } + + def is_intrhandler(self, name: QName) -> bool: + return False + + def init_array(self) -> typing.Collection[QName]: + return [] + + def extra_includes(self) -> typing.Collection[str]: + return [] + + def extra_nodes(self) -> typing.Collection[Node]: + return [] + + def indirect_callees( + self, loc: str, line: str + ) -> tuple[typing.Collection[QName], bool] | None: + if "/3rd-party/pico-sdk/" not in loc: + return None + if "/printf.c:" in loc: + m = re_call_other.fullmatch(line) + call: str | None = m.group("func") if m else None + if call == "out": + return [QName(x) for x in self.known_out.values()], False + if "->fct" in line: + return [QName(x) for x in self.known_fct.values()], False + return None + + def skip_call(self, chain: typing.Sequence[QName], call: QName) -> bool: + if str(call.base()) in self.known_out.values(): + out = "" + for pcall in chain: + if str(pcall.base()) in self.known_out: + out = self.known_out[str(pcall.base())] + if ( + out == "_out_buffer" and str(call.base()) == "_out_null" + ): # XXX: Gross hack + out = "_out_null" + return str(call.base()) != out + if str(call.base()) in self.known_fct.values(): + fct = "" + for pcall in chain: + if str(pcall.base()) in self.known_fct: + fct = self.known_fct[str(pcall.base())] + return str(call.base()) != fct + return False + + class PicoSDKPlugin: - app_gpio_handlers: typing.Collection[str] - app_init_array: typing.Collection[str] - app_preinit_array: typing.Collection[str] + get_init_array: typing.Callable[[], typing.Collection[QName]] + app_init_array: typing.Collection[QName] | None + app_preinit_array: typing.Collection[QName] def __init__( self, *, - app_gpio_handlers: typing.Collection[str], - app_init_array: typing.Collection[str], + get_init_array: typing.Callable[[], typing.Collection[QName]], ) -> None: - self.app_gpio_handlers = app_gpio_handlers - self.app_init_array = app_init_array + # grep for '__attribute__((constructor))'. + self.get_init_array = get_init_array + self.app_init_array = None # git grep '^PICO_RUNTIME_INIT_FUNC\S*(' self.app_preinit_array = [ - # "runtime_init_mutex", # pico_mutex - # "runtime_init_default_alarm_pool", # pico_time - # "runtime_init_boot_locks_reset", # hardware_boot_lock - "runtime_init_per_core_irq_priorities", # hardware_irq - # "spinlock_set_extexclall", # hardware_sync_spin_lock - "__aeabi_bits_init", # pico_bit_ops - # "runtime_init_bootrom_locking_enable", # pico_bootrom, rp2350-only - # "runtime_init_pre_core_tls_setup", # pico_clib_interface, picolibc-only - # "__aeabi_double_init", # pico_double - # "__aeabi_float_init", # pico_float - "__aeabi_mem_init", # pico_mem_ops - "first_per_core_initializer", # pico_runtime + # QName("runtime_init_mutex"), # pico_mutex + # QName("runtime_init_default_alarm_pool"), # pico_time + # QName("runtime_init_boot_locks_reset"), # hardware_boot_lock + QName("runtime_init_per_core_irq_priorities"), # hardware_irq + # QName("spinlock_set_extexclall"), # hardware_sync_spin_lock + QName("__aeabi_bits_init"), # pico_bit_ops + # QName("runtime_init_bootrom_locking_enable"), # pico_bootrom, rp2350-only + # QName("runtime_init_pre_core_tls_setup"), # pico_clib_interface, picolibc-only + # QName("__aeabi_double_init"), # pico_double + # QName("__aeabi_float_init"), # pico_float + QName("__aeabi_mem_init"), # pico_mem_ops + QName("first_per_core_initializer"), # pico_runtime # pico_runtime_init - # "runtime_init_bootrom_reset", # rp2350-only - # "runtime_init_per_core_bootrom_reset", # rp2350-only - # "runtime_init_per_core_h3_irq_registers", # rp2350-only - "runtime_init_early_resets", - "runtime_init_usb_power_down", - # "runtime_init_per_core_enable_coprocessors", # PICO_RUNTIME_SKIP_INIT_PER_CORE_ENABLE_COPROCESSORS - "runtime_init_clocks", - "runtime_init_post_clock_resets", - "runtime_init_rp2040_gpio_ie_disable", - "runtime_init_spin_locks_reset", - "runtime_init_install_ram_vector_table", + # QName("runtime_init_bootrom_reset"), # rp2350-only + # QName("runtime_init_per_core_bootrom_reset"), # rp2350-only + # QName("runtime_init_per_core_h3_irq_registers"), # rp2350-only + QName("runtime_init_early_resets"), + QName("runtime_init_usb_power_down"), + # QName("runtime_init_per_core_enable_coprocessors"), # PICO_RUNTIME_SKIP_INIT_PER_CORE_ENABLE_COPROCESSORS + QName("runtime_init_clocks"), + QName("runtime_init_post_clock_resets"), + QName("runtime_init_rp2040_gpio_ie_disable"), + QName("runtime_init_spin_locks_reset"), + QName("runtime_init_install_ram_vector_table"), ] - def is_intrhandler(self, name: str) -> bool: - return name in [ - "gpio_default_irq_handler", + def is_intrhandler(self, name: QName) -> bool: + return str(name.base()) in [ "isr_invalid", "isr_nmi", "isr_hardfault", @@ -749,7 +981,15 @@ class PicoSDKPlugin: *[f"isr_irq{n}" for n in range(32)], ] - def indirect_callees(self, loc: str, line: str) -> tuple[list[str], bool] | None: + def init_array(self) -> typing.Collection[QName]: + return [] + + def extra_includes(self) -> typing.Collection[str]: + return [] + + def indirect_callees( + self, loc: str, line: str + ) -> tuple[typing.Collection[QName], bool] | None: if "/3rd-party/pico-sdk/" not in loc or "/3rd-party/pico-sdk/lib/" in loc: return None m = re_call_other.fullmatch(line) @@ -757,65 +997,44 @@ class PicoSDKPlugin: match call: case "connect_internal_flash_func": - return ["rom_func_lookup(ROM_FUNC_CONNECT_INTERNAL_FLASH)"], False + return [ + QName("rom_func_lookup(ROM_FUNC_CONNECT_INTERNAL_FLASH)") + ], False case "flash_exit_xip_func": - return ["rom_func_lookup(ROM_FUNC_FLASH_EXIT_XIP)"], False + return [QName("rom_func_lookup(ROM_FUNC_FLASH_EXIT_XIP)")], False case "flash_range_erase_func": - return ["rom_func_lookup(ROM_FUNC_FLASH_RANGE_ERASE)"], False + return [QName("rom_func_lookup(ROM_FUNC_FLASH_RANGE_ERASE)")], False case "flash_flush_cache_func": - return ["rom_func_lookup(ROM_FUNC_FLASH_FLUSH_CACHE)"], False + return [QName("rom_func_lookup(ROM_FUNC_FLASH_FLUSH_CACHE)")], False case "rom_table_lookup": - return ["rom_hword_as_ptr(BOOTROM_TABLE_LOOKUP_OFFSET)"], False + return [QName("rom_hword_as_ptr(BOOTROM_TABLE_LOOKUP_OFFSET)")], False if "/flash.c:" in loc and "boot2_copyout" in line: - return ["_stage2_boot"], False - if "/gpio.c:" in loc and call == "callback": - return sorted(self.app_gpio_handlers), False - if "/printf.c:" in loc: - if call == "out": - return [ - "_out_buffer", - "_out_null", - "_out_fct", - ], False - if "->fct(" in line: - return ["stdio_buffered_printer"], False + return [QName("_stage2_boot")], False if "/stdio.c:" in loc: if call == "out_func": return [ - "stdio_out_chars_crlf", - "stdio_out_chars_no_crlf", + QName("stdio_out_chars_crlf"), + QName("stdio_out_chars_no_crlf"), ], False if call and (call.startswith("d->") or call.startswith("driver->")): _, meth = call.split("->", 1) match meth: case "out_chars": - return ["stdio_uart_out_chars"], False + return [QName("stdio_uart_out_chars")], False case "out_flush": - return ["stdio_uart_out_flush"], False + return [QName("stdio_uart_out_flush")], False case "in_chars": - return ["stdio_uart_in_chars"], False + return [QName("stdio_uart_in_chars")], False if "/newlib_interface.c:" in loc: if line == "*p)();": - return sorted(self.app_init_array), False + if self.app_init_array is None: + self.app_init_array = self.get_init_array() + return self.app_init_array, False if "/pico_runtime/runtime.c:" in loc: - return sorted(self.app_preinit_array), False + return self.app_preinit_array, False return None - def skip_call(self, chain: list[str], call: str) -> bool: - if call == "_out_buffer" or call == "_out_fct": - last = "" - for pcall in chain: - if pcall in [ - "__wrap_sprintf", - "__wrap_snprintf", - "__wrap_vsnprintf", - "vfctprintf", - ]: - last = pcall - if last == "vfctprintf": - return call != "_out_fct" - else: - return call == "_out_buffer" + def skip_call(self, chain: typing.Sequence[QName], call: QName) -> bool: return False def extra_nodes(self) -> typing.Collection[Node]: @@ -836,7 +1055,8 @@ class PicoSDKPlugin: synthetic_node("isr_svcall", 0, {"__unhandled_user_irq"}), synthetic_node("isr_pendsv", 0, {"__unhandled_user_irq"}), synthetic_node("isr_systick", 0, {"__unhandled_user_irq"}), - synthetic_node(f"__unhandled_user_irq", 0), + synthetic_node("__unhandled_user_irq", 0), + synthetic_node("_entry_point", 0, {"_reset_handler"}), synthetic_node("_reset_handler", 0, {"runtime_init", "main", "exit"}), ] @@ -948,7 +1168,7 @@ class PicoSDKPlugin: class TinyUSBDevicePlugin: - tud_drivers: dict[str, set[str]] + tud_drivers: dict[str, set[QName]] # method_name => {method_impls} def __init__(self, arg_c_fnames: typing.Collection[str]) -> None: usbd_c_fname = get_zero_or_one( @@ -968,7 +1188,7 @@ class TinyUSBDevicePlugin: r"^\s*#\s*define\s+(?P<k>CFG_TUD_(?:\S{3}|AUDIO|VIDEO|MIDI|VENDOR|USBTMC|DFU_RUNTIME|ECM_RNDIS))\s+(?P<v>\S+).*" ) tusb_config: dict[str, bool] = {} - with open(tusb_config_h_fname, "r") as fh: + with open(tusb_config_h_fname, "r", encoding="utf-8") as fh: in_table = False for line in fh: line = line.rstrip() @@ -977,14 +1197,14 @@ class TinyUSBDevicePlugin: v = m.group("v") tusb_config[k] = bool(int(v)) - tud_drivers: dict[str, set[str]] = {} + tud_drivers: dict[str, set[QName]] = {} re_tud_entry = re.compile( r"^\s+\.(?P<meth>\S+)\s*=\s*(?P<impl>[a-zA-Z0-9_]+)(?:,.*)?" ) re_tud_if1 = re.compile(r"^\s*#\s*if (\S+)\s*") re_tud_if2 = re.compile(r"^\s*#\s*if (\S+)\s*\|\|\s*(\S+)\s*") re_tud_endif = re.compile(r"^\s*#\s*endif\s*") - with open(usbd_c_fname, "r") as fh: + with open(usbd_c_fname, "r", encoding="utf-8") as fh: in_table = False enabled = True for line in fh: @@ -1004,20 +1224,28 @@ class TinyUSBDevicePlugin: if meth not in tud_drivers: tud_drivers[meth] = set() if impl != "NULL": - tud_drivers[meth].add(impl) + tud_drivers[meth].add(QName(impl)) if line.startswith("}"): in_table = False elif " _usbd_driver[] = {" in line: in_table = True self.tud_drivers = tud_drivers - def is_intrhandler(self, name: str) -> bool: + def is_intrhandler(self, name: QName) -> bool: return False + def init_array(self) -> typing.Collection[QName]: + return [] + + def extra_includes(self) -> typing.Collection[str]: + return [] + def extra_nodes(self) -> typing.Collection[Node]: return [] - def indirect_callees(self, loc: str, line: str) -> tuple[list[str], bool] | None: + def indirect_callees( + self, loc: str, line: str + ) -> tuple[typing.Collection[QName], bool] | None: if "/tinyusb/" not in loc or "/tinyusb/src/host/" in loc or "_host.c:" in loc: return None m = re_call_other.fullmatch(line) @@ -1026,25 +1254,34 @@ class TinyUSBDevicePlugin: if call == "_ctrl_xfer.complete_cb": return [ # "process_test_mode_cb", - "tud_vendor_control_xfer_cb", + QName("tud_vendor_control_xfer_cb"), *sorted(self.tud_drivers["control_xfer_cb"]), ], False - elif call.startswith("driver->"): + if call.startswith("driver->"): return sorted(self.tud_drivers[call[len("driver->") :]]), False - elif call == "event.func_call.func": + if call == "event.func_call.func": # callback from usb_defer_func() return [], False return None - def skip_call(self, chain: list[str], call: str) -> bool: + def skip_call(self, chain: typing.Sequence[QName], call: QName) -> bool: return False class NewlibPlugin: - def is_intrhandler(self, name: str) -> bool: + def is_intrhandler(self, name: QName) -> bool: return False + def init_array(self) -> typing.Collection[QName]: + return [QName("register_fini")] + + def extra_includes(self) -> typing.Collection[str]: + return [ + # register_fini() calls atexit(__libc_fini_array) + "__libc_fini_array", + ] + def extra_nodes(self) -> typing.Collection[Node]: # This is accurate to # /usr/arm-none-eabi/lib/thumb/v6-m/nofp/libg.a as of @@ -1078,33 +1315,54 @@ class NewlibPlugin: synthetic_node("_getpid_r", 8, {"_getpid"}), synthetic_node("random", 8), synthetic_node("register_fini", 8, {"atexit"}), + synthetic_node("atexit", 8, {"__register_exitproc"}), + synthetic_node( + "__register_exitproc", + 32, + { + "__retarget_lock_acquire_recursive", + "__retarget_lock_release_recursive", + }, + ), + synthetic_node("__libc_fini_array", 16, {"_fini"}), ] - def indirect_callees(self, loc: str, line: str) -> tuple[list[str], bool] | None: + def indirect_callees( + self, loc: str, line: str + ) -> tuple[typing.Collection[QName], bool] | None: return None - def skip_call(self, chain: list[str], call: str) -> bool: + def skip_call(self, chain: typing.Sequence[QName], call: QName) -> bool: return False class LibGCCPlugin: - def is_intrhandler(self, name: str) -> bool: + def is_intrhandler(self, name: QName) -> bool: return False + def init_array(self) -> typing.Collection[QName]: + return [] + + def extra_includes(self) -> typing.Collection[str]: + return [] + def extra_nodes(self) -> typing.Collection[Node]: - # This is accurate to - # /usr/lib/gcc/arm-none-eabi/14.2.0/thumb/v6-m/nofp/libgcc.a - # as of Parabola's arm-none-eabi-gcc 14.2.0-1. + # This is accurate to Parabola's arm-none-eabi-gcc 14.2.0-1. return [ + # /usr/lib/gcc/arm-none-eabi/14.2.0/thumb/v6-m/nofp/libgcc.a synthetic_node("__aeabi_idiv0", 0), synthetic_node("__aeabi_ldiv0", 0), synthetic_node("__aeabi_llsr", 0), + # /usr/lib/gcc/arm-none-eabi/14.2.0/thumb/v6-m/nofp/crti.o + synthetic_node("_fini", 24), ] - def indirect_callees(self, loc: str, line: str) -> tuple[list[str], bool] | None: + def indirect_callees( + self, loc: str, line: str + ) -> tuple[typing.Collection[QName], bool] | None: return None - def skip_call(self, chain: list[str], call: str) -> bool: + def skip_call(self, chain: typing.Sequence[QName], call: QName) -> bool: return False @@ -1122,23 +1380,26 @@ def main( lib9p_plugin = Lib9PPlugin(arg_base_dir, arg_c_fnames) - sbc_gpio_handlers = [ - "w5500_intrhandler", - ] - - def sbc_is_thread(name: str) -> int: - if name.endswith("_cr") and name != "lib9p_srv_read_cr": - if "9p" in name: + def sbc_is_thread(name: QName) -> int: + if str(name).endswith("_cr") and str(name.base()) != "lib9p_srv_read_cr": + if "9p" in str(name.base()) or "lib9p/tests/test_server/main.c:" in str( + name + ): return lib9p_plugin.thread_count(name) return 1 - if name == ("_reset_handler" if arg_pico_platform == "rp2040" else "main"): + if str(name.base()) == ( + "_entry_point" if arg_pico_platform == "rp2040" else "main" + ): return 1 return 0 + libobj_plugin = LibObjPlugin(arg_c_fnames) + plugins += [ - AppPlugin(), - LibObjPlugin(arg_c_fnames), - LibHWPlugin(arg_pico_platform), + CmdPlugin(), + libobj_plugin, + LibHWPlugin(arg_pico_platform, libobj_plugin), + LibCRPlugin(), LibCRIPCPlugin(), lib9p_plugin, LibMiscPlugin(), @@ -1147,10 +1408,17 @@ def main( # pico-sdk ####################################################### if arg_pico_platform == "rp2040": + + def get_init_array() -> typing.Collection[QName]: + ret: list[QName] = [] + for plugin in plugins: + ret.extend(plugin.init_array()) + return ret + plugins += [ + PicoFmtPlugin(), PicoSDKPlugin( - app_gpio_handlers=sbc_gpio_handlers, - app_init_array=["register_fini"], + get_init_array=get_init_array, ), TinyUSBDevicePlugin(arg_c_fnames), NewlibPlugin(), @@ -1159,20 +1427,29 @@ def main( # Tie it all together ############################################ - def thread_filter(name: str) -> int: - return sbc_is_thread(name) + def thread_filter(name: QName) -> tuple[int, bool]: + return sbc_is_thread(name), True - def intrhandler_filter(name: str) -> int: - name = name.rsplit(":", 1)[-1] + def intrhandler_filter(name: QName) -> tuple[int, bool]: for plugin in plugins: if plugin.is_intrhandler(name): - return 1 - return 0 + return 1, True + return 0, False - def misc_filter(name: str) -> int: - if name.endswith(":__lm_printf") or name == "__assert_msg_fail": - return 1 - return 0 + def misc_filter(name: QName) -> tuple[int, bool]: + if str(name.base()) in ["__lm_printf", "__assert_msg_fail"]: + return 1, False + return 0, False + + extra_includes: list[str] = [] + for plugin in plugins: + extra_includes.extend(plugin.extra_includes()) + + def extra_filter(name: QName) -> tuple[int, bool]: + nonlocal extra_includes + if str(name.base()) in extra_includes: + return 1, True + return 0, False def location_xform(loc: str) -> str: if not loc.startswith("/"): @@ -1187,6 +1464,7 @@ def main( "Threads": thread_filter, "Interrupt handlers": intrhandler_filter, "Misc": misc_filter, + "Extra": extra_filter, }, app=PluginApplication(location_xform, plugins), cfg_max_call_depth=100, @@ -1195,21 +1473,30 @@ def main( def print_group(grp_name: str) -> None: grp = result.groups[grp_name] + nsum = sum(v.nstatic * v.cnt for v in grp.rows.values()) + nmax = max(v.nstatic for v in grp.rows.values()) + # Figure sizes. - namelen = max([len(k) for k in grp.rows.keys()] + [len(grp_name) + 4]) - numlen = len(str(grp.nsum)) + namelen = max( + [len(location_xform(str(k))) for k in grp.rows.keys()] + [len(grp_name) + 4] + ) + numlen = len(str(nsum)) sep1 = ("=" * namelen) + " " + "=" * numlen sep2 = ("-" * namelen) + " " + "-" * numlen # Print. print("= " + grp_name + " " + sep1[len(grp_name) + 3 :]) - for name, num in sorted(grp.rows.items()): - if num == 0: + for qname, val in sorted(grp.rows.items()): + name = location_xform(str(qname)) + if val.nstatic == 0: continue - print(f"{name.ljust(namelen)} {str(num).rjust(numlen)}") + print( + f"{name:<{namelen}} {val.nstatic:>{numlen}}" + + (f" * {val.cnt}" if val.cnt != 1 else "") + ) print(sep2) - print(f"{'Total'.ljust(namelen)} {str(grp.nsum).rjust(numlen)}") - print(f"{'Maximum'.ljust(namelen)} {str(grp.nmax).rjust(numlen)}") + print(f"{'Total':<{namelen}} {nsum:>{numlen}}") + print(f"{'Maximum':<{namelen}} {nmax:>{numlen}}") print(sep1) def next_power_of_2(x: int) -> int: @@ -1221,32 +1508,77 @@ def main( print_group("Threads") print_group("Interrupt handlers") print("*/") - overhead = result.groups["Interrupt handlers"].nmax - rows: list[tuple[str, int, int]] = [] - for funcname, base in result.groups["Threads"].rows.items(): - rows.append((funcname.split(":")[-1], base, next_power_of_2(base + overhead))) - namelen = max(len(r[0]) for r in rows) - baselen = max(len(str(r[1])) for r in rows) - sizelen = max(len(str(r[2])) for r in rows) + intrstack = max( + v.nstatic for v in result.groups["Interrupt handlers"].rows.values() + ) + stack_guard_size = 16 * 2 + + class CrRow(typing.NamedTuple): + name: str + cnt: int + base: int + size: int + + rows: list[CrRow] = [] + mainrow: CrRow | None = None + for funcname, val in result.groups["Threads"].rows.items(): + name = str(funcname.base()) + base = val.nstatic + size = base + intrstack + if name in ("main", "_entry_point"): + mainrow = CrRow(name=name, cnt=1, base=base, size=size) + else: + size = next_power_of_2(size + stack_guard_size) - stack_guard_size + rows.append(CrRow(name=name, cnt=val.cnt, base=base, size=size)) + namelen = max(len(r.name) for r in rows) + baselen = max(len(str(r.base)) for r in rows) + sizesum = sum(r.cnt * (r.size + stack_guard_size) for r in rows) + sizelen = len(str(max(sizesum, mainrow.size if mainrow else 0))) + + def print_row(comment: bool, name: str, size: int, eqn: str | None = None) -> None: + prefix = "const size_t CONFIG_COROUTINE_STACK_SIZE_" + if comment: + print(f"/* {name}".ljust(len(prefix) + namelen), end="") + else: + print(f"{prefix}{name:<{namelen}}", end="") + print(f" = {size:>{sizelen}};", end="") + if comment: + print(" */", end="") + elif eqn: + print(" ", end="") + if eqn: + print(f" /* {eqn} */", end="") + print() + for row in sorted(rows): - if row[0] in ("main", "_reset_handler"): - continue - print("const size_t CONFIG_COROUTINE_STACK_SIZE_", end="") - print(f"{row[0].ljust(namelen)} =", end="") - print(f" {str(row[2]).rjust(sizelen)};", end="") - print(f" /* LM_NEXT_POWER_OF_2({str(row[1]).rjust(baselen)}+{overhead}) */") + print_row( + False, + row.name, + row.size, + f"LM_NEXT_POWER_OF_2({row.base:>{baselen}}+{intrstack}+{stack_guard_size})-{stack_guard_size}", + ) + print_row(True, "TOTAL (inc. stack guard)", sizesum) + if mainrow: + print_row( + True, + "MAIN/KERNEL", + mainrow.size, + f" {mainrow.base:>{baselen}}+{intrstack}", + ) print() print("/*") print_group("Misc") for funcname in sorted(result.missing): - print(f"warning: missing: {funcname}") + print(f"warning: missing: {location_xform(str(funcname))}") for funcname in sorted(result.dynamic): - print(f"warning: dynamic-stack-usage: {funcname}") + print(f"warning: dynamic-stack-usage: {location_xform(str(funcname))}") print("*/") print("") print("/*") + if result.groups["Extra"].rows: + print_group("Extra") for funcname in sorted(result.included_funcs): print(f"included: {funcname}") print("*/") @@ -1266,7 +1598,7 @@ if __name__ == "__main__": for obj_fname in obj_fnames: if re_c_obj_suffix.search(obj_fname): ci_fnames.add(re_c_obj_suffix.sub(".c.ci", obj_fname)) - with open(obj_fname + ".d", "r") as fh: + with open(obj_fname + ".d", "r", encoding="utf-8") as fh: c_fnames.update( fh.read().replace("\\\n", " ").split(":")[-1].split() ) |