summaryrefslogtreecommitdiff
path: root/build-aux
diff options
context:
space:
mode:
Diffstat (limited to 'build-aux')
-rw-r--r--build-aux/measurestack/analyze.py225
-rw-r--r--build-aux/measurestack/app_plugins.py107
-rw-r--r--build-aux/measurestack/test_app_plugins.py259
-rw-r--r--build-aux/measurestack/util.py4
4 files changed, 497 insertions, 98 deletions
diff --git a/build-aux/measurestack/analyze.py b/build-aux/measurestack/analyze.py
index a93874f..67c44ce 100644
--- a/build-aux/measurestack/analyze.py
+++ b/build-aux/measurestack/analyze.py
@@ -3,24 +3,103 @@
# Copyright (C) 2024-2025 Luke T. Shumaker <lukeshu@lukeshu.com>
# SPDX-License-Identifier: AGPL-3.0-or-later
+import random
import re
import sys
import typing
from . import vcg
+# Whether to print "//dbg-cache:" on cache writes
+dbg_cache = False
+# Whether to print the graph in a /* comment */ before processing it
+dbg_dumpgraph = False
+# Whether to print "//dbg-nstatic:" lines that trace nstatic() execution
+dbg_nstatic = False
+# Whether to disable nstatic() caching (but does NOT disable any cache-related debug logging)
+dbg_nocache = False
+# Whether to sort things for consistently-ordered execution, or shuffle things to detect bugs
+dbg_sort: typing.Literal["unsorted", "sorted", "shuffled"] = "unsorted"
+
# pylint: disable=unused-variable
__all__ = [
"BaseName",
"QName",
"UsageKind",
"Node",
+ "maybe_sorted",
"AnalyzeResultVal",
"AnalyzeResultGroup",
"AnalyzeResult",
"analyze",
]
+
+def dumps(x: typing.Any, depth: int = 0, compact: bool = False) -> str:
+ match x:
+ case int() | str() | None:
+ return repr(x)
+ case dict():
+ if len(x) == 0:
+ return "{}"
+ ret = "{"
+ if not compact:
+ ret += "\n"
+ for k, v in x.items():
+ if not compact:
+ ret += "\t" * (depth + 1)
+ ret += dumps(k, depth + 1, True)
+ ret += ":"
+ if not compact:
+ ret += " "
+ ret += dumps(v, depth + 1, compact)
+ ret += ","
+ if not compact:
+ ret += "\n"
+ if not compact:
+ ret += "\t" * depth
+ ret += "}"
+ return ret
+ case list():
+ if len(x) == 0:
+ return "[]"
+ ret = "["
+ if not compact:
+ ret += "\n"
+ for v in x:
+ if not compact:
+ ret += "\t" * (depth + 1)
+ ret += dumps(v, depth + 1, compact)
+ ret += ","
+ if not compact:
+ ret += "\n"
+ if not compact:
+ ret += "\t" * depth
+ ret += "]"
+ return ret
+ case set():
+ if len(x) == 0:
+ return "set()"
+ ret = "{"
+ if not compact:
+ ret += "\n"
+ for v in x:
+ if not compact:
+ ret += "\t" * (depth + 1)
+ ret += dumps(v, depth + 1, compact)
+ ret += ","
+ if not compact:
+ ret += "\n"
+ if not compact:
+ ret += "\t" * depth
+ ret += "}"
+ return ret
+ case _:
+ if hasattr(x, "__dict__"):
+ return f"{x.__class__.__name__}(*{dumps(x.__dict__, depth, compact)})"
+ return f"TODO({x.__class__.__name__})"
+
+
# types ########################################################################
@@ -152,28 +231,39 @@ class AnalyzeResult(typing.NamedTuple):
class SkipModel(typing.NamedTuple):
"""Running the skipmodel calls `.fn(chain, ...)` with the chain
- consisting of the last `.nchain` items (if .nchain is an int), or
- the chain starting with the *last* occurance of `.nchain` (if
- .nchain is a collection). If the chain is not that long or does
- not contain a member of the collection, then .fn is not called and
- the call is *not* skipped.
+ consisting of the last few items of the input chain.
+
+ If `.nchain` is an int:
+
+ - the chain is the last `.nchain` items or the input chain. If
+ the input chain is not that long, then `.fn` is not called and
+ the call is *not* skipped.
+ If `.nchain` is a collection:
+
+ - the chain starts with the *last* occurance of `.nchain` in the
+ input chain. If the input chain does not contain a member of
+ the collection, then .fn is called with an empty chain.
"""
nchain: int | typing.Collection[BaseName]
- fn: typing.Callable[[typing.Sequence[QName], QName], bool]
-
- def __call__(self, chain: typing.Sequence[QName], call: QName) -> tuple[bool, int]:
- if isinstance(self.nchain, int):
- if len(chain) >= self.nchain:
- _chain = chain[-self.nchain :]
- return self.fn(_chain, call), len(_chain)
- else:
- for i in reversed(range(len(chain))):
- if chain[i].base() in self.nchain:
- _chain = chain[i - 1 :]
- return self.fn(_chain, call), len(_chain)
- return False, 0
+ fn: typing.Callable[[typing.Sequence[QName], Node, QName], bool]
+
+ def __call__(
+ self, chain: typing.Sequence[QName], node: Node, call: QName
+ ) -> tuple[bool, int]:
+ match self.nchain:
+ case int():
+ if len(chain) >= self.nchain:
+ _chain = chain[-self.nchain :]
+ return self.fn(_chain, node, call), len(_chain) + 1
+ return False, 0
+ case _:
+ for i in reversed(range(len(chain))):
+ if chain[i].base() in self.nchain:
+ _chain = chain[i:]
+ return self.fn(_chain, node, call), len(_chain) + 1
+ return self.fn([], node, call), 1
class Application(typing.Protocol):
@@ -235,6 +325,39 @@ class _Graph:
return self._resolve_cache[funcname]
+if typing.TYPE_CHECKING:
+ from _typeshed import SupportsRichComparisonT as _T_sortable
+
+_T = typing.TypeVar("_T")
+
+
+@typing.overload
+def maybe_sorted(
+ unsorted: typing.Iterable["_T_sortable"], /, *, key: None = None
+) -> typing.Iterable["_T_sortable"]: ...
+@typing.overload
+def maybe_sorted(
+ unsorted: typing.Iterable[_T], /, *, key: typing.Callable[[_T], "_T_sortable"]
+) -> typing.Iterable[_T]: ...
+
+
+def maybe_sorted(
+ unsorted: typing.Iterable[_T],
+ /,
+ *,
+ key: typing.Callable[[_T], "_T_sortable"] | None = None,
+) -> typing.Iterable[_T]:
+ match dbg_sort:
+ case "unsorted":
+ return unsorted
+ case "sorted":
+ return sorted(unsorted, key=key) # type: ignore
+ case "shuffled":
+ ret = [*unsorted]
+ random.shuffle(ret)
+ return ret
+
+
def _make_graph(
ci_fnames: typing.Collection[str],
app: Application,
@@ -297,7 +420,7 @@ def _make_graph(
raise ValueError(f"unknown caller: {caller}")
if callee == QName("__indirect_call"):
callees, missing_ok = app.indirect_callees(elem)
- for callee in callees:
+ for callee in maybe_sorted(callees):
if callee not in graph[caller].calls:
graph[caller].calls[callee] = missing_ok
else:
@@ -305,12 +428,15 @@ def _make_graph(
case _:
raise ValueError(f"unknown elem type {elem.typ!r}")
- for ci_fname in ci_fnames:
+ for ci_fname in maybe_sorted(ci_fnames):
with open(ci_fname, "r", encoding="utf-8") as fh:
for elem in vcg.parse_vcg(fh):
handle_elem(elem)
- for node in app.extra_nodes():
+ def sort_key(node: Node) -> QName:
+ return node.funcname
+
+ for node in maybe_sorted(app.extra_nodes(), key=sort_key):
if node.funcname in graph:
raise ValueError(f"duplicate node {node.funcname}")
graph[node.funcname] = node
@@ -332,33 +458,33 @@ def analyze(
cfg_max_call_depth: int,
) -> AnalyzeResult:
graphdata = _make_graph(ci_fnames, app)
+ if dbg_dumpgraph:
+ print(f"/* {dumps(graphdata)} */")
missing: set[QName] = set()
dynamic: set[QName] = set()
included_funcs: set[QName] = set()
- dbg = False
-
track_inclusion: bool = True
skipmodels = app.skipmodels()
for name, model in skipmodels.items():
- if isinstance(model.nchain, int):
- assert model.nchain > 1
- else:
+ if not isinstance(model.nchain, int):
assert len(model.nchain) > 0
_nstatic_cache: dict[QName, int] = {}
def _nstatic(chain: list[QName], funcname: QName) -> tuple[int, int]:
- nonlocal dbg
nonlocal track_inclusion
assert funcname in graphdata.graph
+ def putdbg(msg: str) -> None:
+ print(f"//dbg-nstatic: {'- '*len(chain)}{msg}")
+
node = graphdata.graph[funcname]
- if dbg:
- print(f"//dbg: {'- '*len(chain)}{funcname}\t{node.nstatic}")
+ if dbg_nstatic:
+ putdbg(f"{funcname}\t{node.nstatic}")
if node.usage_kind == "dynamic" or node.ndynamic > 0:
dynamic.add(funcname)
if track_inclusion:
@@ -378,37 +504,52 @@ def analyze(
call_qname = graphdata.resolve_funcname(call_orig_qname)
if not call_qname:
if skipmodel:
- skip, _ = skipmodel(chain, call_orig_qname)
+ skip, _ = skipmodel(chain[:-1], node, call_orig_qname)
if skip:
- if dbg:
- print(
- f"//dbg: {'- '*len(chain)}{call_orig_qname}\tskip missing"
- )
+ if dbg_nstatic:
+ putdbg(f"{call_orig_qname}\tskip missing")
continue
if not call_missing_ok:
missing.add(call_orig_qname)
- if dbg:
- print(f"//dbg: {'- '*len(chain)}{call_orig_qname}\tmissing")
+ if dbg_nstatic:
+ putdbg(f"{call_orig_qname}\tmissing")
continue
# 2. Skip
if skipmodel:
- skip, skip_nchain = skipmodel(chain, call_qname)
+ skip, skip_nchain = skipmodel(chain[:-1], node, call_qname)
max_call_nchain = max(max_call_nchain, skip_nchain)
if skip:
- if dbg:
- print(f"//dbg: {'- '*len(chain)}{call_qname}\tskip")
+ if dbg_nstatic:
+ putdbg(f"{call_qname}\tskip")
continue
# 3. Call
- if skip_nchain == 0 and call_qname in _nstatic_cache:
- max_call_nstatic = max(max_call_nstatic, _nstatic_cache[call_qname])
+ if (
+ (not dbg_nocache)
+ and skip_nchain == 0
+ and call_qname in _nstatic_cache
+ ):
+ call_nstatic = _nstatic_cache[call_qname]
+ if dbg_nstatic:
+ putdbg(f"{call_qname}\ttotal={call_nstatic} (cache-read)")
+ max_call_nstatic = max(max_call_nstatic, call_nstatic)
else:
call_nstatic, call_nchain = _nstatic(chain, call_qname)
max_call_nstatic = max(max_call_nstatic, call_nstatic)
max_call_nchain = max(max_call_nchain, call_nchain)
if skip_nchain == 0 and call_nchain == 0:
- _nstatic_cache[call_qname] = call_nstatic
+ if dbg_nstatic:
+ putdbg(f"{call_qname}\ttotal={call_nstatic} (cache-write)")
+ if call_qname not in _nstatic_cache:
+ if dbg_cache:
+ print(f"//dbg-cache: {call_qname} = {call_nstatic}")
+ _nstatic_cache[call_qname] = call_nstatic
+ else:
+ assert dbg_nocache
+ assert _nstatic_cache[call_qname] == call_nstatic
+ elif dbg_nstatic:
+ putdbg(f"{call_qname}\ttotal={call_nstatic} (do-not-cache)")
chain.pop()
return node.nstatic + max_call_nstatic, max(0, max_call_nchain - 1)
diff --git a/build-aux/measurestack/app_plugins.py b/build-aux/measurestack/app_plugins.py
index ae2dba9..8eda36c 100644
--- a/build-aux/measurestack/app_plugins.py
+++ b/build-aux/measurestack/app_plugins.py
@@ -55,17 +55,16 @@ class CmdPlugin:
return {}
-re_comment = re.compile(r"/\*.*?\*/")
-re_ws = re.compile(r"\s+")
-re_lo_iface = re.compile(r"^\s*#\s*define\s+(?P<name>\S+)_LO_IFACE")
-re_lo_func = re.compile(r"LO_FUNC *\([^,]*, *(?P<name>[^,) ]+) *[,)]")
-re_lo_implementation = re.compile(
- r"^LO_IMPLEMENTATION_[HC]\s*\(\s*(?P<iface>[^, ]+)\s*,\s*(?P<impl_typ>[^,]+)\s*,\s*(?P<impl_name>[^, ]+)\s*[,)].*"
-)
-re_call_objcall = re.compile(r"LO_CALL\((?P<obj>[^,]+), (?P<meth>[^,)]+)[,)].*")
-
-
class LibMiscPlugin:
+ re_comment = re.compile(r"/\*.*?\*/")
+ re_ws = re.compile(r"\s+")
+ re_lo_iface = re.compile(r"^\s*#\s*define\s+(?P<name>\S+)_LO_IFACE")
+ re_lo_func = re.compile(r"LO_FUNC *\([^,]*, *(?P<name>[^,) ]+) *[,)]")
+ re_lo_implementation = re.compile(
+ r"^LO_IMPLEMENTATION_[HC]\s*\(\s*(?P<iface>[^, ]+)\s*,\s*(?P<impl_typ>[^,]+)\s*,\s*(?P<impl_name>[^, ]+)\s*[,)].*"
+ )
+ re_call_objcall = re.compile(r"LO_CALL\((?P<obj>[^,]+), (?P<meth>[^,)]+)[,)].*")
+
objcalls: dict[str, set[QName]] # method_name => {method_impls}
def __init__(self, arg_c_fnames: typing.Collection[str]) -> None:
@@ -73,16 +72,16 @@ class LibMiscPlugin:
for fname in arg_c_fnames:
with open(fname, "r", encoding="utf-8") as fh:
while line := fh.readline():
- if m := re_lo_iface.match(line):
+ if m := self.re_lo_iface.match(line):
iface_name = m.group("name")
if iface_name not in ifaces:
ifaces[iface_name] = set()
while line.endswith("\\\n"):
line += fh.readline()
line = line.replace("\\\n", " ")
- line = re_comment.sub(" ", line)
- line = re_ws.sub(" ", line)
- for m2 in re_lo_func.finditer(line):
+ line = self.re_comment.sub(" ", line)
+ line = self.re_ws.sub(" ", line)
+ for m2 in self.re_lo_func.finditer(line):
ifaces[iface_name].add(m2.group("name"))
implementations: dict[str, set[str]] = {} # iface_name => {impl_names}
@@ -92,7 +91,7 @@ class LibMiscPlugin:
with open(fname, "r", encoding="utf-8") as fh:
for line in fh:
line = line.strip()
- if m := re_lo_implementation.match(line):
+ if m := self.re_lo_implementation.match(line):
implementations[m.group("iface")].add(m.group("impl_name"))
objcalls: dict[str, set[QName]] = {} # method_name => {method_impls}
@@ -121,7 +120,7 @@ class LibMiscPlugin:
) -> tuple[typing.Collection[QName], bool] | None:
if "/3rd-party/" in loc:
return None
- if m := re_call_objcall.fullmatch(line):
+ if m := self.re_call_objcall.fullmatch(line):
if m.group("meth") in self.objcalls:
return self.objcalls[m.group("meth")], False
return [
@@ -137,11 +136,11 @@ class LibMiscPlugin:
}
def _skipmodel___assert_msg_fail(
- self, chain: typing.Sequence[QName], call: QName
+ self, chain: typing.Sequence[QName], node: Node, call: QName
) -> bool:
if call.base() in [BaseName("__lm_printf"), BaseName("__lm_light_printf")]:
return any(
- c.base() == BaseName("__assert_msg_fail") for c in reversed(chain[:-1])
+ c.base() == BaseName("__assert_msg_fail") for c in reversed(chain)
)
return False
@@ -268,19 +267,18 @@ class LibCRIPCPlugin:
return {}
-re_tmessage_handler = re.compile(
- r"^\s*\[LIB9P_TYP_T[^]]+\]\s*=\s*\(tmessage_handler\)\s*(?P<handler>\S+),\s*$"
-)
-re_lib9p_msg_entry = re.compile(r"^\s*_MSG_(?:[A-Z]+)\((?P<typ>\S+)\),$")
-re_lib9p_caller = re.compile(
- r"^lib9p_(?P<grp>[TR])msg_(?P<meth>validate|unmarshal|marshal)$"
-)
-re_lib9p_callee = re.compile(
- r"^(?P<meth>validate|unmarshal|marshal)_(?P<msg>(?P<grp>[TR]).*)$"
-)
-
-
class Lib9PPlugin:
+ re_tmessage_handler = re.compile(
+ r"^\s*\[LIB9P_TYP_T[^]]+\]\s*=\s*\(tmessage_handler\)\s*(?P<handler>\S+),\s*$"
+ )
+ re_lib9p_msg_entry = re.compile(r"^\s*_MSG_(?:[A-Z]+)\((?P<typ>\S+)\),$")
+ re_lib9p_caller = re.compile(
+ r"^lib9p_(?P<grp>[TR])msg_(?P<meth>validate|unmarshal|marshal)$"
+ )
+ re_lib9p_callee = re.compile(
+ r"^(?P<meth>validate|unmarshal|marshal)_(?P<msg>(?P<grp>[TR]).*)$"
+ )
+
tmessage_handlers: set[QName] | None
lib9p_msgs: set[str]
_CONFIG_9P_MAX_CONNS: int | None
@@ -344,7 +342,7 @@ class Lib9PPlugin:
with open(lib9p_srv_c_fname, "r", encoding="utf-8") as fh:
for line in fh:
line = line.rstrip()
- if m := re_tmessage_handler.fullmatch(line):
+ if m := self.re_tmessage_handler.fullmatch(line):
tmessage_handlers.add(QName(m.group("handler")))
self.tmessage_handlers = tmessage_handlers
@@ -353,7 +351,7 @@ class Lib9PPlugin:
with open(lib9p_generated_c_fname, "r", encoding="utf-8") as fh:
for line in fh:
line = line.rstrip()
- if m := re_lib9p_msg_entry.fullmatch(line):
+ if m := self.re_lib9p_msg_entry.fullmatch(line):
typ = m.group("typ")
lib9p_msgs.add(typ)
self.lib9p_msgs = lib9p_msgs
@@ -401,15 +399,15 @@ class Lib9PPlugin:
def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
ret: dict[BaseName, analyze.SkipModel] = {
BaseName("_lib9p_validate"): analyze.SkipModel(
- 2,
+ 1,
self._skipmodel__lib9p_validate_unmarshal_marshal,
),
BaseName("_lib9p_unmarshal"): analyze.SkipModel(
- 2,
+ 1,
self._skipmodel__lib9p_validate_unmarshal_marshal,
),
BaseName("_lib9p_marshal"): analyze.SkipModel(
- 2,
+ 1,
self._skipmodel__lib9p_validate_unmarshal_marshal,
),
BaseName("_vfctprintf"): analyze.SkipModel(
@@ -419,18 +417,18 @@ class Lib9PPlugin:
return ret
def _skipmodel__lib9p_validate_unmarshal_marshal(
- self, chain: typing.Sequence[QName], call: QName
+ self, chain: typing.Sequence[QName], node: Node, call: QName
) -> bool:
- m_caller = re_lib9p_caller.fullmatch(str(chain[-2].base()))
+ m_caller = self.re_lib9p_caller.fullmatch(str(chain[-1].base()))
assert m_caller
- m_callee = re_lib9p_callee.fullmatch(str(call.base()))
+ m_callee = self.re_lib9p_callee.fullmatch(str(call.base()))
if not m_callee:
return False
return m_caller.group("grp") != m_callee.group("grp")
def _skipmodel__vfctprintf(
- self, chain: typing.Sequence[QName], call: QName
+ self, chain: typing.Sequence[QName], node: Node, call: QName
) -> bool:
if call.base() == BaseName("libfmt_conv_formatter"):
return any(c.base() in self.formatters for c in chain)
@@ -512,7 +510,7 @@ class PicoFmtPlugin:
return ret
def _skipmodel_fmt_state_putchar(
- self, chain: typing.Sequence[QName], call: QName
+ self, chain: typing.Sequence[QName], node: Node, call: QName
) -> bool:
if call.base() in self.known_fct.values():
fct: BaseName | None = None
@@ -763,16 +761,17 @@ class PicoSDKPlugin:
return ret
-re_tud_class = re.compile(
- r"^\s*#\s*define\s+(?P<k>CFG_TUD_(?:\S{3}|AUDIO|VIDEO|MIDI|VENDOR|USBTMC|DFU_RUNTIME|ECM_RNDIS))\s+(?P<v>\S+).*"
-)
-re_tud_entry = re.compile(r"^\s+\.(?P<meth>\S+)\s*=\s*(?P<impl>[a-zA-Z0-9_]+)(?:,.*)?")
-re_tud_if1 = re.compile(r"^\s*#\s*if (\S+)\s*")
-re_tud_if2 = re.compile(r"^\s*#\s*if (\S+)\s*\|\|\s*(\S+)\s*")
-re_tud_endif = re.compile(r"^\s*#\s*endif\s*")
-
-
class TinyUSBDevicePlugin:
+ re_tud_class = re.compile(
+ r"^\s*#\s*define\s+(?P<k>CFG_TUD_(?:\S{3}|AUDIO|VIDEO|MIDI|VENDOR|USBTMC|DFU_RUNTIME|ECM_RNDIS))\s+(?P<v>\S+).*"
+ )
+ re_tud_entry = re.compile(
+ r"^\s+\.(?P<meth>\S+)\s*=\s*(?P<impl>[a-zA-Z0-9_]+)(?:,.*)?"
+ )
+ re_tud_if1 = re.compile(r"^\s*#\s*if (\S+)\s*")
+ re_tud_if2 = re.compile(r"^\s*#\s*if (\S+)\s*\|\|\s*(\S+)\s*")
+ re_tud_endif = re.compile(r"^\s*#\s*endif\s*")
+
tud_drivers: dict[str, set[QName]] # method_name => {method_impls}
def __init__(self, arg_c_fnames: typing.Collection[str]) -> None:
@@ -794,7 +793,7 @@ class TinyUSBDevicePlugin:
in_table = False
for line in fh:
line = line.rstrip()
- if m := re_tud_class.fullmatch(line):
+ if m := self.re_tud_class.fullmatch(line):
k = m.group("k")
v = m.group("v")
tusb_config[k] = bool(int(v))
@@ -806,13 +805,13 @@ class TinyUSBDevicePlugin:
for line in fh:
line = line.rstrip()
if in_table:
- if m := re_tud_if1.fullmatch(line):
+ if m := self.re_tud_if1.fullmatch(line):
enabled = tusb_config[m.group(1)]
- elif m := re_tud_if2.fullmatch(line):
+ elif m := self.re_tud_if2.fullmatch(line):
enabled = tusb_config[m.group(1)] or tusb_config[m.group(2)]
- elif re_tud_endif.fullmatch(line):
+ elif self.re_tud_endif.fullmatch(line):
enabled = True
- if m := re_tud_entry.fullmatch(line):
+ if m := self.re_tud_entry.fullmatch(line):
meth = m.group("meth")
impl = m.group("impl")
if meth == "name" or not enabled:
diff --git a/build-aux/measurestack/test_app_plugins.py b/build-aux/measurestack/test_app_plugins.py
new file mode 100644
index 0000000..8aa0a6c
--- /dev/null
+++ b/build-aux/measurestack/test_app_plugins.py
@@ -0,0 +1,259 @@
+# build-aux/measurestack/test_app_plugins.py - Tests for app_plugins.py
+#
+# Copyright (C) 2025 Luke T. Shumaker <lukeshu@lukeshu.com>
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+# pylint: disable=unused-variable
+
+import typing
+
+from . import analyze, app_plugins, util, vcg
+from .analyze import BaseName, Node, QName, SkipModel
+
+
+def aprime_gen(l: int, n: int) -> typing.Sequence[int]:
+ """Return an `l`-length sequence of nonnegative
+ integers such that any `n`-length-or-shorter combination of
+ members with repeats allowed can be uniquely identified by its
+ sum.
+
+ (If that were "product" instead of "sum", the obvious solution
+ would be the first `l` primes.)
+
+ """
+ seq = [1]
+ while len(seq) < l:
+ x = seq[-1] * n + 1
+ seq.append(x)
+ return seq
+
+
+def aprime_decompose(
+ aprimes: typing.Sequence[int], tot: int
+) -> tuple[typing.Collection[int], typing.Collection[int]]:
+ ret_idx = []
+ ret_val = []
+ while tot:
+ idx = max(i for i in range(len(aprimes)) if aprimes[i] <= tot)
+ val = aprimes[idx]
+ ret_idx.append(idx)
+ ret_val.append(val)
+ tot -= val
+ return ret_idx, ret_val
+
+
+def aprime_assert(
+ aprimes: typing.Sequence[int], act_sum: int, exp_idxs: typing.Collection[int]
+) -> None:
+ act_idxs, act_vals = aprime_decompose(aprimes, act_sum)
+ exp_sum = sum(aprimes[i] for i in exp_idxs)
+ # exp_vals = [aprimes[i] for i in exp]
+
+ act_str = f"{act_sum}:{[f's[{v}]' for v in sorted(act_idxs)]}"
+ exp_str = f"{exp_sum}:{[f's[{v}]' for v in sorted(exp_idxs)]}"
+ if act_str != exp_str:
+ assert f"act={act_str}" == f"exp={exp_str}"
+
+
+def test_assert_msg_fail() -> None:
+ num_funcs = 7
+ max_call_depth = 7
+ s = aprime_gen(num_funcs, max_call_depth)
+
+ class TestApplication:
+ def extra_nodes(self) -> typing.Collection[Node]:
+ # 1 2 3 4 5 6 7 <= call_depth
+ # - main() s[0]
+ # - __assert_msg_fail() s[1] *
+ # - __lm_light_printf() s[3]
+ # - fmt_vfctprintf() s[6]
+ # - stdio_putchar() s[5]
+ # - __assert_msg_fail() s[1] **
+ # - __lm_abort() s[2]
+ # - stdio_flush() s[4] (inconsequential)
+ # - __lm_abort() s[2] (inconsequential)
+ # ----
+ # sum(s[i] for i in [0, 1, 3, 6, 5, 1, 2])
+ ret = [
+ # main.c
+ util.synthetic_node("main", s[0], {"__assert_msg_fail"}),
+ # assert.c
+ util.synthetic_node(
+ "__assert_msg_fail", s[1], {"__lm_light_printf", "__lm_abort"}
+ ),
+ # intercept.c / libfmt/libmisc.c
+ util.synthetic_node("__lm_abort", s[2]),
+ util.synthetic_node(
+ "__lm_light_printf", s[3], {"fmt_vfctprintf", "stdio_flush"}
+ ),
+ util.synthetic_node("stdio_flush", s[4]),
+ util.synthetic_node("stdio_putchar", s[5], {"__assert_msg_fail"}),
+ # printf.c
+ util.synthetic_node("fmt_vfctprintf", s[6], {"stdio_putchar"}),
+ ]
+ assert num_funcs == len(s) == len(ret) == len(set(n.nstatic for n in ret))
+ return ret
+
+ def indirect_callees(
+ self, elem: vcg.VCGElem
+ ) -> tuple[typing.Collection[QName], bool]:
+ return [], False
+
+ def skipmodels(self) -> dict[BaseName, SkipModel]:
+ models = app_plugins.LibMiscPlugin(arg_c_fnames=[]).skipmodels()
+ assert BaseName("__assert_msg_fail") in models
+ orig_model = models[BaseName("__assert_msg_fail")]
+
+ def wrapped_model_fn(
+ chain: typing.Sequence[QName], node: Node, call: QName
+ ) -> bool:
+ dbgstr = (
+ ("=>".join(str(c) for c in [*chain, node.funcname]))
+ + "=?=>"
+ + str(call)
+ )
+ assert dbgstr in [
+ "__assert_msg_fail=?=>__lm_light_printf",
+ "__assert_msg_fail=?=>__lm_abort",
+ "__assert_msg_fail=>__lm_light_printf=>fmt_vfctprintf=>stdio_putchar=>__assert_msg_fail=?=>__lm_light_printf",
+ "__assert_msg_fail=>__lm_light_printf=>fmt_vfctprintf=>stdio_putchar=>__assert_msg_fail=?=>__lm_abort",
+ ]
+ return orig_model.fn(chain, node, call)
+
+ models[BaseName("__assert_msg_fail")] = SkipModel(
+ orig_model.nchain, wrapped_model_fn
+ )
+ return models
+
+ def test_filter(name: QName) -> tuple[int, bool]:
+ if name.base() == BaseName("main"):
+ return 1, True
+ return 0, False
+
+ result = analyze.analyze(
+ ci_fnames=[],
+ app_func_filters={
+ "Main": test_filter,
+ },
+ app=TestApplication(),
+ cfg_max_call_depth=max_call_depth,
+ )
+
+ aprime_assert(
+ s, result.groups["Main"].rows[QName("main")].nstatic, [0, 1, 3, 6, 5, 1, 2]
+ )
+
+
+def test_fct() -> None:
+ num_funcs = 13
+ max_call_depth = 12
+ s = aprime_gen(num_funcs, max_call_depth)
+
+ class TestPlugin:
+ def is_intrhandler(self, name: QName) -> bool:
+ return False
+
+ def init_array(self) -> typing.Collection[QName]:
+ return []
+
+ def extra_includes(self) -> typing.Collection[BaseName]:
+ return []
+
+ def indirect_callees(
+ self, loc: str, line: str
+ ) -> tuple[typing.Collection[QName], bool] | None:
+ return None
+
+ def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
+ return {}
+
+ def extra_nodes(self) -> typing.Collection[Node]:
+ # 1. | a +s[0] | b +s[ 1] | c +s[ 2] |*
+ # 2. | fmt_vsnprintf +s[3] | vprintf +s[ 4] | __lm_light_printf +s[ 5] |*
+ # 3. | fmt_vfctprintf +s[6] | fmt_vfctprintf +s[ 6] | fmt_vfctprintf +s[ 6] |
+ # 4. | fmt_state_putchar +s[7] | fmt_state_putchar +s[ 7] | fmt_state_putchar +s[ 7] |
+ # 5. | _out_buffer +s[8] | stdio_buffered_printer +s[ 9] | libfmt_light_fct +s[10] |*
+ # 6. | | __assert_msg_fail +s[11] | __assert_msg_fail +s[11] |
+ # 7. | | a. __lm_light_printf +s[ 5] | a. __lm_light_printf +s[ 5] |
+ # 8. | | a. fmt_vfctprintf +s[ 6] | a. fmt_vfctprintf +s[ 6] |
+ # 9. | | a. fmt_state_putchar +s[ 7] | a. fmt_state_putchar +s[ 7] |
+ # 10. | | a. libfmt_light_fct +s[10] | a. libfmt_light_fct +s[10] |
+ # 11. | | a. __assert_msg_fail +s[11] | a. __assert_msg_fail +s[11] |
+ # 12. | | a. __lm_abort +s[12] | a. __lm_abort +s[12] |
+ # 7. | | b. __lm_abort | b. __lm_abort |
+ return [
+ # main.c
+ util.synthetic_node("a", s[0], {"fmt_vsnprintf"}), # _out_buffer
+ util.synthetic_node("b", s[1], {"vprintf"}), # stdio_buffered_printer
+ util.synthetic_node(
+ "c", s[2], {"__lm_light_printf"}
+ ), # libfmt_light_printf
+ # wrappers
+ util.synthetic_node("fmt_vsnprintf", s[3], {"fmt_vfctprintf"}),
+ util.synthetic_node("__wrap_vprintf", s[4], {"fmt_vfctprintf"}),
+ util.synthetic_node("__lm_light_printf", s[5], {"fmt_vfctprintf"}),
+ # printf.c
+ util.synthetic_node("fmt_vfctprintf", s[6], {"fmt_state_putchar"}),
+ util.synthetic_node(
+ "fmt_state_putchar",
+ s[7],
+ {"_out_buffer", "stdio_buffered_printer", "libfmt_light_fct"},
+ ),
+ # fcts
+ util.synthetic_node("_out_buffer", s[8]),
+ util.synthetic_node(
+ "stdio_buffered_printer", s[9], {"__assert_msg_fail"}
+ ),
+ util.synthetic_node("libfmt_light_fct", s[10], {"__assert_msg_fail"}),
+ # assert.c
+ util.synthetic_node(
+ "__assert_msg_fail",
+ s[11],
+ {"__lm_light_printf", "__lm_abort"},
+ ),
+ # intercept.c / libfmt/libmisc.c
+ util.synthetic_node("__lm_abort", s[12]),
+ ]
+
+ plugins: list[util.Plugin] = [
+ TestPlugin(),
+ app_plugins.LibMiscPlugin(arg_c_fnames=[]),
+ # fmt_vsnprintf => fct=_out_buffer
+ # if rp2040:
+ # __wrap_vprintf => fct=stdio_buffered_printer
+ # stdio_vprintf => fct=stdio_buffered_printer
+ # __lm_light_printf => fct=libfmt_light_fct
+ # if host:
+ # __lm_printf => fct=libfmt_libc_fct
+ # __lm_light_printf => fct=libfmt_libc_fct
+ app_plugins.PicoFmtPlugin("rp2040"),
+ ]
+
+ def test_filter(name: QName) -> tuple[int, bool]:
+ if str(name.base()) in ["a", "b", "c"]:
+ return 1, True
+ return 0, False
+
+ def _str_location_xform(loc: str) -> str:
+ return loc
+
+ result = analyze.analyze(
+ ci_fnames=[],
+ app_func_filters={
+ "Main": test_filter,
+ },
+ app=util.PluginApplication(_str_location_xform, plugins),
+ cfg_max_call_depth=max_call_depth,
+ )
+
+ aprime_assert(s, result.groups["Main"].rows[QName("a")].nstatic, [0, 3, 6, 7, 8])
+ aprime_assert(
+ s,
+ result.groups["Main"].rows[QName("b")].nstatic,
+ [1, 4, 6, 7, 9, 11, 5, 6, 7, 10, 11, 12],
+ )
+ aprime_assert(
+ s,
+ result.groups["Main"].rows[QName("c")].nstatic,
+ [2, 5, 6, 7, 10, 11, 5, 6, 7, 10, 11, 12],
+ )
diff --git a/build-aux/measurestack/util.py b/build-aux/measurestack/util.py
index 47b2617..0af3d02 100644
--- a/build-aux/measurestack/util.py
+++ b/build-aux/measurestack/util.py
@@ -7,7 +7,7 @@ import re
import typing
from . import analyze, vcg
-from .analyze import BaseName, Node, QName
+from .analyze import BaseName, Node, QName, maybe_sorted
# pylint: disable=unused-variable
__all__ = [
@@ -32,7 +32,7 @@ def synthetic_node(
n.nstatic = nstatic
n.ndynamic = 0
- n.calls = dict((QName(c), False) for c in calls)
+ n.calls = dict((QName(c), False) for c in maybe_sorted(calls))
return n