summaryrefslogtreecommitdiff
path: root/build-aux
diff options
context:
space:
mode:
Diffstat (limited to 'build-aux')
-rw-r--r--build-aux/measurestack/analyze.py209
-rw-r--r--build-aux/measurestack/app_plugins.py197
-rw-r--r--build-aux/measurestack/test_app_plugins.py29
-rw-r--r--build-aux/measurestack/testutil.py2
-rw-r--r--build-aux/measurestack/util.py8
5 files changed, 220 insertions, 225 deletions
diff --git a/build-aux/measurestack/analyze.py b/build-aux/measurestack/analyze.py
index 67c44ce..3dc1a26 100644
--- a/build-aux/measurestack/analyze.py
+++ b/build-aux/measurestack/analyze.py
@@ -27,7 +27,10 @@ __all__ = [
"QName",
"UsageKind",
"Node",
+ "NodeHandler",
+ "NodeHandleCB",
"maybe_sorted",
+ "handle_simple_node",
"AnalyzeResultVal",
"AnalyzeResultGroup",
"AnalyzeResult",
@@ -229,41 +232,16 @@ class AnalyzeResult(typing.NamedTuple):
included_funcs: set[QName]
-class SkipModel(typing.NamedTuple):
- """Running the skipmodel calls `.fn(chain, ...)` with the chain
- consisting of the last few items of the input chain.
-
- If `.nchain` is an int:
-
- - the chain is the last `.nchain` items or the input chain. If
- the input chain is not that long, then `.fn` is not called and
- the call is *not* skipped.
-
- If `.nchain` is a collection:
-
- - the chain starts with the *last* occurance of `.nchain` in the
- input chain. If the input chain does not contain a member of
- the collection, then .fn is called with an empty chain.
- """
+class NodeHandleCB(typing.Protocol):
+ def __call__(
+ self, chain: typing.Sequence[QName], missing_ok: bool = False
+ ) -> tuple[int, bool]: ...
- nchain: int | typing.Collection[BaseName]
- fn: typing.Callable[[typing.Sequence[QName], Node, QName], bool]
+class NodeHandler(typing.Protocol):
def __call__(
- self, chain: typing.Sequence[QName], node: Node, call: QName
- ) -> tuple[bool, int]:
- match self.nchain:
- case int():
- if len(chain) >= self.nchain:
- _chain = chain[-self.nchain :]
- return self.fn(_chain, node, call), len(_chain) + 1
- return False, 0
- case _:
- for i in reversed(range(len(chain))):
- if chain[i].base() in self.nchain:
- _chain = chain[i:]
- return self.fn(_chain, node, call), len(_chain) + 1
- return self.fn([], node, call), 1
+ self, handle: NodeHandleCB, chain: typing.Sequence[QName], node: Node
+ ) -> tuple[int, bool]: ...
class Application(typing.Protocol):
@@ -271,7 +249,7 @@ class Application(typing.Protocol):
def indirect_callees(
self, elem: vcg.VCGElem
) -> tuple[typing.Collection[QName], bool]: ...
- def skipmodels(self) -> dict[BaseName, SkipModel]: ...
+ def node_handlers(self) -> dict[BaseName, NodeHandler]: ...
# code #########################################################################
@@ -450,6 +428,28 @@ def _make_graph(
return ret
+def handle_simple_node(
+ handle: NodeHandleCB,
+ chain: typing.Sequence[QName],
+ node: Node,
+ skip_p: typing.Callable[[QName], bool] | None = None,
+) -> tuple[int, bool]:
+ cacheable = True
+ max_call_nstatic = 0
+ for call_qname, call_missing_ok in maybe_sorted(node.calls.items()):
+ if skip_p and skip_p(call_qname):
+ if dbg_nstatic:
+ print(f"//dbg-nstatic: {'- '*(len(chain)+1)}{call_qname}\tskip")
+ continue
+ call_nstatic, call_cacheable = handle(
+ [*chain, node.funcname, call_qname], call_missing_ok
+ )
+ max_call_nstatic = max(max_call_nstatic, call_nstatic)
+ if not call_cacheable:
+ cacheable = False
+ return node.nstatic + max_call_nstatic, cacheable
+
+
def analyze(
*,
ci_fnames: typing.Collection[str],
@@ -467,94 +467,69 @@ def analyze(
track_inclusion: bool = True
- skipmodels = app.skipmodels()
- for name, model in skipmodels.items():
- if not isinstance(model.nchain, int):
- assert len(model.nchain) > 0
-
- _nstatic_cache: dict[QName, int] = {}
-
- def _nstatic(chain: list[QName], funcname: QName) -> tuple[int, int]:
- nonlocal track_inclusion
-
- assert funcname in graphdata.graph
-
- def putdbg(msg: str) -> None:
- print(f"//dbg-nstatic: {'- '*len(chain)}{msg}")
-
- node = graphdata.graph[funcname]
- if dbg_nstatic:
- putdbg(f"{funcname}\t{node.nstatic}")
+ node_handlers = app.node_handlers()
+
+ def default_node_handler(
+ handle: NodeHandleCB, chain: typing.Sequence[QName], node: Node
+ ) -> tuple[int, bool]:
+ return handle_simple_node(handle, chain, node)
+
+ nstatic_cache: dict[QName, int] = {}
+
+ def node_handle_cb(
+ chain: typing.Sequence[QName], missing_ok: bool = False
+ ) -> tuple[int, bool]:
+ nonlocal nstatic_cache
+ assert len(chain) > 0
+
+ if len(chain) > cfg_max_call_depth:
+ raise ValueError(f"max call depth exceeded: {chain}")
+
+ call_orig_qname = chain[-1]
+ call_qname = graphdata.resolve_funcname(call_orig_qname)
+
+ def dbglog(msg: str) -> None:
+ if dbg_nstatic:
+ print(
+ f"//dbg-nstatic: {'- '*(len(chain)-1)}{call_qname or call_orig_qname}\t{msg}"
+ )
+
+ if not call_qname:
+ if not missing_ok:
+ missing.add(call_orig_qname)
+ dbglog("missing")
+ nstatic_cache[call_orig_qname] = 0
+ return 0, True
+
+ assert call_qname in graphdata.graph
+ if (not dbg_nocache) and call_qname in nstatic_cache:
+ nstatic = nstatic_cache[call_qname]
+ dbglog(f"total={nstatic} (cache-read)")
+ return nstatic, True
+ node = graphdata.graph[call_qname]
+ dbglog(str(node.nstatic))
if node.usage_kind == "dynamic" or node.ndynamic > 0:
- dynamic.add(funcname)
+ dynamic.add(call_qname)
if track_inclusion:
- included_funcs.add(funcname)
-
- max_call_nstatic = 0
- max_call_nchain = 0
-
- if node.calls:
- skipmodel = skipmodels.get(funcname.base())
- chain.append(funcname)
- if len(chain) == cfg_max_call_depth:
- raise ValueError(f"max call depth exceeded: {chain}")
- for call_orig_qname, call_missing_ok in node.calls.items():
- skip_nchain = 0
- # 1. Resolve
- call_qname = graphdata.resolve_funcname(call_orig_qname)
- if not call_qname:
- if skipmodel:
- skip, _ = skipmodel(chain[:-1], node, call_orig_qname)
- if skip:
- if dbg_nstatic:
- putdbg(f"{call_orig_qname}\tskip missing")
- continue
- if not call_missing_ok:
- missing.add(call_orig_qname)
- if dbg_nstatic:
- putdbg(f"{call_orig_qname}\tmissing")
- continue
-
- # 2. Skip
- if skipmodel:
- skip, skip_nchain = skipmodel(chain[:-1], node, call_qname)
- max_call_nchain = max(max_call_nchain, skip_nchain)
- if skip:
- if dbg_nstatic:
- putdbg(f"{call_qname}\tskip")
- continue
-
- # 3. Call
- if (
- (not dbg_nocache)
- and skip_nchain == 0
- and call_qname in _nstatic_cache
- ):
- call_nstatic = _nstatic_cache[call_qname]
- if dbg_nstatic:
- putdbg(f"{call_qname}\ttotal={call_nstatic} (cache-read)")
- max_call_nstatic = max(max_call_nstatic, call_nstatic)
- else:
- call_nstatic, call_nchain = _nstatic(chain, call_qname)
- max_call_nstatic = max(max_call_nstatic, call_nstatic)
- max_call_nchain = max(max_call_nchain, call_nchain)
- if skip_nchain == 0 and call_nchain == 0:
- if dbg_nstatic:
- putdbg(f"{call_qname}\ttotal={call_nstatic} (cache-write)")
- if call_qname not in _nstatic_cache:
- if dbg_cache:
- print(f"//dbg-cache: {call_qname} = {call_nstatic}")
- _nstatic_cache[call_qname] = call_nstatic
- else:
- assert dbg_nocache
- assert _nstatic_cache[call_qname] == call_nstatic
- elif dbg_nstatic:
- putdbg(f"{call_qname}\ttotal={call_nstatic} (do-not-cache)")
- chain.pop()
- return node.nstatic + max_call_nstatic, max(0, max_call_nchain - 1)
+ included_funcs.add(call_qname)
+
+ handler = node_handlers.get(call_qname.base(), default_node_handler)
+ nstatic, cacheable = handler(node_handle_cb, chain[:-1], node)
+ if cacheable:
+ dbglog(f"total={nstatic} (cache-write)")
+ if call_qname not in nstatic_cache:
+ if dbg_cache:
+ print(f"//dbg-cache: {call_qname} = {nstatic}")
+ nstatic_cache[call_qname] = nstatic
+ else:
+ assert dbg_nocache
+ assert nstatic_cache[call_qname] == nstatic
+ else:
+ dbglog(f"total={nstatic} (do-not-cache)")
+ return nstatic, cacheable
def nstatic(funcname: QName) -> int:
- return _nstatic([], funcname)[0]
+ return node_handle_cb([funcname])[0]
groups: dict[str, AnalyzeResultGroup] = {}
for grp_name, grp_filter in app_func_filters.items():
diff --git a/build-aux/measurestack/app_plugins.py b/build-aux/measurestack/app_plugins.py
index e365f82..12c0384 100644
--- a/build-aux/measurestack/app_plugins.py
+++ b/build-aux/measurestack/app_plugins.py
@@ -8,7 +8,7 @@ import subprocess
import typing
from . import analyze, util
-from .analyze import BaseName, Node, QName
+from .analyze import BaseName, Node, NodeHandleCB, QName
from .util import synthetic_node
# pylint: disable=unused-variable
@@ -51,7 +51,7 @@ class CmdPlugin:
return [QName("get_root")], False
return None
- def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
+ def node_handlers(self) -> dict[BaseName, analyze.NodeHandler]:
return {}
@@ -128,22 +128,25 @@ class LibMiscPlugin:
], False
return None
- def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
+ def node_handlers(self) -> dict[BaseName, analyze.NodeHandler]:
+ def handle___assert_msg_fail(
+ handle: NodeHandleCB, chain: typing.Sequence[QName], node: Node
+ ) -> tuple[int, bool]:
+ inner: bool = any(c.base() == BaseName("__assert_msg_fail") for c in chain)
+
+ def skip_p(call_qname: QName) -> bool:
+ return inner and call_qname.base() in [
+ BaseName("__lm_printf"),
+ BaseName("__lm_light_printf"),
+ ]
+
+ nstatic, _ = analyze.handle_simple_node(handle, chain, node, skip_p)
+ return nstatic, not inner
+
return {
- BaseName("__assert_msg_fail"): analyze.SkipModel(
- {BaseName("__assert_msg_fail")}, self._skipmodel___assert_msg_fail
- ),
+ BaseName("__assert_msg_fail"): handle___assert_msg_fail,
}
- def _skipmodel___assert_msg_fail(
- self, chain: typing.Sequence[QName], node: Node, call: QName
- ) -> bool:
- if call.base() in [BaseName("__lm_printf"), BaseName("__lm_light_printf")]:
- return any(
- c.base() == BaseName("__assert_msg_fail") for c in reversed(chain)
- )
- return False
-
class LibHWPlugin:
pico_platform: str
@@ -210,7 +213,7 @@ class LibHWPlugin:
], False
return None
- def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
+ def node_handlers(self) -> dict[BaseName, analyze.NodeHandler]:
return {}
@@ -234,7 +237,7 @@ class LibCRPlugin:
) -> tuple[typing.Collection[QName], bool] | None:
return None
- def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
+ def node_handlers(self) -> dict[BaseName, analyze.NodeHandler]:
return {}
@@ -263,7 +266,7 @@ class LibCRIPCPlugin:
], False
return None
- def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
+ def node_handlers(self) -> dict[BaseName, analyze.NodeHandler]:
return {}
@@ -272,12 +275,6 @@ class Lib9PPlugin:
r"^\s*\[LIB9P_TYP_T[^]]+\]\s*=\s*\(tmessage_handler\)\s*(?P<handler>\S+),\s*$"
)
re_lib9p_msg_entry = re.compile(r"^\s*_MSG_(?:[A-Z]+)\((?P<typ>\S+)\),$")
- re_lib9p_caller = re.compile(
- r"^lib9p_(?P<grp>[TR])msg_(?P<meth>validate|unmarshal|marshal)$"
- )
- re_lib9p_callee = re.compile(
- r"^(?P<meth>validate|unmarshal|marshal)_(?P<msg>(?P<grp>[TR]).*)$"
- )
tmessage_handlers: set[QName] | None
lib9p_msgs: set[str]
@@ -396,33 +393,51 @@ class Lib9PPlugin:
return [QName(f"{meth}_{msg}") for msg in self.lib9p_msgs], True
return None
- def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
- ret: dict[BaseName, analyze.SkipModel] = {
- BaseName("_lib9p_validate"): analyze.SkipModel(
- 1,
- self._skipmodel__lib9p_validate_unmarshal_marshal,
- ),
- BaseName("_lib9p_unmarshal"): analyze.SkipModel(
- 1,
- self._skipmodel__lib9p_validate_unmarshal_marshal,
- ),
- BaseName("_lib9p_marshal"): analyze.SkipModel(
- 1,
- self._skipmodel__lib9p_validate_unmarshal_marshal,
- ),
- }
- return ret
+ re_lib9p_caller = re.compile(
+ r"^lib9p_(?P<grp>[TR])msg_(?P<meth>validate|unmarshal|marshal)$"
+ )
+ re_lib9p_callee = re.compile(
+ r"^(?P<meth>validate|unmarshal|marshal)_(?P<msg>(?P<grp>[TR]).*)$"
+ )
- def _skipmodel__lib9p_validate_unmarshal_marshal(
- self, chain: typing.Sequence[QName], node: Node, call: QName
- ) -> bool:
- m_caller = self.re_lib9p_caller.fullmatch(str(chain[-1].base()))
- assert m_caller
+ def node_handlers(self) -> dict[BaseName, analyze.NodeHandler]:
+ def handle_9p_inner(
+ handle: NodeHandleCB, chain: typing.Sequence[QName], node: Node
+ ) -> tuple[int, bool]:
+ m_caller = self.re_lib9p_caller.fullmatch(str(chain[-1].base()))
+ assert m_caller
+ grp = m_caller.group("grp")
+
+ def skip_p(call_qname: QName) -> bool:
+ if m_callee := self.re_lib9p_callee.fullmatch(str(call_qname.base())):
+ if m_callee.group("grp") != grp:
+ return True
+ return False
- m_callee = self.re_lib9p_callee.fullmatch(str(call.base()))
- if not m_callee:
- return False
- return m_caller.group("grp") != m_callee.group("grp")
+ nstatic, _ = analyze.handle_simple_node(handle, chain, node, skip_p)
+ return nstatic, False
+
+ def handle_9p_outer(
+ handle: NodeHandleCB, chain: typing.Sequence[QName], node: Node
+ ) -> tuple[int, bool]:
+ nstatic, _ = analyze.handle_simple_node(handle, chain, node)
+ return nstatic, True
+
+ ret: dict[BaseName, analyze.NodeHandler] = {
+ # validate
+ BaseName("lib9p_Tmsg_validate"): handle_9p_outer,
+ BaseName("lib9p_Rmsg_validate"): handle_9p_outer,
+ BaseName("_lib9p_validate"): handle_9p_inner,
+ # unmarsahl
+ BaseName("lib9p_Tmsg_unmarshal"): handle_9p_outer,
+ BaseName("lib9p_Rmsg_unmarshal"): handle_9p_outer,
+ BaseName("_lib9p_unmarshal"): handle_9p_inner,
+ # marshal
+ BaseName("lib9p_Tmsg_marshal"): handle_9p_outer,
+ BaseName("lib9p_Rmsg_marshal"): handle_9p_outer,
+ BaseName("_lib9p_marshal"): handle_9p_inner,
+ }
+ return ret
class PicoFmtPlugin:
@@ -495,35 +510,63 @@ class PicoFmtPlugin:
], False
return None
- def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
- ret: dict[BaseName, analyze.SkipModel] = {
- BaseName("fmt_state_putchar"): analyze.SkipModel(
- self.known_fct.keys(), self._skipmodel_fmt_state_putchar
- ),
- BaseName("_vfctprintf"): analyze.SkipModel(
- self.wont_call_v, self._skipmodel__vfctprintf
- ),
- }
- return ret
+ def node_handlers(self) -> dict[BaseName, analyze.NodeHandler]:
+ putchar_cache: dict[BaseName | None, int] = {} # fct=>total_nstatic
- def _skipmodel_fmt_state_putchar(
- self, chain: typing.Sequence[QName], node: Node, call: QName
- ) -> bool:
- if call.base() in self.known_fct.values():
+ def handle_putchar(
+ handle: NodeHandleCB, chain: typing.Sequence[QName], node: Node
+ ) -> tuple[int, bool]:
fct: BaseName | None = None
- for pcall in reversed(chain):
+ cachekey: BaseName | None = None
+ for i in reversed(range(len(chain))):
+ pcall = chain[i]
if pcall.base() in self.known_fct:
fct = self.known_fct[pcall.base()]
- return call.base() != fct
- return True
- return False
+ cachekey = fct
+ if fct == BaseName("libfmt_light_fct"):
+ assert pcall.base() == BaseName("__lm_light_printf")
+ if i > 0 and chain[i - 1].base() == BaseName(
+ "__assert_msg_fail"
+ ):
+ cachekey = BaseName("inner/" + str(cachekey))
+ break
+ if cachekey in putchar_cache:
+ return putchar_cache[cachekey], False
+
+ def skip_p(call_qname: QName) -> bool:
+ return fct is None or call_qname.base() != fct
+
+ nstatic, _ = analyze.handle_simple_node(handle, chain, node, skip_p)
+ putchar_cache[cachekey] = nstatic
+ return nstatic, False
+
+ def handle__vfctprintf(
+ handle: NodeHandleCB, chain: typing.Sequence[QName], node: Node
+ ) -> tuple[int, bool]:
+ def skip_p(call_qname: QName) -> bool:
+ if call_qname.base() == BaseName("libfmt_conv_formatter"):
+ return any(c.base() in self.wont_call_v for c in chain)
+ return False
- def _skipmodel__vfctprintf(
- self, chain: typing.Sequence[QName], node: Node, call: QName
- ) -> bool:
- if call.base() == BaseName("libfmt_conv_formatter"):
- return any(c.base() in self.wont_call_v for c in chain)
- return False
+ nstatic, _ = analyze.handle_simple_node(handle, chain, node, skip_p)
+ return nstatic, False
+
+ def handle_top(
+ handle: NodeHandleCB, chain: typing.Sequence[QName], node: Node
+ ) -> tuple[int, bool]:
+ nstatic, _ = analyze.handle_simple_node(handle, chain, node)
+ in_fail: bool = any(
+ c.base() == BaseName("__assert_msg_fail") for c in chain
+ )
+ return nstatic, not in_fail
+
+ ret: dict[BaseName, analyze.NodeHandler] = {
+ BaseName("fmt_state_putchar"): handle_putchar,
+ BaseName("_vfctprintf"): handle__vfctprintf,
+ }
+ for top in self.known_fct:
+ ret[top] = handle_top
+ return ret
class PicoSDKPlugin:
@@ -632,7 +675,7 @@ class PicoSDKPlugin:
return self.app_preinit_array, False
return None
- def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
+ def node_handlers(self) -> dict[BaseName, analyze.NodeHandler]:
return {}
def extra_nodes(self) -> typing.Collection[Node]:
@@ -865,7 +908,7 @@ class TinyUSBDevicePlugin:
return None
- def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
+ def node_handlers(self) -> dict[BaseName, analyze.NodeHandler]:
return {}
@@ -932,7 +975,7 @@ class NewlibPlugin:
) -> tuple[typing.Collection[QName], bool] | None:
return None
- def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
+ def node_handlers(self) -> dict[BaseName, analyze.NodeHandler]:
return {}
@@ -965,5 +1008,5 @@ class LibGCCPlugin:
) -> tuple[typing.Collection[QName], bool] | None:
return None
- def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
+ def node_handlers(self) -> dict[BaseName, analyze.NodeHandler]:
return {}
diff --git a/build-aux/measurestack/test_app_plugins.py b/build-aux/measurestack/test_app_plugins.py
index da8be65..d0a8d2a 100644
--- a/build-aux/measurestack/test_app_plugins.py
+++ b/build-aux/measurestack/test_app_plugins.py
@@ -8,7 +8,7 @@
import typing
from . import analyze, app_plugins, testutil, util
-from .analyze import BaseName, Node, QName, SkipModel
+from .analyze import BaseName, NodeHandler, QName
def test_assert_msg_fail() -> None:
@@ -48,31 +48,8 @@ def test_assert_msg_fail() -> None:
graph_plugin = testutil.GraphProviderPlugin(max_call_depth, graph)
class SkipPlugin(testutil.NopPlugin):
- def skipmodels(self) -> dict[BaseName, SkipModel]:
- models = app_plugins.LibMiscPlugin(arg_c_fnames=[]).skipmodels()
- assert BaseName("__assert_msg_fail") in models
- orig_model = models[BaseName("__assert_msg_fail")]
-
- def wrapped_model_fn(
- chain: typing.Sequence[QName], node: Node, call: QName
- ) -> bool:
- dbgstr = (
- ("=>".join(str(c) for c in [*chain, node.funcname]))
- + "=?=>"
- + str(call)
- )
- assert dbgstr in [
- "__assert_msg_fail=?=>__lm_light_printf",
- "__assert_msg_fail=?=>__lm_abort",
- "__assert_msg_fail=>__lm_light_printf=>fmt_vfctprintf=>stdio_putchar=>__assert_msg_fail=?=>__lm_light_printf",
- "__assert_msg_fail=>__lm_light_printf=>fmt_vfctprintf=>stdio_putchar=>__assert_msg_fail=?=>__lm_abort",
- ]
- return orig_model.fn(chain, node, call)
-
- models[BaseName("__assert_msg_fail")] = SkipModel(
- orig_model.nchain, wrapped_model_fn
- )
- return models
+ def node_handlers(self) -> dict[BaseName, NodeHandler]:
+ return app_plugins.LibMiscPlugin(arg_c_fnames=[]).node_handlers()
def test_filter(name: QName) -> tuple[int, bool]:
if name.base() == BaseName("main"):
diff --git a/build-aux/measurestack/testutil.py b/build-aux/measurestack/testutil.py
index 751e57f..1566695 100644
--- a/build-aux/measurestack/testutil.py
+++ b/build-aux/measurestack/testutil.py
@@ -63,7 +63,7 @@ class NopPlugin:
) -> tuple[typing.Collection[analyze.QName], bool] | None:
return None
- def skipmodels(self) -> dict[analyze.BaseName, analyze.SkipModel]:
+ def node_handlers(self) -> dict[analyze.BaseName, analyze.NodeHandler]:
return {}
def extra_nodes(self) -> typing.Collection[analyze.Node]:
diff --git a/build-aux/measurestack/util.py b/build-aux/measurestack/util.py
index 0af3d02..ae2f998 100644
--- a/build-aux/measurestack/util.py
+++ b/build-aux/measurestack/util.py
@@ -82,7 +82,7 @@ class Plugin(typing.Protocol):
def indirect_callees(
self, loc: str, line: str
) -> tuple[typing.Collection[QName], bool] | None: ...
- def skipmodels(self) -> dict[BaseName, analyze.SkipModel]: ...
+ def node_handlers(self) -> dict[BaseName, analyze.NodeHandler]: ...
class PluginApplication:
@@ -118,8 +118,8 @@ class PluginApplication:
placeholder += " at " + self._location_xform(elem.attrs.get("label", ""))
return [QName(placeholder)], False
- def skipmodels(self) -> dict[BaseName, analyze.SkipModel]:
- ret: dict[BaseName, analyze.SkipModel] = {}
+ def node_handlers(self) -> dict[BaseName, analyze.NodeHandler]:
+ ret: dict[BaseName, analyze.NodeHandler] = {}
for plugin in self._plugins:
- ret.update(plugin.skipmodels())
+ ret.update(plugin.node_handlers())
return ret