texlive[76083] Master/texmf-dist: expltools (18aug25)

commits+karl at tug.org commits+karl at tug.org
Mon Aug 18 21:01:40 CEST 2025


Revision: 76083
          https://tug.org/svn/texlive?view=revision&revision=76083
Author:   karl
Date:     2025-08-18 21:01:40 +0200 (Mon, 18 Aug 2025)
Log Message:
-----------
expltools (18aug25)

Modified Paths:
--------------
    trunk/Master/texmf-dist/doc/support/expltools/CHANGES.md
    trunk/Master/texmf-dist/doc/support/expltools/README.md
    trunk/Master/texmf-dist/doc/support/expltools/project-proposal.pdf
    trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-02-lexical-analysis.md
    trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-03-syntactic-analysis.md
    trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-04-semantic-analysis.md
    trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-05-flow-analysis.md
    trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-99-caveats.md
    trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors.pdf
    trunk/Master/texmf-dist/scripts/expltools/explcheck-cli.lua
    trunk/Master/texmf-dist/scripts/expltools/explcheck-config.lua
    trunk/Master/texmf-dist/scripts/expltools/explcheck-config.toml
    trunk/Master/texmf-dist/scripts/expltools/explcheck-evaluation.lua
    trunk/Master/texmf-dist/scripts/expltools/explcheck-format.lua
    trunk/Master/texmf-dist/scripts/expltools/explcheck-issues.lua
    trunk/Master/texmf-dist/scripts/expltools/explcheck-latex3.lua
    trunk/Master/texmf-dist/scripts/expltools/explcheck-lexical-analysis.lua
    trunk/Master/texmf-dist/scripts/expltools/explcheck-parsers.lua
    trunk/Master/texmf-dist/scripts/expltools/explcheck-preprocessing.lua
    trunk/Master/texmf-dist/scripts/expltools/explcheck-semantic-analysis.lua
    trunk/Master/texmf-dist/scripts/expltools/explcheck-syntactic-analysis.lua
    trunk/Master/texmf-dist/scripts/expltools/explcheck-utils.lua

Added Paths:
-----------
    trunk/Master/texmf-dist/doc/support/expltools/e417.tex
    trunk/Master/texmf-dist/doc/support/expltools/e418.tex
    trunk/Master/texmf-dist/doc/support/expltools/e420.tex
    trunk/Master/texmf-dist/doc/support/expltools/e421.tex
    trunk/Master/texmf-dist/doc/support/expltools/e424.tex
    trunk/Master/texmf-dist/doc/support/expltools/e425-01.tex
    trunk/Master/texmf-dist/doc/support/expltools/e425-02.tex
    trunk/Master/texmf-dist/doc/support/expltools/e425-03.tex
    trunk/Master/texmf-dist/doc/support/expltools/e427-01.tex
    trunk/Master/texmf-dist/doc/support/expltools/e427-02.tex
    trunk/Master/texmf-dist/doc/support/expltools/s412-01.tex
    trunk/Master/texmf-dist/doc/support/expltools/s412-02.tex
    trunk/Master/texmf-dist/doc/support/expltools/s412-03.tex
    trunk/Master/texmf-dist/doc/support/expltools/s412-04.tex
    trunk/Master/texmf-dist/doc/support/expltools/s412-05.tex
    trunk/Master/texmf-dist/doc/support/expltools/s412-06.tex
    trunk/Master/texmf-dist/doc/support/expltools/s412-07.tex
    trunk/Master/texmf-dist/doc/support/expltools/s413-01.tex
    trunk/Master/texmf-dist/doc/support/expltools/s413-02.tex
    trunk/Master/texmf-dist/doc/support/expltools/s413-03.tex
    trunk/Master/texmf-dist/doc/support/expltools/s414-01.tex
    trunk/Master/texmf-dist/doc/support/expltools/s414-02.tex
    trunk/Master/texmf-dist/doc/support/expltools/s414-03.tex
    trunk/Master/texmf-dist/doc/support/expltools/s414-04.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-01.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-02.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-03.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-04.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-05.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-06.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-07.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-08.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-09.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-10.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-11.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-12.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-13.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-14.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-15.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-16.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-17.tex
    trunk/Master/texmf-dist/doc/support/expltools/t422-18.tex
    trunk/Master/texmf-dist/doc/support/expltools/w415-01.tex
    trunk/Master/texmf-dist/doc/support/expltools/w415-02.tex
    trunk/Master/texmf-dist/doc/support/expltools/w415-03.tex
    trunk/Master/texmf-dist/doc/support/expltools/w415-04.tex
    trunk/Master/texmf-dist/doc/support/expltools/w415-05.tex
    trunk/Master/texmf-dist/doc/support/expltools/w416.tex
    trunk/Master/texmf-dist/doc/support/expltools/w419-01.tex
    trunk/Master/texmf-dist/doc/support/expltools/w419-02.tex
    trunk/Master/texmf-dist/doc/support/expltools/w419-03.tex
    trunk/Master/texmf-dist/doc/support/expltools/w419-04.tex
    trunk/Master/texmf-dist/doc/support/expltools/w419-05.tex
    trunk/Master/texmf-dist/doc/support/expltools/w423-01.tex
    trunk/Master/texmf-dist/doc/support/expltools/w423-02.tex
    trunk/Master/texmf-dist/doc/support/expltools/w426.tex

Removed Paths:
-------------
    trunk/Master/texmf-dist/doc/support/expltools/s205-01.tex
    trunk/Master/texmf-dist/doc/support/expltools/s205-02.tex
    trunk/Master/texmf-dist/doc/support/expltools/s205-03.tex
    trunk/Master/texmf-dist/doc/support/expltools/s205-04.tex
    trunk/Master/texmf-dist/doc/support/expltools/s205-05.tex
    trunk/Master/texmf-dist/doc/support/expltools/s205-06.tex
    trunk/Master/texmf-dist/doc/support/expltools/s205-07.tex
    trunk/Master/texmf-dist/doc/support/expltools/s206-01.tex
    trunk/Master/texmf-dist/doc/support/expltools/s206-02.tex
    trunk/Master/texmf-dist/doc/support/expltools/s206-03.tex
    trunk/Master/texmf-dist/doc/support/expltools/s207-01.tex
    trunk/Master/texmf-dist/doc/support/expltools/s207-02.tex
    trunk/Master/texmf-dist/doc/support/expltools/s207-03.tex
    trunk/Master/texmf-dist/doc/support/expltools/s207-04.tex

Modified: trunk/Master/texmf-dist/doc/support/expltools/CHANGES.md
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/CHANGES.md	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/CHANGES.md	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,5 +1,158 @@
 # Changes
 
+## expltools 2025-08-18
+
+### explcheck v0.12.0
+
+#### Warnings and errors
+
+This version of explcheck has made the following changes to the document titled
+[_Warnings and errors for the expl3 analysis tool_][warnings-and-errors]:
+
+- Postpone planned issue E417 (Multiply declared variable or constant) to flow
+  analysis, under the identifier E519 and the same name. (#110, #112)
+
+- Postpone planned issue E242 (Multiply defined message) to flow analysis,
+  under the identifier E524 and the same name. (#110, #112)
+
+- Plan for a weaker version of issue E522 (Too few arguments supplied to
+  message) to semantic analysis under the identifier E425 and the same name.
+  (#110, #112)
+
+- Remove issues S205 (Malformed function name), S206 (Malformed variable or
+  constant name), and S207 (Malformed quark or scan mark name) and replan them
+  to semantic analysis under the identifiers S412, S413, and S414, respectively,
+  and the same names. (reported by @u-fischer in #109, added in #117)
+
+- Plan for a new issue E417 (Setting a variable as a constant). (#119)
+
+- Unplan issues W419 (Using a token list variable or constant without
+  an accessor) and E420 (Using non-token-list variable or constant without
+  an accessor). (#121)
+
+- Reclassify the planned errors E421 and E518 (Using an undefined variable or
+  constant) as warnings W421 and W518, respectively. (#121)
+
+- Unplan issues W424 and E521 (Setting an undefined message). (#127)
+
+- Reclassify and rename the planned errors E426 and E522 (Too few arguments
+  supplied to message) to W426 and W522 (Incorrect number of arguments supplied
+  to message), respectively. (#127)
+
+#### Development
+
+- Include contextual information in human-readable issue descriptions.
+  (suggested by @u-fischer at TUG 2025, reported in #110, added in #112)
+
+- Improve autodetection of expl3 for small example files. (c5ad7a4)
+
+  Previously, we added a new Lua option `min_expl3like_material`, which would
+  require at least 5 instances of expl3-like material for a file without
+  standard expl3 delimiters to be recognized as expl3. However, this penalizes
+  small example files, where there are only a few calls.
+
+  After this change, the option has been renamed to
+  `min_expl3like_material_count` and a new Lua option
+  `min_expl3like_material_ratio` has been added that specifies the minimum
+  portion of the file that must be occupied by expl3 material (defaults to 0.5,
+  i.e. 50%) before it is automatically recognized as expl3 regardless.
+
+- Make `% noqa` comments at the beginning of a file silence issues everywhere.
+  (suggested by @FrankMittelbach at TUG 2025, reported in #111, added in #116)
+
+- Add more support for semantic analysis. (#117..#122, #127)
+
+  This adds support for all remaining issues from Section 4 of the document
+  titled [_Warnings and errors for the expl3 analysis tool_][warnings-and-errors]:
+
+   1. S412 (Malformed function name)
+   2. S413 (Malformed function name)
+   3. S414 (Malformed quark or scan mark name)
+   4. W415 (Unused variable or constant)
+   5. W416 (Setting an undeclared variable)
+   6. E417 (Setting a variable as a constant)
+   7. E418 (Setting a constant)
+   8. W419 (Using an undeclared variable or constant)
+   9. E420 (Locally setting a global variable)
+  10. E421 (Globally setting a local variable)
+  11. T422 (Using a variable of an incompatible type)
+  12. W423 (Unused message)
+  13. E424 (Using an undefined message)
+  14. E425 (Incorrect parameters in message text)
+  15. W426 (Incorrect number of arguments supplied to message)
+  16. E427 (Comparison conditional without signature `:nnTF`)
+
+- Add Lua option `suppressed_issue_map`.
+
+  This option defines a mapping between issues that suppress one or more
+  other issues. At this point, this option only maps issue W200 ("Do not use"
+  argument specifiers) to issues S412, S413, and S414, so that defining
+  functions, variables, and constants with malformed names and a "do not use"
+  specifier (`:D`) only produces issue W200 and not also S412, S413, and S414.
+
+  In the future, this option will be highly used for issues from the flow
+  analysis that have a weaker version in the semantic analysis. In these cases,
+  the weaker version will always suppress the stronger version of an issue.
+
+- Make the Lua option `ignored_options`, the command-line option
+  `--ignored-options`, the TeX comments `% noqa` and the Lua function
+  `issues:ignore()` treat the issue identifiers as prefixes. (#123, #125)
+
+  This allows you to e.g. ignore all style warnings on the current line with
+  `% noqa: s`, all general warnings that originate from the semantic analysis
+  with `--ignored-issues=W4`, etc.
+
+- Add Lua option `stop_after`. (#124, #126)
+
+  This option allows you to specify after which processing step the analysis
+  should stop. If an advanced processing step reports false positive issues
+  on a complex expl3 file, this option can be used to reduce the number of
+  false positive detections.
+
+- Add Lua option `stop_early_when_confused`. (#124, #126)
+
+  This option, which is enabled by default, allows the processing steps to
+  indicate that they are confused by the results of the previous processing
+  steps and stop any further processing. If an advanced processing step reports
+  false positive issues, then this option should stop the step from running
+  and reduce the number of false positive detections.
+
+#### Fixes
+
+- Prevent command-line option `--no-config-file` from raising the error
+  `Config file "" does not exist`.
+  (reported by @muzimuzhi in #107, fixed in 41446d0)
+
+- Do not report issue W401 (Unused private function) for well-known and
+  imported prefixes. (#115)
+
+- Correctly parse indirect applications of creator functions via
+  `\cs_generate_from_arg_count:NNnn`. (#118)
+
+- Properly use lazy matching and backtracking in control sequence name patterns
+  produced during the semantic analysis. (#120)
+
+  Previously, only wildcards at the end of a name would function properly (lazy
+  matching) and any partial matches by previous patterns would prevent any
+  potential matches by future patterns (backtracking). Both properties
+  limitations were due to parsing expression grammars (PEGs) being greedy and
+  non-backtracking by default. As a result, many wildcards would not match even
+  though they should have.
+
+#### Continuous integration
+
+- Rename GitHub Action `teatimeguest/setup-texlive-action at v3` to `TeX-Live/...`.
+  (reported by @pablogonz in markdown#576, fixed in 28ba10b5)
+
+- Bump actions/checkout and actions/download-artifact from 4 to 5.
+  (contributed by @dependabot in #113 and #114)
+
+- Check Lua code blocks in `README.md` with luacheck. (1d21b97, 42f7504, 7b97271)
+
+#### Distribution
+
+- Install Bash in the Docker image. (e8c4a08)
+
 ## expltools 2025-06-24
 
 ### explcheck v0.11.0
@@ -82,7 +235,7 @@
     renamed to T305, since it can be detected by the syntactic analysis already.
 
  [^e408]: By default, all standard library prefixes, defined by the parser
-    `expl3_standard_library_prefixes` as well as registered prefixes from the
+    `expl3_standard_library_prefixes`, as well as registered prefixes from the
     file `l3prefixes.csv` are excluded from this error.
 
     Besides well-known prefixes, you may also declare other imported prefixes
@@ -134,14 +287,14 @@
   fixed in #97 and #99)
 
 - Remove issue T400 (Expanding an unexpandable variable or constant) and create
-  a corresponding issue T305 for the syntactic analysis.
+  a corresponding issue T305 for the syntactic analysis. (#99)
 
 - Plan for a flow-aware variant E506 (Indirect function definition from an
-  undefined function) of issue E411 of the same name.
+  undefined function) of issue E411 of the same name. (#99)
 
 - Plan for issue E515 (Paragraph token in the parameter of a "nopar" function)
   and remove the item "Verifying the 'nopar' restriction on functions" from
-  Section "Caveats".
+  Section "Caveats". (#99)
 
 #### Fixes
 

Modified: trunk/Master/texmf-dist/doc/support/expltools/README.md
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/README.md	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/README.md	2025-08-18 19:01:40 UTC (rev 76083)
@@ -17,6 +17,7 @@
 7. [Lexical analysis and a public website listing issues in current TeX Live][12] from February 24, 2025
 8. [Syntactic analysis][13] from March 27, 2025
 9. [Two][15] [posts][16] about semantic analysis from April 25 and May 29, 2025
+10. [Expltools goes to India][17] from July 22, 2025
 
 The article [Expltools: Development tools for expl3 programmers][14], forthcoming in TUGboat 46(2) or (3), is a work in progress that summarizes the devlog posts and provides a coherent overview of the current state of the tool.
 
@@ -38,6 +39,7 @@
  [14]: https://github.com/Witiko/expltools-tug25-paper
  [15]: https://witiko.github.io/Expl3-Linter-8.5/
  [16]: https://witiko.github.io/Expl3-Linter-8.75/
+ [17]: https://witiko.github.io/Expl3-Linter-10/
 
 ## Usage
 
@@ -109,10 +111,10 @@
 
 You may configure the tool using command-line options.
 
-For example, the following command-line options would increase the maximum line length before the warning S103 (Line too long) is produced from 80 to 120 characters and also disable the warnings W100 (No standard delimiters) and S204 (Missing stylistic whitespaces).
+For example, the following command-line options would increase the maximum line length before the warning S103 (Line too long) is produced from 80 to 120 characters and also disable the warnings W100 (No standard delimiters) and all style warnings S\*.
 
 ``` sh
-$ explcheck --max-line-length=120 --ignored-issues=w100,S204 *.tex
+$ explcheck --max-line-length=120 --ignored-issues=w100,S *.tex
 ```
 
 Use the command `explcheck --help` to list the available options.
@@ -123,7 +125,7 @@
 ``` toml
 [defaults]
 max_line_length = 120
-ignored_issues = ["w100", "S204"]
+ignored_issues = ["w100", "S"]
 ```
 
 You may also configure the tool from within your Lua code.
@@ -130,15 +132,43 @@
 For example, here is how you would apply the same configuration in the Lua example from the previous section:
 
 ``` lua
-local options = { max_line_length = 120 }
+-- LuaTeX users must initialize Kpathsea Lua module searchers first.
+local using_luatex, kpse = pcall(require, "kpse")
+if using_luatex then
+  kpse.set_program_name("texlua", "explcheck")
+end
 
-issues:ignore("w100")
-issues:ignore("S204")
+-- Import explcheck.
+local new_issues = require("explcheck-issues")
 
+local preprocessing = require("explcheck-preprocessing")
+local lexical_analysis = require("explcheck-lexical-analysis")
+local syntactic_analysis = require("explcheck-syntactic-analysis")
+local semantic_analysis = require("explcheck-semantic-analysis")
+
+-- Process file "code.tex" and print warnings and errors.
+local filename = "code.tex"
+local options = {
+  max_line_length = 120,
+  ignored_issues = {"w100", "S"},
+}
+local issues = new_issues(filename, options)
+local results = {}
+
+local file = assert(io.open(filename, "r"))
+local content = assert(file:read("*a"))
+assert(file:close())
+
 preprocessing.process(filename, content, issues, results, options)
 lexical_analysis.process(filename, content, issues, results, options)
 syntactic_analysis.process(filename, content, issues, results, options)
 semantic_analysis.process(filename, content, issues, results, options)
+
+print(
+  "There were " .. #issues.warnings .. " warnings, "
+  .. "and " .. #issues.errors .. " errors "
+  .. "in the file " .. filename .. "."
+)
 ```
 
 Command-line options, configuration files, and Lua code allow you to ignore certain warnings and errors everywhere.
@@ -145,7 +175,7 @@
 To ignore them in just some of your expl3 code, you may use TeX comments.
 
 For example, a comment `% noqa` will ignore any issues on the current line.
-As another example, a comment `% noqa: w100, S204` will ignore the file-wide warning W100 and also the warning S204 on the current line.
+As another example, a comment `% noqa: w100, S` will ignore the file-wide warning W100 and also all style warnings on the current line.
 
 A list of all currently supported issues is available [here][10].
 

Added: trunk/Master/texmf-dist/doc/support/expltools/e417.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/e417.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/e417.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,3 @@
+\tl_const:Nn  % error on this line
+  \g_example_tl
+  { bar }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/e417.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/e418.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/e418.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/e418.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,3 @@
+\tl_gset:Nn  % error on this line
+  \c_example_tl
+  { bar }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/e418.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/e420.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/e420.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/e420.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,5 @@
+\tl_new:N
+  \g_example_tl
+\tl_set:Nn  % error on this line
+  \g_example_tl
+  { foo }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/e420.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/e421.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/e421.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/e421.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,5 @@
+\tl_new:N
+  \l_example_tl
+\tl_gset:Nn  % error on this line
+  \l_example_tl
+  { foo }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/e421.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/e424.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/e424.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/e424.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,3 @@
+\msg_info:nn  % error on this line
+  { foo }
+  { bar }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/e424.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/e425-01.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/e425-01.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/e425-01.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,4 @@
+\msg_new:nnn  % error on this line
+  { foo }
+  { bar }
+  { #5 }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/e425-01.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/e425-02.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/e425-02.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/e425-02.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,5 @@
+\msg_new:nnnn  % error on this line
+  { foo }
+  { bar }
+  { #4 }
+  { #5 }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/e425-02.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/e425-03.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/e425-03.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/e425-03.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,5 @@
+\msg_new:nnnn
+  { foo }
+  { bar }
+  { #1~#2 }
+  { #3~#4 }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/e425-03.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/e427-01.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/e427-01.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/e427-01.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,6 @@
+\cs_new:Nn
+  \example_foo:
+  { \prg_return_true: }
+\tl_sort:nN  % error on this line
+  { { foo } { bar } }
+  \example_foo:nnT


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/e427-01.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/e427-02.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/e427-02.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/e427-02.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,6 @@
+\cs_new:Nn
+  \example_foo:
+  { \prg_return_true: }
+\tl_sort:nN
+  { { foo } { bar } }
+  \example_foo:nnTF


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/e427-02.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Modified: trunk/Master/texmf-dist/doc/support/expltools/project-proposal.pdf
===================================================================
(Binary files differ)

Deleted: trunk/Master/texmf-dist/doc/support/expltools/s205-01.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s205-01.tex	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/s205-01.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,3 +0,0 @@
-\cs_new:Nn
-  \description:  % warning on this line
-  { foo }

Deleted: trunk/Master/texmf-dist/doc/support/expltools/s205-02.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s205-02.tex	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/s205-02.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,3 +0,0 @@
-\cs_gset:Npn
-  \module__description:  % warning on this line
-  { foo }

Deleted: trunk/Master/texmf-dist/doc/support/expltools/s205-03.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s205-03.tex	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/s205-03.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,3 +0,0 @@
-\cs_set_eq:NN
-  \_module_description:  % warning on this line
-  \example_foo:

Deleted: trunk/Master/texmf-dist/doc/support/expltools/s205-04.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s205-04.tex	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/s205-04.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,5 +0,0 @@
-\cs_generate_from_arg_count:NNnn
-  \__module_description:
-  \cs_new:Npn
-  { 0 }
-  { foo }

Deleted: trunk/Master/texmf-dist/doc/support/expltools/s205-05.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s205-05.tex	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/s205-05.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,4 +0,0 @@
-\prg_new_conditional:Nn
-  \description:  % warning on this line
-  { p, T, F, TF }
-  { foo }

Deleted: trunk/Master/texmf-dist/doc/support/expltools/s205-06.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s205-06.tex	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/s205-06.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,4 +0,0 @@
-\prg_gset_conditional:Npn
-  \module__description:  % warning on this line
-  { p, T, F, TF }
-  { foo }

Deleted: trunk/Master/texmf-dist/doc/support/expltools/s205-07.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s205-07.tex	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/s205-07.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,4 +0,0 @@
-\prg_set_eq_conditional:NNn
-  \_module_description:  % warning on this line
-  \example_foo:
-  { p, T, F, TF }

Deleted: trunk/Master/texmf-dist/doc/support/expltools/s206-01.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s206-01.tex	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/s206-01.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,7 +0,0 @@
-\tl_new:N
-  \g_description_tl  % warning on this line
-\box_new:N
-  \l__description_box  % warning on this line
-\int_const:Nn
-  \c_description  % warning on this line
-  { 123 }

Deleted: trunk/Master/texmf-dist/doc/support/expltools/s206-02.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s206-02.tex	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/s206-02.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,7 +0,0 @@
-\regex_new:N
-  \g_module_description_regex
-\coffin_new:N
-  \l_module_description_coffin
-\str_const:Nn
-  \c__module_description_str
-  { foo }

Deleted: trunk/Master/texmf-dist/doc/support/expltools/s206-03.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s206-03.tex	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/s206-03.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,9 +0,0 @@
-\tl_use:N
-  \l_tmpa_tl
-\int_gset:Nn
-  \g_tmpb_int
-  { 1 + 2 }
-\str_show:N
-  \g_tmpa_str
-\bool_set_true:N
-  \l_tmpa_bool

Deleted: trunk/Master/texmf-dist/doc/support/expltools/s207-01.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s207-01.tex	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/s207-01.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,2 +0,0 @@
-\quark_new:N
-  \foo_bar  % error on this line

Deleted: trunk/Master/texmf-dist/doc/support/expltools/s207-02.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s207-02.tex	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/s207-02.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,2 +0,0 @@
-\quark_new:N
-  \q_foo_bar

Deleted: trunk/Master/texmf-dist/doc/support/expltools/s207-03.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s207-03.tex	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/s207-03.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,2 +0,0 @@
-\scan_new:N
-  \foo_bar  % error on this line

Deleted: trunk/Master/texmf-dist/doc/support/expltools/s207-04.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s207-04.tex	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/s207-04.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,2 +0,0 @@
-\scan_new:N
-  \s_foo_bar

Added: trunk/Master/texmf-dist/doc/support/expltools/s412-01.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s412-01.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/s412-01.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,3 @@
+\cs_new:Nn
+  \description:  % warning on this line
+  { foo }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/s412-01.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/s412-02.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s412-02.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/s412-02.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,3 @@
+\cs_gset:Npn
+  \module__description:  % warning on this line
+  { foo }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/s412-02.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/s412-03.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s412-03.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/s412-03.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,6 @@
+\cs_new:Nn
+  \example_foo:
+  { bar }
+\cs_set_eq:NN
+  \_module_description:  % warning on this line
+  \example_foo:


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/s412-03.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/s412-04.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s412-04.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/s412-04.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,5 @@
+\cs_generate_from_arg_count:NNnn
+  \__module_description:
+  \cs_new:Npn
+  { 0 }
+  { foo }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/s412-04.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/s412-05.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s412-05.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/s412-05.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,4 @@
+\prg_new_conditional:Nn
+  \description:  % warning on this line
+  { p, T, F, TF }
+  { foo }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/s412-05.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/s412-06.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s412-06.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/s412-06.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,4 @@
+\prg_gset_conditional:Npn
+  \module__description:  % warning on this line
+  { p, T, F, TF }
+  { foo }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/s412-06.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/s412-07.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s412-07.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/s412-07.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,8 @@
+\prg_new_conditional:Nnn
+  \example_foo:
+  { p, T, F, TF }
+  { \prg_return_true: }
+\prg_set_eq_conditional:NNn
+  \_module_description:  % warning on this line
+  \example_foo:
+  { p, T, F, TF }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/s412-07.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/s413-01.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s413-01.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/s413-01.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,7 @@
+\tl_new:N  % warning on this line
+  \g_description_tl
+\box_use:N  % warning on this line
+  \l__description_box
+\int_const:Nn  % warning on this line
+  \c_description
+  { 123 }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/s413-01.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/s413-02.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s413-02.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/s413-02.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,7 @@
+\regex_new:N
+  \g_module_description_regex
+\coffin_new:N
+  \l_module_description_coffin
+\str_const:Nn
+  \c__module_description_str
+  { foo }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/s413-02.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/s413-03.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s413-03.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/s413-03.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,9 @@
+\tl_use:N
+  \l_tmpa_tl
+\int_gset:Nn
+  \g_tmpb_int
+  { 1 + 2 }
+\str_show:N
+  \g_tmpa_str
+\bool_set_true:N
+  \l_tmpa_bool


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/s413-03.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/s414-01.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s414-01.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/s414-01.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,2 @@
+\quark_new:N  % warning on this line
+  \foo_bar


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/s414-01.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/s414-02.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s414-02.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/s414-02.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,2 @@
+\quark_new:N
+  \q_foo_bar


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/s414-02.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/s414-03.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s414-03.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/s414-03.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,2 @@
+\scan_new:N  % error on this line
+  \foo_bar


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/s414-03.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/s414-04.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/s414-04.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/s414-04.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,2 @@
+\scan_new:N
+  \s_foo_bar


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/s414-04.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-01.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-01.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-01.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,4 @@
+\tl_new:N  % error on this line
+  \l_example_str
+\str_new:N  % error on this line
+  \l_example_tl


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-01.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-02.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-02.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-02.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,16 @@
+\tl_new:N
+  \l_example_tl
+\tl_count:N
+  \l_example_tl
+\str_count:N
+  \l_example_tl
+\seq_count:N  % error on this line
+  \l_example_tl
+\clist_count:N
+  \l_example_tl
+\prop_count:N  % error on this line
+  \l_example_tl
+\intarray_count:N  % error on this line
+  \l_example_tl
+\fparray_count:N  % error on this line
+  \l_example_tl


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-02.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-03.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-03.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-03.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,16 @@
+\str_new:N
+  \l_example_str
+\tl_count:N
+  \l_example_str
+\str_count:N
+  \l_example_str
+\seq_count:N  % error on this line
+  \l_example_str
+\clist_count:N  % error on this line
+  \l_example_str
+\prop_count:N  % error on this line
+  \l_example_str
+\intarray_count:N  % error on this line
+  \l_example_str
+\fparray_count:N  % error on this line
+  \l_example_str


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-03.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-04.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-04.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-04.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,16 @@
+\int_new:N
+  \l_example_int
+\tl_count:N  % error on this line
+  \l_example_int
+\str_count:N  % error on this line
+  \l_example_int
+\seq_count:N  % error on this line
+  \l_example_int
+\clist_count:N  % error on this line
+  \l_example_int
+\prop_count:N  % error on this line
+  \l_example_int
+\intarray_count:N  % error on this line
+  \l_example_int
+\fparray_count:N  % error on this line
+  \l_example_int


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-04.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-05.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-05.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-05.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,16 @@
+\seq_new:N
+  \l_example_seq
+\tl_count:N  % error on this line
+  \l_example_seq
+\str_count:N  % error on this line
+  \l_example_seq
+\seq_count:N
+  \l_example_seq
+\clist_count:N  % error on this line
+  \l_example_seq
+\prop_count:N  % error on this line
+  \l_example_seq
+\intarray_count:N  % error on this line
+  \l_example_seq
+\fparray_count:N  % error on this line
+  \l_example_seq


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-05.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-06.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-06.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-06.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,16 @@
+\clist_new:N
+  \l_example_clist
+\tl_count:N
+  \l_example_clist
+\str_count:N  % error on this line
+  \l_example_clist
+\seq_count:N  % error on this line
+  \l_example_clist
+\clist_count:N
+  \l_example_clist
+\prop_count:N  % error on this line
+  \l_example_clist
+\intarray_count:N  % error on this line
+  \l_example_clist
+\fparray_count:N  % error on this line
+  \l_example_clist


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-06.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-07.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-07.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-07.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,16 @@
+\clist_new:N  % error on this line
+  \l_example_prop
+\tl_count:N  % error on this line
+  \l_example_prop
+\str_count:N  % error on this line
+  \l_example_prop
+\seq_count:N  % error on this line
+  \l_example_prop
+\clist_count:N  % error on this line
+  \l_example_prop
+\prop_count:N
+  \l_example_prop
+\intarray_count:N  % error on this line
+  \l_example_prop
+\fparray_count:N  % error on this line
+  \l_example_prop


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-07.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-08.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-08.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-08.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,17 @@
+\intarray_new:Nn
+  \g_example_intarray
+  { 5 }
+\tl_count:N  % error on this line
+  \g_example_intarray
+\str_count:N  % error on this line
+  \g_example_intarray
+\seq_count:N  % error on this line
+  \g_example_intarray
+\clist_count:N  % error on this line
+  \g_example_intarray
+\prop_count:N  % error on this line
+  \g_example_intarray
+\intarray_count:N
+  \g_example_intarray
+\fparray_count:N  % error on this line
+  \g_example_intarray


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-08.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-09.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-09.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-09.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,17 @@
+\fparray_new:Nn
+  \g_example_fparray
+  { 5 }
+\tl_count:N  % error on this line
+  \g_example_fparray
+\str_count:N  % error on this line
+  \g_example_fparray
+\seq_count:N  % error on this line
+  \g_example_fparray
+\clist_count:N  % error on this line
+  \g_example_fparray
+\prop_count:N  % error on this line
+  \g_example_fparray
+\intarray_count:N  % error on this line
+  \g_example_fparray
+\fparray_count:N
+  \g_example_fparray


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-09.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-10.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-10.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-10.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,5 @@
+\ior_new:N
+  \l_example_ior
+\iow_open:Nn  % error on this line
+  \l_example_ior
+  { example }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-10.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-11.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-11.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-11.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,8 @@
+\clist_new:N
+  \l_example_clist
+\tl_set:Nn
+  \l_tmpa_tl
+  { foo }
+\clist_set_eq:NN  % error on this line
+  \l_example_clist
+  \l_tmpa_tl


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-11.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-12.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-12.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-12.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,6 @@
+\tl_set:Nn
+  \l_tmpa_tl
+  { foo }
+\seq_set_from_clist:NN  % error on this line
+  \l_tmpa_seq
+  \l_tmpa_tl


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-12.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-13.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-13.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-13.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,15 @@
+\tl_set:Nn
+  \l_tmpa_tl
+  { foo }
+\regex_set:Nn
+  \l_tmpa_regex
+  { foo }
+\int_set:Nn
+  \l_tmpa_int
+  { 1 + 2 }
+\regex_show:N  % error on this line
+  \l_tmpa_tl
+\regex_show:N
+  \l_tmpa_regex
+\regex_show:N  % error on this line
+  \l_tmpa_int


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-13.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-14.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-14.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-14.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,6 @@
+\tl_set:Nn
+  \l_tmpa_tl
+  { foo }
+\int_set_eq:NN  % error on this line
+  \l_tmpa_int
+  \l_tmpa_tl


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-14.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-15.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-15.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-15.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,8 @@
+\str_new:N
+  \l_example_str
+\tl_const:Nn
+  \c_example_tl
+  { foo }
+\str_set_eq:NN  % error on this line
+  \l_example_str
+  \c_example_tl


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-15.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-16.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-16.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-16.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,19 @@
+\tl_new:N
+  \l_example_tl
+\str_new:N
+  \l_example_str
+\str_set:Nn
+  \l_example_str
+  { foo }
+\tl_set_eq:NN
+  \l_example_tl
+  \l_example_str
+\str_set_eq:NN
+  \l_example_tl
+  \l_example_str
+\tl_set_eq:NN  % error on this line
+  \l_example_str
+  \l_example_tl
+\str_set_eq:NN  % error on this line
+  \l_example_str
+  \l_example_tl


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-16.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-17.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-17.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-17.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,6 @@
+\str_set_eq:NN  % error on this line
+  \l_example_tl
+  \l_example_tl
+\tl_set_eq:NN  % error on this line
+  \l_example_str
+  \l_example_str


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-17.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/t422-18.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/t422-18.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/t422-18.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,15 @@
+\seq_set_from_clist:NN
+  \l_tmpa_seq
+  \l_tmpa_clist
+\seq_set_from_clist:NN  % error on this line
+  \l_tmpa_seq
+  \l_tmpa_tl
+\seq_set_from_clist:NN  % error on this line
+  \l_tmpa_seq
+  \l_tmpa_seq
+\clist_set_from_seq:NN
+  \l_tmpa_clist
+  \l_tmpa_seq
+\clist_set_from_seq:NN  % error on this line
+  \l_tmpa_clist
+  \l_tmpa_int


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/t422-18.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/w415-01.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/w415-01.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/w415-01.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,2 @@
+\tl_new:N  % warning on this line
+  \g_declared_but_undefined_tl


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/w415-01.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/w415-02.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/w415-02.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/w415-02.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,5 @@
+\tl_new:N  % warning on this line
+  \g_defined_but_unused_tl
+\tl_gset:Nn
+  \g_defined_but_unused_tl
+  { foo }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/w415-02.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/w415-03.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/w415-03.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/w415-03.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,7 @@
+\tl_new:N
+  \g_defined_and_used_tl
+\tl_gset:Nn
+  \g_defined_and_used_tl
+  { foo }
+\tl_use:N
+  \g_defined_and_used_tl


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/w415-03.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/w415-04.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/w415-04.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/w415-04.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,3 @@
+\tl_const:Nn  % warning on this line
+  \c_defined_but_unused_tl
+  { foo }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/w415-04.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/w415-05.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/w415-05.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/w415-05.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,5 @@
+\tl_const:Nn
+  \c_defined_and_used_tl
+  { foo }
+\tl_use:N
+  \c_defined_and_used_tl


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/w415-05.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/w416.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/w416.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/w416.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,3 @@
+\tl_gset:Nn  % warning on this line
+  \g_example_tl
+  { bar }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/w416.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/w419-01.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/w419-01.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/w419-01.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,2 @@
+\tl_use:N  % error on this line
+  \g_undeclared_tl


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/w419-01.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/w419-02.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/w419-02.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/w419-02.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,4 @@
+\tl_new:N
+  \g_declared_but_undefined_tl
+\tl_use:N
+  \g_declared_but_undefined_tl


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/w419-02.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/w419-03.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/w419-03.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/w419-03.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,7 @@
+\tl_new:N
+  \g_defined_tl
+\tl_gset:Nn
+  \g_defined_tl
+  { foo }
+\tl_use:N
+  \g_defined_tl


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/w419-03.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/w419-04.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/w419-04.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/w419-04.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,2 @@
+\tl_use:N  % error on this line
+  \c_undefined_tl


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/w419-04.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/w419-05.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/w419-05.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/w419-05.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,5 @@
+\tl_const:Nn
+  \c_defined_tl
+  { foo }
+\tl_use:N
+  \c_defined_tl


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/w419-05.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/w423-01.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/w423-01.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/w423-01.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,4 @@
+\msg_new:nnn  % warning on this line
+  { foo }
+  { bar }
+  { baz }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/w423-01.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/w423-02.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/w423-02.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/w423-02.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,7 @@
+\msg_new:nnn
+  { bar }
+  { bar }
+  { baz }
+\msg_info:nn
+  { bar }
+  { bar }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/w423-02.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: trunk/Master/texmf-dist/doc/support/expltools/w426.tex
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/w426.tex	                        (rev 0)
+++ trunk/Master/texmf-dist/doc/support/expltools/w426.tex	2025-08-18 19:01:40 UTC (rev 76083)
@@ -0,0 +1,22 @@
+\msg_new:nnn
+  { foo }
+  { bar }
+  { #1~#2 }
+\msg_info:nn  % warning on this line
+  { foo }
+  { bar }
+\msg_info:nnn  % warning on this line
+  { foo }
+  { bar }
+  { foo }
+\msg_info:nnnn
+  { foo }
+  { bar }
+  { foo }
+  { bar }
+\msg_info:nnnnn  % warning on this line
+  { foo }
+  { bar }
+  { foo }
+  { bar }
+  { baz }


Property changes on: trunk/Master/texmf-dist/doc/support/expltools/w426.tex
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Modified: trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-02-lexical-analysis.md
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-02-lexical-analysis.md	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-02-lexical-analysis.md	2025-08-18 19:01:40 UTC (rev 76083)
@@ -28,38 +28,6 @@
 
  /s204.tex
 
-## Malformed function name {.s label=s205}
-Some function have names that are not in the format `\texttt{\textbackslash\meta{module}\_\meta{description}:\meta{arg-spec}}`{=tex} [@latexteam2024programming, Section 3.2].
-
- /s205-01.tex
- /s205-02.tex
- /s205-03.tex
- /s205-04.tex
-
-This also extends to conditional functions:
-
- /s205-05.tex
- /s205-06.tex
- /s205-07.tex
-
-## Malformed variable or constant name {.s label=s206}
-Some expl3 variables and constants have names that are not in the format `\texttt{\textbackslash\meta{scope}\_\meta{module}\_\meta{description}\_\meta{type}}`{=tex} [@latexteam2024programming, Section 3.2], where the `\meta{module}`{=tex} part is optional.
-
- /s206-01.tex
- /s206-02.tex
-
-An exception is made for scratch variables [@latexteam2024interfaces, Section 1.1.1]:
-
- /s206-03.tex
-
-## Malformed quark or scan mark name {.s label=s207}
-Some expl3 quarks and scan marks have names that do not start with `\q_` and `\s_`, respectively [@latexteam2024programming, Chapter 19].
-
- /s207-01.tex
- /s207-02.tex
- /s207-03.tex
- /s207-04.tex
-
 ## Too many closing braces {.e label=e208}
 An expl3 part of the input file contains too many closing braces.
 

Modified: trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-03-syntactic-analysis.md
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-03-syntactic-analysis.md	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-03-syntactic-analysis.md	2025-08-18 19:01:40 UTC (rev 76083)
@@ -35,7 +35,7 @@
  /e304-01.tex
  /e304-02.tex
 
-### Expanding an unexpandable variable or constant {.t label=t305}
+## Expanding an unexpandable variable or constant {.t label=t305}
 A function with a `V`-type argument is called with a variable or constant that does not support `V`-type expansion [@latexteam2024interfaces, Section 1.1].
 
  /t305.tex

Modified: trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-04-semantic-analysis.md
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-04-semantic-analysis.md	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-04-semantic-analysis.md	2025-08-18 19:01:40 UTC (rev 76083)
@@ -55,495 +55,130 @@
  /e411-03.tex
  /e411-04.tex
 
-## Variables and constants
+### Malformed function name {.s label=s412}
+Some function have names that are not in the format `\texttt{\textbackslash\meta{module}\_\meta{description}:\meta{arg-spec}}`{=tex} [@latexteam2024programming, Section 3.2].
 
-### Unused variable or constant {.w #unused-variable-or-constant}
-A variable or a constant is declared and perhaps defined but unused.
+ /s412-01.tex
+ /s412-02.tex
+ /s412-03.tex
+ /s412-04.tex
 
-``` tex
-\tl_new:N  % warning on this line
-  \g_declared_but_undefined_tl
-```
+This also extends to conditional functions:
 
-``` tex
-\tl_new:N  % warning on this line
-  \g_defined_but_unused_tl
-\tl_gset:Nn
-  \g_defined_but_unused_tl
-  { foo }
-```
+ /s412-05.tex
+ /s412-06.tex
+ /s412-07.tex
 
-``` tex
-\tl_new:N
-  \g_defined_but_unused_tl
-\tl_gset:Nn
-  \g_defined_but_unused_tl
-  { foo }
-\tl_use:N
-  \g_defined_but_unused_tl
-```
+## Variables and constants
 
-``` tex
-\tl_const:Nn  % warning on this line
-  \c_defined_but_unused_tl
-  { foo }
-```
+### Malformed variable or constant name {.s label=s413}
+Some expl3 variables and constants have names that are not in the format `\texttt{\textbackslash\meta{scope}\_\meta{module}\_\meta{description}\_\meta{type}}`{=tex} [@latexteam2024programming, Section 3.2], where the `\meta{module}`{=tex} part is optional.
 
-``` tex
-\tl_const:Nn
-  \c_defined_but_unused_tl
-  { foo }
-\tl_use:N
-  \c_defined_but_unused_tl
-```
+ /s413-01.tex
+ /s413-02.tex
+ /s413-03.tex
 
-### Setting an undeclared variable {.w #setting-undeclared-variable}
-An undeclared variable is set.
+### Malformed quark or scan mark name {.s label=s414}
+Some expl3 quarks and scan marks have names that do not start with `\q_` and `\s_`, respectively [@latexteam2024programming, Chapter 19].
 
-``` tex
-\tl_gset:Nn  % warning on this line
-  \g_example_tl
-  { bar }
-```
+ /s414-01.tex
+ /s414-02.tex
+ /s414-03.tex
+ /s414-04.tex
 
-### Setting a constant {.e}
-A constant is set.
+### Unused variable or constant {.w label=w415 #unused-variable-or-constant}
+A variable or a constant is declared and perhaps defined but unused.
 
-``` tex
-\tl_gset:Nn  % error on this line
-  \c_example_tl
-  { bar }
-```
+ /w415-01.tex
+ /w415-02.tex
+ /w415-03.tex
+ /w415-04.tex
+ /w415-05.tex
 
-### Using a token list variable or constant without an accessor {.w}
-A token list variable or constant is used without an accessor function.
+### Setting an undeclared variable {.w label=w416 #setting-undeclared-variable}
+An undeclared variable is set.
 
-``` tex
-\tl_set:Nn
-  \l_tmpa_tl
-  { world }
-Hello,~\l_tmpa_tl!  % warning on this line
-Hello,~\tl_use:N \l_tmpa_tl !
-```
+ /w416.tex
 
-This also applies to subtypes of token lists such as strings
-and comma-lists:
+### Setting a variable as a constant {.e label=e417}
+A variable is set as though it were a constant.
 
-``` tex
-\str_set:Nn
-  \l_tmpa_str
-  { world }
-Hello,~\l_tmpa_str!  % warning on this line
-Hello,~\str_use:N \l_tmpa_str !
-```
+ /e417.tex
 
-``` tex
-\clist_set:Nn
-  \l_tmpa_clist
-  { world }
-Hello,~\l_tmpa_clist!  % warning on this line
-Hello,~\clist_use:Nn \l_tmpa_clist { and } !
-```
+### Setting a constant {.e label=e418}
+A constant is set.
 
-### Using non-token-list variable or constant without an accessor {.e #using-variables-without-accessors}
-A non-token-list variable or constant is used without an accessor function.
+ /e418.tex
 
-``` tex
-Hello,~\l_tmpa_seq!  % error on this line
-Hello,~\seq_use:Nn \l_tmpa_seq { and } !
-```
-
-Note that boolean and integer variables may be used without accessor functions in boolean and integer expressions, respectively. Therefore, we may want to initially exclude them from this check to prevent false positives.
-
-### Multiply declared variable or constant {.e}
-A variable or constant is declared multiple times.
-
-``` tex
-\tl_new:N
-  \g_example_tl
-\tl_new:N  % error on this line
-  \g_example_tl
-```
-
-``` tex
-\tl_const:Nn
-  \c_example_tl
-  { foo }
-\tl_const:Nn  % error on this line
-  \c_example_tl
-  { bar }
-```
-
-### Using an undefined variable or constant {.e #using-undefined-variable-or-constant}
+### Using an undeclared variable or constant {.w label=w419 #using-undeclared-variable-or-constant}
 A variable or constant is used but undeclared or undefined.
 
-``` tex
-\tl_use:N  % error on this line
-  \g_undeclared_tl
-```
+ /w419-01.tex
+ /w419-02.tex
+ /w419-03.tex
+ /w419-04.tex
+ /w419-05.tex
 
-``` tex
-\tl_new:N
-  \g_declared_but_undefined_tl
-\tl_use:N  % error on this line
-  \g_declared_but_undefined_tl
-```
-
-``` tex
-\tl_new:N
-  \g_defined_tl
-\tl_gset:Nn
-  \g_defined_tl
-  { foo }
-\tl_use:N
-  \g_defined_tl
-```
-
-``` tex
-\tl_use:N  % error on this line
-  \c_undefined_tl
-```
-
-``` tex
-\tl_const:Nn
-  \c_defined_tl
-  { foo }
-\tl_use:N
-  \c_defined_tl
-```
-
-### Locally setting a global variable {.e}
+### Locally setting a global variable {.e label=e420}
 A global variable is locally set.
 
-``` tex
-\tl_new:N
-  \g_example_tl
-\tl_set:Nn  % error on this line
-  \g_example_tl
-  { foo }
-```
+ /e420.tex
 
-### Globally setting a local variable {.e}
+### Globally setting a local variable {.e label=e421}
 A local variable is globally set.
 
-``` tex
-\tl_new:N
-  \l_example_tl
-\tl_gset:Nn  % error on this line
-  \l_example_tl
-  { foo }
-```
+ /e421.tex
 
-### Using a variable of an incompatible type {.t}
+### Using a variable of an incompatible type {.t label=t422}
 A variable of one type is used where a variable of a different type should be used.
 
-``` tex
-\tl_new:N
-  \l_example_str  % error on this line
-```
+ /t422-01.tex
+ /t422-02.tex
+ /t422-03.tex
+ /t422-04.tex
+ /t422-05.tex
+ /t422-06.tex
+ /t422-07.tex
+ /t422-08.tex
+ /t422-09.tex
+ /t422-10.tex
+ /t422-11.tex
+ /t422-12.tex
+ /t422-13.tex
+ /t422-14.tex
+ /t422-15.tex
+ /t422-16.tex
+ /t422-17.tex
+ /t422-18.tex
 
-``` tex
-\tl_new:N
-  \l_example_tl
-\tl_count:N
-  \l_example_tl
-\str_count:N
-  \l_example_tl
-\seq_count:N
-  \l_example_tl  % error on this line
-\clist_count:N
-  \l_example_tl  % error on this line
-\prop_count:N
-  \l_example_tl  % error on this line
-\intarray_count:N
-  \l_example_tl  % error on this line
-\fparray_count:N
-  \l_example_tl  % error on this line
-```
-
-``` tex
-\str_new:N
-  \l_example_str
-\tl_count:N
-  \l_example_str
-\str_count:N
-  \l_example_str
-\seq_count:N
-  \l_example_str  % error on this line
-\clist_count:N
-  \l_example_str  % error on this line
-\prop_count:N
-  \l_example_str  % error on this line
-\intarray_count:N
-  \l_example_str  % error on this line
-\fparray_count:N
-  \l_example_str  % error on this line
-```
-
-``` tex
-\int_new:N
-  \l_example_int
-\tl_count:N
-  \l_example_int  % error on this line
-\str_count:N
-  \l_example_int  % error on this line
-\seq_count:N
-  \l_example_int  % error on this line
-\clist_count:N
-  \l_example_int  % error on this line
-\prop_count:N
-  \l_example_int  % error on this line
-\intarray_count:N
-  \l_example_int  % error on this line
-\fparray_count:N
-  \l_example_int  % error on this line
-```
-
-``` tex
-\seq_new:N
-  \l_example_seq
-\tl_count:N
-  \l_example_seq  % error on this line
-\str_count:N
-  \l_example_seq  % error on this line
-\seq_count:N
-  \l_example_seq
-\clist_count:N
-  \l_example_seq  % error on this line
-\prop_count:N
-  \l_example_seq  % error on this line
-\intarray_count:N
-  \l_example_seq  % error on this line
-\fparray_count:N
-  \l_example_seq  % error on this line
-```
-
-``` tex
-\clist_new:N
-  \l_example_clist
-\tl_count:N
-  \l_example_clist  % error on this line
-\str_count:N
-  \l_example_clist  % error on this line
-\seq_count:N
-  \l_example_clist  % error on this line
-\clist_count:N
-  \l_example_clist
-\prop_count:N
-  \l_example_clist  % error on this line
-\intarray_count:N
-  \l_example_clist  % error on this line
-\fparray_count:N
-  \l_example_clist  % error on this line
-```
-
-``` tex
-\clist_new:N
-  \l_example_prop
-\tl_count:N
-  \l_example_prop  % error on this line
-\str_count:N
-  \l_example_prop  % error on this line
-\seq_count:N
-  \l_example_prop  % error on this line
-\clist_count:N
-  \l_example_prop  % error on this line
-\prop_count:N
-  \l_example_prop
-\intarray_count:N
-  \l_example_prop  % error on this line
-\fparray_count:N
-  \l_example_prop  % error on this line
-```
-
-``` tex
-\intarray_new:Nn
-  \g_example_intarray
-  { 5 }
-\tl_count:N
-  \g_example_intarray  % error on this line
-\str_count:N
-  \g_example_intarray  % error on this line
-\seq_count:N
-  \g_example_intarray  % error on this line
-\clist_count:N
-  \g_example_intarray  % error on this line
-\prop_count:N
-  \g_example_intarray  % error on this line
-\intarray_count:N
-  \g_example_intarray
-\fparray_count:N
-  \g_example_intarray  % error on this line
-```
-
-``` tex
-\fparray_new:Nn
-  \g_example_fparray
-  { 5 }
-\tl_count:N
-  \g_example_fparray  % error on this line
-\str_count:N
-  \g_example_fparray  % error on this line
-\seq_count:N
-  \g_example_fparray  % error on this line
-\clist_count:N
-  \g_example_fparray  % error on this line
-\prop_count:N
-  \g_example_fparray  % error on this line
-\intarray_count:N
-  \g_example_fparray  % error on this line
-\fparray_count:N
-  \g_example_fparray
-```
-
-``` tex
-\ior_new:N
-  \l_example_ior
-\iow_open:Nn
-  \l_example_ior  % error on this line
-  { example }
-```
-
-``` tex
-\clist_new:N
-  \l_example_clist
-\tl_set:Nn
-  \l_tmpa_tl
-  { foo }
-\clist_set_eq:NN
-  \l_example_clist
-  \l_tmpa_tl  % error on this line
-```
-
-``` tex
-\tl_set:Nn
-  \l_tmpa_tl
-  { foo }
-\seq_set_from_clist:NN
-  \l_tmpa_seq
-  \l_tmpa_tl  % error on this line
-```
-
-``` tex
-\tl_set:Nn
-  \l_tmpa_tl
-  { foo }
-\regex_set:Nn
-  \l_tmpa_regex
-  { foo }
-\int_set:Nn
-  \l_tmpa_int
-  { 1 + 2 }
-\regex_show:N
-  \l_tmpa_tl
-\regex_show:N
-  \l_tmpa_regex
-\regex_show:N
-  \l_tmpa_int  % error on this line
-```
-
-``` tex
-\tl_set:Nn
-  \l_tmpa_tl
-  { foo }
-\int_set_eq:NN
-  \l_tmpa_int
-  \l_tmpa_tl  % error on this line
-```
-
 ## Messages
 
-### Unused message {.w #unused-message}
+### Unused message {.w label=w423 #unused-message}
 A message is defined but unused.
 
-``` tex
-\msg_new:nnn  % warning on this line
-  { foo }
-  { bar }
-  { baz }
-```
+ /w423-01.tex
+ /w423-02.tex
 
-``` tex
-\msg_new:nnn
-  { bar }
-  { bar }
-  { baz }
-\msg_info:nn
-  { bar }
-  { bar }
-```
-
-### Setting an undefined message {.w #setting-undefined-message}
-A message is set but undefined.
-
-``` tex
-\msg_set:nnn  % error on this line
-  { foo }
-  { bar }
-  { baz }
-```
-
-``` tex
-\msg_new:nnn
-  { foo }
-  { bar }
-  { baz }
-\msg_set:nnn
-  { foo }
-  { bar }
-  { baz }
-```
-
-### Multiply defined message {.e}
-A message is defined multiple times.
-
-``` tex
-\msg_new:nnn
-  { foo }
-  { bar }
-  { baz }
-\msg_new:nnn  % error on this line
-  { foo }
-  { bar }
-  { baz }
-```
-
-### Using an undefined message {.e #using-undefined-message}
+### Using an undefined message {.e label=e424 #using-undefined-message}
 A message is used but undefined.
 
-``` tex
-\msg_info:nn
-  { foo }
-  { bar }
-```
+ /e424.tex
 
-### Incorrect parameters in message text {.e #invalid-parameters-in-message-text}
+### Incorrect parameters in message text {.e label=e425 #invalid-parameters-in-message-text}
 Parameter tokens other than `#1`, `#2`, `#3`, and `#4` are specified in a message text.
 
-``` tex
-\msg_new:nnn
-  { foo }
-  { bar }
-  { #5 }  % error on this line
-```
+ /e425-01.tex
+ /e425-02.tex
+ /e425-03.tex
 
-``` tex
-\msg_new:nnnn
-  { foo }
-  { bar }
-  { #4 }
-  { #5 }  % error on this line
-```
+### Incorrect number of arguments supplied to message {.w label=w426 #incorrect-number-of-arguments-supplied-to-message}
+A message was supplied fewer or more arguments than there are parameters in the message text.
 
+ /w426.tex
+
 ## Sorting
-### Comparison conditional without signature `:nnTF` {.e}
+### Comparison conditional without signature `:nnTF` {.e label=e427}
 A sorting function is called with a conditional that has a signature different than `:nnTF` [@latexteam2024interfaces, Section 15.5.4].
 
-``` tex
-\cs_new:Nn
-  \example_foo:
-  { \prg_return_true: }
-\tl_sort:nN
-  { { foo } { bar } }
-  \example_foo:TF
-```
+ /e427-01.tex
+ /e427-02.tex

Modified: trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-05-flow-analysis.md
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-05-flow-analysis.md	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-05-flow-analysis.md	2025-08-18 19:01:40 UTC (rev 76083)
@@ -609,21 +609,45 @@
 
 This check is a stronger version of <#setting-undeclared-variable> and should prevent <#setting-undeclared-variable> from being emitted for this variable.
 
-### Using an undefined variable or constant {.e}
-A variable or constant is used before it has been defined.
+### Using an undeclared variable or constant {.w}
+A variable or constant is used before it has been declared.
 
 ``` tex
+\tl_use:N  % error on this line
+  \g_example_tl
 \tl_new:N
   \g_example_tl
+```
+
+``` tex
 \tl_use:N  % error on this line
+  \c_example_tl
+\tl_const:N
+  \c_example_tl
+  { foo }
+```
+
+This check is a stronger version of <#using-undeclared-variable-or-constant> and should only be emitted if <#using-undeclared-variable-or-constant> has not previously been emitted for this variable or constant.
+
+### Multiply declared variable or constant {.e}
+A variable or constant is declared multiple times.
+
+``` tex
+\tl_new:N
   \g_example_tl
-\tl_gset:Nn
+\tl_new:N  % error on this line
   \g_example_tl
+```
+
+``` tex
+\tl_const:Nn
+  \c_example_tl
   { foo }
+\tl_const:Nn  % error on this line
+  \c_example_tl
+  { bar }
 ```
 
-This check is a stronger version of <#using-undefined-variable-or-constant> and should only be emitted if <#using-undefined-variable-or-constant> has not previously been emitted for this variable or constant.
-
 ## Messages
 
 ### Unused message {.w}
@@ -645,22 +669,6 @@
 
 This check is a stronger version of <#unused-message> and should only be emitted if <#unused-message> has not previously been emitted for this message.
 
-### Setting an undefined message {.e}
-A message is set before it has been defined.
-
-``` tex
-\msg_set:nnn  % error on this line
-  { foo }
-  { bar }
-  { baz }
-\msg_new:nnn
-  { foo }
-  { bar }
-  { baz }
-```
-
-This check is a stronger version of <#setting-undefined-message> and should prevent <#setting-undefined-message> from being emitted for this message.
-
 ### Using an undefined message {.e}
 A message is used before it has been defined.
 
@@ -676,55 +684,50 @@
 
 This check is a stronger version of <#using-undefined-message> and should only be emitted if <#using-undefined-message> has not previously been emitted for this message.
 
-### Too few arguments supplied to message {.e #too-few-arguments-supplied-to-message}
-A message was supplied fewer arguments than there are parameters in the message text.
+### Incorrect number of arguments supplied to message {.w}
+A message was supplied fewer or more arguments than there are parameters in the message text.
 
 ``` tex
 \msg_new:nnn
   { foo }
   { bar }
-  { #1~#2 }
-\msg_info:nn  % error on this line
+  { #1 }
+\msg_set:nnn
   { foo }
   { bar }
+  { baz }
 \msg_info:nnn  % error on this line
   { foo }
   { bar }
   { baz }
-\msg_info:nnnn
-  { foo }
-  { bar }
-  { baz }
-  { baz }
 ```
 
-Since a message can be redefined, we need to track the (possibly many) definitions that can be active when we display a message.
-
 ``` tex
 \msg_new:nnn
   { foo }
   { bar }
   { #1 }
-\msg_set:nnn
+\msg_info:nnn
   { foo }
   { bar }
   { baz }
-\msg_info:nnn  % error on this line
+\msg_set:nnn
   { foo }
   { bar }
   { baz }
 ```
 
+This check is a stronger version of <#incorrect-number-of-arguments-supplied-to-message> and should only be emitted if <#incorrect-number-of-arguments-supplied-to-message> has not previously been emitted for this message.
+
+### Multiply defined message {.e}
+A message is defined multiple times.
+
 ``` tex
 \msg_new:nnn
   { foo }
   { bar }
-  { #1 }
-\msg_info:nnn
-  { foo }
-  { bar }
   { baz }
-\msg_set:nnn
+\msg_new:nnn  % error on this line
   { foo }
   { bar }
   { baz }

Modified: trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-99-caveats.md
===================================================================
--- trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-99-caveats.md	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors-99-caveats.md	2025-08-18 19:01:40 UTC (rev 76083)
@@ -6,7 +6,7 @@
   [@latexteam2024interfaces, sections 5.4–5.10]
 - Validation of parameters in (inline) functions
   (c.f. <#invalid-parameters-in-message-text>
-   and <#too-few-arguments-supplied-to-message>)
+   and <#incorrect-number-of-arguments-supplied-to-message>)
 - Shorthands such as `\~` and `\\` in message texts
   [@latexteam2024interfaces, sections 11.4 and 12.1.3]
 - Quotes in shell commands and file names
@@ -26,30 +26,26 @@
     - `\iow_wrap_allow_break:` and `\iow_indent:n`
       outside wrapped message text
       [@latexteam2024interfaces, Section 12.1.4]
+    - Token list and string variables without accessor
+      functions `\tl_use:N` and `\str_use:N`
     - Boolean variable without an accessor function
       `\bool_to_str:N` outside boolean expressions
       [@latexteam2024interfaces, Section 21.4]
-      (see <#using-variables-without-accessors>)
     - Integer variable without an accessor function
       `\int_use:N` outside integer or floating point
       expressions [@latexteam2024interfaces, Section 21.4]
-      (see <#using-variables-without-accessors>)
     - Dimension variable without an accessor function
       `\dim_use:N` outside dimension or floating point
       expressions [@latexteam2024interfaces, Section 26.7]
-      (see <#using-variables-without-accessors>)
     - Skip variable without an accessor function
       `\skip_use:N` outside skip or floating point expressions
       [@latexteam2024interfaces, Section 26.14]
-      (see <#using-variables-without-accessors>)
     - Muskip variable without an accessor function
       `\muskip_use:N` outside muskip or floating point
       expressions [@latexteam2024interfaces, Section 26.21]
-      (see <#using-variables-without-accessors>)
     - Floating point variable without an accessor function
       `\fp_use:N` outside floating point
       expressions [@latexteam2024interfaces, Section 29.3]
-      (see <#using-variables-without-accessors>)
     - Box variable without accessor functions
       `\box_use(_drop)?:N` or `\[hv]box_unpack(_drop)?:N`,
       or without a measuring function
@@ -60,6 +56,8 @@
       `\coffin_typeset:Nnnnn` outside dimension or
       floating point expressions
       [@latexteam2024interfaces, Section 36.4]
+    - Lonely variables of other types that may or may not
+      have accessor functions
 - Validation of literal expressions:
     - Comparison expressions in functions
       `\*_compare(_p:n|:nT?F?)`
@@ -136,4 +134,5 @@
       `.value_required:n` and never set keys with
       property `.value_forbidden:n`?
 - Horizontal box operation on a vertical box or vice
-  versa [@latexteam2024interfaces, Chapter 35]
+  versa [@latexteam2024interfaces, Chapter 35], similarly for
+  coffins

Modified: trunk/Master/texmf-dist/doc/support/expltools/warnings-and-errors.pdf
===================================================================
(Binary files differ)

Modified: trunk/Master/texmf-dist/scripts/expltools/explcheck-cli.lua
===================================================================
--- trunk/Master/texmf-dist/scripts/expltools/explcheck-cli.lua	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/scripts/expltools/explcheck-cli.lua	2025-08-18 19:01:40 UTC (rev 76083)
@@ -73,10 +73,7 @@
     local is_ok, error_message = xpcall(function()
 
       -- Set up the issue registry.
-      local issues = new_issues()
-      for _, issue_identifier in ipairs(get_option("ignored_issues", options, pathname)) do
-        issues:ignore(issue_identifier)
-      end
+      local issues = new_issues(pathname, options)
 
       -- Load an input file.
       local file = assert(io.open(pathname, "r"))
@@ -133,7 +130,7 @@
     .. '\t                               - "auto": Use context cues to determine whether no part or the whole input file\n'
     .. "\t                                 is in expl3.\n\n"
     .. "\t                           The default setting is --expl3-detection-strategy=" .. expl3_detection_strategy .. ".\n\n"
-    .. "\t--ignored-issues=ISSUES    A comma-list of warning and error identifiers that should not be reported.\n\n"
+    .. "\t--ignored-issues=ISSUES    A comma-list of issue identifiers (or just prefixes) that should not be reported.\n\n"
     .. "\t--make-at-letter[={true|false|auto}]\n\n"
     .. '\t                           How the at sign ("@") should be tokenized:\n\n'
     .. '\t                           - empty or "true": Tokenize "@" as a letter (catcode 11), like in LaTeX style files.\n'
@@ -151,7 +148,7 @@
 end
 
 local function print_version()
-  print("explcheck (expltools 2025-06-24) v0.11.0")
+  print("explcheck (expltools 2025-08-18) v0.12.0")
   print("Copyright (c) 2024-2025 Vít Starý Novotný")
   print("Licenses: LPPL 1.3 or later, GNU GPL v2 or later")
 end

Modified: trunk/Master/texmf-dist/scripts/expltools/explcheck-config.lua
===================================================================
--- trunk/Master/texmf-dist/scripts/expltools/explcheck-config.lua	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/scripts/expltools/explcheck-config.lua	2025-08-18 19:01:40 UTC (rev 76083)
@@ -34,7 +34,9 @@
   local pathname, must_exist
   if options_pathname ~= nil then
     pathname = options_pathname
-    must_exist = options_pathname ~= default_pathname  -- if the options specify a distinct pathname, it must exist
+    if options_pathname ~= "" and options_pathname ~= default_pathname then
+      must_exist = true  -- if the options specify a distinct pathname, it must exist
+    end
   else
     pathname = default_pathname
     must_exist = false

Modified: trunk/Master/texmf-dist/scripts/expltools/explcheck-config.toml
===================================================================
--- trunk/Master/texmf-dist/scripts/expltools/explcheck-config.toml	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/scripts/expltools/explcheck-config.toml	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,15 +1,27 @@
 [defaults]
 config_file = ".explcheckrc"
+error_format = "%f:%l:%c: %t%n %m"
 expl3_detection_strategy = "auto"
-error_format = "%f:%l:%c: %t%n %m"
+fail_fast = true
 ignored_issues = []
 imported_prefixes = []
 make_at_letter = "auto"
 max_line_length = 80
+min_expl3like_material_count = 5
+min_expl3like_material_ratio = 0.5
+min_invalid_character_count = 5
+min_invalid_character_ratio = 0.1
+min_other_complex_tokens_count = 5
+min_other_complex_tokens_ratio = 0.5
+min_unclosed_grouping_count = 5
+min_unclosed_grouping_ratio = 0.2
 min_simple_tokens_in_csname_pattern = 5
-min_expl3like_material = 5
 porcelain = false
-fail_fast = true
+stop_early_when_confused = true
+stop_after = "semantic analysis"
+suppressed_issue_map = {
+  w200 = ["s412", "s413", "s414"],
+}
 terminal_width = 80
 verbose = false
 warnings_are_errors = false

Modified: trunk/Master/texmf-dist/scripts/expltools/explcheck-evaluation.lua
===================================================================
--- trunk/Master/texmf-dist/scripts/expltools/explcheck-evaluation.lua	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/scripts/expltools/explcheck-evaluation.lua	2025-08-18 19:01:40 UTC (rev 76083)
@@ -14,18 +14,8 @@
 local FileEvaluationResults = {}
 local AggregateEvaluationResults = {}
 
--- Create a new evaluation results for the analysis results of an individual file.
-function FileEvaluationResults.new(cls, content, analysis_results, issues)
-  -- Instantiate the class.
-  local self = {}
-  setmetatable(self, cls)
-  cls.__index = cls
-  -- Evaluate the pre-analysis information.
-  local num_total_bytes = #content
-  -- Evaluate the issues.
-  local num_warnings = #issues.warnings
-  local num_errors = #issues.errors
-  -- Evaluate the results of the preprocessing.
+-- Count the number of all expl3 bytes in analysis results.
+local function count_expl3_bytes(analysis_results)
   local num_expl_bytes
   if analysis_results.expl_ranges ~= nil then
     num_expl_bytes = 0
@@ -33,17 +23,11 @@
       num_expl_bytes = num_expl_bytes + #range
     end
   end
-  -- Evaluate the results of the lexical analysis.
-  local num_tokens
-  if analysis_results.tokens ~= nil then
-    num_tokens = 0
-    for _, part_tokens in ipairs(analysis_results.tokens) do
-      for _, token in ipairs(part_tokens) do
-        assert(token.type ~= ARGUMENT)
-        num_tokens = num_tokens + 1
-      end
-    end
-  end
+  return num_expl_bytes
+end
+
+-- Count the number of all and unclosed groupings in analysis results.
+local function count_groupings(analysis_results)
   local num_groupings, num_unclosed_groupings
   if analysis_results.groupings ~= nil then
     num_groupings, num_unclosed_groupings = 0, 0
@@ -56,9 +40,27 @@
       end
     end
   end
-  -- Evaluate the results of the syntactic analysis.
-  local num_calls, num_call_tokens
-  local num_calls_total
+  return num_groupings, num_unclosed_groupings
+end
+
+-- Count the number of all tokens in analysis results.
+local function count_tokens(analysis_results)
+  local num_tokens
+  if analysis_results.tokens ~= nil then
+    num_tokens = 0
+    for _, part_tokens in ipairs(analysis_results.tokens) do
+      for _, token in ipairs(part_tokens) do
+        assert(token.type ~= ARGUMENT)
+        num_tokens = num_tokens + 1
+      end
+    end
+  end
+  return num_tokens
+end
+
+-- Count the number of all top-level calls in analysis results.
+local function count_top_level_calls(analysis_results)
+  local num_calls, num_call_tokens, num_calls_total
   if analysis_results.calls ~= nil then
     num_calls, num_call_tokens = {}, {}
     num_calls_total = 0
@@ -75,6 +77,27 @@
       end
     end
   end
+  return num_calls, num_call_tokens, num_calls_total
+end
+
+-- Create a new evaluation results for the analysis results of an individual file.
+function FileEvaluationResults.new(cls, content, analysis_results, issues)
+  -- Instantiate the class.
+  local self = {}
+  setmetatable(self, cls)
+  cls.__index = cls
+  -- Evaluate the pre-analysis information.
+  local num_total_bytes = #content
+  -- Evaluate the issues.
+  local num_warnings = #issues.warnings
+  local num_errors = #issues.errors
+  -- Evaluate the results of the preprocessing.
+  local num_expl_bytes = count_expl3_bytes(analysis_results)
+  -- Evaluate the results of the lexical analysis.
+  local num_tokens = count_tokens(analysis_results)
+  local num_groupings, num_unclosed_groupings = count_groupings(analysis_results)
+  -- Evaluate the results of the syntactic analysis.
+  local num_calls, num_call_tokens, num_calls_total = count_top_level_calls(analysis_results)
   local num_replacement_text_calls, num_replacement_text_call_tokens
   local num_replacement_text_calls_total
   if analysis_results.replacement_texts ~= nil then
@@ -243,10 +266,13 @@
 end
 
 return {
+  count_expl3_bytes = count_expl3_bytes,
+  count_groupings = count_groupings,
+  count_tokens = count_tokens,
   new_file_results = function(...)
     return FileEvaluationResults:new(...)
   end,
   new_aggregate_results = function(...)
     return AggregateEvaluationResults:new(...)
-  end
+  end,
 }

Modified: trunk/Master/texmf-dist/scripts/expltools/explcheck-format.lua
===================================================================
--- trunk/Master/texmf-dist/scripts/expltools/explcheck-format.lua	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/scripts/expltools/explcheck-format.lua	2025-08-18 19:01:40 UTC (rev 76083)
@@ -86,7 +86,7 @@
 -- Convert a number to a string with thousand separators.
 local function separate_thousands(number)
   local initial_digit, following_digits = string.match(tostring(number), '^(%d)(%d*)$')
-	return initial_digit .. following_digits:reverse():gsub('(%d%d%d)', '%1,'):reverse()
+  return initial_digit .. following_digits:reverse():gsub('(%d%d%d)', '%1,'):reverse()
 end
 
 -- Transform short numbers to words and make long numbers more readable using thousand separators.
@@ -116,6 +116,25 @@
   end
 end
 
+-- Strip leading and trailing whitespace in a text string.
+local function strip(text)
+  return text:gsub("^[%s\n]*", ""):gsub("[%s\n]*$", "")
+end
+
+-- Strip leading and trailing whitespace, collapse internal whitespace and optionally shorten issue context.
+local function format_context(context, max_length)
+  local ellipsis = "..."
+  -- Strip any leading or trailing whitespace.
+  context = strip(context)
+  -- Collapse internal whitespace.
+  context = context:gsub("%s+", " ")
+  -- Strip all text that exceeds the max length.
+  if #context + #ellipsis > max_length then
+    return string.format("%s%s", context:sub(1, max_length - #ellipsis), ellipsis)
+  end
+  return context
+end
+
 -- Shorten a pathname, so that it does not exceed maximum length.
 local function format_pathname(pathname, max_length)
   -- First, replace path segments with `/.../`, keeping other segments.
@@ -173,6 +192,16 @@
   return text:gsub("\27%[[0-9]+m", "")
 end
 
+-- Format a number as a percentage.
+local function format_percentage(percentage)
+  local formatted_percentage = string.format("%.0f%%", percentage)
+  if percentage > 0 and formatted_percentage == "0%" then
+    return "<1%"
+  else
+    return formatted_percentage
+  end
+end
+
 -- Format a ratio as a percentage.
 local function format_ratio(numerator, denominator)
   assert(numerator <= denominator)
@@ -180,12 +209,7 @@
     return "100%"
   else
     assert(denominator > 0)
-    local formatted_percentage = string.format("%.0f%%", 100.0 * numerator / denominator)
-    if numerator > 0 and formatted_percentage == "0%" then
-      return "<1%"
-    else
-      return formatted_percentage
-    end
+    return format_percentage(100.0 * numerator / denominator)
   end
 end
 
@@ -424,9 +448,7 @@
       end
       -- Display the warnings/errors.
       for _, issue in ipairs(issues.sort(warnings_or_errors)) do
-        local code = issue[1]
-        local message = issue[2]
-        local range = issue[3]
+        local code, message, range, context = table.unpack(issue)
         local start_line_number, start_column_number = 1, 1
         local end_line_number, end_column_number = 1, 1
         if range ~= nil then
@@ -434,14 +456,21 @@
           end_line_number, end_column_number = utils.convert_byte_to_line_and_column(line_starting_byte_numbers, range:stop())
           end_column_number = end_column_number
         end
-        local position = ":" .. tostring(start_line_number) .. ":" .. tostring(start_column_number) .. ":"
-        local terminal_width = get_option('terminal_width', options, pathname)
-        local max_line_length = math.max(math.min(88, terminal_width), terminal_width - 16)
-        local reserved_position_length = 10
-        local reserved_suffix_length = 30
-        local label_indent = (" "):rep(4)
-        local suffix = code:upper() .. " " .. message
         if not porcelain then
+          local position = ":" .. tostring(start_line_number) .. ":" .. tostring(start_column_number) .. ":"
+          local max_line_length = get_option('terminal_width', options, pathname)
+          local reserved_position_length = 10
+          local reserved_message_length = 30
+          local reserved_context_length = 20
+          local max_context_length = 50
+          local label_indent = (" "):rep(4)
+          local formatted_message = code:upper() .. " " .. message
+          if context ~= nil then
+            formatted_message = formatted_message .. ": "
+            context = format_context(context, max_context_length)
+          else
+            context = ""
+          end
           local formatted_pathname = format_pathname(
             pathname,
             math.max(
@@ -450,7 +479,8 @@
                 - #label_indent
                 - reserved_position_length
                 - #(" ")
-                - math.max(#suffix, reserved_suffix_length)
+                - math.max(#formatted_message, reserved_message_length)
+                - math.max(#context, reserved_context_length)
               ), 1
             )
           )
@@ -465,12 +495,14 @@
                   - #label_indent
                   - #formatted_pathname
                   - #decolorize(position)
-                  - math.max(#suffix, reserved_suffix_length)
+                  - math.max(#formatted_message, reserved_message_length)
+                  - math.max(#context, reserved_context_length)
                 ), 1
               )
             )
-            .. suffix
-            .. (" "):rep(math.max(reserved_suffix_length - #suffix, 0))
+            .. formatted_message
+            .. (" "):rep(math.max(reserved_message_length - #formatted_message, 0))
+            .. context
           )
           io.write("\n" .. line)
         else
@@ -507,185 +539,213 @@
 
   -- Display additional information.
   if verbose and not porcelain then
-    local line_indent = (" "):rep(4)
-    print()
-    -- Display pre-evaluation information.
-    local num_total_bytes = evaluation_results.num_total_bytes
-    if num_total_bytes == 0 then
-      io.write(string.format("\n%sEmpty file", line_indent))
-      goto skip_remaining_additional_information
-    end
-    local formatted_file_size = string.format("%s %s", titlecase(humanize(num_total_bytes)), pluralize("byte", num_total_bytes))
-    io.write(string.format("\n%s%s %s", line_indent, colorize("File size:", BOLD), formatted_file_size))
-    -- Evaluate the evalution results of the preprocessing.
-    io.write(string.format("\n\n%s%s", line_indent, colorize("Preprocessing results:", BOLD)))
-    local seems_like_latex_style_file = analysis_results.seems_like_latex_style_file
-    if seems_like_latex_style_file ~= nil then
-      if seems_like_latex_style_file then
-        io.write(string.format("\n%s- Seems like a LaTeX style file", line_indent))
-      else
-        io.write(string.format("\n%s- Doesn't seem like a LaTeX style file", line_indent))
+  local line_indent = (" "):rep(4)
+    do
+      print()
+      -- Display pre-evaluation information.
+      local num_total_bytes = evaluation_results.num_total_bytes
+      if num_total_bytes == 0 then
+        io.write(string.format("\n%sEmpty file", line_indent))
+        goto skip_remaining_additional_information
       end
-    end
-    local num_expl_bytes = evaluation_results.num_expl_bytes
-    if num_expl_bytes == 0 or num_expl_bytes == nil then
-      io.write(string.format("\n%s- No expl3 material", line_indent))
-      goto skip_remaining_additional_information
-    end
-    local expl_ranges = analysis_results.expl_ranges
-    assert(expl_ranges ~= nil)
-    assert(#expl_ranges > 0)
-    io.write(string.format("\n%s- %s %s spanning ", line_indent, titlecase(humanize(#expl_ranges)), pluralize("expl3 part", #expl_ranges)))
-    if num_expl_bytes == num_total_bytes then
-      io.write("the whole file")
-    else
-      local formatted_expl_bytes = string.format("%s %s", humanize(num_expl_bytes), pluralize("byte", num_expl_bytes))
-      local formatted_expl_ratio = format_ratio(num_expl_bytes, num_total_bytes)
-      io.write(string.format("%s (%s of file size)", formatted_expl_bytes, formatted_expl_ratio))
-    end
-    if not (#expl_ranges == 1 and #expl_ranges[1] == num_total_bytes) then
-      io.write(":")
-      for part_number, range in ipairs(expl_ranges) do
-        local start_line_number, start_column_number = utils.convert_byte_to_line_and_column(line_starting_byte_numbers, range:start())
-        local end_line_number, end_column_number = utils.convert_byte_to_line_and_column(line_starting_byte_numbers, range:stop())
-        local formatted_range_start = string.format("%d:%d", start_line_number, start_column_number)
-        local formatted_range_end = string.format("%d:%d", end_line_number, end_column_number)
-        io.write(string.format("\n%s%d. Between ", line_indent:rep(2), part_number))
-        io.write(string.format("%s and %s", formatted_range_start, formatted_range_end))
+      local formatted_file_size = string.format("%s %s", titlecase(humanize(num_total_bytes)), pluralize("byte", num_total_bytes))
+      io.write(string.format("\n%s%s %s", line_indent, colorize("File size:", BOLD), formatted_file_size))
+      -- Evaluate the evalution results of the preprocessing.
+      io.write(string.format("\n\n%s%s", line_indent, colorize("Preprocessing results:", BOLD)))
+      local seems_like_latex_style_file = analysis_results.seems_like_latex_style_file
+      if seems_like_latex_style_file ~= nil then
+        if seems_like_latex_style_file then
+          io.write(string.format("\n%s- Seems like a LaTeX style file", line_indent))
+        else
+          io.write(string.format("\n%s- Doesn't seem like a LaTeX style file", line_indent))
+        end
       end
-    end
-    -- Evaluate the evalution results of the lexical analysis.
-    local num_tokens = evaluation_results.num_tokens
-    if num_tokens == nil then
-      goto skip_remaining_additional_information
-    end
-    io.write(string.format("\n\n%s%s", line_indent, colorize("Lexical analysis results:", BOLD)))
-    if num_tokens == 0 then
-      io.write(string.format("\n%s- No tokens in expl3 parts", line_indent))
-      goto skip_remaining_additional_information
-    end
-    io.write(string.format("\n%s- %s %s in expl3 parts", line_indent, titlecase(humanize(num_tokens)), pluralize("token", num_tokens)))
-    local num_groupings = evaluation_results.num_groupings
-    if num_groupings ~= nil and num_groupings > 0 then
-      io.write(string.format("\n%s- %s %s", line_indent, titlecase(humanize(num_groupings)), pluralize("grouping", num_groupings)))
-      io.write(" in expl3 parts")
-      local num_unclosed_groupings = evaluation_results.num_unclosed_groupings
-      assert(num_unclosed_groupings ~= nil)
-      if num_unclosed_groupings > 0 then
-        local formatted_grouping_ratio = format_ratio(num_unclosed_groupings, num_groupings)
-        io.write(string.format(" (%s unclosed, %s of groupings)", humanize(num_unclosed_groupings), formatted_grouping_ratio))
+      local num_expl_bytes = evaluation_results.num_expl_bytes
+      if num_expl_bytes == 0 or num_expl_bytes == nil then
+        io.write(string.format("\n%s- No expl3 material", line_indent))
+        goto skip_remaining_additional_information
       end
-    end
-    -- Evaluate the evalution results of the syntactic analysis.
-    if evaluation_results.num_calls == nil then
-      goto skip_remaining_additional_information
-    end
-    io.write(string.format("\n\n%s%s", line_indent, colorize("Syntactic analysis results:", BOLD)))
-    if evaluation_results.num_calls_total == 0 then
-      io.write(string.format("\n%s- No top-level %s", line_indent, pluralize("call")))
-      goto skip_remaining_additional_information
-    end
-    for call_type, num_call_tokens in pairs_sorted_by_descending_values(evaluation_results.num_call_tokens) do
-      local num_calls = evaluation_results.num_calls[call_type]
-      assert(num_calls ~= nil)
-      assert(num_calls > 0)
-      assert(num_call_tokens ~= nil)
-      assert(num_call_tokens > 0)
-      io.write(string.format("\n%s- %s top-level %s ", line_indent, titlecase(humanize(num_calls)), pluralize(call_type, num_calls)))
-      io.write("spanning ")
-      if num_call_tokens == num_tokens then
-        io.write("all tokens")
+      local expl_ranges = analysis_results.expl_ranges
+      assert(expl_ranges ~= nil)
+      assert(#expl_ranges > 0)
+      io.write(
+        string.format(
+          "\n%s- %s %s spanning ",
+          line_indent,
+          titlecase(humanize(#expl_ranges)),
+          pluralize("expl3 part", #expl_ranges)
+        )
+      )
+      if num_expl_bytes == num_total_bytes then
+        io.write("the whole file")
       else
-        local formatted_call_tokens = string.format("%s %s", humanize(num_call_tokens), pluralize("token", num_call_tokens))
-        local formatted_token_ratio = format_ratio(num_call_tokens, num_tokens)
-        if num_expl_bytes == num_total_bytes then
-            io.write(string.format("%s (%s of file size)", formatted_call_tokens, formatted_token_ratio))
-        else
-          local formatted_byte_ratio = format_ratio(num_expl_bytes * num_call_tokens, num_total_bytes * num_tokens)
-          io.write(string.format("%s (%s of tokens, ~%s of file size)", formatted_call_tokens, formatted_token_ratio, formatted_byte_ratio))
+        local formatted_expl_bytes = string.format("%s %s", humanize(num_expl_bytes), pluralize("byte", num_expl_bytes))
+        local formatted_expl_ratio = format_ratio(num_expl_bytes, num_total_bytes)
+        io.write(string.format("%s (%s of file size)", formatted_expl_bytes, formatted_expl_ratio))
+      end
+      if not (#expl_ranges == 1 and #expl_ranges[1] == num_total_bytes) then
+        io.write(":")
+        for part_number, range in ipairs(expl_ranges) do
+          local start_line_number, start_column_number = utils.convert_byte_to_line_and_column(line_starting_byte_numbers, range:start())
+          local end_line_number, end_column_number = utils.convert_byte_to_line_and_column(line_starting_byte_numbers, range:stop())
+          local formatted_range_start = string.format("%d:%d", start_line_number, start_column_number)
+          local formatted_range_end = string.format("%d:%d", end_line_number, end_column_number)
+          io.write(string.format("\n%s%d. Between ", line_indent:rep(2), part_number))
+          io.write(string.format("%s and %s", formatted_range_start, formatted_range_end))
         end
       end
-    end
-    if evaluation_results.num_calls_total == nil or evaluation_results.num_calls_total == 0 then
-      goto skip_remaining_additional_information
-    end
-    -- Evaluate the evalution results of the semantic analysis.
-    if evaluation_results.num_statement_tokens == nil then
-      goto skip_remaining_additional_information
-    end
-    io.write(string.format("\n\n%s%s", line_indent, colorize("Semantic analysis results:", BOLD)))
-    if evaluation_results.num_statements_total == 0 then
-      io.write(string.format("\n%s- No top-level %s", line_indent, pluralize("statement")))
-      goto skip_remaining_additional_information
-    end
-    for statement_type, num_statement_tokens in pairs_sorted_by_descending_values(evaluation_results.num_statement_tokens) do
-      local num_statements = evaluation_results.num_statements[statement_type]
-      assert(num_statements ~= nil)
-      assert(num_statements > 0)
-      assert(num_statement_tokens ~= nil)
-      assert(num_statement_tokens > 0)
-      io.write(string.format("\n%s- %s top-level ", line_indent, titlecase(humanize(num_statements))))
-      io.write(string.format("%s spanning ", pluralize(statement_type, num_statements)))
-      if num_statement_tokens == num_tokens then
-        io.write("all tokens")
-      else
-        local formatted_statement_tokens = string.format(
-          "%s %s", humanize(num_statement_tokens), pluralize("token", num_statement_tokens))
-        local formatted_token_ratio = format_ratio(num_statement_tokens, num_tokens)
-        if num_expl_bytes == num_total_bytes then
-          io.write(string.format("%s (%s of file size)", formatted_statement_tokens, formatted_token_ratio))
+      -- Evaluate the evalution results of the lexical analysis.
+      local num_tokens = evaluation_results.num_tokens
+      if num_tokens == nil then
+        goto skip_remaining_additional_information
+      end
+      io.write(string.format("\n\n%s%s", line_indent, colorize("Lexical analysis results:", BOLD)))
+      if num_tokens == 0 then
+        io.write(string.format("\n%s- No tokens in expl3 parts", line_indent))
+        goto skip_remaining_additional_information
+      end
+      io.write(string.format("\n%s- %s %s in expl3 parts", line_indent, titlecase(humanize(num_tokens)), pluralize("token", num_tokens)))
+      local num_groupings = evaluation_results.num_groupings
+      if num_groupings ~= nil and num_groupings > 0 then
+        io.write(string.format("\n%s- %s %s", line_indent, titlecase(humanize(num_groupings)), pluralize("grouping", num_groupings)))
+        io.write(" in expl3 parts")
+        local num_unclosed_groupings = evaluation_results.num_unclosed_groupings
+        assert(num_unclosed_groupings ~= nil)
+        if num_unclosed_groupings > 0 then
+          local formatted_grouping_ratio = format_ratio(num_unclosed_groupings, num_groupings)
+          io.write(string.format(" (%s unclosed, %s of groupings)", humanize(num_unclosed_groupings), formatted_grouping_ratio))
+        end
+      end
+      -- Evaluate the evalution results of the syntactic analysis.
+      if evaluation_results.num_calls == nil then
+        goto skip_remaining_additional_information
+      end
+      io.write(string.format("\n\n%s%s", line_indent, colorize("Syntactic analysis results:", BOLD)))
+      if evaluation_results.num_calls_total == 0 then
+        io.write(string.format("\n%s- No top-level %s", line_indent, pluralize("call")))
+        goto skip_remaining_additional_information
+      end
+      for call_type, num_call_tokens in pairs_sorted_by_descending_values(evaluation_results.num_call_tokens) do
+        local num_calls = evaluation_results.num_calls[call_type]
+        assert(num_calls ~= nil)
+        assert(num_calls > 0)
+        assert(num_call_tokens ~= nil)
+        assert(num_call_tokens > 0)
+        io.write(string.format("\n%s- %s top-level %s ", line_indent, titlecase(humanize(num_calls)), pluralize(call_type, num_calls)))
+        io.write("spanning ")
+        if num_call_tokens == num_tokens then
+          io.write("all tokens")
         else
-          local formatted_byte_ratio = format_ratio(num_expl_bytes * num_statement_tokens, num_total_bytes * num_tokens)
-          io.write(string.format(
-            "%s (%s of tokens, ~%s of file size)", formatted_statement_tokens, formatted_token_ratio, formatted_byte_ratio))
+          local formatted_call_tokens = string.format("%s %s", humanize(num_call_tokens), pluralize("token", num_call_tokens))
+          local formatted_token_ratio = format_ratio(num_call_tokens, num_tokens)
+          if num_expl_bytes == num_total_bytes then
+              io.write(string.format("%s (%s of file size)", formatted_call_tokens, formatted_token_ratio))
+          else
+            local formatted_byte_ratio = format_ratio(num_expl_bytes * num_call_tokens, num_total_bytes * num_tokens)
+            io.write(
+              string.format(
+                "%s (%s of tokens, ~%s of file size)",
+                formatted_call_tokens,
+                formatted_token_ratio,
+                formatted_byte_ratio
+              )
+            )
+          end
         end
       end
-      if statement_type == FUNCTION_DEFINITION and evaluation_results.num_replacement_text_statements_total > 0 then
-        local seen_nested_function_definition = false
-        for nested_statement_type, num_nested_statement_tokens in
-            pairs_sorted_by_descending_values(evaluation_results.num_replacement_text_statement_tokens) do
-          local num_nested_statements = evaluation_results.num_replacement_text_statements[nested_statement_type]
-          local max_nesting_depth = evaluation_results.replacement_text_max_nesting_depth[nested_statement_type]
-          assert(num_nested_statements ~= nil)
-          assert(num_nested_statements > 0)
-          assert(num_nested_statement_tokens ~= nil)
-          assert(num_nested_statement_tokens > 0)
-          assert(max_nesting_depth ~= nil)
-          assert(max_nesting_depth > 0)
-          if nested_statement_type == FUNCTION_DEFINITION then
-            seen_nested_function_definition = true
+      if evaluation_results.num_calls_total == nil or evaluation_results.num_calls_total == 0 then
+        goto skip_remaining_additional_information
+      end
+      -- Evaluate the evalution results of the semantic analysis.
+      if evaluation_results.num_statement_tokens == nil then
+        goto skip_remaining_additional_information
+      end
+      io.write(string.format("\n\n%s%s", line_indent, colorize("Semantic analysis results:", BOLD)))
+      if evaluation_results.num_statements_total == 0 then
+        io.write(string.format("\n%s- No top-level %s", line_indent, pluralize("statement")))
+        goto skip_remaining_additional_information
+      end
+      for statement_type, num_statement_tokens in pairs_sorted_by_descending_values(evaluation_results.num_statement_tokens) do
+        local num_statements = evaluation_results.num_statements[statement_type]
+        assert(num_statements ~= nil)
+        assert(num_statements > 0)
+        assert(num_statement_tokens ~= nil)
+        assert(num_statement_tokens > 0)
+        io.write(string.format("\n%s- %s top-level ", line_indent, titlecase(humanize(num_statements))))
+        io.write(string.format("%s spanning ", pluralize(statement_type, num_statements)))
+        if num_statement_tokens == num_tokens then
+          io.write("all tokens")
+        else
+          local formatted_statement_tokens = string.format(
+            "%s %s", humanize(num_statement_tokens), pluralize("token", num_statement_tokens))
+          local formatted_token_ratio = format_ratio(num_statement_tokens, num_tokens)
+          if num_expl_bytes == num_total_bytes then
+            io.write(string.format("%s (%s of file size)", formatted_statement_tokens, formatted_token_ratio))
+          else
+            local formatted_byte_ratio = format_ratio(num_expl_bytes * num_statement_tokens, num_total_bytes * num_tokens)
+            io.write(string.format(
+              "%s (%s of tokens, ~%s of file size)", formatted_statement_tokens, formatted_token_ratio, formatted_byte_ratio))
           end
-          io.write(string.format("\n%s- %s nested ", line_indent:rep(2), titlecase(humanize(num_nested_statements))))
-          io.write(string.format("%s ", pluralize(nested_statement_type, num_nested_statements)))
-          if max_nesting_depth > 1 and nested_statement_type == FUNCTION_DEFINITION then
-            io.write(string.format("with a maximum nesting depth of %s, ", humanize(max_nesting_depth)))
-          end
-          io.write(string.format(
-            "spanning %s %s", humanize(num_nested_statement_tokens), pluralize("token", num_nested_statement_tokens)
-          ))
-          if max_nesting_depth > 1 and nested_statement_type ~= FUNCTION_DEFINITION then
-            local num_nested_function_definition_statements = evaluation_results.num_replacement_text_statements[FUNCTION_DEFINITION]
-            assert(num_nested_function_definition_statements > 0)
+        end
+        if statement_type == FUNCTION_DEFINITION and evaluation_results.num_replacement_text_statements_total > 0 then
+          local seen_nested_function_definition = false
+          for nested_statement_type, num_nested_statement_tokens in
+              pairs_sorted_by_descending_values(evaluation_results.num_replacement_text_statement_tokens) do
+            local num_nested_statements = evaluation_results.num_replacement_text_statements[nested_statement_type]
+            local max_nesting_depth = evaluation_results.replacement_text_max_nesting_depth[nested_statement_type]
+            assert(num_nested_statements ~= nil)
+            assert(num_nested_statements > 0)
+            assert(num_nested_statement_tokens ~= nil)
+            assert(num_nested_statement_tokens > 0)
+            assert(max_nesting_depth ~= nil)
+            assert(max_nesting_depth > 0)
+            if nested_statement_type == FUNCTION_DEFINITION then
+              seen_nested_function_definition = true
+            end
+            io.write(string.format("\n%s- %s nested ", line_indent:rep(2), titlecase(humanize(num_nested_statements))))
+            io.write(string.format("%s ", pluralize(nested_statement_type, num_nested_statements)))
+            if max_nesting_depth > 1 and nested_statement_type == FUNCTION_DEFINITION then
+              io.write(string.format("with a maximum nesting depth of %s, ", humanize(max_nesting_depth)))
+            end
             io.write(string.format(
-              ", some in %s",
-              add_article(
-                pluralize(string.format("nested %s", FUNCTION_DEFINITION), num_nested_function_definition_statements),
-                num_nested_function_definition_statements,
-                seen_nested_function_definition,
-                false
-              )
+              "spanning %s %s", humanize(num_nested_statement_tokens), pluralize("token", num_nested_statement_tokens)
             ))
+            if max_nesting_depth > 1 and nested_statement_type ~= FUNCTION_DEFINITION then
+              local num_nested_function_definition_statements = evaluation_results.num_replacement_text_statements[FUNCTION_DEFINITION]
+              assert(num_nested_function_definition_statements > 0)
+              io.write(string.format(
+                ", some in %s",
+                add_article(
+                  pluralize(string.format("nested %s", FUNCTION_DEFINITION), num_nested_function_definition_statements),
+                  num_nested_function_definition_statements,
+                  seen_nested_function_definition,
+                  false
+                )
+              ))
+            end
           end
         end
       end
+      if evaluation_results.num_statements_total == nil or evaluation_results.num_statements_total == 0 then
+        goto skip_remaining_additional_information
+      end
     end
-    if evaluation_results.num_statements_total == nil or evaluation_results.num_statements_total == 0 then
-      goto skip_remaining_additional_information
+
+    ::skip_remaining_additional_information::
+
+    -- Display early stopping information.
+    if analysis_results.stopped_early then
+      io.write(
+        string.format(
+          '\n\n%sProcessing stopped %s because %s.',
+          line_indent,
+          analysis_results.stopped_early.when,
+          analysis_results.stopped_early.reason
+        )
+      )
     end
   end
 
-  ::skip_remaining_additional_information::
-
   if not porcelain and not is_last_file and (#all_issues > 0 or verbose) then
     print()
   end
@@ -692,6 +752,7 @@
 end
 
 return {
+  format_percentage = format_percentage,
   pluralize = pluralize,
   print_results = print_results,
   print_summary = print_summary,

Modified: trunk/Master/texmf-dist/scripts/expltools/explcheck-issues.lua
===================================================================
--- trunk/Master/texmf-dist/scripts/expltools/explcheck-issues.lua	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/scripts/expltools/explcheck-issues.lua	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,8 +1,10 @@
 -- A registry of warnings and errors identified by different processing steps.
 
+local get_option = require("explcheck-config").get_option
+
 local Issues = {}
 
-function Issues.new(cls)
+function Issues.new(cls, pathname, options)
   -- Instantiate the class.
   local self = {}
   setmetatable(self, cls)
@@ -11,7 +13,15 @@
   self.errors = {}
   self.warnings = {}
   self.seen_issues = {}
+  self.suppressed_issue_map = {}
+  for issue_identifier, suppressed_issues in pairs(get_option("suppressed_issue_map", options, pathname)) do
+    issue_identifier = self._normalize_identifier(issue_identifier)
+    self.suppressed_issue_map[issue_identifier] = suppressed_issues
+  end
   self.ignored_issues = {}
+  for _, issue_identifier in ipairs(get_option("ignored_issues", options, pathname)) do
+    self:ignore(issue_identifier)
+  end
   return self
 end
 
@@ -34,7 +44,7 @@
 end
 
 -- Add an issue to the table of issues.
-function Issues:add(identifier, message, range)
+function Issues:add(identifier, message, range, context)
   identifier = self._normalize_identifier(identifier)
 
   -- Discard duplicate issues.
@@ -52,8 +62,16 @@
     return
   end
 
+  -- Suppress any dependent issues.
+  if self.suppressed_issue_map[identifier] ~= nil then
+    for _, suppressed_issue_identifier in ipairs(self.suppressed_issue_map[identifier]) do
+      suppressed_issue_identifier = self._normalize_identifier(suppressed_issue_identifier)
+      self:ignore(suppressed_issue_identifier, range)
+    end
+  end
+
   -- Construct the issue.
-  local issue = {identifier, message, range}
+  local issue = {identifier, message, range, context}
 
   -- Determine if the issue should be ignored.
   for _, ignore_issue in ipairs(self.ignored_issues) do
@@ -68,9 +86,9 @@
 end
 
 -- Prevent issues from being present in the table of issues.
-function Issues:ignore(identifier, range)
-  if identifier ~= nil then
-    identifier = self._normalize_identifier(identifier)
+function Issues:ignore(identifier_prefix, range)
+  if identifier_prefix ~= nil then
+    identifier_prefix = self._normalize_identifier(identifier_prefix)
   end
 
   -- Determine which issues should be ignored.
@@ -82,13 +100,17 @@
     )
   end
   local function match_issue_identifier(issue_identifier)
-    return issue_identifier == identifier
+    -- Match the prefix of an issue, allowing us to ignore whole sets of issues with prefixes like "s" or "w4".
+    return issue_identifier:sub(1, #identifier_prefix) == identifier_prefix
   end
 
   local ignore_issue, issue_tables
-  if identifier == nil then
+  if identifier_prefix == nil and range == nil then
+    -- Prevent any issues.
+    issue_tables = {self.warnings, self.errors}
+    ignore_issue = function() return true end
+  elseif identifier_prefix == nil then
     -- Prevent any issues within the given range.
-    assert(range ~= nil)
     issue_tables = {self.warnings, self.errors}
     ignore_issue = function(issue)
       local issue_range = issue[3]
@@ -100,8 +122,8 @@
     end
   elseif range == nil then
     -- Prevent any issues with the given identifier.
-    assert(identifier ~= nil)
-    issue_tables = {self:_get_issue_table(identifier)}
+    assert(identifier_prefix ~= nil)
+    issue_tables = {self:_get_issue_table(identifier_prefix)}
     ignore_issue = function(issue)
       local issue_identifier = issue[1]
       return match_issue_identifier(issue_identifier)
@@ -108,8 +130,8 @@
     end
   else
     -- Prevent any issues with the given identifier that are also either within the given range or file-wide.
-    assert(range ~= nil and identifier ~= nil)
-    issue_tables = {self:_get_issue_table(identifier)}
+    assert(range ~= nil and identifier_prefix ~= nil)
+    issue_tables = {self:_get_issue_table(identifier_prefix)}
     ignore_issue = function(issue)
       local issue_identifier = issue[1]
       local issue_range = issue[3]
@@ -173,10 +195,7 @@
 function Issues.sort(warnings_and_errors)
   local sorted_warnings_and_errors = {}
   for _, issue in ipairs(warnings_and_errors) do
-    local identifier = issue[1]
-    local message = issue[2]
-    local range = issue[3]
-    table.insert(sorted_warnings_and_errors, {identifier, message, range})
+    table.insert(sorted_warnings_and_errors, issue)
   end
   table.sort(sorted_warnings_and_errors, function(a, b)
     local a_identifier, b_identifier = a[1], b[1]
@@ -186,6 +205,6 @@
   return sorted_warnings_and_errors
 end
 
-return function()
-  return Issues:new()
+return function(...)
+  return Issues:new(...)
 end

Modified: trunk/Master/texmf-dist/scripts/expltools/explcheck-latex3.lua
===================================================================
--- trunk/Master/texmf-dist/scripts/expltools/explcheck-latex3.lua	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/scripts/expltools/explcheck-latex3.lua	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,7 +1,7 @@
 -- LPEG parsers and other information extracted from LaTeX3 data files.
--- Generated on 2025-06-24 from the following files:
+-- Generated on 2025-08-18 from the following files:
 -- - "l3obsolete.txt" with the latest obsolete entry from 2025-05-25
--- - "l3prefixes.csv" with the latest updated prefix from 2025-05-12
+-- - "l3prefixes.csv" with the latest updated prefix from 2025-07-28
 
 local lpeg = require("lpeg")
 local P = lpeg.P
@@ -12,12 +12,12 @@
   local wildcard = any^0  -- luacheck: ignore wildcard
 
   -- luacheck: push no max line length
-  obsolete.deprecated_csname = (P("t") * (P("e") * (P("x") * (P("t") * (P("_") * (P("t") * (P("i") * (P("t") * (P("l") * (P("e") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("nn") + P("n"))))))))))))))) + P("l") * (P("_") * (P("m") * (P("i") * (P("x") * (P("e") * (P("d") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("nn") + P("n")))))))))))) + P("l") * (P("o") * (P("w") * (P("e") * (P("r") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("nn") + P("n")))))))))))) + P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("c") * (P("n") * (P("TF") + P("F") + P("T")) + P("n")) + P("N") * (P("n") * (P("TF") + P("F") + P("T")) + P("n"))))))) + P("b") * (P("u") * (P("i") * (P("l") * (P("d") * (P("_") * (P("clear:N") + P("g") * (P("clear:N") + P("et:NN")))))))) + P("u") * (P("p") * (P("p") * (P("e") * (P("r") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("nn") + P("n"))))))))))))))) + P("c") * (P("s_argument_spec:N") + P("h") * (P("a") * (P("r") * (P("_") * (P("mixed_case:N") + P("t") * (P("itlecase:N") + P("o") * (P("_") * (P("utfviii_bytes:n") + P("nfd:N")))) + P("s") * (P("t") * (P("r") * (P("_") * (P("mixed_case:N") + P("titlecase:N") + P("u") * (P("p") * (P("p") * (P("e") * (P("r") * (P("_case:N") + P("case:N")))))) + P("l") * (P("o") * (P("w") * (P("e") * (P("r") * (P("_case:N") + P("case:N")))))) + P("f") * (P("o") * (P("l") * (P("d") * (P("_case:N") + P("case:N"))))))))) + P("u") * (P("p") * (P("p") * (P("e") * (P("r") * (P("_case:N") + P("case:N")))))) + P("l") * (P("o") * (P("w") * (P("e") * (P("r") * (P("_case:N") + P("case:N")))))) + P("f") * (P("o") * (P("l") * (P("d") * (P("_case:N") + P("case:N")))))))))) + P("r") * (P("e") * (P("g") * (P("e") * (P("x") * (P("_") * (P("m") * (P("a") * (P("t") * (P("c") * (P("h") * (P(":") * (P("N") * (P("n") * (P("TF") + P("F") + P("T")) + P("n")) + P("n") * (P("n") * (P("TF") + P("F") + P("T")) + P("n")))))))))))))) + P("i") * (P("o") * (P("w") * (P("_") * (P("s") * (P("h") * (P("i") * (P("p") * (P("o"!
 ) * (P("u") * (P("t") * (P("_") * (P("x") * (P(":") * (P("c") * (P("n") + P("x")) + P("N") * (P("n") + P("x")))))))))))))))) + P("m") * (P("s") * (P("g") * (P("_") * (P("g") * (P("s") * (P("e") * (P("t") * (P(":") * (P("n") * (P("n") * (P("nn") + P("n")))))))))))) + P("l") * (P("_") * (P("k") * (P("e") * (P("y") * (P("s") * (P("_") * (P("key_tl") + P("path_tl")))))) + P("t") * (P("e") * (P("x") * (P("t") * (P("_") * (P("accents_tl") + P("letterlike_tl")))))))) + P("k") * (P("e") * (P("y") * (P("s") * (P("_") * (P("s") * (P("e") * (P("t") * (P("_") * (P("f") * (P("i") * (P("l") * (P("t") * (P("e") * (P("r") * (P(":") * (P("n") * (P("n") * (P("V") * (P("nN") + P("N")) + P("v") * (P("nN") + P("N")) + P("o") * (P("nN") + P("N")) + P("n") * (P("nN") + P("N")) + P("n") + P("V") + P("v") + P("o"))))))))))))))))))) + P("p") * (P("e") * (P("e") * (P("k") * (P("_") * (P("c") * (P("a") * (P("t") * (P("c") * (P("o") * (P("d") * (P("e") * (P("_") * (P("ignore_spaces:N") + P("remove_ignore_spaces:N")))))))) + P("h") * (P("a") * (P("r") * (P("c") * (P("o") * (P("d") * (P("e") * (P("_") * (P("ignore_spaces:N") + P("remove_ignore_spaces:N")))))))))) + P("m") * (P("e") * (P("a") * (P("n") * (P("i") * (P("n") * (P("g") * (P("_") * (P("ignore_spaces:N") + P("remove_ignore_spaces:N"))))))))))))) + P("d") * (P("f") * (P("_") * (P("o") * (P("b") * (P("j") * (P("e") * (P("c") * (P("t") * (P("_") * (P("w") * (P("r") * (P("i") * (P("t") * (P("e") * (P(":") * (P("n") * (P("n") + P("x")))))))) + P("new:nn"))))))))))) + P("r") * (P("o") * (P("p") * (P("_") * (P("g") * (P("p") * (P("u") * (P("t") * (P("_") * (P("i") * (P("f") * (P("_") * (P("n") * (P("e") * (P("w") * (P(":") * (P("c") * (P("Vn") + P("n") * (P("n") + P("V"))) + P("N") * (P("Vn") + P("n") * (P("n") + P("V"))))))))))))))) + P("p") * (P("u") * (P("t") * (P("_") * (P("i") * (P("f") * (P("_") * (P("n") * (P("e") * (P("w") * (P(":") * (P("c") * (P("Vn") + P("n") * (P("n") + P("V"))) + P("N") * (P("Vn") + P("n") * (P("n") + P("V"))))))))))))))))))) + P("s") * (P("y") * (P("s") * (P(!
 "_") * (P("i") * (P("f") * (P("_") * (P("t") * (P("i") * (P("m") * (P("e") * (P("r") * (P("_") * (P("e") * (P("x") * (P("i") * (P("s") * (P("t") * (P("_p:") + P(":") * (P("TF") + P("F") + P("T")) + P(":"))))))))))))))) + P("load_deprecation:") + P("finalise:")))) + P("t") * (P("r") * (P("_") * (P("u") * (P("p") * (P("p") * (P("e") * (P("r") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("f") + P("n")))))))))))) + P("declare_eight_bit_encoding:nnn") + P("l") * (P("o") * (P("w") * (P("e") * (P("r") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("f") + P("n")))))))))))) + P("f") * (P("o") * (P("l") * (P("d") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("n") + P("V"))))))) + P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("n") + P("V"))))))))))))) + P("e") * (P("q") * (P("_") * (P("i") * (P("n") * (P("d") * (P("e") * (P("x") * (P("e") * (P("d") * (P("_") * (P("m") * (P("a") * (P("p") * (P("_") * (P("inline:Nn") + P("function:NN"))))))))))))) + P("gset_map_x:NNn") + P("set_map_x:NNn")))))) * eof
+  obsolete.deprecated_csname = (P("k") * (P("e") * (P("y") * (P("s") * (P("_") * (P("s") * (P("e") * (P("t") * (P("_") * (P("f") * (P("i") * (P("l") * (P("t") * (P("e") * (P("r") * (P(":") * (P("n") * (P("n") * (P("o") * (P("nN") + P("N")) + P("v") * (P("nN") + P("N")) + P("V") * (P("nN") + P("N")) + P("n") * (P("nN") + P("N")) + P("n") + P("V") + P("v") + P("o"))))))))))))))))))) + P("l") * (P("_") * (P("k") * (P("e") * (P("y") * (P("s") * (P("_") * (P("key_tl") + P("path_tl")))))) + P("t") * (P("e") * (P("x") * (P("t") * (P("_") * (P("accents_tl") + P("letterlike_tl")))))))) + P("m") * (P("s") * (P("g") * (P("_") * (P("g") * (P("s") * (P("e") * (P("t") * (P(":") * (P("n") * (P("n") * (P("nn") + P("n")))))))))))) + P("i") * (P("o") * (P("w") * (P("_") * (P("s") * (P("h") * (P("i") * (P("p") * (P("o") * (P("u") * (P("t") * (P("_") * (P("x") * (P(":") * (P("c") * (P("n") + P("x")) + P("N") * (P("n") + P("x")))))))))))))))) + P("c") * (P("s_argument_spec:N") + P("h") * (P("a") * (P("r") * (P("_") * (P("s") * (P("t") * (P("r") * (P("_") * (P("l") * (P("o") * (P("w") * (P("e") * (P("r") * (P("_case:N") + P("case:N")))))) + P("mixed_case:N") + P("f") * (P("o") * (P("l") * (P("d") * (P("_case:N") + P("case:N"))))) + P("u") * (P("p") * (P("p") * (P("e") * (P("r") * (P("_case:N") + P("case:N")))))) + P("titlecase:N"))))) + P("l") * (P("o") * (P("w") * (P("e") * (P("r") * (P("_case:N") + P("case:N")))))) + P("mixed_case:N") + P("f") * (P("o") * (P("l") * (P("d") * (P("_case:N") + P("case:N"))))) + P("u") * (P("p") * (P("p") * (P("e") * (P("r") * (P("_case:N") + P("case:N")))))) + P("t") * (P("itlecase:N") + P("o") * (P("_") * (P("utfviii_bytes:n") + P("nfd:N"))))))))) + P("t") * (P("e") * (P("x") * (P("t") * (P("_") * (P("t") * (P("i") * (P("t") * (P("l") * (P("e") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("nn") + P("n"))))))))))))))) + P("l") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("c") * (P("n") * (P("TF") + P("F") + P("T")) + P("n")) + P("N") * (P("n") * (P("TF") + P("F")!
  + P("T")) + P("n"))))))) + P("l") * (P("o") * (P("w") * (P("e") * (P("r") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("nn") + P("n")))))))))))) + P("m") * (P("i") * (P("x") * (P("e") * (P("d") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("nn") + P("n")))))))))))) + P("u") * (P("p") * (P("p") * (P("e") * (P("r") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("nn") + P("n")))))))))))) + P("b") * (P("u") * (P("i") * (P("l") * (P("d") * (P("_") * (P("clear:N") + P("g") * (P("clear:N") + P("et:NN"))))))))))) + P("p") * (P("d") * (P("f") * (P("_") * (P("o") * (P("b") * (P("j") * (P("e") * (P("c") * (P("t") * (P("_") * (P("w") * (P("r") * (P("i") * (P("t") * (P("e") * (P(":") * (P("n") * (P("n") + P("x")))))))) + P("new:nn"))))))))))) + P("e") * (P("e") * (P("k") * (P("_") * (P("c") * (P("a") * (P("t") * (P("c") * (P("o") * (P("d") * (P("e") * (P("_") * (P("ignore_spaces:N") + P("remove_ignore_spaces:N")))))))) + P("h") * (P("a") * (P("r") * (P("c") * (P("o") * (P("d") * (P("e") * (P("_") * (P("ignore_spaces:N") + P("remove_ignore_spaces:N")))))))))) + P("m") * (P("e") * (P("a") * (P("n") * (P("i") * (P("n") * (P("g") * (P("_") * (P("ignore_spaces:N") + P("remove_ignore_spaces:N"))))))))))))) + P("r") * (P("o") * (P("p") * (P("_") * (P("g") * (P("p") * (P("u") * (P("t") * (P("_") * (P("i") * (P("f") * (P("_") * (P("n") * (P("e") * (P("w") * (P(":") * (P("c") * (P("Vn") + P("n") * (P("n") + P("V"))) + P("N") * (P("Vn") + P("n") * (P("n") + P("V"))))))))))))))) + P("p") * (P("u") * (P("t") * (P("_") * (P("i") * (P("f") * (P("_") * (P("n") * (P("e") * (P("w") * (P(":") * (P("c") * (P("Vn") + P("n") * (P("n") + P("V"))) + P("N") * (P("Vn") + P("n") * (P("n") + P("V"))))))))))))))))))) + P("s") * (P("t") * (P("r") * (P("_") * (P("l") * (P("o") * (P("w") * (P("e") * (P("r") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("f") + P("n")))))))))))) + P("declare_eight_bit_encoding:nnn") + P("u") * (P("p") * (P("p") * (P("e") * (P("r") * (P("_") * (P("c"!
 ) * (P("a") * (P("s") * (P("e") * (P(":") * (P("f") + P("n")))))))))))) + P("f") * (P("o") * (P("l") * (P("d") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("n") + P("V"))))))) + P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("n") + P("V"))))))))))))) + P("y") * (P("s") * (P("_") * (P("load_deprecation:") + P("i") * (P("f") * (P("_") * (P("t") * (P("i") * (P("m") * (P("e") * (P("r") * (P("_") * (P("e") * (P("x") * (P("i") * (P("s") * (P("t") * (P("_p:") + P(":") * (P("TF") + P("F") + P("T")) + P(":"))))))))))))))) + P("finalise:")))) + P("e") * (P("q") * (P("_") * (P("gset_map_x:NNn") + P("i") * (P("n") * (P("d") * (P("e") * (P("x") * (P("e") * (P("d") * (P("_") * (P("m") * (P("a") * (P("p") * (P("_") * (P("inline:Nn") + P("function:NN"))))))))))))) + P("set_map_x:NNn"))))) + P("r") * (P("e") * (P("g") * (P("e") * (P("x") * (P("_") * (P("m") * (P("a") * (P("t") * (P("c") * (P("h") * (P(":") * (P("N") * (P("n") * (P("TF") + P("F") + P("T")) + P("n")) + P("n") * (P("n") * (P("TF") + P("F") + P("T")) + P("n"))))))))))))))) * eof
   -- luacheck: pop
 end
 
 -- luacheck: push no max line length
-local prefixes = (P("e") * (P("m") * (P("o") * (P("j") * (P("icite") + P("i"))) + P("pty")) + P("t") * (P("ex") + P("l")) + P("x") * (P("tblx") + P("sheets") + P("p") * (P("ltools") + P("l")) + P("wf") + P("fs") + P("p")) + P("l") * (P("ectrum") + P("se")) + P("n") * (P("otez") + P("umext"))) + P("d") * (P("e") * (P("riv") + P("nisbdoc") + P("bug")) + P("r") * (P("iver") + P("y")) + P("i") * (P("ffcoeff") + P("dec") + P("m")) + P("o") * (P("cument") + P("llar")) + P("u") * (P("c") * (P("k") * (P("say") + P("uments"))))) + P("c") * (P("ellprops") + P("dhh") + P("ctab") + P("ircumflex") + P("h") * (P("a") * (P("os") + P("r")) + P("e") * (P("m") * (P("macros") + P("num") + P("formula"))) + P("ronos") + P("k")) + P("md") + P("l") * (P("ist") + P("assics") + P("m")) + P("t") * (P("ex") + P("uthesis")) + P("a") * (P("scade") + P("tcode") + P("l")) + P("s") * (P("vsim") + P("l")) + P("o") * (P("l") * (P("o") * (P("n") + P("r"))) + P("d") * (P("e") * (P("point") + P("d") * (P("esc") + P("oc")) + P("list") + P("high")) + P("e")) + P("nteq") + P("okingunits") + P("ffin")) + P("s")) + P("b") * (P("e") * (P("arwear") + P("uron") + P("renis")) + P("lock") + P("a") * (P("skervald") + P("c") * (P("k") * (P("end") + P("slash"))) + P("bellatin")) + P("xjh") + P("o") * (P("ol") + P("x")) + P("itset")) + P("i") * (P("wonamath") + P("stqb") + P("o") * (P("r") + P("t") + P("w")) + P("n") * (P("itex") + P("tarray") + P("sert") + P("cgra") + P("f") + P("t")) + P("f")) + P("h") * (P("coffin") + P("box") + P("ash") + P("o") * (P("ok") + P("bete")) + P("yp")) + P("g") * (P("etree") + P("tl") + P("r") * (P("aph") + P("oup")) + P("a") * (P("therenum") + P("lley")) + P("hsystem") + P("zt")) + P("f") * (P("m") * (P("uft") + P("dug") + P("wao")) + P("d") * (P("u") * (P("logo") + P("doc")) + P("u")) + P("un") + P("l") * (P("ag") + P("tr")) + P("i") * (P("l") * (P("ehook") + P("lwith") + P("e")) + P("xtounicode") + P("ngering")) + P("alse") + P("o") * (P("n") * (P("t") * (P("s") * (P("izes") + P("pec") + P("cale"))))) + P("n") * (P("ot!
 e") + P("pct")) + P("i") + P("p")) + P("u") * (P("tex") + P("se") + P("ptex") + P("n") * (P("ravel") + P("derscore")) + P("f") * (P("tag") + P("grid") + P("combo")) + P("m")) + P("z") * (P("ugferd") + P("r") * (P("e") * (P("f") * (P("c") * (P("lever") + P("heck")) + P("vario")))) + P("h") * (P("num") + P("lipsum")) + P("xjt") + P("ero")) + P("a") * (P("d") * (P("f") * (P("arrows") + P("orn") + P("bullets"))) + P("cro") + P("r") * (P("senal") + P("ch") + P("ray")) + P("ffiliations") + P("mpersand") + P("l") * (P("ignment") + P("loc") + P("gobox")) + P("kshar") + P("pfs") + P("vm") + P("tsign")) + P("GS") + P("m") * (P("e") * (P("t") * (P("rix") + P("a")) + P("rmap")) + P("uskip") + P("crule") + P("s") * (P("vg") + P("g")) + P("a") * (P("t") * (P("hcolor") + P("h")) + P("r") * (P("k") * (P("down") + P("s") + P("s")) + P("k")) + P("x")) + P("hchem") + P("o") * (P("r") * (P("e") * (P("math") + P("writes"))) + P("de")) + P("i") * (P("n") * (P("ibox") + P("us")) + P("x"))) + P("ENdiagram") + P("UFca") + P("t") * (P("e") * (P("mplate") + P("xt") + P("rm") + P("x")) + P("cobox") + P("b") * (P("lr") + P("l")) + P("i") * (P("k") * (P("z") * (P("lings") + P("s") * (P("ymbols") + P("path")) + P("fill"))) + P("lde")) + P("wmk") + P("m") * (P("p") * (P("a") + P("b"))) + P("utodoc") + P("a") * (P("sks") + P("g")) + P("peg") + P("oken") + P("rue") + P("l")) + P("s") * (P("eq") + P("daps") + P("c") * (P("a") * (P("letextbullet") + P("n")) + P("hulma") + P("ontents") + P("ripture")) + P("i") * (P("unitx") + P("lly")) + P("hipout") + P("ys") + P("lcd") + P("k") * (P("el") + P("ip")) + P("ubstances") + P("anuml") + P("p") * (P("a") * (P("ce") + P("th")) + P("eg")) + P("o") * (P("cket") + P("rt")) + P("t") * (P("a") * (P("tistics") + P("rray")) + P("op") + P("ycmd") + P("m") + P("r"))) + P("r") * (P("e") * (P("cursion") + P("gex") + P("verse") + P("f")) + P("a") * (P("inbow") + P("wobjects") + P("ndomwalk")) + P("pgicons") + P("omande") + P("i") * (P("ght") + P("v") * (P("math") + P("book")))) + P("yoin") + P("x") * (P("marks") + P(!
 "template") + P("s") * (P("im") + P("b")) + P("p") * (P("a") * (P("tch") + P("rse")) + P("inyin") + P("eek") + P("g")) + P("e") * (P("CJK") + P("tex")) + P("frac")) + P("w") * (P("i") * (P("t") * (P("h") * (P("a") * (P("r") * (P("gs") + P("rows")))))) + P("heelchart") + P("stf")) + P("v") * (P("e") * (P("n") * (P("t") * (P("u") * (P("r") * (P("i") * (P("s") * (P("ii") + P("old")) + P("s"))))))) + P("coffin") + P("box")) + P("MOdiagram") + P("l") * (P("eft") + P("ltxmath") + P("wc") + P("angsci") + P("u") * (P("a") * (P("tex") + P("bridge")) + P("a")) + P("og") + P("i") * (P("ftarm") + P("bris"))) + P("k") * (P("e") * (P("y") * (P("val") + P("thms") + P("s")) + P("rnel")) + P("ivitendo") + P("gl") + P("not")) + P("j") * (P("iazhu") + P("ob") + P("sonparse")) + P("q") * (P("uark") + P("rbill")) + P("p") * (P("e") * (P("ek") + P("rcent")) + P("d") * (P("f") * (P("m") * (P("anagement") + P("eta")) + P("tex") + P("annot") + P("xform") + P("overlay") + P("file")) + P("f")) + P("seudo") + P("bs") + P("iton") + P("hone") + P("g") * (P("fmxfp") + P("f")) + P("latex") + P("kgploader") + P("a") * (P("r") * (P("ameter") + P("a"))) + P("t") * (P("ex") + P("x") * (P("cd") + P("tools"))) + P("o") * (P("stnotes") + P("l") * (P("y") * (P("glossia") + P("omino")))) + P("r") * (P("imargs") + P("o") * (P("oftrees") + P("perty") + P("p")) + P("elim") + P("g")) + P("i")) + P("o") * (P("verleaf") + P("ther") + P("c") * (P("g") * (P("xii") + P("base"))) + P("ne") + P("r")) + P("n") * (P("mc") + P("i") * (P("cematrix") + P("necolors") + P("l")) + P("wejm") + P("o") * (P("value") + P("testobib")) + P("a") * (P("medef") + P("n")) + P("o")) + P("e"))
+local prefixes = (P("u") * (P("se") + P("tex") + P("f") * (P("grid") + P("tag") + P("combo")) + P("ptex") + P("n") * (P("ravel") + P("derscore")) + P("m")) + P("m") * (P("crule") + P("uskip") + P("e") * (P("t") * (P("rix") + P("a")) + P("rmap")) + P("s") * (P("vg") + P("g")) + P("o") * (P("r") * (P("e") * (P("math") + P("writes"))) + P("de")) + P("hchem") + P("a") * (P("t") * (P("hcolor") + P("h")) + P("r") * (P("k") * (P("down") + P("s") + P("s")) + P("k")) + P("x")) + P("i") * (P("n") * (P("ibox") + P("us")) + P("x"))) + P("w") * (P("stf") + P("heelchart") + P("i") * (P("t") * (P("h") * (P("a") * (P("r") * (P("gs") + P("rows"))))))) + P("x") * (P("s") * (P("im") + P("b")) + P("template") + P("e") * (P("CJK") + P("tex")) + P("frac") + P("p") * (P("inyin") + P("a") * (P("tch") + P("rse")) + P("eek") + P("g")) + P("marks")) + P("yoin") + P("z") * (P("ero") + P("xjt") + P("h") * (P("num") + P("lipsum")) + P("ugferd") + P("r") * (P("e") * (P("f") * (P("c") * (P("lever") + P("heck")) + P("vario"))))) + P("c") * (P("l") * (P("ist") + P("assics") + P("m")) + P("md") + P("h") * (P("e") * (P("m") * (P("num") + P("macros") + P("formula"))) + P("a") * (P("os") + P("r")) + P("ronos") + P("k")) + P("ircumflex") + P("ctab") + P("dhh") + P("ellprops") + P("t") * (P("ex") + P("uthesis")) + P("o") * (P("l") * (P("o") * (P("n") + P("r"))) + P("ffin") + P("okingunits") + P("n") * (P("t") * (P("e") * (P("xt") + P("q")))) + P("d") * (P("e") * (P("point") + P("d") * (P("esc") + P("oc")) + P("list") + P("high")) + P("e"))) + P("s") * (P("vsim") + P("l")) + P("a") * (P("scade") + P("tcode") + P("l")) + P("s")) + P("d") * (P("e") * (P("riv") + P("nisbdoc") + P("bug")) + P("o") * (P("cument") + P("llar")) + P("u") * (P("c") * (P("k") * (P("say") + P("uments")))) + P("i") * (P("ffcoeff") + P("dec") + P("m")) + P("r") * (P("iver") + P("y"))) + P("e") * (P("l") * (P("ectrum") + P("se")) + P("m") * (P("o") * (P("j") * (P("icite") + P("i"))) + P("pty")) + P("n") * (P("otez") + P("umext")) + P("x") * (P("sheets") + P("tblx") + P("fs"!
 ) + P("wf") + P("p") * (P("ltools") + P("l")) + P("p")) + P("t") * (P("ex") + P("l"))) + P("f") * (P("un") + P("l") * (P("ag") + P("tr")) + P("m") * (P("wao") + P("dug") + P("uft")) + P("n") * (P("ote") + P("pct")) + P("o") * (P("n") * (P("t") * (P("s") * (P("cale") + P("pec") + P("izes"))))) + P("d") * (P("u") * (P("logo") + P("doc")) + P("u")) + P("alse") + P("i") * (P("l") * (P("ehook") + P("lwith") + P("e")) + P("xtounicode") + P("ngering")) + P("i") + P("p")) + P("g") * (P("tl") + P("etree") + P("zt") + P("hsystem") + P("a") * (P("therenum") + P("lley")) + P("r") * (P("aph") + P("oup"))) + P("ENdiagram") + P("a") * (P("kshar") + P("l") * (P("gobox") + P("loc") + P("ignment")) + P("mpersand") + P("cro") + P("d") * (P("f") * (P("orn") + P("arrows") + P("bullets"))) + P("ffiliations") + P("vm") + P("pfs") + P("tsign") + P("r") * (P("ch") + P("senal") + P("ray"))) + P("b") * (P("lock") + P("e") * (P("uron") + P("arwear") + P("renis")) + P("o") * (P("ol") + P("x")) + P("xjh") + P("a") * (P("c") * (P("k") * (P("end") + P("slash"))) + P("skervald") + P("bellatin")) + P("itset")) + P("k") * (P("gl") + P("ivitendo") + P("e") * (P("y") * (P("val") + P("thms") + P("s")) + P("rnel")) + P("not")) + P("l") * (P("ltxmath") + P("e") * (P("porello") + P("ft")) + P("wc") + P("og") + P("u") * (P("a") * (P("tex") + P("bridge")) + P("a")) + P("angsci") + P("i") * (P("ftarm") + P("bris"))) + P("MOdiagram") + P("n") * (P("mc") + P("o") * (P("value") + P("testobib")) + P("wejm") + P("i") * (P("cematrix") + P("necolors") + P("l")) + P("a") * (P("medef") + P("n")) + P("o")) + P("GS") + P("h") * (P("coffin") + P("ead") + P("o") * (P("ok") + P("bete")) + P("yp") + P("ash") + P("box")) + P("i") * (P("o") * (P("r") + P("t") + P("w")) + P("wonamath") + P("stqb") + P("n") * (P("sert") + P("tarray") + P("itex") + P("cgra") + P("f") + P("t")) + P("f")) + P("j") * (P("ob") + P("iazhu") + P("sonparse")) + P("s") * (P("k") * (P("el") + P("ip")) + P("lcd") + P("hipout") + P("i") * (P("unitx") + P("lly")) + P("ys") + P("c") * (P("ontents") + P("!
 hulma") + P("a") * (P("letextbullet") + P("n")) + P("ripture")) + P("daps") + P("eq") + P("ubstances") + P("o") * (P("cket") + P("rt")) + P("p") * (P("a") * (P("ce") + P("th")) + P("eg")) + P("anuml") + P("t") * (P("op") + P("a") * (P("tistics") + P("rray")) + P("ycmd") + P("m") + P("r"))) + P("t") * (P("m") * (P("p") * (P("a") + P("b"))) + P("wmk") + P("i") * (P("k") * (P("z") * (P("s") * (P("ymbols") + P("path")) + P("lings") + P("fill"))) + P("lde")) + P("cobox") + P("utodoc") + P("e") * (P("xt") + P("mplate") + P("rm") + P("x")) + P("rue") + P("oken") + P("peg") + P("a") * (P("sks") + P("lk") + P("g")) + P("b") * (P("lr") + P("l")) + P("l")) + P("UFca") + P("v") * (P("coffin") + P("e") * (P("n") * (P("t") * (P("u") * (P("r") * (P("i") * (P("s") * (P("ii") + P("old")) + P("s"))))))) + P("box")) + P("o") * (P("c") * (P("g") * (P("xii") + P("base"))) + P("ther") + P("verleaf") + P("ne") + P("r")) + P("p") * (P("kgploader") + P("latex") + P("g") * (P("fmxfp") + P("f")) + P("hone") + P("iton") + P("seudo") + P("d") * (P("f") * (P("tex") + P("m") * (P("anagement") + P("eta")) + P("file") + P("overlay") + P("xform") + P("annot")) + P("f")) + P("e") * (P("ek") + P("rcent")) + P("t") * (P("ex") + P("x") * (P("cd") + P("tools"))) + P("o") * (P("stnotes") + P("l") * (P("y") * (P("glossia") + P("omino")))) + P("r") * (P("o") * (P("oftrees") + P("perty") + P("p")) + P("imargs") + P("elim") + P("g")) + P("a") * (P("r") * (P("ameter") + P("a"))) + P("bs") + P("i")) + P("q") * (P("uark") + P("rbill")) + P("r") * (P("e") * (P("gex") + P("cursion") + P("verse") + P("f")) + P("omande") + P("pgicons") + P("a") * (P("wobjects") + P("inbow") + P("ndomwalk")) + P("i") * (P("ght") + P("v") * (P("math") + P("book")))) + P("e"))
 -- luacheck: pop
 
 return {

Modified: trunk/Master/texmf-dist/scripts/expltools/explcheck-lexical-analysis.lua
===================================================================
--- trunk/Master/texmf-dist/scripts/expltools/explcheck-lexical-analysis.lua	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/scripts/expltools/explcheck-lexical-analysis.lua	2025-08-18 19:01:40 UTC (rev 76083)
@@ -57,6 +57,35 @@
   end
 end
 
+-- Format a control sequence name as it appears in expl3 code.
+local function format_csname(csname)
+  return string.format("\\%s", csname)
+end
+
+-- Format a token as it appears in expl3 code.
+local function format_token(token, content)
+  assert(#token.byte_range > 0)
+  return content:sub(token.byte_range:start(), token.byte_range:stop())
+end
+
+-- Format a range of tokens as they appear in expl3 code.
+local function format_tokens(token_range, tokens, content)
+  if token_range == 0 then
+    return ""
+  end
+  local byte_range = token_range:new_range_from_subranges(get_token_byte_range(tokens), #content)
+  return content:sub(byte_range:start(), byte_range:stop())
+end
+
+-- Determine whether the lexical analysis step is too confused by the results
+-- of the previous steps to run.
+local function is_confused(_, results, _)
+  if #results.expl_ranges == 0 then
+    return true, "no expl3 material was detected"
+  end
+  return false
+end
+
 -- Tokenize the content and register any issues.
 local function lexical_analysis(pathname, content, issues, results, options)
 
@@ -115,6 +144,8 @@
     local current_grouping = groupings
     local parent_grouping
 
+    local num_invalid_characters = 0
+
     local state
 
     -- Determine the category code of the at sign ("@").
@@ -259,6 +290,7 @@
           character_index = #line_text + 1
         else
           if catcode == 15 then  -- invalid character
+            num_invalid_characters = num_invalid_characters + 1
             issues:add('e209', 'invalid characters', range)
           end
           if catcode == 1 or catcode == 2 then  -- begin/end grouping
@@ -314,11 +346,11 @@
       current_grouping.parent = nil
       current_grouping = parent_grouping
     end
-    return tokens, groupings
+    return tokens, groupings, num_invalid_characters
   end
 
   -- Tokenize the content.
-  local tokens, groupings = {}, {}
+  local tokens, groupings, num_invalid_characters = {}, {}, 0
   for _, range in ipairs(results.expl_ranges) do
     local lines = (function()
       local co = coroutine.create(function()
@@ -329,59 +361,28 @@
         return line_text, map_back
       end
     end)()
-    local part_tokens, part_groupings = get_tokens(lines)
+    local part_tokens, part_groupings, part_num_invalid_characters = get_tokens(lines)
     table.insert(tokens, part_tokens)
     table.insert(groupings, part_groupings)
+    num_invalid_characters = num_invalid_characters + part_num_invalid_characters
   end
 
   -- Record issues that are apparent after the lexical analysis.
   for _, part_tokens in ipairs(tokens) do
-    for token_index, token in ipairs(part_tokens) do
+    for _, token in ipairs(part_tokens) do
       if token.type == CONTROL_SEQUENCE then
         local _, _, argument_specifiers = token.payload:find(":([^:]*)")
         if argument_specifiers ~= nil then
           if lpeg.match(parsers.do_not_use_argument_specifiers, argument_specifiers) then
-            issues:add('w200', '"do not use" argument specifiers', token.byte_range)
-            issues:ignore('s206', token.byte_range)
-            -- TODO: Add a configuration option that would allow us to express that w200 silences s206,
-            --       so that we don't need to do this manually.
+            issues:add('w200', '"do not use" argument specifiers', token.byte_range, format_token(token, content))
           end
           if lpeg.match(parsers.argument_specifiers, argument_specifiers) == nil then
-            issues:add('e201', 'unknown argument specifiers', token.byte_range)
+            issues:add('e201', 'unknown argument specifiers', token.byte_range, format_token(token, content))
           end
         end
         if lpeg.match(obsolete.deprecated_csname, token.payload) ~= nil then
-          issues:add('w202', 'deprecated control sequences', token.byte_range)
+          issues:add('w202', 'deprecated control sequences', token.byte_range, format_token(token, content))
         end
-        if token_index + 1 <= #part_tokens then
-          local next_token = part_tokens[token_index + 1]
-          if next_token.type == CONTROL_SEQUENCE then
-            if (
-                  lpeg.match(parsers.expl3_function_definition_csname, token.payload) ~= nil
-                  and lpeg.match(parsers.expl3like_csname, next_token.payload) ~= nil
-                  and lpeg.match(parsers.expl3_expansion_csname, next_token.payload) == nil
-                  and lpeg.match(parsers.expl3_function_csname, next_token.payload) == nil
-                ) then
-              issues:add('s205', 'malformed function name', next_token.byte_range)
-            end
-            if (
-                  lpeg.match(parsers.expl3_variable_or_constant_use_csname, token.payload) ~= nil
-                  and lpeg.match(parsers.expl3like_csname, next_token.payload) ~= nil
-                  and lpeg.match(parsers.expl3_expansion_csname, next_token.payload) == nil
-                  and lpeg.match(parsers.expl3_scratch_variable_csname, next_token.payload) == nil
-                  and lpeg.match(parsers.expl3_variable_or_constant_csname, next_token.payload) == nil
-                ) then
-              issues:add('s206', 'malformed variable or constant name', next_token.byte_range)
-            end
-            if (
-                  lpeg.match(parsers.expl3_quark_or_scan_mark_definition_csname, token.payload) ~= nil
-                  and lpeg.match(parsers.expl3_quark_or_scan_mark_csname, next_token.payload) == nil
-                  and lpeg.match(parsers.expl3_expansion_csname, next_token.payload) == nil
-                ) then
-              issues:add('s207', 'malformed quark or scan mark name', next_token.byte_range)
-            end
-          end
-        end
       end
     end
   end
@@ -389,11 +390,17 @@
   -- Store the intermediate results of the analysis.
   results.tokens = tokens
   results.groupings = groupings
+  results.num_invalid_characters = num_invalid_characters
 end
 
 return {
+  format_csname = format_csname,
+  format_token = format_token,
+  format_tokens = format_tokens,
   get_token_byte_range = get_token_byte_range,
+  is_confused = is_confused,
   is_token_simple = is_token_simple,
+  name = "lexical analysis",
   process = lexical_analysis,
   token_types = token_types,
 }

Modified: trunk/Master/texmf-dist/scripts/expltools/explcheck-parsers.lua
===================================================================
--- trunk/Master/texmf-dist/scripts/expltools/explcheck-parsers.lua	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/scripts/expltools/explcheck-parsers.lua	2025-08-18 19:01:40 UTC (rev 76083)
@@ -15,6 +15,7 @@
 ---- Tokens
 local ampersand = P("&")
 local backslash = P([[\]])
+local slash = P("/")
 local circumflex = P("^")
 local colon = P(":")
 local comma = P(",")
@@ -223,7 +224,7 @@
   + Cc({})
 )
 
----- Function, variable, and constant names
+---- Expl3 control sequence names
 local expl3_function_csname = (
   (underscore * underscore)^-1 * letter^1  -- module
   * underscore
@@ -230,7 +231,7 @@
   * letter * (letter + underscore)^0  -- description
   * colon
   * argument_specifier^0  -- argspec
-  * (eof + -letter)
+  * #(eof + -letter)
 )
 
 local any_type = (
@@ -285,7 +286,9 @@
   + P("intarray")
   + P("io") * S("rw")
   + P("prop")
+  + P("quark")
   + P("regex")
+  + P("scan")
   + P("seq")
 )
 
@@ -355,46 +358,43 @@
   + P("use")
   + P("withargs")  -- part of the withargs package
 )
-local function expl3_well_known_function_csname(other_prefix_texts)
+local function expl3_well_known_csname(other_prefix_texts)
   local other_prefixes = fail
   for _, prefix_text in ipairs(other_prefix_texts) do
-    other_prefixes = other_prefixes + P(prefix_text)
+    other_prefixes = (
+      other_prefixes +
+      #(P(prefix_text) * (underscore + colon))
+      * P(prefix_text)
+    )
   end
-  return (
+  local prefix = (
+    #(expl3_standard_library_prefixes * (underscore + colon))
+    * expl3_standard_library_prefixes
+    + #(registered_prefixes * (underscore + colon))
+    * registered_prefixes
+    + other_prefixes
+  )
+  local well_known_function_csname = (
     P("__")^-1
+    * prefix
     * (
-      expl3_standard_library_prefixes * #(underscore + colon)
-      + registered_prefixes * #(underscore + colon)
-      + other_prefixes
-    )
-    * (
       underscore
       * (any - colon)^0
     )^0
     * colon
   )
-end
-
-local expl3_variable_or_constant_csname = (
-  S("cgl")  -- scope
-  * underscore
-  * (
-    underscore^-1 * letter^1  -- module
+  local well_known_variable_or_constant_csname = (
+    S("cgl")  -- scope
     * underscore
-    * letter * (letter + underscore * -#(expl3_variable_or_constant_type * eof))^0  -- description
+    * underscore^-1
+    * prefix
+    * underscore
   )
-  * underscore
-  * expl3_variable_or_constant_type
-  * eof
-)
-local expl3_scratch_variable_csname = (
-  S("gl")
-  * underscore
-  * P("tmp") * S("ab")
-  * underscore
-  * expl3_variable_or_constant_type
-  * eof
-)
+  return (
+    well_known_function_csname
+    + well_known_variable_or_constant_csname
+  )
+end
 
 local expl3like_function_with_underscores_csname = (
   underscore^0
@@ -488,11 +488,9 @@
 )
 
 ------ Explcheck issues
-local issue_code = (
+local issue_code_prefix = (
   S("EeSsTtWw")
-  * decimal_digit
-  * decimal_digit
-  * decimal_digit
+  * decimal_digit^-3
 )
 local ignored_issues = Ct(
   (
@@ -505,12 +503,12 @@
     P(":")
     * optional_spaces
     * (
-      Cs(issue_code)
+      Cs(issue_code_prefix)
       * optional_spaces
       * comma
       * optional_spaces
     )^0
-    * Cs(issue_code)
+    * Cs(issue_code_prefix)
     * optional_spaces
     + optional_spaces
   )
@@ -647,7 +645,8 @@
   * colon
 )
 
----- Assigning functions
+---- Functions and conditional functions
+------ Function definitions
 local expl3_function_definition_type_signifier = (
   P("new") * Cc(false) * Cc(true)  -- definition
   + (  -- assignment
@@ -695,7 +694,7 @@
   + Cc(false) * expl3_indirect_function_definition_csname
 )
 
----- Generating function variants
+------ Generating function variants
 local expl3_function_variant_definition_csname = Ct(
   (
     -- A non-conditional function
@@ -707,7 +706,7 @@
   * S("Nc")
 )
 
----- Function calls with Lua arguments
+------ Function calls with Lua arguments
 local expl3_function_call_with_lua_code_argument_csname = Ct(
   P("lua")
   * underscore
@@ -722,50 +721,157 @@
   + success
 )
 
----- Using variables/constants
-local expl3_variable_or_constant_use_csname = (
-  expl3_variable_or_constant_type
+------ Conditions in a conditional function definition
+local condition = (
+  P("p")
+  + P("T") * P("F")^-1
+  + P("F")
+)
+local conditions = comma_list(condition)
+
+---- Variables and constants
+------ Variable names
+local expl3_variable_or_constant_csname = (
+  S("cgl")  -- scope
   * underscore
   * (
-    P("const")
-    + P("new")
-    + P("g")^-1
-    * P("set")
+    underscore^-1 * letter^1  -- module
+    * underscore
+    * letter * (letter + underscore * -#(expl3_variable_or_constant_type * eof))^0  -- description
+  )
+  * underscore
+  * expl3_variable_or_constant_type  -- type
+  * eof
+)
+local expl3_variable_or_constant_csname_scope = (
+  C(S("cgl"))  -- scope
+  * underscore
+)
+local expl3_variable_or_constant_csname_type = (
+  (any - underscore)^0  -- scope
+  * underscore^1
+  * (any - underscore)^1  -- module and description
+  * (any - #(underscore * expl3_variable_or_constant_type * eof))^0
+  * underscore
+  * C(expl3_variable_or_constant_type)  -- type
+  * eof
+)
+local expl3_scratch_variable_csname = (
+  S("gl")
+  * underscore
+  * P("tmp") * S("ab")
+  * underscore
+  * expl3_variable_or_constant_type
+  * eof
+)
+local expl3_quark_or_scan_mark_csname = (
+  S("qs")
+  * underscore
+)
+
+------ Variable declarations
+local expl3_variable_declaration_csname = Ct(
+  C(expl3_variable_or_constant_type)
+  * underscore
+  * (
+    P("g")^-1
     * (
-      underscore
+      P("zero")
+      + P("clear")
+    )
+    * underscore
+  )^-1
+  * P("new:N")
+)
+
+------ Variable and constant definitions
+local expl3_variable_definition_csname = Ct(
+  C(expl3_variable_or_constant_type)
+  * underscore
+  * (
+    P("const") * Cc(true)^-3  -- constant definition
+    + Cc(false)  -- variable definition
+    * (
+      P("gset") * Cc(true)  -- global
+      + P("set") * Cc(false)  -- local
+    )
+    * (
+      Cc(false)  -- indirect
+      * underscore
       * (
         P("eq")
-        + P("true")
-        + P("false")
+        + P("from_")
+        * C(expl3_variable_or_constant_type)
       )
-    )^-1
+      + Cc(true)  -- direct
+    )
+  )
+  * P(":N")
+)
+
+------ Variable and constant use
+local expl3_variable_use_csname = Ct(
+  C(expl3_variable_or_constant_type)
+  * underscore
+  * (
+    P("count")
+    + P("open")
+    + P("show")
     + P("use")
-    + P("show")
   )
   * P(":N")
 )
 
----- Defining quarks and scan marks
-local expl3_quark_or_scan_mark_definition_csname = (
-  (
-    P("quark")
-    + P("scan")
+---- Messages
+------ Message names
+local function expl3_well_known_message_name(other_prefix_texts)
+  local other_prefixes = fail
+  for _, prefix_text in ipairs(other_prefix_texts) do
+    other_prefixes = (
+      other_prefixes
+      + #(P(prefix_text) * slash)
+      * P(prefix_text)
+    )
+  end
+  return (
+    #(expl3_standard_library_prefixes * slash)
+    * expl3_standard_library_prefixes
+    + #(registered_prefixes * slash)
+    * registered_prefixes
+    + other_prefixes
   )
-  * P("_new:N")
-  * eof
-)
-local expl3_quark_or_scan_mark_csname = (
-  S("qs")
+end
+
+------ Message definitions
+local expl3_message_definition = (
+  P("msg")
   * underscore
+  * (
+    P("new")
+    + P("set")
+  )
+  * colon
 )
 
----- Conditions in a conditional function definition
-local condition = (
-  P("p")
-  + P("T") * P("F")^-1
-  + P("F")
+------ Message use
+local expl3_message_use = (
+  P("msg")
+  * underscore
+  * (
+    P("none")
+    + P("show")
+    + P("term")
+    + P("log")
+    + P("info")
+    + P("note")
+    + P("warning")
+    + P("error")
+    + P("expandable_error")
+    + P("critical")
+    + P("fatal")
+  )
+  * colon
 )
-local conditions = comma_list(condition)
 
 return {
   any = any,
@@ -777,10 +883,13 @@
   decimal_digit = decimal_digit,
   deprecated_argument_specifiers = deprecated_argument_specifiers,
   determine_expl3_catcode = determine_expl3_catcode,
+  double_superscript_convention = double_superscript_convention,
   do_not_use_argument_specifiers = do_not_use_argument_specifiers,
-  double_superscript_convention = double_superscript_convention,
   endinput = endinput,
   eof = eof,
+  expl3like_csname = expl3like_csname,
+  expl3like_function_csname = expl3like_function_csname,
+  expl3like_material = expl3like_material,
   expl3_catcodes = expl3_catcodes,
   expl3_endlinechar = expl3_endlinechar,
   expl3_expansion_csname = expl3_expansion_csname,
@@ -788,16 +897,19 @@
   expl3_function_csname = expl3_function_csname,
   expl3_function_definition_csname = expl3_function_definition_csname,
   expl3_function_variant_definition_csname = expl3_function_variant_definition_csname,
-  expl3like_csname = expl3like_csname,
-  expl3like_function_csname = expl3like_function_csname,
-  expl3like_material = expl3like_material,
   expl3_maybe_unexpandable_csname = expl3_maybe_unexpandable_csname,
+  expl3_message_definition = expl3_message_definition,
+  expl3_message_use = expl3_message_use,
   expl3_quark_or_scan_mark_csname = expl3_quark_or_scan_mark_csname,
-  expl3_quark_or_scan_mark_definition_csname = expl3_quark_or_scan_mark_definition_csname,
   expl3_scratch_variable_csname = expl3_scratch_variable_csname,
+  expl3_variable_declaration_csname = expl3_variable_declaration_csname,
+  expl3_variable_definition_csname = expl3_variable_definition_csname,
   expl3_variable_or_constant_csname = expl3_variable_or_constant_csname,
-  expl3_variable_or_constant_use_csname = expl3_variable_or_constant_use_csname,
-  expl3_well_known_function_csname = expl3_well_known_function_csname,
+  expl3_variable_or_constant_csname_scope = expl3_variable_or_constant_csname_scope,
+  expl3_variable_or_constant_csname_type = expl3_variable_or_constant_csname_type,
+  expl3_variable_use_csname = expl3_variable_use_csname,
+  expl3_well_known_csname = expl3_well_known_csname,
+  expl3_well_known_message_name = expl3_well_known_message_name,
   expl_syntax_off = expl_syntax_off,
   expl_syntax_on = expl_syntax_on,
   fail = fail,
@@ -807,8 +919,8 @@
   newline = newline,
   N_or_n_type_argument_specifier = N_or_n_type_argument_specifier,
   N_or_n_type_argument_specifiers = N_or_n_type_argument_specifiers,
+  N_type_argument_specifier = N_type_argument_specifier,
   n_type_argument_specifier = n_type_argument_specifier,
-  N_type_argument_specifier = N_type_argument_specifier,
   provides = provides,
   space = space,
   success = success,

Modified: trunk/Master/texmf-dist/scripts/expltools/explcheck-preprocessing.lua
===================================================================
--- trunk/Master/texmf-dist/scripts/expltools/explcheck-preprocessing.lua	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/scripts/expltools/explcheck-preprocessing.lua	2025-08-18 19:01:40 UTC (rev 76083)
@@ -41,11 +41,16 @@
     local transformed_index = 0
     local numbers_of_bytes_removed = {}
     local transformed_text_table = {}
+    local content_started = false
     for index, text_position in ipairs(lpeg.match(parsers.commented_lines, content)) do
       local span_size = text_position - transformed_index - 1
       if span_size > 0 then
         if index % 2 == 1 then  -- chunk of text
-          table.insert(transformed_text_table, content:sub(transformed_index + 1, text_position - 1))
+          local chunk_text = content:sub(transformed_index + 1, text_position - 1)
+          if content_started or chunk_text:find("%S") ~= nil then
+            content_started = true
+          end
+          table.insert(transformed_text_table, chunk_text)
         else  -- comment
           local comment_text = content:sub(transformed_index + 1, text_position - 1)
           local ignored_issues = lpeg.match(parsers.ignored_issues, comment_text)
@@ -53,18 +58,23 @@
           if ignored_issues ~= nil then
             local comment_line_number = utils.convert_byte_to_line_and_column(line_starting_byte_numbers, transformed_index + 1)
             assert(comment_line_number <= #line_starting_byte_numbers)
-            local comment_range_start = line_starting_byte_numbers[comment_line_number]
-            local comment_range_end, comment_range
-            if(comment_line_number + 1 <= #line_starting_byte_numbers) then
-              comment_range_end = line_starting_byte_numbers[comment_line_number + 1]
-              comment_range = new_range(comment_range_start, comment_range_end, EXCLUSIVE, #content)
-            else
-              comment_range_end = #content
-              comment_range = new_range(comment_range_start, comment_range_end, INCLUSIVE, #content)
+            -- If the comment appears before any content other than indentation and comments, ignore all issues everywhere.
+            local comment_range = nil
+            -- Otherwise, ignore the issues only on this line, except for file-wide issues, which are always ignored everywhere.
+            if content_started then
+              local comment_range_start = line_starting_byte_numbers[comment_line_number]
+              local comment_range_end
+              if(comment_line_number + 1 <= #line_starting_byte_numbers) then
+                comment_range_end = line_starting_byte_numbers[comment_line_number + 1]
+                comment_range = new_range(comment_range_start, comment_range_end, EXCLUSIVE, #content)
+              else
+                comment_range_end = #content
+                comment_range = new_range(comment_range_start, comment_range_end, INCLUSIVE, #content)
+              end
             end
-            if #ignored_issues == 0 then  -- ignore all issues on this line
+            if #ignored_issues == 0 then  -- ignore all issues
               issues:ignore(nil, comment_range)
-            else  -- ignore specific issues on this line or everywhere (for file-wide issues)
+            else  -- ignore specific issues
               for _, identifier in ipairs(ignored_issues) do
                 issues:ignore(identifier, comment_range)
               end
@@ -110,13 +120,17 @@
     end
   end
 
-  local function unexpected_pattern(pattern, code, message, test)
+  local function unexpected_pattern(pattern, code, message, test, include_context)
     return Ct(Cp() * pattern * Cp()) / function(range_table)
-      if not input_ended and (test == nil or test()) then
-        local range_start, range_end = range_table[#range_table - 1], range_table[#range_table]
-        local range = new_range(range_start, range_end, EXCLUSIVE, #transformed_content, map_back, #content)
-        issues:add(code, message, range)
+      local range_start, range_end = range_table[#range_table - 1], range_table[#range_table]
+      local context
+      if include_context then
+        context = transformed_content:sub(range_start, range_end - 1)
       end
+      local range = new_range(range_start, range_end, EXCLUSIVE, #transformed_content, map_back, #content)
+      if not input_ended and (test == nil or test(range)) then
+        issues:add(code, message, range, context)
+      end
     end
   end
 
@@ -176,7 +190,7 @@
     )
   end
 
-  local num_expl3like_material = 0
+  local expl3like_material_count, expl3like_material_bytes = 0, 0
   local analysis_grammar = P{
     "Root";
     Root = (
@@ -204,10 +218,12 @@
             parsers.expl3like_material,
             "e102",
             "expl3 material in non-expl3 parts",
-            function()
-              num_expl3like_material = num_expl3like_material + 1
+            function(byte_range)
+              expl3like_material_count = expl3like_material_count + 1
+              expl3like_material_bytes = expl3like_material_bytes + #byte_range
               return true
-            end
+            end,
+            true
           )
         + (
           V"Any"
@@ -294,7 +310,12 @@
     elseif expl3_detection_strategy == "auto" then
       -- Use context clues to determine whether no part or the whole
       -- input file is in expl3.
-      if num_expl3like_material >= get_option('min_expl3like_material', options, pathname) then
+      local expl3like_material_ratio = 0
+      if #content > 0 then
+        expl3like_material_ratio = expl3like_material_bytes / #content
+      end
+      if expl3like_material_count >= get_option('min_expl3like_material_count', options, pathname)
+          or expl3like_material_ratio >= get_option('min_expl3like_material_ratio', options, pathname) then
         issues:add('w100', 'no standard delimiters')
         local range = new_range(1, #content, INCLUSIVE, #content)
         table.insert(expl_ranges, range)
@@ -309,27 +330,29 @@
     local offset = expl_range:start() - 1
 
     local function line_too_long(range_start, range_end)
-        local range = new_range(offset + range_start, offset + range_end, EXCLUSIVE, #transformed_content, map_back, #content)
-        issues:add('s103', 'line too long', range)
-      end
+      local range = new_range(offset + range_start, offset + range_end, EXCLUSIVE, #transformed_content, map_back, #content)
+      issues:add('s103', 'line too long', range)
+    end
 
-      local overline_lines_grammar = (
-        (
-          Cp() * parsers.linechar^(get_option('max_line_length', options, pathname) + 1) * Cp() / line_too_long
-          + parsers.linechar^0
-        )
-        * parsers.newline
-      )^0
+    local overline_lines_grammar = (
+      (
+        Cp() * parsers.linechar^(get_option('max_line_length', options, pathname) + 1) * Cp() / line_too_long
+        + parsers.linechar^0
+      )
+      * parsers.newline
+    )^0
 
-      lpeg.match(overline_lines_grammar, transformed_content:sub(expl_range:start(), expl_range:stop()))
-    end
-
-    -- Store the intermediate results of the analysis.
-    results.line_starting_byte_numbers = line_starting_byte_numbers
-    results.expl_ranges = expl_ranges
-    results.seems_like_latex_style_file = seems_like_latex_style_file
+    lpeg.match(overline_lines_grammar, transformed_content:sub(expl_range:start(), expl_range:stop()))
   end
 
-  return {
-  process = preprocessing
+  -- Store the intermediate results of the analysis.
+  results.line_starting_byte_numbers = line_starting_byte_numbers
+  results.expl_ranges = expl_ranges
+  results.seems_like_latex_style_file = seems_like_latex_style_file
+end
+
+return {
+  is_confused = function() return false end,
+  name = "preprocessing",
+  process = preprocessing,
 }

Modified: trunk/Master/texmf-dist/scripts/expltools/explcheck-semantic-analysis.lua
===================================================================
--- trunk/Master/texmf-dist/scripts/expltools/explcheck-semantic-analysis.lua	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/scripts/expltools/explcheck-semantic-analysis.lua	2025-08-18 19:01:40 UTC (rev 76083)
@@ -10,10 +10,13 @@
 local get_token_byte_range = lexical_analysis.get_token_byte_range
 local is_token_simple = lexical_analysis.is_token_simple
 local token_types = lexical_analysis.token_types
+local format_csname = lexical_analysis.format_csname
 
+local count_parameters_in_replacement_text = syntactic_analysis.count_parameters_in_replacement_text
 local extract_text_from_tokens = syntactic_analysis.extract_text_from_tokens
 
 local CONTROL_SEQUENCE = token_types.CONTROL_SEQUENCE
+local CHARACTER = token_types.CHARACTER
 
 local new_range = ranges.new_range
 local range_flags = ranges.range_flags
@@ -34,6 +37,11 @@
 local statement_types = {
   FUNCTION_DEFINITION = "function definition",
   FUNCTION_VARIANT_DEFINITION = "function variant definition",
+  VARIABLE_DECLARATION = "variable declaration",
+  VARIABLE_DEFINITION = "variable or constant definition",
+  VARIABLE_USE = "variable or constant use",
+  MESSAGE_DEFINITION = "message definition",
+  MESSAGE_USE = "message use",
   OTHER_STATEMENT = "other statement",
   OTHER_TOKENS_SIMPLE = "block of other simple tokens",
   OTHER_TOKENS_COMPLEX = "block of other complex tokens",
@@ -41,6 +49,14 @@
 
 local FUNCTION_DEFINITION = statement_types.FUNCTION_DEFINITION
 local FUNCTION_VARIANT_DEFINITION = statement_types.FUNCTION_VARIANT_DEFINITION
+
+local VARIABLE_DECLARATION = statement_types.VARIABLE_DECLARATION
+local VARIABLE_DEFINITION = statement_types.VARIABLE_DEFINITION
+local VARIABLE_USE = statement_types.VARIABLE_USE
+
+local MESSAGE_DEFINITION = statement_types.MESSAGE_DEFINITION
+local MESSAGE_USE = statement_types.MESSAGE_USE
+
 local OTHER_STATEMENT = statement_types.OTHER_STATEMENT
 local OTHER_TOKENS_SIMPLE = statement_types.OTHER_TOKENS_SIMPLE
 local OTHER_TOKENS_COMPLEX = statement_types.OTHER_TOKENS_COMPLEX
@@ -47,14 +63,21 @@
 
 local statement_subtypes = {
   FUNCTION_DEFINITION = {
-    DIRECT = "direct function definition",
-    INDIRECT = "indirect function definition",
-  }
+    DIRECT = "direct " .. FUNCTION_DEFINITION,
+    INDIRECT = "indirect " .. FUNCTION_DEFINITION,
+  },
+  VARIABLE_DEFINITION = {
+    DIRECT = "direct " .. VARIABLE_DEFINITION,
+    INDIRECT = "indirect " .. VARIABLE_DEFINITION,
+  },
 }
 
 local FUNCTION_DEFINITION_DIRECT = statement_subtypes.FUNCTION_DEFINITION.DIRECT
 local FUNCTION_DEFINITION_INDIRECT = statement_subtypes.FUNCTION_DEFINITION.INDIRECT
 
+local VARIABLE_DEFINITION_DIRECT = statement_subtypes.VARIABLE_DEFINITION.DIRECT
+local VARIABLE_DEFINITION_INDIRECT = statement_subtypes.VARIABLE_DEFINITION.INDIRECT
+
 local statement_confidences = {
   DEFINITELY = 1,
   MAYBE = 0.5,
@@ -73,24 +96,158 @@
 local TEXT = csname_types.TEXT
 local PATTERN = csname_types.PATTERN
 
+-- Determine whether an expl3 type is a subtype of another type.
+local function is_subtype(subtype, supertype)
+  if subtype == supertype then
+    return true
+  elseif (subtype == "str" or subtype == "clist") and supertype == "tl" then
+    return true
+  elseif (subtype == "ior" or subtype == "iow") and supertype == "int" then
+    return true
+  -- Without tracking the data flow, we can't distinguish between h?box and v?box, we just know !(hbox <= vbox) and !(vbox <= hbox).
+  elseif subtype:sub(-3) == "box" and supertype:sub(-3) == "box" and math.min(#subtype, #supertype) == 3 then
+    return true
+  -- Without tracking the data flow, we can't distinguish between h?coffin and v?coffin, we just know !(hcoffin <= vcoffin)
+  -- and !(vcoffin <= hcoffin).
+  elseif subtype:sub(-6) == "coffin" and supertype:sub(-6) == "coffin" and math.min(#subtype, #supertype) == 6 then
+    return true
+  else
+    return false
+  end
+end
+
+-- Determine whether an expl3 type can perhaps be used by a function of another type.
+local function is_maybe_compatible_type(first_type, second_type)
+  return is_subtype(first_type, second_type) or is_subtype(second_type, first_type)
+end
+
+-- Determine the type of a span of tokens as either "simple text" [1, p. 383] with no expected side effects or
+-- a more complex material that may have side effects and presents a boundary between chunks of well-understood
+-- expl3 statements.
+--
+--  [1]: Donald Ervin Knuth. 1986. TeX: The Program. Addison-Wesley, USA.
+--
+local function classify_tokens(tokens, token_range)
+  for _, token in token_range:enumerate(tokens) do
+    if not is_token_simple(token) then
+      return OTHER_TOKENS_COMPLEX  -- context material
+    end
+  end
+  return OTHER_TOKENS_SIMPLE  -- simple material
+end
+
+-- Determine whether the semantic analysis step is too confused by the results
+-- of the previous steps to run.
+local function is_confused(pathname, results, options)
+  local format_percentage = require("explcheck-format").format_percentage
+  local evaluation = require("explcheck-evaluation")
+  local count_tokens = evaluation.count_tokens
+  local num_tokens = count_tokens(results)
+  assert(num_tokens ~= nil)
+  assert(results.tokens ~= nil and results.calls ~= nil)
+  local num_other_complex_tokens = 0
+  for part_number, part_calls in ipairs(results.calls) do
+    local part_tokens = results.tokens[part_number]
+    for _, call in ipairs(part_calls) do
+      if call.type == OTHER_TOKENS then
+        for _, token in call.token_range:enumerate(part_tokens) do
+          if not is_token_simple(token) then
+            num_other_complex_tokens = num_other_complex_tokens + 1
+          end
+        end
+      end
+    end
+  end
+  if num_tokens > 0 then
+    local other_complex_token_ratio = num_other_complex_tokens / num_tokens
+    local min_other_complex_tokens_count = get_option('min_other_complex_tokens_count', options, pathname)
+    local min_other_complex_tokens_ratio = get_option('min_other_complex_tokens_ratio', options, pathname)
+    if num_other_complex_tokens >= min_other_complex_tokens_count and other_complex_token_ratio >= min_other_complex_tokens_ratio then
+      local reason = string.format(
+        "too much complex material (%s >= %s) wasn't recognized as calls",
+        format_percentage(100.0 * other_complex_token_ratio),
+        format_percentage(100.0 * min_other_complex_tokens_ratio)
+      )
+      return true, reason
+    end
+  end
+  return false
+end
+
 -- Determine the meaning of function calls and register any issues.
 local function semantic_analysis(pathname, content, issues, results, options)
 
-  -- Determine the type of a span of tokens as either "simple text" [1, p. 383] with no expected side effects or
-  -- a more complex material that may have side effects and presents a boundary between chunks of well-understood
-  -- expl3 statements.
-  --
-  --  [1]: Donald Ervin Knuth. 1986. TeX: The Program. Addison-Wesley, USA.
-  --
-  local function classify_tokens(tokens, token_range)
-    for _, token in token_range:enumerate(tokens) do
-      if not is_token_simple(token) then  -- complex material
-        return OTHER_TOKENS_COMPLEX
+  -- Convert tokens from a range into a PEG pattern.
+  local function extract_pattern_from_tokens(token_range, transformed_tokens, map_forward)
+    -- First, extract subpatterns and text transcripts for the simple material.
+    local subpatterns, subpattern, transcripts, num_simple_tokens = {}, parsers.success, {}, 0
+    local previous_token_was_simple = true
+    for _, token in token_range:enumerate(transformed_tokens, map_forward) do
+      if is_token_simple(token) then  -- simple material
+        subpattern = subpattern * lpeg.P(token.payload)
+        table.insert(transcripts, token.payload)
+        num_simple_tokens = num_simple_tokens + 1
+        previous_token_was_simple = true
+      else  -- complex material
+        if previous_token_was_simple then
+          table.insert(subpatterns, subpattern)
+          subpattern = parsers.success
+          table.insert(transcripts, "*")
+        end
+        previous_token_was_simple = false
       end
     end
-    return OTHER_TOKENS_SIMPLE  -- simple material
+    if previous_token_was_simple then
+      table.insert(subpatterns, subpattern)
+    end
+    local transcript = table.concat(transcripts)
+    -- Next, build up the pattern from the back, simulating lazy `.*?` using negative lookaheads.
+    local subpattern_separators = {}
+    for subpattern_number = #subpatterns, 2, -1 do
+      local rest = subpatterns[subpattern_number]
+      for separator_number = 1, #subpattern_separators do
+        rest = rest * subpattern_separators[#subpattern_separators - separator_number + 1]
+        rest = rest * subpatterns[subpattern_number + separator_number]
+      end
+      local separator = (parsers.any - #rest)^0
+      table.insert(subpattern_separators, separator)
+    end
+    local pattern = parsers.success
+    for subpattern_number = 1, #subpatterns do
+      pattern = pattern * subpatterns[subpattern_number]
+      if subpattern_number < #subpatterns then
+        pattern = pattern * subpattern_separators[#subpattern_separators - subpattern_number + 1]
+      elseif not previous_token_was_simple then
+        pattern = pattern * parsers.any^0
+      end
+    end
+    return pattern, transcript, num_simple_tokens
   end
 
+  -- Try and convert tokens from a range into a csname.
+  local function _extract_name_from_tokens(token_range, transformed_tokens, map_forward)
+    local text = extract_text_from_tokens(token_range, transformed_tokens, map_forward)
+    local csname
+    if text ~= nil then  -- simple material
+      csname = {
+        payload = text,
+        transcript = text,
+        type = TEXT
+      }
+    else  -- complex material
+      local pattern, transcript, num_simple_tokens = extract_pattern_from_tokens(token_range, transformed_tokens, map_forward)
+      if num_simple_tokens < get_option("min_simple_tokens_in_csname_pattern", options, pathname) then  -- too few simple tokens, give up
+        return nil
+      end
+      csname = {
+        payload = pattern,
+        transcript = transcript,
+        type = PATTERN
+      }
+    end
+    return csname
+  end
+
   -- Extract statements from function calls and record them. For all identified function definitions, also record replacement texts.
   local function record_statements_and_replacement_texts(tokens, transformed_tokens, calls, first_map_back, first_map_forward)
     local statements = {}
@@ -100,6 +257,16 @@
       local call_range = new_range(call_number, call_number, INCLUSIVE, #calls)
       local byte_range = call.token_range:new_range_from_subranges(get_token_byte_range(tokens), #content)
 
+      -- Map a token range from the tokens to the transformed tokens.
+      local function transform_token_range(token_range)
+        return new_range(
+          first_map_forward(token_range:start()),
+          first_map_forward(token_range:stop()),
+          INCLUSIVE + MAYBE_EMPTY,
+          #transformed_tokens
+        )
+      end
+
       -- Try and convert tokens from an argument into a text.
       local function extract_text_from_argument(argument)
         assert(lpeg.match(parsers.n_type_argument_specifier, argument.specifier) ~= nil)
@@ -106,6 +273,11 @@
         return extract_text_from_tokens(argument.token_range, transformed_tokens, first_map_forward)
       end
 
+      -- Try and convert tokens from a range into a csname.
+      local function extract_name_from_tokens(token_range)
+        return _extract_name_from_tokens(token_range, transformed_tokens, first_map_forward)
+      end
+
       -- Extract the name of a control sequence from a call argument.
       local function extract_csname_from_argument(argument)
         local csname
@@ -114,9 +286,13 @@
           if csname_token.type ~= CONTROL_SEQUENCE then  -- the N-type argument is not a control sequence, give up
             return nil
           end
-          csname = csname_token.payload
+          csname = {
+            payload = csname_token.payload,
+            transcript = csname_token.payload,
+            type = TEXT
+          }
         elseif argument.specifier == "c" then
-          csname = extract_text_from_argument(argument)
+          csname = extract_name_from_tokens(argument.token_range)
           if csname == nil then  -- the c-type argument contains complex material, give up
             return nil
           end
@@ -129,31 +305,72 @@
 
       -- Split an expl3 control sequence name to a stem and the argument specifiers.
       local function parse_expl3_csname(csname)
-        local _, _, csname_stem, argument_specifiers = csname:find("([^:]*):([^:]*)")
-        return csname_stem, argument_specifiers
+        if csname.type == TEXT then
+          local _, _, csname_stem, argument_specifiers = csname.payload:find("([^:]*):([^:]*)")
+          if csname_stem == nil then
+            return nil
+          else
+            return csname_stem, {
+              payload = argument_specifiers,
+              transcript = argument_specifiers,
+              type = TEXT
+            }
+          end
+        elseif csname.type == PATTERN then
+          return nil
+        else
+          error('Unexpected csname type "' .. csname.type .. '"')
+        end
       end
 
       -- Determine whether a function is private or public based on its name.
       local function is_function_private(csname)
-        return csname:sub(1, 2) == "__"
+        if csname.type == TEXT then
+          return csname.payload:sub(1, 2) == "__"
+        elseif csname.type == PATTERN then
+          return csname.transcript:sub(1, 2) == "__"
+        else
+          error('Unexpected csname type "' .. csname.type .. '"')
+        end
       end
 
       -- Replace the argument specifiers in an expl3 control sequence name.
       local function replace_argument_specifiers(csname_stem, argument_specifiers)
         local csname
-        if type(argument_specifiers) == 'string' then
-          csname = string.format("%s:%s", csname_stem, argument_specifiers)
+        local transcript = string.format("%s:%s", csname_stem, argument_specifiers.transcript)
+        if argument_specifiers.type == TEXT then
+          csname = {
+            payload = string.format("%s:%s", csname_stem, argument_specifiers.payload),
+            transcript = transcript,
+            type = TEXT
+          }
+        elseif argument_specifiers.type == PATTERN then
+          csname = {
+            payload = lpeg.P(csname_stem) * lpeg.P(":") * argument_specifiers.payload,
+            transcript = transcript,
+            type = PATTERN
+          }
         else
-          local transcript = string.format("%s:%s", csname_stem, argument_specifiers.transcript)
+          error('Unexpected argument specifiers type "' .. argument_specifiers.type .. '"')
+        end
+        return csname
+      end
+
+      -- Determine the control sequence name of a conditional function given a base control sequence name and a condition.
+      local function get_conditional_function_csname(csname_stem, argument_specifiers, condition)
+        local csname
+        if condition == "p" then  -- predicate function
+          local format = "%s_p:%s"
+          local transcript = string.format(format, csname_stem, argument_specifiers.transcript)
           if argument_specifiers.type == TEXT then
             csname = {
-              payload = string.format("%s:%s", csname_stem, argument_specifiers.payload),
+              payload = string.format(format, csname_stem, argument_specifiers.payload),
               transcript = transcript,
               type = TEXT
             }
           elseif argument_specifiers.type == PATTERN then
             csname = {
-              payload = lpeg.P(csname_stem) * lpeg.P(":") * argument_specifiers.payload,
+              payload = lpeg.P(csname_stem) * lpeg.P("_p:") * argument_specifiers.payload,
               transcript = transcript,
               type = PATTERN
             }
@@ -160,100 +377,59 @@
           else
             error('Unexpected argument specifiers type "' .. argument_specifiers.type .. '"')
           end
-        end
-        return csname
-      end
-
-      -- Determine the control sequence name of a conditional function given a base control sequence name and a condition.
-      local function get_conditional_function_csname(csname_stem, argument_specifiers, condition)
-        local csname
-        if condition == "p" then  -- predicate function
-          local format = "%s_p:%s"
-          if type(argument_specifiers) == 'string' then
-            csname = string.format(format, csname_stem, argument_specifiers)
-          else
-            local transcript = string.format(format, csname_stem, argument_specifiers.transcript)
-            if argument_specifiers.type == TEXT then
-              csname = {
-                payload = string.format(format, csname_stem, argument_specifiers.payload),
-                transcript = transcript,
-                type = TEXT
-              }
-            elseif argument_specifiers.type == PATTERN then
-              csname = {
-                payload = lpeg.P(csname_stem) * lpeg.P("_p:") * argument_specifiers.payload,
-                transcript = transcript,
-                type = PATTERN
-              }
-            else
-              error('Unexpected argument specifiers type "' .. argument_specifiers.type .. '"')
-            end
-          end
         elseif condition == "T" then  -- true-branch conditional function
           local format = "%s:%sT"
-          if type(argument_specifiers) == 'string' then
-            csname = string.format(format, csname_stem, argument_specifiers)
+          local transcript = string.format(format, csname_stem, argument_specifiers.transcript)
+          if argument_specifiers.type == TEXT then
+            csname = {
+              payload = string.format(format, csname_stem, argument_specifiers.payload),
+              transcript = transcript,
+              type = TEXT
+            }
+          elseif argument_specifiers.type == PATTERN then
+            csname = {
+              payload = lpeg.P(csname_stem) * lpeg.P(":") * argument_specifiers.payload * lpeg.P("T"),
+              transcript = transcript,
+              type = PATTERN
+            }
           else
-            local transcript = string.format(format, csname_stem, argument_specifiers.transcript)
-            if argument_specifiers.type == TEXT then
-              csname = {
-                payload = string.format(format, csname_stem, argument_specifiers.payload),
-                transcript = transcript,
-                type = TEXT
-              }
-            elseif argument_specifiers.type == PATTERN then
-              csname = {
-                payload = lpeg.P(csname_stem) * lpeg.P(":") * argument_specifiers.payload * lpeg.P("T"),
-                transcript = transcript,
-                type = PATTERN
-              }
-            else
-              error('Unexpected argument specifiers type "' .. argument_specifiers.type .. '"')
-            end
+            error('Unexpected argument specifiers type "' .. argument_specifiers.type .. '"')
           end
         elseif condition == "F" then  -- false-branch conditional function
           local format = "%s:%sF"
-          if type(argument_specifiers) == 'string' then
-            csname = string.format(format, csname_stem, argument_specifiers)
+          local transcript = string.format(format, csname_stem, argument_specifiers.transcript)
+          if argument_specifiers.type == TEXT then
+            csname = {
+              payload = string.format(format, csname_stem, argument_specifiers.payload),
+              transcript = transcript,
+              type = TEXT
+            }
+          elseif argument_specifiers.type == PATTERN then
+            csname = {
+              payload = lpeg.P(csname_stem) * lpeg.P(":") * argument_specifiers.payload * lpeg.P("F"),
+              transcript = transcript,
+              type = PATTERN
+            }
           else
-            local transcript = string.format(format, csname_stem, argument_specifiers.transcript)
-            if argument_specifiers.type == TEXT then
-              csname = {
-                payload = string.format(format, csname_stem, argument_specifiers.payload),
-                transcript = transcript,
-                type = TEXT
-              }
-            elseif argument_specifiers.type == PATTERN then
-              csname = {
-                payload = lpeg.P(csname_stem) * lpeg.P(":") * argument_specifiers.payload * lpeg.P("F"),
-                transcript = transcript,
-                type = PATTERN
-              }
-            else
-              error('Unexpected argument specifiers type "' .. argument_specifiers.type .. '"')
-            end
+            error('Unexpected argument specifiers type "' .. argument_specifiers.type .. '"')
           end
         elseif condition == "TF" then  -- true-and-false-branch conditional function
           local format = "%s:%sTF"
-          if type(argument_specifiers) == 'string' then
-            csname = string.format(format, csname_stem, argument_specifiers)
+          local transcript = string.format(format, csname_stem, argument_specifiers.transcript)
+          if argument_specifiers.type == TEXT then
+            csname = {
+              payload = string.format(format, csname_stem, argument_specifiers.payload),
+              transcript = transcript,
+              type = TEXT
+            }
+          elseif argument_specifiers.type == PATTERN then
+            csname = {
+              payload = lpeg.P(csname_stem) * lpeg.P(":") * argument_specifiers.payload * lpeg.P("TF"),
+              transcript = transcript,
+              type = PATTERN,
+            }
           else
-            local transcript = string.format(format, csname_stem, argument_specifiers.transcript)
-            if argument_specifiers.type == TEXT then
-              csname = {
-                payload = string.format(format, csname_stem, argument_specifiers.payload),
-                transcript = transcript,
-                type = TEXT
-              }
-            elseif argument_specifiers.type == PATTERN then
-              csname = {
-                payload = lpeg.P(csname_stem) * lpeg.P(":") * argument_specifiers.payload * lpeg.P("TF"),
-                transcript = transcript,
-                type = PATTERN,
-              }
-            else
-              error('Unexpected argument specifiers type "' .. argument_specifiers.type .. '"')
-            end
+            error('Unexpected argument specifiers type "' .. argument_specifiers.type .. '"')
           end
         else
           error('Unexpected condition "' .. condition .. '"')
@@ -298,9 +474,10 @@
       local function parse_variant_argument_specifiers(csname, argument)
         -- extract the argument specifiers from the csname
         local _, base_argument_specifiers = parse_expl3_csname(csname)
-        if base_argument_specifiers == nil then
+        if base_argument_specifiers == nil or base_argument_specifiers.type ~= TEXT then
           return nil  -- we couldn't parse the csname, give up
         end
+        base_argument_specifiers = base_argument_specifiers.payload
 
         local variant_argument_specifiers
 
@@ -326,7 +503,8 @@
                 argument_specifiers, base_argument_specifiers:sub(#argument_specifiers + 1)
               )
             else  -- variant argument specifiers are longer than base specifiers
-              issues:add("t403", "function variant of incompatible type", byte_range)
+              local context = string.format("%s -> %s", base_argument_specifiers, argument_specifiers)
+              issues:add("t403", "function variant of incompatible type", byte_range, context)
               return nil  -- give up
             end
           end
@@ -352,10 +530,11 @@
                   break  -- skip further checks
                 end
               end
+              local context = string.format("%s -> %s", base_argument_specifiers, argument_specifiers)
               if any_deprecated_specifier then
-                issues:add("w410", "function variant of deprecated type", byte_range)
+                issues:add("w410", "function variant of deprecated type", byte_range, context)
               else
-                issues:add("t403", "function variant of incompatible type", byte_range)
+                issues:add("t403", "function variant of incompatible type", byte_range, context)
                 return nil  -- variant argument specifier is incompatible with base argument specifier, give up
               end
             end
@@ -396,6 +575,7 @@
       end
 
       if call.type == CALL then  -- a function call
+
         -- Ignore error S204 (Missing stylistic whitespaces) in Lua code.
         for _, arguments_number in ipairs(lpeg.match(parsers.expl3_function_call_with_lua_code_argument_csname, call.csname)) do
           local lua_code_argument = call.arguments[arguments_number]
@@ -405,9 +585,29 @@
           end
         end
 
+        -- Report using a comparison conditional without the signature `:nnTF`.
+        if call.csname == 'tl_sort:nN' and #call.arguments == 2 then
+          -- determine the name of the comparison conditional
+          local csname_argument = call.arguments[2]
+          local csname = extract_csname_from_argument(csname_argument)
+          if csname ~= nil then
+            local _, argument_specifiers = parse_expl3_csname(csname)
+            if argument_specifiers ~= nil and argument_specifiers.type == TEXT and argument_specifiers.payload ~= 'nnTF' then
+              issues:add('e427', 'comparison conditional without signature `:nnTF`', byte_range, argument_specifiers.payload)
+            end
+          end
+        end
+
         local function_variant_definition = lpeg.match(parsers.expl3_function_variant_definition_csname, call.csname)
         local function_definition = lpeg.match(parsers.expl3_function_definition_csname, call.csname)
 
+        local variable_declaration = lpeg.match(parsers.expl3_variable_declaration_csname, call.csname)
+        local variable_definition = lpeg.match(parsers.expl3_variable_definition_csname, call.csname)
+        local variable_use = lpeg.match(parsers.expl3_variable_use_csname, call.csname)
+
+        local message_definition = lpeg.match(parsers.expl3_message_definition, call.csname)
+        local message_use = lpeg.match(parsers.expl3_message_use, call.csname)
+
         -- Process a function variant definition.
         if function_variant_definition ~= nil then
           local is_conditional = table.unpack(function_variant_definition)
@@ -476,14 +676,13 @@
           -- Process a direct function definition.
           if is_direct then
             -- determine the properties of the defined function
+            local defined_csname_argument = call.arguments[1]
             local _, _, is_creator_function = table.unpack(function_definition)
             local is_conditional, maybe_redefinition, is_global, is_protected, is_nopar
-            local defined_csname_argument, num_parameters
+            local num_parameters
             if is_creator_function == true then  -- direct application of a creator function
-              defined_csname_argument = call.arguments[1]
               _, is_conditional, _, maybe_redefinition, is_global, is_protected, is_nopar = table.unpack(function_definition)
             else  -- indirect application of a creator function
-              defined_csname_argument = call.arguments[2]
               local num_parameter_argument = call.arguments[3]
               if num_parameter_argument ~= nil and num_parameter_argument.specifier == "n" then
                 local num_parameters_text = extract_text_from_argument(num_parameter_argument)
@@ -491,11 +690,14 @@
                   num_parameters = tonumber(num_parameters_text)
                 end
               end
-              local creator_function_csname = extract_csname_from_argument(call.arguments[1])
-              if creator_function_csname == nil then  -- couldn't determine the name of the creator function, give up
+              local creator_function_csname = extract_csname_from_argument(call.arguments[2])
+              if (  -- couldn't determine the name of the creator function, give up
+                    creator_function_csname == nil
+                    or creator_function_csname.type ~= TEXT
+                  ) then
                 goto other_statement
               end
-              local actual_function_definition = lpeg.match(parsers.expl3_function_definition_csname, creator_function_csname)
+              local actual_function_definition = lpeg.match(parsers.expl3_function_definition_csname, creator_function_csname.payload)
               if actual_function_definition == nil then  -- couldn't understand the creator function, give up
                 goto other_statement
               end
@@ -521,8 +723,12 @@
                   num_parameters = updated_num_parameters
                 end
               end
-              if argument_specifiers ~= nil and lpeg.match(parsers.N_or_n_type_argument_specifiers, argument_specifiers) ~= nil then
-                update_num_parameters(#argument_specifiers)
+              if (
+                    argument_specifiers ~= nil
+                    and argument_specifiers.type == TEXT
+                    and lpeg.match(parsers.N_or_n_type_argument_specifiers, argument_specifiers.payload) ~= nil
+                  ) then
+                update_num_parameters(#argument_specifiers.payload)
               end
               for _, argument in ipairs(call.arguments) do  -- next, try to look for p-type "TeX parameter" argument specifiers
                 if argument.specifier == "p" and argument.num_parameters ~= nil then
@@ -534,18 +740,12 @@
                 goto skip_replacement_text  -- record partial information
               end
               -- parse the replacement text and record the function definition
-              local mapped_replacement_text_token_range = new_range(
-                first_map_forward(replacement_text_argument.token_range:start()),
-                first_map_forward(replacement_text_argument.token_range:stop()),
-                INCLUSIVE + MAYBE_EMPTY,
-                #transformed_tokens
-              )
               local doubly_transformed_tokens, second_map_back, second_map_forward = transform_replacement_text_tokens(
                 content,
                 transformed_tokens,
                 issues,
                 num_parameters,
-                mapped_replacement_text_token_range
+                transform_token_range(replacement_text_argument.token_range)
               )
               if doubly_transformed_tokens == nil then  -- we couldn't parse the replacement text
                 goto skip_replacement_text  -- record partial information
@@ -572,10 +772,13 @@
               -- determine the defined csnames
               for _, condition_table in ipairs(conditions) do
                 local condition, confidence = table.unpack(condition_table)
+                if defined_csname_stem == nil or argument_specifiers == nil then  -- we couldn't parse the csname, give up
+                  goto other_statement
+                end
+                local effectively_defined_csname = get_conditional_function_csname(defined_csname_stem, argument_specifiers, condition)
                 if condition == "p" and is_protected then
-                  issues:add("e404", "protected predicate function", byte_range)
+                  issues:add("e404", "protected predicate function", byte_range, format_csname(effectively_defined_csname))
                 end
-                local effectively_defined_csname = get_conditional_function_csname(defined_csname_stem, argument_specifiers, condition)
                 table.insert(effectively_defined_csnames, {effectively_defined_csname, confidence})
               end
             else  -- non-conditional function
@@ -671,6 +874,217 @@
           goto continue
         end
 
+        -- Process a variable declaration.
+        if variable_declaration ~= nil then
+          local variable_type = table.unpack(variable_declaration)
+          -- determine the name of the declared variable
+          local declared_csname_argument = call.arguments[1]
+          local declared_csname = extract_csname_from_argument(declared_csname_argument)
+          if declared_csname == nil then  -- we couldn't extract the csname, give up
+            goto other_statement
+          end
+          if (
+                declared_csname.type == TEXT
+                and lpeg.match(parsers.expl3_expansion_csname, declared_csname.payload) ~= nil  -- there appear to be expansion, give up
+              ) then
+            goto other_statement
+          end
+          local confidence = declared_csname.type == TEXT and DEFINITELY or MAYBE
+          local statement = {
+            type = VARIABLE_DECLARATION,
+            call_range = call_range,
+            confidence = confidence,
+            -- The following attributes are specific to the type.
+            declared_csname = declared_csname,
+            variable_type = variable_type,
+          }
+          table.insert(statements, statement)
+          goto continue
+        end
+
+        -- Process a variable or constant definition.
+        if variable_definition ~= nil then
+          local variable_type, is_constant, is_global, is_direct = table.unpack(variable_definition)
+          -- determine the name of the declared variable
+          local defined_csname_argument = call.arguments[1]
+          local defined_csname = extract_csname_from_argument(defined_csname_argument)
+          if defined_csname == nil then  -- we couldn't extract the csname, give up
+            goto other_statement
+          end
+          if (
+                defined_csname.type == TEXT
+                and lpeg.match(parsers.expl3_expansion_csname, defined_csname.payload) ~= nil  -- there appear to be expansion, give up
+              ) then
+            goto other_statement
+          end
+          -- detect mutability mismatches
+          local defined_csname_scope = lpeg.match(parsers.expl3_variable_or_constant_csname_scope, defined_csname.transcript)
+          if defined_csname_scope ~= nil then
+            if is_constant and (defined_csname_scope == "g" or defined_csname_scope == "l") then
+              issues:add('e417', 'setting a variable as a constant', byte_range, format_csname(defined_csname.transcript))
+            end
+            if not is_constant and defined_csname_scope == "c" then
+              issues:add('e418', 'setting a constant', byte_range, format_csname(defined_csname.transcript))
+            end
+            if not is_global and defined_csname_scope == "g" then
+              issues:add('e420', 'locally setting a global variable', byte_range, format_csname(defined_csname.transcript))
+            end
+            if is_global and defined_csname_scope == "l" then
+              issues:add('e421', 'globally setting a local variable', byte_range, format_csname(defined_csname.transcript))
+            end
+          end
+          local confidence = defined_csname.type == TEXT and DEFINITELY or MAYBE
+          local statement
+          if is_direct then
+            -- determine the definition text
+            local definition_text_argument = call.arguments[2]
+            if definition_text_argument == nil then  -- we couldn't extract the definition text, give up
+              goto other_statement
+            end
+            statement = {
+              type = VARIABLE_DEFINITION,
+              call_range = call_range,
+              confidence = confidence,
+              -- The following attributes are specific to the type.
+              subtype = VARIABLE_DEFINITION_DIRECT,
+              variable_type = variable_type,
+              is_constant = is_constant,
+              is_global = is_global,
+              defined_csname = defined_csname,
+              -- The following attributes are specific to the subtype.
+              definition_text_argument = definition_text_argument,
+            }
+          else
+            local base_variable_type = variable_definition[5] or variable_type
+            -- determine the name of the base variable or constant
+            local base_csname_argument = call.arguments[2]
+            local base_csname = extract_csname_from_argument(base_csname_argument)
+            if base_csname == nil then  -- we couldn't extract the csname, give up
+              goto other_statement
+            end
+            statement = {
+              type = VARIABLE_DEFINITION,
+              call_range = call_range,
+              confidence = confidence,
+              -- The following attributes are specific to the type.
+              subtype = VARIABLE_DEFINITION_INDIRECT,
+              variable_type = variable_type,
+              is_constant = is_constant,
+              is_global = is_global,
+              defined_csname = defined_csname,
+              -- The following attributes are specific to the subtype.
+              base_csname = base_csname,
+              base_variable_type = base_variable_type,
+            }
+          end
+          table.insert(statements, statement)
+          goto continue
+        end
+
+        -- Process a variable declaration.
+        if variable_use ~= nil then
+          local variable_type = table.unpack(variable_use)
+          -- determine the name of the used variable
+          local used_csname_argument = call.arguments[1]
+          local used_csname = extract_csname_from_argument(used_csname_argument)
+          if used_csname == nil then  -- we couldn't extract the csname, give up
+            goto other_statement
+          end
+          if (
+                used_csname.type == TEXT
+                and lpeg.match(parsers.expl3_expansion_csname, used_csname.payload) ~= nil  -- there appear to be expansion, give up
+              ) then
+            goto other_statement
+          end
+          local confidence = used_csname.type == TEXT and DEFINITELY or MAYBE
+          local statement = {
+            type = VARIABLE_USE,
+            call_range = call_range,
+            confidence = confidence,
+            -- The following attributes are specific to the type.
+            used_csname = used_csname,
+            variable_type = variable_type,
+          }
+          table.insert(statements, statement)
+          goto continue
+        end
+
+        -- Process a message definition.
+        if message_definition ~= nil then
+          if #call.arguments < 3 or #call.arguments > 4 then  -- we couldn't find the expected number of arguments, give up
+            goto other_statement
+          end
+          local module_argument, message_argument, text_argument, more_text_argument = table.unpack(call.arguments)
+          -- determine the number of parameters in the message text
+          local num_text_parameters
+            = count_parameters_in_replacement_text(transformed_tokens, transform_token_range(text_argument.token_range))
+          if more_text_argument ~= nil then
+            num_text_parameters = math.max(
+              num_text_parameters,
+              count_parameters_in_replacement_text(transformed_tokens, transform_token_range(more_text_argument.token_range))
+            )
+          end
+          if num_text_parameters > 4 then  -- too many parameters, register an error
+            issues:add('e425', 'incorrect parameters in message text', byte_range, string.format('#%d', num_text_parameters))
+          end
+          -- parse the module and message names
+          local module_name = extract_name_from_tokens(module_argument.token_range)
+          if module_name == nil then  -- we couldn't parse the module name, give up
+            goto other_statement
+          end
+          local message_name = extract_name_from_tokens(message_argument.token_range)
+          if message_name == nil then  -- we couldn't parse the message name, give up
+            goto other_statement
+          end
+          local confidence = module_name.type == TEXT and message_name.type == TEXT and DEFINITELY or MAYBE
+          local statement = {
+            type = MESSAGE_DEFINITION,
+            call_range = call_range,
+            confidence = confidence,
+            -- The following attributes are specific to the type.
+            module_name = module_name,
+            message_name = message_name,
+            text_argument = text_argument,
+            more_text_argument = more_text_argument,
+            num_text_parameters = num_text_parameters,
+          }
+          table.insert(statements, statement)
+          goto continue
+        end
+
+        if message_use ~= nil then
+          if #call.arguments < 2 or #call.arguments > 6 then  -- we couldn't find the expected number of arguments, give up
+            goto other_statement
+          end
+          -- parse the module and message names
+          local module_argument, message_argument = table.unpack(call.arguments)
+          local module_name = extract_name_from_tokens(module_argument.token_range)
+          if module_name == nil then  -- we couldn't parse the module name, give up
+            goto other_statement
+          end
+          local message_name = extract_name_from_tokens(message_argument.token_range)
+          if message_name == nil then  -- we couldn't parse the message name, give up
+            goto other_statement
+          end
+          -- collect the text arguments
+          local text_arguments = {}
+          for i = 3, #call.arguments do
+            table.insert(text_arguments, call.arguments[i])
+          end
+          local confidence = module_name.type == TEXT and message_name.type == TEXT and DEFINITELY or MAYBE
+          local statement = {
+            type = MESSAGE_USE,
+            call_range = call_range,
+            confidence = confidence,
+            -- The following attributes are specific to the type.
+            module_name = module_name,
+            message_name = message_name,
+            text_arguments = text_arguments,
+          }
+          table.insert(statements, statement)
+          goto continue
+        end
+
         ::other_statement::
         local statement = {
           type = OTHER_STATEMENT,
@@ -779,8 +1193,9 @@
     local part_statements = statements[part_number]
     table.insert(call_segments, part_calls)
     table.insert(statement_segments, part_statements)
+    local part_groupings = results.groupings[part_number]
     local part_tokens = results.tokens[part_number]
-    table.insert(token_segments, {part_tokens, part_tokens, identity})
+    table.insert(token_segments, {part_groupings, part_tokens, part_tokens, identity, identity})
     local part_replacement_texts = replacement_texts[part_number]
     for replacement_text_number, nested_calls in ipairs(part_replacement_texts.calls) do
       local nested_statements = part_replacement_texts.statements[replacement_text_number]
@@ -787,84 +1202,112 @@
       table.insert(call_segments, nested_calls)
       table.insert(statement_segments, nested_statements)
       local replacement_text_tokens = part_replacement_texts.tokens[replacement_text_number]
-      table.insert(token_segments, {part_tokens, replacement_text_tokens.transformed_tokens, replacement_text_tokens.map_forward})
+      table.insert(
+        token_segments,
+        {
+          part_groupings,
+          part_tokens,
+          replacement_text_tokens.transformed_tokens,
+          replacement_text_tokens.map_forward,
+          replacement_text_tokens.map_back,
+        }
+      )
     end
   end
 
   --- Make a pass over the segments, building up information.
-  local defined_private_functions = {}
 
   ---- Collect information about symbols that were definitely defined.
+  local defined_private_function_texts = {}
   local called_functions_and_variants = {}
-  local defined_private_function_variant_texts, defined_private_function_variant_pattern = {}, parsers.fail
-  local defined_private_function_variant_byte_ranges = {}
-  local variant_base_csnames, indirect_definition_base_csnames = {}, {}
+  local defined_csname_texts = {}
+  local defined_private_function_variant_texts = {}
+  local defined_private_function_variant_byte_ranges, defined_private_function_variant_csnames = {}, {}
+  local variant_base_csname_texts, indirect_definition_base_csname_texts = {}, {}
 
+  local declared_defined_and_used_variable_csname_texts = {}
+  local declared_variable_csname_texts, declared_variable_csname_transcripts = {}, {}
+  local defined_variable_csname_texts = {}
+  local defined_variable_csname_transcripts, defined_variable_base_csname_transcripts = {}, {}
+  local used_variable_csname_texts, used_variable_csname_transcripts = {}, {}
+
+  local defined_message_name_texts, defined_message_nums_text_parameters = {}, {}
+  local used_message_name_texts, used_message_nums_text_arguments = {}, {}
+
   ---- Collect information about symbols that may have been defined.
+  local maybe_defined_private_function_variant_pattern = parsers.fail
   local maybe_defined_csname_texts, maybe_defined_csname_pattern = {}, parsers.fail
   local maybe_used_csname_texts, maybe_used_csname_pattern = {}, parsers.fail
 
+  local maybe_declared_variable_csname_texts = {}
+  local maybe_declared_variable_csname_pattern = parsers.fail
+  local maybe_used_variable_csname_texts = {}
+  local maybe_used_variable_csname_pattern = parsers.fail
+
+  local maybe_defined_message_name_texts, maybe_defined_message_name_pattern = {}, parsers.fail
+  local maybe_used_message_name_texts, maybe_used_message_name_pattern = {}, parsers.fail
+
   for segment_number, segment_statements in ipairs(statement_segments) do
     local segment_calls = call_segments[segment_number]
-    local segment_tokens, segment_transformed_tokens, map_forward = table.unpack(token_segments[segment_number])
+    local segment_groupings, segment_tokens, segment_transformed_tokens, map_forward, map_back
+      = table.unpack(token_segments[segment_number])
 
-    -- Convert tokens from a range into a PEG pattern.
-    local function extract_pattern_from_tokens(token_range)
-      local pattern, transcripts, num_simple_tokens = parsers.success, {}, 0
-      local previous_token_was_simple = true
-      for _, token in token_range:enumerate(segment_transformed_tokens, map_forward) do
-        if is_token_simple(token) then  -- simple material
-          pattern = pattern * lpeg.P(token.payload)
-          table.insert(transcripts, token.payload)
-          num_simple_tokens = num_simple_tokens + 1
-          previous_token_was_simple = true
-        else  -- complex material
-          if previous_token_was_simple then
-            pattern = pattern * parsers.any^0
-            table.insert(transcripts, "*")
-          end
-          previous_token_was_simple = false
-        end
-      end
-      local transcript = table.concat(transcripts)
-      return pattern, transcript, num_simple_tokens
-    end
-
-    -- Try and convert tokens from a range into a csname.
-    local function extract_csname_from_tokens(token_range)
-      local text = extract_text_from_tokens(token_range, segment_transformed_tokens, map_forward)
-      local csname
-      if text ~= nil then  -- simple material
-        csname = {
-          payload = text,
-          transcript = text,
-          type = TEXT
+    -- Merge a module name and a message name into a combined fully qualified name.
+    local function combine_module_and_message_names(module_name, message_name)
+      local transcript = string.format("%s/%s", module_name.transcript, message_name.transcript)
+      if module_name.type == TEXT and message_name.type == TEXT then
+        return {
+          payload = string.format("%s/%s", module_name.payload, message_name.payload),
+          transcript = transcript,
+          type = TEXT,
         }
-      else  -- complex material
-        local pattern, transcript, num_simple_tokens = extract_pattern_from_tokens(token_range)
-        if num_simple_tokens < get_option("min_simple_tokens_in_csname_pattern", options, pathname) then  -- too few simple tokens, give up
-          return nil
+      else
+        local message_name_pattern
+        if module_name.type == TEXT then
+          message_name_pattern = lpeg.P(module_name.payload)
+        elseif module_name.type == PATTERN then
+          message_name_pattern = module_name.payload
+        else
+          error('Unexpected message name type "' .. module_name.type .. '"')
         end
-        csname = {
-          payload = pattern,
+        message_name_pattern = message_name_pattern * lpeg.P("/")
+        if message_name.type == TEXT then
+          message_name_pattern = message_name_pattern * lpeg.P(message_name.payload)
+        elseif message_name.type == PATTERN then
+          message_name_pattern = message_name_pattern * message_name.payload
+        else
+          error('Unexpected message name type "' .. message_name.type .. '"')
+        end
+        return {
+          payload = message_name_pattern,
           transcript = transcript,
-          type = PATTERN
+          type = PATTERN,
         }
       end
-      return csname
     end
 
+    -- Try and convert tokens from a range into a csname.
+    local function extract_name_from_tokens(token_range)
+      return _extract_name_from_tokens(token_range, segment_transformed_tokens, map_forward)
+    end
+
     -- Process an argument and record control sequence name usage and definitions.
     local function process_argument_tokens(argument)
       -- Record control sequence name usage.
       --- Extract text from tokens within c- and v-type arguments.
       if argument.specifier == "c" or argument.specifier == "v" then
-        local csname = extract_csname_from_tokens(argument.token_range)
+        local csname = extract_name_from_tokens(argument.token_range)
         if csname ~= nil then
           if csname.type == TEXT then
             maybe_used_csname_texts[csname.payload] = true
           elseif csname.type == PATTERN then
-            maybe_used_csname_pattern = maybe_used_csname_pattern + csname.payload
+            maybe_used_csname_pattern = (
+              maybe_used_csname_pattern
+              + #(csname.payload * parsers.eof)
+              * lpeg.Cc(true)
+            )
+          else
+            error('Unexpected csname type "' .. csname.type .. '"')
           end
         end
       end
@@ -876,18 +1319,101 @@
           end
         end
       end
-      -- Record control sequence name definitions.
+      -- Record control sequence name definitions and message name definitions and uses.
       --- Scan control sequence tokens within N- and n-type arguments.
       if lpeg.match(parsers.N_or_n_type_argument_specifier, argument.specifier) ~= nil then
         for token_number, token in argument.token_range:enumerate(segment_transformed_tokens, map_forward) do
-          if token.type == CONTROL_SEQUENCE then
-            if token_number + 1 <= #segment_transformed_tokens then
-              local next_token = segment_transformed_tokens[token_number + 1]
-              if (
-                    next_token.type == CONTROL_SEQUENCE
-                    and lpeg.match(parsers.expl3_function_definition_csname, token.payload) ~= nil
-                  ) then
-                maybe_defined_csname_texts[next_token.payload] = true
+          if token.type == CONTROL_SEQUENCE then  -- control sequence, process it directly
+            local next_token_number = token_number + 1
+            if next_token_number <= #segment_transformed_tokens then
+              local next_token = segment_transformed_tokens[next_token_number]
+              -- Record control sequence name definitions.
+              if next_token.type == CONTROL_SEQUENCE then
+                -- Record potential function definitions.
+                if lpeg.match(parsers.expl3_function_definition_csname, token.payload) ~= nil then
+                  maybe_defined_csname_texts[next_token.payload] = true
+                end
+                -- Record potential variable declarations and definitions.
+                if lpeg.match(parsers.expl3_variable_declaration_csname, token.payload) ~= nil then
+                  maybe_declared_variable_csname_texts[next_token.payload] = true
+                  maybe_defined_csname_texts[next_token.payload] = true
+                end
+                local variable_definition = lpeg.match(parsers.expl3_variable_definition_csname, token.payload)
+                if variable_definition ~= nil then
+                  local _, is_constant = table.unpack(variable_definition)
+                  if is_constant then
+                    maybe_declared_variable_csname_texts[next_token.payload] = true
+                  end
+                  maybe_defined_csname_texts[next_token.payload] = true
+                end
+              -- Record message name definitions and uses.
+              elseif next_token.type == CHARACTER and next_token.catcode == 1 then  -- begin grouping, try to collect the module name
+                local message_definition = lpeg.match(parsers.expl3_message_definition, token.payload)
+                local message_use = lpeg.match(parsers.expl3_message_use, token.payload)
+                if message_definition ~= nil or message_use ~= nil then
+                  local next_grouping = segment_groupings[map_back(next_token_number)]
+                  assert(next_grouping ~= nil)
+                  assert(map_forward(next_grouping.start) == next_token_number)
+                  if next_grouping.stop ~= nil then  -- balanced text
+                    local module_name_token_range = new_range(
+                      next_grouping.start + 1,
+                      next_grouping.stop - 1,
+                      INCLUSIVE + MAYBE_EMPTY,
+                      #segment_tokens
+                    )
+                    local next_next_token_number = map_forward(next_grouping.stop) + 1
+                    if next_next_token_number <= #segment_transformed_tokens then
+                      local next_next_token = segment_transformed_tokens[next_next_token_number]
+                      if next_next_token.type == CHARACTER  -- begin grouping, try to collect the message name
+                          and next_next_token.catcode == 1 then
+                        local next_next_grouping = segment_groupings[map_back(next_next_token_number)]
+                        assert(next_next_grouping ~= nil)
+                        assert(map_forward(next_next_grouping.start) == next_next_token_number)
+                        if next_next_grouping.stop ~= nil then  -- balanced text
+                          local message_name_token_range = new_range(
+                            next_next_grouping.start + 1,
+                            next_next_grouping.stop - 1,
+                            INCLUSIVE + MAYBE_EMPTY,
+                            #segment_tokens
+                          )
+                          local module_name = extract_name_from_tokens(module_name_token_range)
+                          local message_name = extract_name_from_tokens(message_name_token_range)
+                          if module_name ~= nil and message_name ~= nil then
+                            local combined_name = combine_module_and_message_names(module_name, message_name)
+                            -- Record potential message definitions.
+                            if message_definition ~= nil then
+                              if combined_name.type == TEXT then
+                                maybe_defined_message_name_texts[combined_name.payload] = true
+                              elseif combined_name.type == PATTERN then
+                                maybe_defined_message_name_pattern = (
+                                  maybe_defined_message_name_pattern
+                                  + #(combined_name.payload * parsers.eof)
+                                  * lpeg.Cc(true)
+                                )
+                              else
+                                error('Unexpected message name type "' .. combined_name.type .. '"')
+                              end
+                            end
+                            -- Record potential message uses.
+                            if message_use ~= nil then
+                              if combined_name.type == TEXT then
+                                maybe_used_message_name_texts[combined_name.payload] = true
+                              elseif combined_name.type == PATTERN then
+                                maybe_used_message_name_pattern = (
+                                  maybe_used_message_name_pattern
+                                  + #(combined_name.payload * parsers.eof)
+                                  * lpeg.Cc(true)
+                                )
+                              else
+                                error('Unexpected message name type "' .. combined_name.type .. '"')
+                              end
+                            end
+                          end
+                        end
+                      end
+                    end
+                  end
+                end
               end
             end
           end
@@ -901,51 +1427,216 @@
       -- Process a function variant definition.
       if statement.type == FUNCTION_VARIANT_DEFINITION then
         -- Record base control sequence names of variants, both as control sequence name usage and separately.
-        table.insert(variant_base_csnames, {statement.base_csname, byte_range})
-        maybe_used_csname_texts[statement.base_csname] = true
+        if statement.base_csname.type == TEXT then
+          table.insert(variant_base_csname_texts, {statement.base_csname.payload, byte_range})
+          maybe_used_csname_texts[statement.base_csname.payload] = true
+        elseif statement.base_csname.type == PATTERN then
+          maybe_used_csname_pattern = (
+            maybe_used_csname_pattern
+            + #(statement.base_csname.payload * parsers.eof)
+            * lpeg.Cc(true)
+          )
+        else
+          error('Unexpected csname type "' .. statement.base_csname.type .. '"')
+        end
         -- Record control sequence name definitions.
         if statement.defined_csname.type == TEXT then
+          table.insert(defined_csname_texts, {statement.defined_csname.payload, byte_range})
           maybe_defined_csname_texts[statement.defined_csname.payload] = true
         elseif statement.defined_csname.type == PATTERN then
-          maybe_defined_csname_pattern = maybe_defined_csname_pattern + statement.defined_csname.payload
+          maybe_defined_csname_pattern = (
+            maybe_defined_csname_pattern
+            + #(statement.defined_csname.payload * parsers.eof)
+            * lpeg.Cc(true)
+          )
         else
           error('Unexpected csname type "' .. statement.defined_csname.type .. '"')
         end
         -- Record private function variant definitions.
-        if statement.confidence == DEFINITELY and statement.is_private then
+        if statement.defined_csname.type == TEXT and statement.is_private then
           table.insert(defined_private_function_variant_byte_ranges, byte_range)
-          local defined_private_function_variant = {
-            number = #defined_private_function_variant_byte_ranges,
-            csname = statement.defined_csname
-          }
-          if statement.defined_csname.type == TEXT then
-            table.insert(defined_private_function_variant_texts, defined_private_function_variant)
-          elseif statement.defined_csname.type == PATTERN then
-            defined_private_function_variant_pattern = (
-              defined_private_function_variant_pattern
-              + statement.defined_csname.payload
-              / defined_private_function_variant
-            )
-          else
-            error('Unexpected csname type "' .. statement.defined_csname.type .. '"')
-          end
+          table.insert(defined_private_function_variant_csnames, statement.defined_csname)
+          local private_function_variant_number = #defined_private_function_variant_byte_ranges
+          table.insert(defined_private_function_variant_texts, private_function_variant_number)
         end
       -- Process a function definition.
       elseif statement.type == FUNCTION_DEFINITION then
         -- Record the base control sequences used in indirect function definitions.
         if statement.subtype == FUNCTION_DEFINITION_INDIRECT then
-          maybe_used_csname_texts[statement.base_csname] = true
-          table.insert(indirect_definition_base_csnames, {statement.base_csname, byte_range})
+          if statement.base_csname.type == TEXT then
+            maybe_used_csname_texts[statement.base_csname.payload] = true
+            table.insert(indirect_definition_base_csname_texts, {statement.base_csname.payload, byte_range})
+          elseif statement.base_csname.type == PATTERN then
+            maybe_used_csname_pattern = (
+              maybe_used_csname_pattern
+              + #(statement.base_csname.payload * parsers.eof)
+              * lpeg.Cc(true)
+            )
+          else
+            error('Unexpected csname type "' .. statement.base_csname.type .. '"')
+          end
         end
         -- Record control sequence name usage and definitions.
-        maybe_defined_csname_texts[statement.defined_csname] = true
+        if statement.defined_csname.type == TEXT then
+          maybe_defined_csname_texts[statement.defined_csname.payload] = true
+          table.insert(defined_csname_texts, {statement.defined_csname.payload, byte_range})
+        end
         if statement.subtype == FUNCTION_DEFINITION_DIRECT and statement.replacement_text_number == nil then
           process_argument_tokens(statement.replacement_text_argument)
         end
         -- Record private function defition.
-        if statement.confidence == DEFINITELY and statement.is_private then
-          table.insert(defined_private_functions, {statement.defined_csname, byte_range})
+        if statement.defined_csname.type == TEXT and statement.is_private then
+          table.insert(defined_private_function_texts, {statement.defined_csname.payload, byte_range})
         end
+      -- Process a variable declaration.
+      elseif statement.type == VARIABLE_DECLARATION then
+        -- Record variable names.
+        table.insert(
+          declared_variable_csname_transcripts,
+          {statement.variable_type, statement.declared_csname.transcript, byte_range}
+        )
+        if statement.declared_csname.type == TEXT then
+          table.insert(
+            declared_defined_and_used_variable_csname_texts,
+            {statement.variable_type, statement.declared_csname.payload, byte_range}
+          )
+          table.insert(declared_variable_csname_texts, {statement.declared_csname.payload, byte_range})
+          maybe_declared_variable_csname_texts[statement.declared_csname.payload] = true
+        elseif statement.declared_csname.type == PATTERN then
+          maybe_declared_variable_csname_pattern = (
+            maybe_declared_variable_csname_pattern
+            + #(statement.declared_csname.payload * parsers.eof)
+            * lpeg.Cc(true)
+          )
+        else
+          error('Unexpected csname type "' .. statement.base_csname.type .. '"')
+        end
+      -- Process a variable or constant definition.
+      elseif statement.type == VARIABLE_DEFINITION then
+        -- Record variable names.
+        if statement.is_constant then
+          table.insert(
+            declared_variable_csname_transcripts,
+            {statement.variable_type, statement.defined_csname.transcript, byte_range}
+          )
+        else
+          table.insert(
+            defined_variable_csname_transcripts,
+            {statement.variable_type, statement.defined_csname.transcript, byte_range}
+          )
+          if statement.subtype == VARIABLE_DEFINITION_INDIRECT then
+            table.insert(
+              defined_variable_base_csname_transcripts,
+              {statement.base_variable_type, statement.base_csname.transcript, byte_range}
+            )
+          end
+        end
+        if statement.defined_csname.type == TEXT then
+          table.insert(
+            declared_defined_and_used_variable_csname_texts,
+            {statement.variable_type, statement.defined_csname.payload, byte_range})
+          table.insert(
+            defined_variable_csname_texts,
+            {statement.defined_csname.payload, byte_range}
+          )
+          if statement.is_constant then
+            maybe_declared_variable_csname_texts[statement.defined_csname.payload] = true
+            table.insert(
+              declared_variable_csname_texts,
+              {statement.defined_csname.payload, byte_range}
+            )
+          end
+        end
+        -- Record control sequence name usage and definitions.
+        if statement.subtype == VARIABLE_DEFINITION_DIRECT then
+          process_argument_tokens(statement.definition_text_argument)
+        end
+      -- Process a variable or constant use.
+      elseif statement.type == VARIABLE_USE then
+        -- Record variable names.
+        table.insert(
+          used_variable_csname_transcripts,
+          {statement.variable_type, statement.used_csname.transcript, byte_range}
+        )
+        if statement.used_csname.type == TEXT then
+          table.insert(
+            declared_defined_and_used_variable_csname_texts,
+            {statement.variable_type, statement.used_csname.payload, byte_range}
+          )
+          table.insert(used_variable_csname_texts, {statement.used_csname.payload, byte_range})
+          maybe_used_variable_csname_texts[statement.used_csname.payload] = true
+        elseif statement.used_csname.type == PATTERN then
+          maybe_used_variable_csname_pattern = (
+            maybe_used_variable_csname_pattern
+            + #(statement.used_csname.payload * parsers.eof)
+            * lpeg.Cc(true)
+          )
+        else
+          error('Unexpected csname type "' .. statement.defined_csname.type .. '"')
+        end
+      -- Process a message definition.
+      elseif statement.type == MESSAGE_DEFINITION then
+        -- Record message names.
+        local message_name = combine_module_and_message_names(statement.module_name, statement.message_name)
+        if message_name.type == TEXT then
+          maybe_defined_message_name_texts[message_name.payload] = true
+          table.insert(defined_message_name_texts, {message_name.payload, byte_range})
+        elseif message_name.type == PATTERN then
+          maybe_defined_message_name_pattern = (
+            maybe_defined_message_name_pattern
+            + #(message_name.payload * parsers.eof)
+            * lpeg.Cc(true)
+          )
+        else
+          error('Unexpected message name type "' .. message_name.type .. '"')
+        end
+        -- Record numbers of text parameters.
+        if message_name.type == TEXT then
+          if defined_message_nums_text_parameters[message_name.payload] == nil then
+            defined_message_nums_text_parameters[message_name.payload] = {
+              min = statement.num_text_parameters,
+              max = statement.num_text_parameters,
+            }
+          else
+            defined_message_nums_text_parameters[message_name.payload].min = math.min(
+              defined_message_nums_text_parameters[message_name.payload].min,
+              statement.num_text_parameters
+            )
+            defined_message_nums_text_parameters[message_name.payload].max = math.max(
+              defined_message_nums_text_parameters[message_name.payload].max,
+              statement.num_text_parameters
+            )
+          end
+        end
+        -- Record control sequence name usage and definitions.
+        process_argument_tokens(statement.text_argument)
+        if statement.more_text_argument ~= nil then
+          process_argument_tokens(statement.more_text_argument)
+        end
+      -- Process a message use.
+      elseif statement.type == MESSAGE_USE then
+        -- Record message names.
+        local message_name = combine_module_and_message_names(statement.module_name, statement.message_name)
+        if message_name.type == TEXT then
+          maybe_used_message_name_texts[message_name.payload] = true
+          table.insert(used_message_name_texts, {message_name.payload, byte_range})
+        elseif message_name.type == PATTERN then
+          maybe_used_message_name_pattern = (
+            maybe_used_message_name_pattern
+            + #(message_name.payload * parsers.eof)
+            * lpeg.Cc(true)
+          )
+        else
+          error('Unexpected message name type "' .. message_name.type .. '"')
+        end
+        -- Record numbers of text parameters.
+        if message_name.type == TEXT then
+          table.insert(used_message_nums_text_arguments, {message_name.payload, #statement.text_arguments, byte_range})
+        end
+        -- Record control sequence name usage and definitions.
+        for _, argument in ipairs(statement.text_arguments) do
+          process_argument_tokens(argument)
+        end
       -- Process an unrecognized statement.
       elseif statement.type == OTHER_STATEMENT then
         -- Record control sequence name usage and definitions.
@@ -970,17 +1661,18 @@
     end
   end
 
-  -- Finalize PEG patterns.
-  maybe_defined_csname_pattern = maybe_defined_csname_pattern * parsers.eof
-  maybe_used_csname_pattern = maybe_used_csname_pattern * parsers.eof
-  defined_private_function_variant_pattern = defined_private_function_variant_pattern * parsers.eof
+  --- Report issues apparent from the collected information.
+  local imported_prefixes = get_option('imported_prefixes', options, pathname)
+  local expl3_well_known_csname = parsers.expl3_well_known_csname(imported_prefixes)
+  local expl3_well_known_message_name = parsers.expl3_well_known_message_name(imported_prefixes)
 
-  --- Report issues apparent from the collected information.
   ---- Report unused private functions.
-  for _, defined_private_function in ipairs(defined_private_functions) do
-    local defined_csname, byte_range = table.unpack(defined_private_function)
-    if not maybe_used_csname_texts[defined_csname] and lpeg.match(maybe_used_csname_pattern, defined_csname) == nil then
-      issues:add('w401', 'unused private function', byte_range)
+  for _, defined_private_function_text in ipairs(defined_private_function_texts) do
+    local defined_csname, byte_range = table.unpack(defined_private_function_text)
+    if lpeg.match(expl3_well_known_csname, defined_csname) == nil
+        and not maybe_used_csname_texts[defined_csname]
+        and lpeg.match(maybe_used_csname_pattern, defined_csname) == nil then
+      issues:add('w401', 'unused private function', byte_range, format_csname(defined_csname))
     end
   end
 
@@ -989,42 +1681,42 @@
   for private_function_variant_number, _ in ipairs(defined_private_function_variant_byte_ranges) do
     used_private_function_variants[private_function_variant_number] = false
   end
-  for _, defined_private_function_variant in ipairs(defined_private_function_variant_texts) do
-    assert(defined_private_function_variant.csname.type == TEXT)
-    if maybe_used_csname_texts[defined_private_function_variant.csname.payload]
-        or lpeg.match(maybe_used_csname_pattern, defined_private_function_variant.csname.payload) ~= nil then
-      used_private_function_variants[defined_private_function_variant.number] = true
+  for _, private_function_variant_number in ipairs(defined_private_function_variant_texts) do
+    local csname = defined_private_function_variant_csnames[private_function_variant_number]
+    assert(csname.type == TEXT)
+    if maybe_used_csname_texts[csname.payload] or lpeg.match(maybe_used_csname_pattern, csname.payload) ~= nil then
+      used_private_function_variants[private_function_variant_number] = true
     end
   end
   for maybe_used_csname, _ in pairs(maybe_used_csname_texts) do
-    -- NOTE: Although we might want to also test whether "defined_private_function_variant_pattern" and
+    -- NOTE: Although we might want to also test whether "maybe_defined_private_function_variant_pattern" and
     -- "maybe_used_csname_pattern" overlap, intersection is undecideable for parsing expression languages (PELs). In
     -- theory, we could use regular expressions instead of PEG patterns, since intersection is decideable for regular
     -- languages. In practice, there are no Lua libraries that would implement the required algorithms. Therefore, it
     -- seems more practical to just accept that low-confidence function variant definitions and function uses don't
     -- interact, not just because the technical difficulty but also because the combined confidence is just too low.
-    local defined_private_function_variant = lpeg.match(defined_private_function_variant_pattern, maybe_used_csname)
-    if defined_private_function_variant ~= nil then
-      assert(defined_private_function_variant.csname.type == PATTERN)
-      used_private_function_variants[defined_private_function_variant.number] = true
+    local private_function_variant_number = lpeg.match(maybe_defined_private_function_variant_pattern, maybe_used_csname)
+    if private_function_variant_number ~= nil then
+      local csname = defined_private_function_variant_csnames[private_function_variant_number]
+      assert(csname.type == PATTERN)
+      used_private_function_variants[private_function_variant_number] = true
     end
   end
   for private_function_variant_number, byte_range in ipairs(defined_private_function_variant_byte_ranges) do
+    local csname = defined_private_function_variant_csnames[private_function_variant_number]
+    assert(csname.type == TEXT or csname.type == PATTERN)
     if not used_private_function_variants[private_function_variant_number] then
-      issues:add('w402', 'unused private function variant', byte_range)
+      issues:add('w402', 'unused private function variant', byte_range, format_csname(csname.transcript))
     end
   end
 
-  local imported_prefixes = get_option('imported_prefixes', options, pathname)
-  local expl3_well_known_function_csname = parsers.expl3_well_known_function_csname(imported_prefixes)
-
   ---- Report function variants for undefined functions.
-  for _, variant_base_csname in ipairs(variant_base_csnames) do
-    local base_csname, byte_range = table.unpack(variant_base_csname)
-    if lpeg.match(expl3_well_known_function_csname, base_csname) == nil
+  for _, variant_base_csname_text in ipairs(variant_base_csname_texts) do
+    local base_csname, byte_range = table.unpack(variant_base_csname_text)
+    if lpeg.match(expl3_well_known_csname, base_csname) == nil
         and not maybe_defined_csname_texts[base_csname]
-        and not lpeg.match(maybe_defined_csname_pattern, base_csname) then
-      issues:add('e405', 'function variant for an undefined function', byte_range)
+        and lpeg.match(maybe_defined_csname_pattern, base_csname) == nil then
+      issues:add('e405', 'function variant for an undefined function', byte_range, format_csname(base_csname))
     end
   end
 
@@ -1032,24 +1724,185 @@
   for _, called_function_or_variant in ipairs(called_functions_and_variants) do
     local csname, byte_range = table.unpack(called_function_or_variant)
     if lpeg.match(parsers.expl3like_function_csname, csname) ~= nil
-        and lpeg.match(expl3_well_known_function_csname, csname) == nil
+        and lpeg.match(expl3_well_known_csname, csname) == nil
         and not maybe_defined_csname_texts[csname]
-        and not lpeg.match(maybe_defined_csname_pattern, csname) then
-      issues:add('e408', 'calling an undefined function', byte_range)
+        and lpeg.match(maybe_defined_csname_pattern, csname) == nil then
+      issues:add('e408', 'calling an undefined function', byte_range, format_csname(csname))
     end
   end
 
   ---- Report indirect function definitions from undefined base functions.
-  for _, indirect_definition_base_csname in ipairs(indirect_definition_base_csnames) do
-    local csname, byte_range = table.unpack(indirect_definition_base_csname)
+  for _, indirect_definition_base_csname_text in ipairs(indirect_definition_base_csname_texts) do
+    local csname, byte_range = table.unpack(indirect_definition_base_csname_text)
     if lpeg.match(parsers.expl3like_function_csname, csname) ~= nil
-        and lpeg.match(expl3_well_known_function_csname, csname) == nil
+        and lpeg.match(expl3_well_known_csname, csname) == nil
         and not maybe_defined_csname_texts[csname]
-        and not lpeg.match(maybe_defined_csname_pattern, csname) then
-      issues:add('e411', 'indirect function definition from an undefined function', byte_range)
+        and lpeg.match(maybe_defined_csname_pattern, csname) == nil then
+      issues:add('e411', 'indirect function definition from an undefined function', byte_range, format_csname(csname))
     end
   end
 
+  ---- Report malformed function names.
+  for _, defined_csname_text in ipairs(defined_csname_texts) do
+    local defined_csname, byte_range = table.unpack(defined_csname_text)
+    if (
+          lpeg.match(parsers.expl3like_csname, defined_csname) ~= nil
+          and lpeg.match(expl3_well_known_csname, defined_csname) == nil
+          and lpeg.match(parsers.expl3_function_csname, defined_csname) == nil
+        ) then
+      issues:add('s412', 'malformed function name', byte_range, format_csname(defined_csname))
+    end
+  end
+
+  ---- Report malformed variable and constant names.
+  for _, declared_defined_and_used_variable_csname_text in ipairs(declared_defined_and_used_variable_csname_texts) do
+    local variable_type, variable_csname, byte_range = table.unpack(declared_defined_and_used_variable_csname_text)
+    if variable_type == "quark" or variable_type == "scan" then
+      if lpeg.match(parsers.expl3_quark_or_scan_mark_csname, variable_csname) == nil then
+        issues:add('s414', 'malformed quark or scan mark name', byte_range, format_csname(variable_csname))
+      end
+    else
+      if (
+            lpeg.match(parsers.expl3like_csname, variable_csname) ~= nil
+            and lpeg.match(parsers.expl3_scratch_variable_csname, variable_csname) == nil
+            and lpeg.match(parsers.expl3_variable_or_constant_csname, variable_csname) == nil
+          ) then
+        issues:add('s413', 'malformed variable or constant', byte_range, format_csname(variable_csname))
+      end
+    end
+  end
+
+  ---- Report unused variables and constants.
+  for _, declared_variable_csname_text in ipairs(declared_variable_csname_texts) do
+    local variable_csname, byte_range = table.unpack(declared_variable_csname_text)
+    if (
+          lpeg.match(parsers.expl3like_csname, variable_csname) ~= nil
+          and not maybe_used_variable_csname_texts[variable_csname]
+          and lpeg.match(maybe_used_variable_csname_pattern, variable_csname) == nil
+          and not maybe_used_csname_texts[variable_csname]
+          and lpeg.match(maybe_used_csname_pattern, variable_csname) == nil
+        ) then
+      issues:add('w415', 'unused variable or constant', byte_range, format_csname(variable_csname))
+    end
+  end
+
+  ---- Report undeclared variables.
+  for _, defined_variable_csname_text in ipairs(defined_variable_csname_texts) do
+    local variable_csname, byte_range = table.unpack(defined_variable_csname_text)
+    if (
+          lpeg.match(parsers.expl3like_csname, variable_csname) ~= nil
+          and lpeg.match(expl3_well_known_csname, variable_csname) == nil
+          and lpeg.match(parsers.expl3_scratch_variable_csname, variable_csname) == nil
+          and not maybe_declared_variable_csname_texts[variable_csname]
+          and lpeg.match(maybe_declared_variable_csname_pattern, variable_csname) == nil
+        ) then
+      issues:add('w416', 'setting an undeclared variable', byte_range, format_csname(variable_csname))
+    end
+  end
+
+  ---- Report using undefined variables or constants.
+  for _, used_variable_csname_text in ipairs(used_variable_csname_texts) do
+    local variable_csname, byte_range = table.unpack(used_variable_csname_text)
+    if (
+          lpeg.match(parsers.expl3like_csname, variable_csname) ~= nil
+          and lpeg.match(expl3_well_known_csname, variable_csname) == nil
+          and lpeg.match(parsers.expl3_scratch_variable_csname, variable_csname) == nil
+          and not maybe_declared_variable_csname_texts[variable_csname]
+          and lpeg.match(maybe_declared_variable_csname_pattern, variable_csname) == nil
+        ) then
+      issues:add('w419', 'using an undeclared variable or constant', byte_range, format_csname(variable_csname))
+    end
+  end
+
+  ---- Report using variables and constants of incompatible types.
+  for _, declared_variable_csname_transcript in ipairs(declared_variable_csname_transcripts) do
+    local declaration_type, csname_transcript, byte_range = table.unpack(declared_variable_csname_transcript)
+    local csname_type = lpeg.match(parsers.expl3_variable_or_constant_csname_type, csname_transcript)
+    if csname_type ~= nil then
+      -- For declarations, we require that the the declaration type <= the variable type.
+      -- For example, `\str_new:N \g_example_tl` is OK but `\tl_new:N \g_example_str` is not.
+      local subtype, supertype = declaration_type, csname_type
+      if not is_subtype(subtype, supertype) then
+        local context = string.format("!(%s <= %s)", subtype, supertype)
+        issues:add('t422', 'using a variable of an incompatible type', byte_range, context)
+      end
+    end
+  end
+  for _, defined_variable_csname_transcript in ipairs(defined_variable_csname_transcripts) do
+    local definition_type, csname_transcript, byte_range = table.unpack(defined_variable_csname_transcript)
+    local csname_type = lpeg.match(parsers.expl3_variable_or_constant_csname_type, csname_transcript)
+    if csname_type ~= nil then
+      -- For definitions, we require that the definition type <= the defined variable type.
+      -- For example, `\clist_gset:Nn \g_example_tl ...` is OK but `\tl_gset:Nn \g_example_clist ...` is not.
+      local subtype, supertype = definition_type, csname_type
+      if not is_subtype(subtype, supertype) then
+        local context = string.format("!(%s <= %s)", subtype, supertype)
+        issues:add('t422', 'using a variable of an incompatible type', byte_range, context)
+      end
+    end
+  end
+  for _, defined_variable_base_csname_transcript in ipairs(defined_variable_base_csname_transcripts) do
+    local definition_type, csname_transcript, byte_range = table.unpack(defined_variable_base_csname_transcript)
+    local csname_type = lpeg.match(parsers.expl3_variable_or_constant_csname_type, csname_transcript)
+    if csname_type ~= nil then
+      -- Additionally, for indirect definitions, we also require that the base variable type <= the definition type.
+      -- For example, `\tl_gset_eq:NN ... \g_example_str` is OK but `\str_gset_eq:NN ... \g_example_tl` is not.
+      local subtype, supertype = csname_type, definition_type
+      if not is_subtype(subtype, supertype) then
+        local context = string.format("!(%s <= %s)", subtype, supertype)
+        issues:add('t422', 'using a variable of an incompatible type', byte_range, context)
+      end
+    end
+  end
+  for _, used_variable_csname_transcript in ipairs(used_variable_csname_transcripts) do
+    local use_type, csname_transcript, byte_range = table.unpack(used_variable_csname_transcript)
+    local csname_type = lpeg.match(parsers.expl3_variable_or_constant_csname_type, csname_transcript)
+    -- For uses, we require a potential compatibility between the use type and the variable type.
+    -- For example, both `\str_count:N \g_example_tl` and `\tl_count:N \g_example_str` are OK.
+    if csname_type ~= nil and not is_maybe_compatible_type(use_type, csname_type) then
+      local context = string.format("!(%s ~= %s)", use_type, csname_type)
+      issues:add('t422', 'using a variable of an incompatible type', byte_range, context)
+    end
+  end
+
+  -- Report unused messages.
+  for _, defined_message_name_text in ipairs(defined_message_name_texts) do
+    local message_name_text, byte_range = table.unpack(defined_message_name_text)
+    if (
+          not maybe_used_message_name_texts[message_name_text]
+          and lpeg.match(maybe_used_message_name_pattern, message_name_text) == nil
+        ) then
+      issues:add('w423', 'unused message', byte_range, message_name_text)
+    end
+  end
+
+  -- Report using an undefined message.
+  for _, used_message_name_text in ipairs(used_message_name_texts) do
+    local message_name_text, byte_range = table.unpack(used_message_name_text)
+    if (
+          lpeg.match(expl3_well_known_message_name, message_name_text) == nil
+          and not maybe_defined_message_name_texts[message_name_text]
+          and lpeg.match(maybe_defined_message_name_pattern, message_name_text) == nil
+        ) then
+      issues:add('e424', 'using an undefined message', byte_range, message_name_text)
+    end
+  end
+
+  -- Report supplying incorrect numbers of arguments to a message.
+  for _, used_message_num_text_arguments in ipairs(used_message_nums_text_arguments) do
+    local message_name_text, num_arguments, byte_range = table.unpack(used_message_num_text_arguments)
+    local num_parameters = defined_message_nums_text_parameters[message_name_text]
+    if num_parameters ~= nil and (num_arguments < num_parameters.min or num_arguments > num_parameters.max) then
+      local context
+      if num_arguments < num_parameters.min then
+        context = string.format('%d < %d', num_arguments, num_parameters.min)
+      else
+        context = string.format('%d > %d', num_arguments, num_parameters.max)
+      end
+      issues:add('w426', 'incorrect number of arguments supplied to message', byte_range, context)
+    end
+  end
+
   -- Store the intermediate results of the analysis.
   results.statements = statements
   results.replacement_texts = replacement_texts
@@ -1057,6 +1910,8 @@
 
 return {
   csname_types = csname_types,
+  is_confused = is_confused,
+  name = "semantic analysis",
   process = semantic_analysis,
   statement_types = statement_types,
   statement_confidences = statement_confidences,

Modified: trunk/Master/texmf-dist/scripts/expltools/explcheck-syntactic-analysis.lua
===================================================================
--- trunk/Master/texmf-dist/scripts/expltools/explcheck-syntactic-analysis.lua	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/scripts/expltools/explcheck-syntactic-analysis.lua	2025-08-18 19:01:40 UTC (rev 76083)
@@ -1,5 +1,6 @@
 -- The syntactic analysis step of static analysis converts TeX tokens into a tree of function calls.
 
+local get_option = require("explcheck-config").get_option
 local lexical_analysis = require("explcheck-lexical-analysis")
 local ranges = require("explcheck-ranges")
 local parsers = require("explcheck-parsers")
@@ -8,6 +9,8 @@
 local get_token_byte_range = lexical_analysis.get_token_byte_range
 local is_token_simple = lexical_analysis.is_token_simple
 local token_types = lexical_analysis.token_types
+local format_token = lexical_analysis.format_token
+local format_tokens = lexical_analysis.format_tokens
 
 local new_range = ranges.new_range
 local range_flags = ranges.range_flags
@@ -52,6 +55,35 @@
   return text
 end
 
+-- Count the number of parameters in a replacement text.
+local function count_parameters_in_replacement_text(tokens, replacement_text_token_range)
+  if #replacement_text_token_range == 0 then
+    return 0
+  end
+  local max_parameter_number = 0
+  local token_number = replacement_text_token_range:start()
+  while token_number <= replacement_text_token_range:stop() do
+    local token = tokens[token_number]
+    local next_token_number = token_number + 1
+    if token.type == CHARACTER and token.catcode == 6 then  -- parameter
+      if next_token_number > replacement_text_token_range:stop() then  -- not followed by anything, the replacement text is invalid
+        break
+      end
+      local next_token = tokens[next_token_number]
+      if next_token.type == CHARACTER and next_token.catcode == 6 then  -- followed by another parameter
+        next_token_number = next_token_number + 1
+      elseif next_token.type == CHARACTER and lpeg.match(parsers.decimal_digit, next_token.payload) then  -- followed by a digit
+        local next_digit = tonumber(next_token.payload)
+        assert(next_digit ~= nil)
+        max_parameter_number = math.max(max_parameter_number, next_digit)
+        next_token_number = next_token_number + 1
+      end
+    end
+    token_number = next_token_number
+  end
+  return max_parameter_number
+end
+
 -- Transform parameter tokens in a replacement text.
 local function transform_replacement_text_tokens(content, tokens, issues, num_parameters, replacement_text_token_range)
   local deleted_token_numbers, transformed_tokens = {}, {}
@@ -90,7 +122,7 @@
           table.insert(deleted_token_numbers, next_token_number)
           next_token_number = next_token_number + 1
         else  -- an incorrect digit, the replacement text is invalid
-          issues:add('e304', 'unexpected parameter number', next_token.byte_range)
+          issues:add('e304', 'unexpected parameter number', next_token.byte_range, format_token(next_token, content))
           return nil
         end
       elseif next_token.type == ARGUMENT then  -- followed by a function call argument
@@ -144,6 +176,46 @@
   return transformed_tokens, map_back, map_forward
 end
 
+-- Determine whether the syntactic analysis step is too confused by the results
+-- of the previous steps to run.
+local function is_confused(pathname, results, options)
+  local format_percentage = require("explcheck-format").format_percentage
+  local evaluation = require("explcheck-evaluation")
+  local count_groupings = evaluation.count_groupings
+  local num_groupings, num_unclosed_groupings = count_groupings(results)
+  assert(num_groupings ~= nil and num_unclosed_groupings ~= nil)
+  if num_groupings > 0 then
+    local unclosed_grouping_ratio = num_unclosed_groupings / num_groupings
+    local min_unclosed_grouping_count = get_option('min_unclosed_grouping_count', options, pathname)
+    local min_unclosed_grouping_ratio = get_option('min_unclosed_grouping_ratio', options, pathname)
+    if num_unclosed_groupings >= min_unclosed_grouping_count and unclosed_grouping_ratio >= min_unclosed_grouping_ratio then
+      local reason = string.format(
+        "there were too many unclosed groupings (%s >= %s)",
+        format_percentage(100.0 * unclosed_grouping_ratio),
+        format_percentage(100.0 * min_unclosed_grouping_ratio)
+      )
+      return true, reason
+    end
+  end
+  local count_expl3_bytes = evaluation.count_expl3_bytes
+  local num_characters, num_invalid_characters = count_expl3_bytes(results), results.num_invalid_characters
+  assert(num_characters ~= nil and num_invalid_characters ~= nil)
+  if num_characters > 0 then
+    local invalid_character_ratio = num_invalid_characters / num_characters
+    local min_invalid_character_count = get_option('min_invalid_character_count', options, pathname)
+    local min_invalid_character_ratio = get_option('min_invalid_character_ratio', options, pathname)
+    if num_invalid_characters >= min_invalid_character_count and invalid_character_ratio >= min_invalid_character_ratio then
+      local reason = string.format(
+        "there were too many invalid characters (%s >= %s)",
+        format_percentage(100.0 * invalid_character_ratio),
+        format_percentage(100.0 * min_invalid_character_ratio)
+      )
+      return true, reason
+    end
+  end
+  return false
+end
+
 -- Extract function calls from TeX tokens and groupings.
 local function get_calls(tokens, transformed_tokens, token_range, map_back, map_forward, issues, groupings, content)
   local calls = {}
@@ -187,7 +259,7 @@
           if next_digit == num_parameters + 1 then  -- a correct digit, increment the number of parameters
             num_parameters = num_parameters + 1
           else  -- an incorrect digit, the parameter text is invalid
-            issues:add('e304', 'unexpected parameter number', next_token.byte_range)
+            issues:add('e304', 'unexpected parameter number', next_token.byte_range, format_token(next_token, content))
             return nil
           end
         elseif next_token.type == ARGUMENT then  -- followed by a function call argument
@@ -300,7 +372,7 @@
 
   while token_number <= transformed_token_range_end do
     local token = transformed_tokens[token_number]
-    local next_token, next_next_token, next_token_range
+    local next_token, next_next_token, next_token_range, context
     if token.type == CONTROL_SEQUENCE then  -- a control sequence
       local original_csname = token.payload
       local csname, next_token_number, ignored_token_number = normalize_csname(original_csname)
@@ -314,7 +386,12 @@
             for _, argument_token in argument.token_range:enumerate(transformed_tokens, map_forward) do
               if argument_token.type == CONTROL_SEQUENCE and
                   lpeg.match(parsers.expl3_maybe_unexpandable_csname, argument_token.payload) ~= nil then
-                issues:add('t305', 'expanding an unexpandable variable or constant', argument_token.byte_range)
+                issues:add(
+                  't305',
+                  'expanding an unexpandable variable or constant',
+                  argument_token.byte_range,
+                  format_token(argument_token, content)
+                )
               end
             end
           elseif argument.specifier == "v" then
@@ -321,7 +398,12 @@
             local argument_text = extract_text_from_tokens(argument.token_range, transformed_tokens, map_forward)
             if argument_text ~= nil and lpeg.match(parsers.expl3_maybe_unexpandable_csname, argument_text) ~= nil then
               local argument_byte_range = argument.token_range:new_range_from_subranges(get_token_byte_range(tokens), #content)
-              issues:add('t305', 'expanding an unexpandable variable or constant', argument_byte_range)
+              issues:add(
+                't305',
+                'expanding an unexpandable variable or constant',
+                argument_byte_range,
+                format_tokens(argument.outer_token_range or argument.token_range, tokens, content)
+              )
             end
           end
           table.insert(arguments, argument)
@@ -426,12 +508,14 @@
                   #transformed_tokens
                 )
                 if #next_token_range == 1 then  -- a single token, record it
-                    issues:add('w303', 'braced N-type function call argument', next_token.byte_range)
-                    record_argument({
-                      specifier = argument_specifier,
-                      token_range = new_range(next_grouping.start + 1, next_grouping.stop - 1, INCLUSIVE, #tokens),
-                    })
-                    next_token_number = map_forward(next_grouping.stop)
+                  context = format_tokens(new_range(next_grouping.start, next_grouping.stop, INCLUSIVE, #tokens), tokens, content)
+                  issues:add('w303', 'braced N-type function call argument', next_token.byte_range, context)
+                  record_argument({
+                    specifier = argument_specifier,
+                    token_range = new_range(next_grouping.start + 1, next_grouping.stop - 1, INCLUSIVE, #tokens),
+                    outer_token_range = new_range(next_grouping.start, next_grouping.stop, INCLUSIVE, #tokens),
+                  })
+                  next_token_number = map_forward(next_grouping.stop)
                 elseif #next_token_range == 2 and  -- two tokens
                     transformed_tokens[next_token_range:start()].type == CHARACTER and
                     transformed_tokens[next_token_range:start()].catcode == 6 and  -- a parameter
@@ -448,7 +532,8 @@
                     csname, next_token_number, ignored_token_number = original_csname, token_number + 1, nil
                     goto retry_control_sequence
                   else
-                    issues:add('e300', 'unexpected function call argument', next_token.byte_range)
+                    context = format_tokens(new_range(next_grouping.start, next_grouping.stop, INCLUSIVE, #tokens), tokens, content)
+                    issues:add('e300', 'unexpected function call argument', next_token.byte_range, context)
                     goto skip_other_token
                   end
                 end
@@ -495,17 +580,10 @@
                 end
                 goto skip_other_token
               else  -- a balanced text, record it
-                next_token_range = new_range(
-                  map_forward(next_grouping.start + 1),
-                  map_forward(next_grouping.stop - 1),
-                  INCLUSIVE + MAYBE_EMPTY,
-                  #transformed_tokens,
-                  map_back,
-                  #tokens
-                )
                 record_argument({
                   specifier = argument_specifier,
-                  token_range = next_token_range,
+                  token_range = new_range(next_grouping.start + 1, next_grouping.stop - 1, INCLUSIVE + MAYBE_EMPTY, #tokens),
+                  outer_token_range = new_range(next_grouping.start, next_grouping.stop, INCLUSIVE, #tokens),
                 })
                 next_token_number = map_forward(next_grouping.stop)
               end
@@ -529,7 +607,7 @@
                 end
               end
               -- an unbraced n-type argument, record it
-              issues:add('w302', 'unbraced n-type function call argument', next_token.byte_range)
+              issues:add('w302', 'unbraced n-type function call argument', next_token.byte_range, format_token(next_token, content))
               next_token_range = new_range(next_token_number, next_token_number, INCLUSIVE, #transformed_tokens, map_back, #tokens)
               record_argument({
                 specifier = argument_specifier,
@@ -600,9 +678,12 @@
 end
 
 return {
+  count_parameters_in_replacement_text = count_parameters_in_replacement_text,
   extract_text_from_tokens = extract_text_from_tokens,
   get_calls = get_calls,
   get_call_token_range = get_call_token_range,
+  is_confused = is_confused,
+  name = "syntactic analysis",
   process = syntactic_analysis,
   call_types = call_types,
   transform_replacement_text_tokens = transform_replacement_text_tokens,

Modified: trunk/Master/texmf-dist/scripts/expltools/explcheck-utils.lua
===================================================================
--- trunk/Master/texmf-dist/scripts/expltools/explcheck-utils.lua	2025-08-18 08:48:03 UTC (rev 76082)
+++ trunk/Master/texmf-dist/scripts/expltools/explcheck-utils.lua	2025-08-18 19:01:40 UTC (rev 76083)
@@ -48,17 +48,41 @@
 -- Run all processing steps.
 local function process_with_all_steps(pathname, content, issues, analysis_results, options)
   local get_option = require("explcheck-config").get_option
-  local preprocessing = require("explcheck-preprocessing")
-  local lexical_analysis = require("explcheck-lexical-analysis")
-  local syntactic_analysis = require("explcheck-syntactic-analysis")
-  local semantic_analysis = require("explcheck-semantic-analysis")
-  local steps = {preprocessing, lexical_analysis, syntactic_analysis, semantic_analysis}
-  for _, step in ipairs(steps) do
+  local fail_fast = get_option('fail_fast', options, pathname)
+  local stop_after = get_option('stop_after', options, pathname)
+  local stop_early_when_confused = get_option('stop_early_when_confused', options, pathname)
+  local step_filenames = {'preprocessing', 'lexical-analysis', 'syntactic-analysis', 'semantic-analysis'}
+  for step_number, step_filename in ipairs(step_filenames) do
+    local step = require(string.format('explcheck-%s', step_filename))
+    -- If a processing step is confused, skip it and all following steps.
+    if stop_early_when_confused then
+      local is_confused, reason = step.is_confused(pathname, analysis_results, options)
+      if is_confused then
+        assert(reason ~= nil)
+        analysis_results.stopped_early = {
+          when = string.format("before the %s", step.name),
+          reason = reason,
+        }
+        break
+      end
+    end
     step.process(pathname, content, issues, analysis_results, options)
     -- If a processing step ended with error, skip all following steps.
-    if #issues.errors > 0 and get_option('fail_fast', options, pathname) then
-      return
+    if step_number < #step_filenames and fail_fast and #issues.errors > 0 then
+      analysis_results.stopped_early = {
+        when = string.format("after %s", step.name),
+        reason = "it ended with errors and the option `fail_fast` was enabled",
+      }
+      break
     end
+    -- If a processing step is supposed to be the last step, skip all following steps.
+    if step_number < #step_filenames and (stop_after == step_filename or stop_after == step.name) then
+      analysis_results.stopped_early = {
+        when = string.format("after %s", step.name),
+        reason = "that was the final step according to the option `stop_after`",
+      }
+      break
+    end
   end
 end
 



More information about the tex-live-commits mailing list.