texlive[60194] Master/texmf-dist: lua-uni-algos (8aug21)

commits+karl at tug.org commits+karl at tug.org
Sun Aug 8 22:55:17 CEST 2021


Revision: 60194
          http://tug.org/svn/texlive?view=revision&revision=60194
Author:   karl
Date:     2021-08-08 22:55:17 +0200 (Sun, 08 Aug 2021)
Log Message:
-----------
lua-uni-algos (8aug21)

Modified Paths:
--------------
    trunk/Master/texmf-dist/doc/luatex/lua-uni-algos/README.md
    trunk/Master/texmf-dist/doc/luatex/lua-uni-algos/lua-uni-algos.pdf
    trunk/Master/texmf-dist/doc/luatex/lua-uni-algos/lua-uni-algos.tex
    trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-algos.lua
    trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-case.lua
    trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-graphemes.lua
    trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua
    trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-parse.lua

Modified: trunk/Master/texmf-dist/doc/luatex/lua-uni-algos/README.md
===================================================================
--- trunk/Master/texmf-dist/doc/luatex/lua-uni-algos/README.md	2021-08-08 20:55:00 UTC (rev 60193)
+++ trunk/Master/texmf-dist/doc/luatex/lua-uni-algos/README.md	2021-08-08 20:55:17 UTC (rev 60194)
@@ -1,12 +1,12 @@
 # The lua-uni-algos Package
 
-Version: v0.3
+Version: v0.4
 
-Date: 2021-07-05
+Date: 2021-08-08
 
 Author: Marcel Krüger
 
-License: LPPL v1.3c
+License: LPPL v1.3
 
 A collection of small Lua modules implementing some if the most generic Unicode algorithms for use with LuaTeX.
 This package tries to reduce duplicated work by collecting a set of small utilities which can be used be useful for many LuaTeX packages dealing with Unicode strings.

Modified: trunk/Master/texmf-dist/doc/luatex/lua-uni-algos/lua-uni-algos.pdf
===================================================================
(Binary files differ)

Modified: trunk/Master/texmf-dist/doc/luatex/lua-uni-algos/lua-uni-algos.tex
===================================================================
--- trunk/Master/texmf-dist/doc/luatex/lua-uni-algos/lua-uni-algos.tex	2021-08-08 20:55:00 UTC (rev 60193)
+++ trunk/Master/texmf-dist/doc/luatex/lua-uni-algos/lua-uni-algos.tex	2021-08-08 20:55:17 UTC (rev 60194)
@@ -2,7 +2,7 @@
 \usepackage{doc, shortvrb, metalogo, hyperref, fontspec}
 % \setmainfont{Noto Serif}
 % \setmonofont{FreeMono}
-\title{Unicode algorithms for Lua\TeX\thanks{This document corresponds to \pkg{lua-uni-algos} v0.3.}}
+\title{Unicode algorithms for Lua\TeX\thanks{This document corresponds to \pkg{lua-uni-algos} v0.4.}}
 \author{Marcel Krüger\thanks{E-Mail: \href{mailto:tex at 2krueger.de}{\nolinkurl{tex at 2krueger.de}}}}
 \MakeShortVerb\|
 \newcommand\pkg{\texttt}
@@ -51,12 +51,18 @@
 (This example is shown in Latin Modern Mono which has the (for this purpose) very useful property of not handling combining character very well.
 In a well-behaving font, the `...C` and `...D` lines should look the same.)
 
-Additionally for NFC direct normalization of Lua\TeX\ node lists is supported.
+Additionally for direct normalization of Lua\TeX\ node lists is supported.
 There are two functions |normalize.node.NFC| and |normalize.direct.NFC| taking upto four parameters: The first parameter is the head of the node list to be converted.
 The second parameter is the font id of the affected character nodes. Only non-protected glyph nodes of the specified font will be normalized. Pass |nil| for the font
 to normalize without respecting the font in the process. The third parameter is an optional table. If it is not |nil|, normalization is supressed if it might add glyph
 which map to |false| (or |nil|) in this table. If the forth argument is |true|, normalization will never join two glyph nodes with different attributes.
 
+For NFD and NFKD equivalent functions exists without the last parameter (since they never compose nodes, they never have to deal with composing nodes with different
+attributes.
+
+NFKC is not supported for node list normalization since the author is not convinced that there is any usecase for it. (Probably there isn't any usecase for node list
+NFKD normalization either, but that was easy to implement while NFKC would need separate data tables.
+
 \section{Case folding}
 For case folding load the Lua module |lua-uni-case|.
 You can either load it directly with

Modified: trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-algos.lua
===================================================================
--- trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-algos.lua	2021-08-08 20:55:00 UTC (rev 60193)
+++ trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-algos.lua	2021-08-08 20:55:17 UTC (rev 60194)
@@ -1,5 +1,5 @@
 -- lua-uni-algos.lua
--- Copyright 2020 Marcel Krüger
+-- Copyright 2020--2021 Marcel Krüger
 --
 -- This work may be distributed and/or modified under the
 -- conditions of the LaTeX Project Public License, either version 1.3

Modified: trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-case.lua
===================================================================
--- trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-case.lua	2021-08-08 20:55:00 UTC (rev 60193)
+++ trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-case.lua	2021-08-08 20:55:17 UTC (rev 60194)
@@ -1,5 +1,5 @@
 -- lua-uni-graphemes.lua
--- Copyright 2020 Marcel Krüger
+-- Copyright 2020--2021 Marcel Krüger
 --
 -- This work may be distributed and/or modified under the
 -- conditions of the LaTeX Project Public License, either version 1.3

Modified: trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-graphemes.lua
===================================================================
--- trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-graphemes.lua	2021-08-08 20:55:00 UTC (rev 60193)
+++ trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-graphemes.lua	2021-08-08 20:55:17 UTC (rev 60194)
@@ -1,5 +1,5 @@
 -- lua-uni-graphemes.lua
--- Copyright 2020 Marcel Krüger
+-- Copyright 2020--2021 Marcel Krüger
 --
 -- This work may be distributed and/or modified under the
 -- conditions of the LaTeX Project Public License, either version 1.3

Modified: trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua
===================================================================
--- trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua	2021-08-08 20:55:00 UTC (rev 60193)
+++ trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua	2021-08-08 20:55:17 UTC (rev 60194)
@@ -1,5 +1,5 @@
 -- lua-uni-normalize.lua
--- Copyright 2020 Marcel Krüger
+-- Copyright 2020--2021 Marcel Krüger
 --
 -- This work may be distributed and/or modified under the
 -- conditions of the LaTeX Project Public License, either version 1.3
@@ -21,8 +21,10 @@
 local codes = utf8.codes
 local unpack = table.unpack
 
-kpse.set_program_name'kpsewhich'
-local ccc, composition_mapping, decomposition_mapping, compatibility_mapping do
+if tex.initialize then
+  kpse.set_program_name'kpsewhich'
+end
+local ccc, composition_mapping, decomposition_mapping, compatibility_mapping, nfc_qc do
   local function doubleset(ts, key, v1, kind, v2)
     ts[1][key] = v1
     ts[3][key] = v2
@@ -73,7 +75,31 @@
     [0x1D1BF] = true, [0x1D1C0] = true,
   }
 
+  -- We map for NFC_QC:
+  --   No -> false
+  --   Maybe -> true
+  --   Yes -> nil
+  -- since Yes should be the default.
+  nfc_qc = {}
   for cp, decomp in next, decomposition_mapping do
+    if ccc[cp] or ccc[decomp[1]] then
+      nfc_qc[cp] = false
+    elseif #decomp == 1 then
+      nfc_qc[cp] = false
+    elseif composition_exclusions[cp] then
+      nfc_qc[cp] = false
+    else
+      nfc_qc[decomp[2]] = true
+    end
+  end
+  for i=0x1161, 0x1175 do
+    nfc_qc[i] = true
+  end
+  for i=0x11A8, 0x11C2 do
+    nfc_qc[i] = true
+  end
+
+  for cp, decomp in next, decomposition_mapping do
     if #decomp > 1 and not (composition_exclusions[cp] or ccc[decomp[1]]) then
       local mapping = composition_mapping[decomp[1]]
       if not mapping then
@@ -144,6 +170,25 @@
   for cp, decomp in next, compatibility_mapping do
     fixup_decomp(cp, decomp)
   end
+
+  --[[ To verify that nfc_qc is correctly generated
+  local ref_nfc_qc = p.parse_file('DerivedNormalizationProps', l.Cf(
+    l.Ct'' * (
+      l.Cg(p.fields(p.codepoint_range,
+                    'NFC_QC',
+                    'N' * l.Cc(false) + 'M' * l.Cc(true))) + p.ignore_line
+    )^0 * -1, p.multiset))
+  for k,v in next, ref_nfc_qc do
+    if nfc_qc[k] ~= v then
+      print('MISMATCH1', k, v, nfc_qc[k])
+    end
+  end
+  for k,v in next, nfc_qc do
+    if ref_nfc_qc[k] ~= v then
+      print('MISMATCH2', k, v)
+    end
+  end
+  ]]
 end
 
 local function ccc_reorder(codepoints, i, j, k)
@@ -270,81 +315,111 @@
   return to_nfc_generic(s, compatibility_mapping)
 end
 
+if tex.initialize then
+  return {
+    NFD = to_nfd,
+    NFC = to_nfc,
+    NFKD = to_nfkd,
+    NFKC = to_nfkc,
+  }
+end
+
+local direct = node.direct
+local node_new = direct.new
+local node_copy = direct.copy
+local is_char = direct.is_char
+local setchar = direct.setchar
+local insert_after = direct.insert_after
+local insert_before = direct.insert_before
+local getnext = direct.getnext
+local remove = direct.remove
+local free = direct.free
+local getattrlist = direct.getattributelist
+local getprev = direct.getprev
+local setprev = direct.setprev
+local getboth = direct.getboth
+local setlink = direct.setlink
+
 -- allowed_characters only works reliably if it's closed under canonical decomposition mappings
 -- but it should fail in reasonable ways as long as it's at least closed under full canonical decompositions
+--
+-- This could be adapted to NFKC as above except that we would either need to handle Hangul syllables
+-- while iterating over starter_decomposition or adapt ~5 entries in compatibility_mapping to not decompose the syllables.
+-- We don't do this currently since I don't see a usecase for NFKC normalized nodes.
 local function nodes_to_nfc(head, f, allowed_characters, preserve_attr)
   if not head then return head end
-  local tmp_node = node.new'temp'
+  local tmp_node = node_new'temp'
   -- This is more complicated since we want to ensure that nodes (including their attributes and properties) are preserved whenever possible
   --
   -- We use three passes:
-  -- 1. Decompose composition exclusions etc.
+  -- 1&2. Decompose everything with NFC_Quick_Check == No and reorder marks
+  local last_ccc
   local n = head
+  local prev = getprev(head)
+  setlink(tmp_node, head)
+  local require_work
   while n do
-    local char = node.is_char(n, f)
+    local char = is_char(n, f)
     if char then
-      local decomposed = decomposition_mapping[char]
-      if decomposed then
-        local compose_lookup = composition_mapping[decomposed[1]]
-        if not (compose_lookup and compose_lookup[decomposed[2]]) then
-          local available = true
-          if allowed_characters then
-            -- This is probably buggy for werd fonts
-            for i=1, #decomposed do
-              if not allowed_characters[decomposed[i]] then
-                available = false
-                break
-              end
+      local qc = nfc_qc[char]
+      if qc == false then
+        local decomposed = decomposition_mapping[char]
+        local available = true
+        if allowed_characters then
+          -- This is probably buggy for weird fonts
+          for i=1, #decomposed do
+            if not allowed_characters[decomposed[i]] then
+              available = false
+              break
             end
           end
-          if available then
-            -- Here we never want to compose again, so we can decompose directly
-            n.char = decomposed[1]
-            for i=2, #decomposed do
-              local nn = node.copy(n)
-              nn.char = decomposed[i]
-              node.insert_after(head, n, nn)
-              n = nn
-            end
+        end
+        if available then
+          -- Here we never want to compose again, so we can decompose directly
+          local n = n
+          char = decomposed[1]
+          qc = nfc_qc[char]
+          setchar(n, char)
+          for i=2, #decomposed do
+            local nn = node_copy(n)
+            setchar(nn, decomposed[i])
+            insert_after(head, n, nn)
+            n = nn
           end
         end
       end
-    end
-    n = n.next
-  end
-  -- 2. Reorder marks
-  local last_ccc
-  n = head
-  local prev = head.prev
-  tmp_node.next, head.prev = head, tmp_node
-  while n do
-    local char = node.is_char(n, f)
-    if char then
+      -- Now reorder marks. The goal here is to reduce the overhead
+      -- in the common case that no reordering is needed
       local this_ccc = ccc[char]
       if last_ccc and this_ccc and last_ccc > this_ccc then
         local nn = n
         while nn ~= tmp_node do
-          nn = nn.prev
-          local nn_char = node.is_char(nn, f)
+          nn = getprev(nn)
+          local nn_char = is_char(nn, f)
           if not nn_char then break end
           local nn_ccc = ccc[nn_char]
           if not nn_ccc or nn_ccc <= this_ccc then break end
         end
-        local before, after = n.prev, n.next
-        node.insert_after(head, nn, n)
-        before.next = after
-        if after then after.prev = before end
+        local before, after = getboth(n)
+        insert_after(head, nn, n)
+        setlink(before, after)
         n = after
       else
-        n = n.next
+        n = getnext(n)
         last_ccc = this_ccc
       end
+      require_work = require_work or qc
     else
-      n = n.next
+      n = getnext(n)
       last_ccc = nil
     end
   end
-  head, head.prev = tmp_node.next, prev
+  head = getnext(tmp_node)
+  setprev(head, prev)
+  if not require_work then
+    free(tmp_node)
+    return head
+  end
   -- 3. The rest: Maybe decompose and then compose again
   local starter_n, starter, lookup
   local starter_decomposition
@@ -352,20 +427,21 @@
   local i -- index into starter_decomposition
   local i_ccc
   n = head
-  node.insert_after(head, nil, tmp_node)
+  insert_after(head, nil, tmp_node)
   repeat
-    local char = node.is_char(n, f)
+    local char = is_char(n, f)
     local this_ccc = ccc[char] or 300
+    local is_composed -- Did we generate char through composition?
     while i and i_ccc <= this_ccc do
       local new_starter = lookup and lookup[starter_decomposition[i]]
       if new_starter and (not allowed_characters or allowed_characters[new_starter]) then
         starter = new_starter
-        starter_n.char = starter
+        setchar(starter_n, starter)
         lookup = composition_mapping[starter]
       else
-        local nn = node.copy(starter_n)
-        nn.char = starter_decomposition[i]
-        node.insert_before(head, n, nn)
+        local nn = node_copy(starter_n)
+        setchar(nn, starter_decomposition[i])
+        insert_before(head, n, nn)
         last_ccc = i_ccc
       end
       i = i + 1
@@ -379,13 +455,14 @@
     if char then
       if lookup and (this_ccc == 300) == (this_ccc == last_ccc) then
         local new_starter = lookup[char]
-        if new_starter and (not allowed_characters or allowed_characters[new_starter]) and (not preserve_attr or starter_n.attr == n.attr) then
-          local last = n.prev
-          node.remove(head, n)
-          node.free(n)
+        if new_starter and (not allowed_characters or allowed_characters[new_starter]) and (not preserve_attr or getattrlist(starter_n) == getattrlist(n)) then
+          local last = getprev(n)
+          remove(head, n)
+          free(n)
           n = last
           starter = new_starter
-          starter_n.char, char = starter, starter
+          setchar(starter_n, starter)
+          char, is_composed = starter, true
           lookup = composition_mapping[starter]
         else
           last_ccc = this_ccc
@@ -394,21 +471,23 @@
       elseif not lookup and this_ccc == 300 and last_ccc == 300 then
         if starter >= 0x1100 and starter <= 0x1112 and char >= 0x1161 and char <= 0x1175 then -- L + V -> LV
           local new_starter = ((starter - 0x1100) * 21 + char - 0x1161) * 28 + 0xAC00
-          if (not allowed_characters or allowed_characters[new_starter]) and (not preserve_attr or starter_n.attr == n.attr) then
-            node.remove(head, n)
-            node.free(n)
-            starter = starter
-            starter_n.char, char = starter, starter
+          if (not allowed_characters or allowed_characters[new_starter]) and (not preserve_attr or getattrlist(starter_n) == getattrlist(n)) then
+            remove(head, n)
+            free(n)
+            starter = new_starter
+            setchar(starter_n, starter)
+            char, is_composed = starter, true
             lookup = composition_mapping[starter]
             n = starter_n
           end
         elseif char >= 0x11A8 and char <= 0x11C2 and starter >= 0xAC00 and starter <= 0xD7A3 and (starter-0xAC00) % 28 == 0 then -- LV + T -> LVT
           local new_starter = starter + char - 0x11A7
-          if (not allowed_characters or allowed_characters[new_starter]) and (not preserve_attr or starter_n.attr == n.attr) then
-            node.remove(head, n)
-            node.free(n)
+          if (not allowed_characters or allowed_characters[new_starter]) and (not preserve_attr or getattrlist(starter_n) == getattrlist(n)) then
+            remove(head, n)
+            free(n)
             starter = new_starter
-            starter_n.char, char = starter, starter
+            setchar(starter_n, starter)
+            char, is_composed = starter, true
             lookup = composition_mapping[starter]
             n = starter_n
           end
@@ -418,39 +497,142 @@
       end
       if this_ccc == 300 then
         starter_n = n
-        starter_decomposition = decomposition_mapping[char]
-        if allowed_characters and starter_decomposition then
-          for i=1, #starter_decomposition do
-            if not allowed_characters[starter_decomposition[i]] then
-              starter_decomposition = nil
-              break
+        if is_composed then -- If we just composed starter, we don't want to decompose it again
+          starter = char
+        else
+          starter_decomposition = decomposition_mapping[char]
+          if allowed_characters and starter_decomposition then
+            for i=1, #starter_decomposition do
+              if not allowed_characters[starter_decomposition[i]] then
+                starter_decomposition = nil
+                break
+              end
             end
           end
+          starter = starter_decomposition and starter_decomposition[1] or char
+          setchar(starter_n, starter)
+          if starter_decomposition then
+            i, i_ccc = 2, ccc[starter_decomposition[2]] or 300
+          else
+            i, i_ccc = nil
+          end
         end
-        starter = starter_decomposition and starter_decomposition[1] or char
-        starter_n.char = starter
         lookup = composition_mapping[starter]
-        if starter_decomposition then
-          i, i_ccc = 2, ccc[starter_decomposition[2]] or 300
-        else
-          i, i_ccc = nil
-        end
       end
     else
       starter, lookup, last_ccc, last_decomposition, i, i_ccc = nil
     end
     if n == tmp_node then
-      node.remove(head, tmp_node)
+      remove(head, tmp_node)
       break
     end
-    n = n.next
+    n = getnext(n)
   until false
-  node.free(tmp_node)
+  free(tmp_node)
   return head
 end
 
-local todirect, tonode = node.direct.todirect, node.direct.tonode
+-- This is almost the same as the first loop from nodes_to_nfc, just without checking for NFC_QC and decomposing everything instead.
+-- Also we have to decompose Hangul syllables.
+-- No preserve_attr parameter since we never compose.
+local function nodes_to_nfd_generic(decomposition_mapping, head, f, allowed_characters)
+  if not head then return head end
+  local tmp_node = node_new'temp'
+  -- This is more complicated since we want to ensure that nodes (including their attributes and properties) are preserved whenever possible
+  --
+  -- We use three passes:
+  -- 1&2. Decompose everything with NFC_Quick_Check == No and reorder marks
+  local last_ccc
+  local n = head
+  local prev = getprev(head)
+  setlink(tmp_node, head)
+  while n do
+    local char = is_char(n, f)
+    if char then
+      local decomposed = decomposition_mapping[char]
+      if decomposed then
+        local available = true
+        if allowed_characters then
+          -- This is probably buggy for weird fonts
+          for i=1, #decomposed do
+            if not allowed_characters[decomposed[i]] then
+              available = false
+              break
+            end
+          end
+        end
+        if available then
+          local n = n
+          char = decomposed[1]
+          setchar(n, char)
+          for i=2, #decomposed do
+            local nn = node_copy(n)
+            setchar(nn, decomposed[i])
+            insert_after(head, n, nn)
+            n = nn
+          end
+        end
+      elseif char >= 0xAC00 and char <= 0xD7A3 then -- Hangul clusters. In this case we update n since we never need to reorder them anyway
+        local c = char - 0xAC00
+        local t = 0x11A7 + c % 28
+        c = c // 28
+        local l = 0x1100 + c // 21
+        local v = 0x1161 + c % 21
+        if not allowed_characters or (allowed_characters[l] and allowed_characters[v] and (t == 0x11A7 or allowed_characters[t])) then
+          setchar(n, l)
+          local nn = node_copy(n)
+          setchar(nn, v)
+          insert_after(head, n, nn)
+          n = nn
+          char = v
+          if t ~= 0x11A7 then
+            nn = node_copy(n)
+            setchar(nn, t)
+            insert_after(head, n, nn)
+            n = nn
+            char = t
+          end
+        end
+      end
+      -- Now reorder marks. The goal here is to reduce the overhead
+      -- in the common case that no reordering is needed
+      local this_ccc = ccc[char]
+      if last_ccc and this_ccc and last_ccc > this_ccc then
+        local nn = n
+        while nn ~= tmp_node do
+          nn = getprev(nn)
+          local nn_char = is_char(nn, f)
+          if not nn_char then break end
+          local nn_ccc = ccc[nn_char]
+          if not nn_ccc or nn_ccc <= this_ccc then break end
+        end
+        local before, after = getboth(n)
+        insert_after(head, nn, n)
+        setlink(before, after)
+        n = after
+      else
+        n = getnext(n)
+        last_ccc = this_ccc
+      end
+    else
+      n = getnext(n)
+      last_ccc = nil
+    end
+  end
+  head = getnext(tmp_node)
+  setprev(head, prev)
+  free(tmp_node)
+  return head
+end
+local function nodes_to_nfd(head, f, allowed_characters)
+  return nodes_to_nfd_generic(decomposition_mapping, head, f, allowed_characters)
+end
+local function nodes_to_nfkd(head, f, allowed_characters)
+  return nodes_to_nfd_generic(compatibility_mapping, head, f, allowed_characters)
+end
 
+local todirect, tonode = direct.todirect, direct.tonode
+
 return {
   NFD = to_nfd,
   NFC = to_nfc,
@@ -457,10 +639,14 @@
   NFKD = to_nfkd,
   NFKC = to_nfkc,
   node = {
-    NFC = nodes_to_nfc,
+    NFC = function(head, ...) return tonode(nodes_to_nfc(todirect(head), ...)) end,
+    NFD = function(head, ...) return tonode(nodes_to_nfd(todirect(head), ...)) end,
+    NFKD = function(head, ...) return tonode(nodes_to_nfkd(todirect(head), ...)) end,
   },
   direct = {
-    NFC = function(head, ...) return todirect(nodes_to_nfc(tonode(head), ...)) end,
+    NFC = nodes_to_nfc,
+    NFD = nodes_to_nfd,
+    NFKD = nodes_to_nfkd,
   },
 }
 -- print(require'inspect'{to_nfd{0x1E0A}, to_nfc{0x1E0A}})

Modified: trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-parse.lua
===================================================================
--- trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-parse.lua	2021-08-08 20:55:00 UTC (rev 60193)
+++ trunk/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-parse.lua	2021-08-08 20:55:17 UTC (rev 60194)
@@ -1,5 +1,5 @@
 -- lua-uni-parse.lua
--- Copyright 2020 Marcel Krüger
+-- Copyright 2020--2021 Marcel Krüger
 --
 -- This work may be distributed and/or modified under the
 -- conditions of the LaTeX Project Public License, either version 1.3



More information about the tex-live-commits mailing list.