1
0
mirror of https://github.com/SquidDev-CC/CC-Tweaked synced 2025-09-16 17:24:02 +00:00

Use lexer for edit's syntax highlighting

This is slightly more accurate for long strings and comments. Note that
we still work a line at a time (and in a non-incremental manner), so
doesn't actaully support multi-line strings (#1396).

We do now treat goto as a keyword (fixes #1653). We don't currently
support labels — those *technically* aren't a token (`:: foo --[[ a
comment ]] ::` is a valid label!), but maybe we could special-case the
short `::foo::` form.
This commit is contained in:
Jonathan Coates
2025-06-15 13:18:45 +01:00
parent ff363dca5a
commit 69353a4fcf
3 changed files with 53 additions and 77 deletions

View File

@@ -238,7 +238,7 @@ local function lex_token(context, str, pos)
if end_pos then return tokens.STRING, end_pos end if end_pos then return tokens.STRING, end_pos end
context.report(errors.unfinished_long_string, pos, boundary_pos, boundary_pos - pos) context.report(errors.unfinished_long_string, pos, boundary_pos, boundary_pos - pos)
return tokens.ERROR, #str return tokens.STRING, #str
elseif pos + 1 == boundary_pos then -- Just a "[" elseif pos + 1 == boundary_pos then -- Just a "["
return tokens.OSQUARE, pos return tokens.OSQUARE, pos
else -- Malformed long string, for instance "[=" else -- Malformed long string, for instance "[="
@@ -260,7 +260,7 @@ local function lex_token(context, str, pos)
if end_pos then return tokens.COMMENT, end_pos end if end_pos then return tokens.COMMENT, end_pos end
context.report(errors.unfinished_long_comment, pos, boundary_pos, boundary_pos - comment_pos) context.report(errors.unfinished_long_comment, pos, boundary_pos, boundary_pos - comment_pos)
return tokens.ERROR, #str return tokens.COMMENT, #str
end end
end end

View File

@@ -175,86 +175,62 @@ local function save(_sPath, fWrite)
return ok, err, fileerr return ok, err, fileerr
end end
local tKeywords = {
["and"] = true, local tokens = require "cc.internal.syntax.parser".tokens
["break"] = true, local lex_one = require "cc.internal.syntax.lexer".lex_one
["do"] = true,
["else"] = true, local token_colours = {
["elseif"] = true, [tokens.STRING] = stringColour,
["end"] = true, [tokens.COMMENT] = commentColour,
["false"] = true, -- Keywords
["for"] = true, [tokens.AND] = keywordColour,
["function"] = true, [tokens.BREAK] = keywordColour,
["if"] = true, [tokens.DO] = keywordColour,
["in"] = true, [tokens.ELSE] = keywordColour,
["local"] = true, [tokens.ELSEIF] = keywordColour,
["nil"] = true, [tokens.END] = keywordColour,
["not"] = true, [tokens.FALSE] = keywordColour,
["or"] = true, [tokens.FOR] = keywordColour,
["repeat"] = true, [tokens.FUNCTION] = keywordColour,
["return"] = true, [tokens.GOTO] = keywordColour,
["then"] = true, [tokens.IF] = keywordColour,
["true"] = true, [tokens.IN] = keywordColour,
["until"] = true, [tokens.LOCAL] = keywordColour,
["while"] = true, [tokens.NIL] = keywordColour,
[tokens.NOT] = keywordColour,
[tokens.OR] = keywordColour,
[tokens.REPEAT] = keywordColour,
[tokens.RETURN] = keywordColour,
[tokens.THEN] = keywordColour,
[tokens.TRUE] = keywordColour,
[tokens.UNTIL] = keywordColour,
[tokens.WHILE] = keywordColour,
} }
-- Fill in the remaining tokens.
local function tryWrite(sLine, regex, colour) for _, token in pairs(tokens) do
local match = string.match(sLine, regex) if not token_colours[token] then token_colours[token] = textColour end
if match then
if type(colour) == "number" then
term.setTextColour(colour)
else
term.setTextColour(colour(match))
end
term.write(match)
term.setTextColour(textColour)
return string.sub(sLine, #match + 1)
end
return nil
end end
local function tryWriteString(sLine) local lex_context = { line = function() end, report = function() end }
local quotationChar = string.sub(sLine, 1, 1)
if quotationChar ~= '"' and quotationChar ~= "'" then
return nil
end
-- Scan through the rest of the string, skipping over escapes, local function writeHighlighted(line)
-- until we find the closing quote. local pos, colour = 1, nil
local i = 2
while i <= #sLine do while true do
local nextChar = string.sub(sLine, i, i) local token, _, finish = lex_one(lex_context, line, pos)
if nextChar == "\\" then if not token then break end
i = i + 2 -- Skip over escapes
elseif nextChar == quotationChar then local new_colour = token_colours[token]
break if new_colour ~= colour then
else term.setTextColor(new_colour)
i = i + 1 colour = new_colour
end end
term.write(line:sub(pos, finish))
pos = finish + 1
end end
term.setTextColor(stringColour) term.write(line:sub(pos))
term.write(string.sub(sLine, 1, i))
term.setTextColor(textColour)
return string.sub(sLine, i + 1)
end
local function writeHighlighted(sLine)
while #sLine > 0 do
sLine =
tryWrite(sLine, "^%-%-%[%[.-%]%]", commentColour) or
tryWrite(sLine, "^%-%-.*", commentColour) or
tryWriteString(sLine) or
tryWrite(sLine, "^%[%[.-%]%]", stringColour) or
tryWrite(sLine, "^[%w_]+", function(match)
if tKeywords[match] then
return keywordColour
end
return textColour
end) or
tryWrite(sLine, "^[^%w_]", textColour)
end
end end
local tCompletions local tCompletions

View File

@@ -67,7 +67,7 @@ This comment was never finished.
1 | --[=[ 1 | --[=[
| ^^^^^ Comment was started here. | ^^^^^ Comment was started here.
We expected a closing delimiter (]=]) somewhere after this comment was started. We expected a closing delimiter (]=]) somewhere after this comment was started.
1:1-1:5 ERROR --[=[ 1:1-1:5 COMMENT --[=[
``` ```
Nested comments are rejected, just as Lua 5.1 does: Nested comments are rejected, just as Lua 5.1 does:
@@ -191,7 +191,7 @@ This string was never finished.
1 | return [[ 1 | return [[
| ^^ String was started here. | ^^ String was started here.
We expected a closing delimiter (]]) somewhere after this string was started. We expected a closing delimiter (]]) somewhere after this string was started.
1:8-1:9 ERROR [[ 1:8-1:9 STRING [[
``` ```
We also handle malformed opening strings: We also handle malformed opening strings: