1
0
mirror of https://github.com/SquidDev-CC/CC-Tweaked synced 2024-06-16 10:09:55 +00:00

Be lazy in reporting errors in the lexer

Instead of reporting an error with `.report(f(...))`, we now do
`.report(f, ...)`. This allows consumers to ignore error messages when
not needed, such as when just doing syntax highlighting.
This commit is contained in:
Jonathan Coates 2023-06-25 15:48:57 +01:00
parent 7ffdbb2316
commit 54ab98473f
No known key found for this signature in database
GPG Key ID: B9E431FF07C98D06
4 changed files with 24 additions and 19 deletions

View File

@ -21,6 +21,8 @@ local error_printer = require "cc.internal.error_printer"
local error_sentinel = {}
local function make_context(input)
expect(1, input, "string")
local context = {}
local lines = { 1 }
@ -73,8 +75,9 @@ local function parse(input, start_symbol)
expect(2, start_symbol, "number")
local context = make_context(input)
function context.report(msg)
expect(1, msg, "table")
function context.report(msg, ...)
expect(1, msg, "table", "function")
if type(msg) == "function" then msg = msg(...) end
error_printer(context, msg)
error(error_sentinel)
end
@ -110,8 +113,9 @@ local function parse_repl(input)
local context = make_context(input)
local last_error = nil
function context.report(msg)
expect(1, msg, "table")
function context.report(msg, ...)
expect(1, msg, "table", "function")
if type(msg) == "function" then msg = msg(...) end
last_error = msg
error(error_sentinel)
end

View File

@ -96,7 +96,7 @@ local function lex_number(context, str, start)
local contents = sub(str, start, pos - 1)
if not tonumber(contents) then
-- TODO: Separate error for "2..3"?
context.report(errors.malformed_number(start, pos - 1))
context.report(errors.malformed_number, start, pos - 1)
end
return tokens.NUMBER, pos - 1
@ -118,14 +118,14 @@ local function lex_string(context, str, start_pos, quote)
return tokens.STRING, pos
elseif c == "\n" or c == "\r" or c == "" then
-- We don't call newline here, as that's done for the next token.
context.report(errors.unfinished_string(start_pos, pos, quote))
context.report(errors.unfinished_string, start_pos, pos, quote)
return tokens.STRING, pos - 1
elseif c == "\\" then
c = sub(str, pos + 1, pos + 1)
if c == "\n" or c == "\r" then
pos = newline(context, str, pos + 1, c)
elseif c == "" then
context.report(errors.unfinished_string_escape(start_pos, pos, quote))
context.report(errors.unfinished_string_escape, start_pos, pos, quote)
return tokens.STRING, pos
elseif c == "z" then
pos = pos + 2
@ -133,7 +133,7 @@ local function lex_string(context, str, start_pos, quote)
local next_pos, _, c = find(str, "([%S\r\n])", pos)
if not next_pos then
context.report(errors.unfinished_string(start_pos, #str, quote))
context.report(errors.unfinished_string, start_pos, #str, quote)
return tokens.STRING, #str
end
@ -196,7 +196,7 @@ local function lex_long_str(context, str, start, len)
elseif c == "[" then
local ok, boundary_pos = lex_long_str_boundary(str, pos + 1, "[")
if ok and boundary_pos - pos == len and len == 1 then
context.report(errors.nested_long_str(pos, boundary_pos))
context.report(errors.nested_long_str, pos, boundary_pos)
end
pos = boundary_pos
@ -238,12 +238,12 @@ local function lex_token(context, str, pos)
local end_pos = lex_long_str(context, str, boundary_pos + 1, boundary_pos - pos)
if end_pos then return tokens.STRING, end_pos end
context.report(errors.unfinished_long_string(pos, boundary_pos, boundary_pos - pos))
context.report(errors.unfinished_long_string, pos, boundary_pos, boundary_pos - pos)
return tokens.ERROR, #str
elseif pos + 1 == boundary_pos then -- Just a "["
return tokens.OSQUARE, pos
else -- Malformed long string, for instance "[="
context.report(errors.malformed_long_string(pos, boundary_pos, boundary_pos - pos))
context.report(errors.malformed_long_string, pos, boundary_pos, boundary_pos - pos)
return tokens.ERROR, boundary_pos
end
@ -260,7 +260,7 @@ local function lex_token(context, str, pos)
local end_pos = lex_long_str(context, str, boundary_pos + 1, boundary_pos - comment_pos)
if end_pos then return tokens.COMMENT, end_pos end
context.report(errors.unfinished_long_comment(pos, boundary_pos, boundary_pos - comment_pos))
context.report(errors.unfinished_long_comment, pos, boundary_pos, boundary_pos - comment_pos)
return tokens.ERROR, #str
end
end
@ -317,18 +317,18 @@ local function lex_token(context, str, pos)
if end_pos - pos <= 3 then
local contents = sub(str, pos, end_pos)
if contents == "&&" then
context.report(errors.wrong_and(pos, end_pos))
context.report(errors.wrong_and, pos, end_pos)
return tokens.AND, end_pos
elseif contents == "||" then
context.report(errors.wrong_or(pos, end_pos))
context.report(errors.wrong_or, pos, end_pos)
return tokens.OR, end_pos
elseif contents == "!=" or contents == "<>" then
context.report(errors.wrong_ne(pos, end_pos))
context.report(errors.wrong_ne, pos, end_pos)
return tokens.NE, end_pos
end
end
context.report(errors.unexpected_character(pos))
context.report(errors.unexpected_character, pos)
return tokens.ERROR, end_pos
end
end

View File

@ -244,7 +244,7 @@ local function handle_error(context, stack, stack_n, token, token_start, token_e
end
end
context.report(errors.unexpected_token(token, token_start, token_end))
context.report(errors.unexpected_token, token, token_start, token_end)
return false
end

View File

@ -49,8 +49,9 @@ local function capture_parser(input, print_tokens, start)
end
local context = make_context(input)
function context.report(message)
expect(3, message, "table")
function context.report(message, ...)
expect(3, message, "table", "function")
if type(message) == "function" then message = message(...) end
for _, msg in ipairs(message) do
if type(msg) == "table" and msg.tag == "annotate" then