🐐 Line numbers in errors

This commit is contained in:
2026-04-25 20:21:16 +02:00
parent 382492a6fc
commit 272881e6cf
8 changed files with 176 additions and 71 deletions

View File

@@ -318,8 +318,15 @@ def fraction_to_frac(f, magnvm=False, svbnvlla=False) -> str:
class Node(BaseBox):
pos = None # (lineno, colno) — set in parser productions
def eval(self, vtable):
return self._eval(vtable.copy())
try:
return self._eval(vtable.copy())
except CentvrionError as e:
if e.lineno is None and self.pos is not None:
e.lineno, e.colno = self.pos
raise
def _eval(self, vtable):
raise NotImplementedError
@@ -1321,7 +1328,7 @@ class TemptaStatement(Node):
if vtable["#return"] is not None or vtable["#break"] or vtable["#continue"]:
return vtable, last_val
except CentvrionError as e:
vtable[self.error_var.name] = ValStr(str(e))
vtable[self.error_var.name] = ValStr(e.msg)
for statement in self.catch_statements:
vtable, last_val = statement.eval(vtable)
if vtable["#return"] is not None or vtable["#break"] or vtable["#continue"]:

View File

@@ -7,6 +7,14 @@ from centvrion.ast_nodes import (
num_to_int, frac_to_fraction,
)
def _err(node, msg):
"""Build a CentvrionError stamped with a node's source position, if any."""
pos = getattr(node, "pos", None)
if pos is not None:
return CentvrionError(msg, pos[0], pos[1])
return CentvrionError(msg)
_BINOP_FN = {
"SYMBOL_PLUS": "cent_add",
"SYMBOL_MINUS": "cent_sub",
@@ -84,7 +92,7 @@ def emit_expr(node, ctx):
if isinstance(node, Fractio):
if not ctx.has_module("FRACTIO"):
raise CentvrionError("Cannot use fraction literals without 'FRACTIO' module")
raise _err(node, "Cannot use fraction literals without 'FRACTIO' module")
tmp = ctx.fresh_tmp()
magnvm = "MAGNVM" in ctx.modules
svbnvlla = "SVBNVLLA" in ctx.modules
@@ -318,25 +326,25 @@ def _emit_builtin(node, ctx):
f"CentValue {tmp} = cent_ordina_cmp({param_vars[0]}, {param_vars[1]}, _scope);"
)
else:
raise CentvrionError("ORDINA takes 1 or 2 arguments")
raise _err(node, "ORDINA takes 1 or 2 arguments")
case "MVTA":
if len(param_vars) != 2:
raise CentvrionError("MVTA takes II arguments")
raise _err(node, "MVTA takes II arguments")
lines.append(
f"CentValue {tmp} = cent_mvta({param_vars[0]}, {param_vars[1]}, _scope);"
)
case "CRIBRA":
if len(param_vars) != 2:
raise CentvrionError("CRIBRA takes II arguments")
raise _err(node, "CRIBRA takes II arguments")
lines.append(
f"CentValue {tmp} = cent_cribra({param_vars[0]}, {param_vars[1]}, _scope);"
)
case "CONFLA":
if len(param_vars) != 3:
raise CentvrionError("CONFLA takes III arguments")
raise _err(node, "CONFLA takes III arguments")
lines.append(
f"CentValue {tmp} = cent_confla({param_vars[0]}, {param_vars[1]}, {param_vars[2]}, _scope);"
)
@@ -456,7 +464,7 @@ def _emit_invoca(node, ctx):
lines.append(f"CentScope {call_scope_var} = cent_scope_copy(&_scope);")
param_names = ctx.functions[c_func_name]
if len(param_vars) != len(param_names):
raise CentvrionError(
raise _err(node,
f"Function '{node.callee.name}' expects {len(param_names)} argument(s), "
f"got {len(param_vars)}"
)

View File

@@ -11,6 +11,14 @@ def emit_stmt(node, ctx):
Emit C code for a CENTVRION statement node.
Returns lines — list of C statements.
"""
body = _emit_stmt_body(node, ctx)
pos = getattr(node, "pos", None)
if pos is not None:
return [f"_cent_current_line = {pos[0]};"] + body
return body
def _emit_stmt_body(node, ctx):
if isinstance(node, Designa):
val_lines, val_var = emit_expr(node.value, ctx)
return val_lines + [f'cent_scope_set(&_scope, "{node.id.name}", {val_var});']

View File

@@ -35,6 +35,7 @@ static uint32_t cent_rng_next(void) {
jmp_buf _cent_try_stack[CENT_TRY_STACK_MAX];
int _cent_try_depth = 0;
const char *_cent_error_msg = NULL;
int _cent_current_line = 0;
/* ------------------------------------------------------------------ */
/* Arena allocator */
@@ -74,13 +75,20 @@ void *cent_arena_alloc(CentArena *a, size_t n) {
/* Error handling */
/* ------------------------------------------------------------------ */
static void _cent_die(const char *kind, const char *msg) {
if (_cent_current_line > 0)
fprintf(stderr, "CENTVRION %s: %s at line %d\n", kind, msg, _cent_current_line);
else
fprintf(stderr, "CENTVRION %s: %s\n", kind, msg);
exit(1);
}
void cent_type_error(const char *msg) {
if (_cent_try_depth > 0) {
_cent_error_msg = msg;
longjmp(_cent_try_stack[_cent_try_depth - 1], 1);
}
fprintf(stderr, "CENTVRION type error: %s\n", msg);
exit(1);
_cent_die("type error", msg);
}
void cent_runtime_error(const char *msg) {
@@ -88,8 +96,7 @@ void cent_runtime_error(const char *msg) {
_cent_error_msg = msg;
longjmp(_cent_try_stack[_cent_try_depth - 1], 1);
}
fprintf(stderr, "CENTVRION error: %s\n", msg);
exit(1);
_cent_die("error", msg);
}
/* ------------------------------------------------------------------ */
@@ -101,8 +108,11 @@ CentValue cent_scope_get(CentScope *s, const char *name) {
if (strcmp(s->names[i], name) == 0)
return s->vals[i];
}
fprintf(stderr, "CENTVRION error: undefined variable '%s'\n", name);
exit(1);
size_t bufsz = strlen(name) + 32;
char *buf = cent_arena_alloc(cent_arena, bufsz);
snprintf(buf, bufsz, "undefined variable '%s'", name);
cent_runtime_error(buf);
return cent_null(); /* unreachable */
}
void cent_scope_set(CentScope *s, const char *name, CentValue v) {
@@ -236,8 +246,10 @@ long cent_roman_to_int(const char *s) {
}
}
if (!matched) {
fprintf(stderr, "CENTVRION error: invalid Roman numeral: %s\n", s);
exit(1);
size_t bufsz = strlen(s) + 32;
char *buf = cent_arena_alloc(cent_arena, bufsz);
snprintf(buf, bufsz, "invalid Roman numeral: %s", s);
cent_runtime_error(buf);
}
}
return result;

View File

@@ -154,6 +154,9 @@ extern jmp_buf _cent_try_stack[];
extern int _cent_try_depth;
extern const char *_cent_error_msg;
/* Updated at the start of every emitted statement; 0 means "no line known". */
extern int _cent_current_line;
void cent_type_error(const char *msg); /* type mismatch → longjmp or exit(1) */
void cent_runtime_error(const char *msg); /* runtime fault → longjmp or exit(1) */

View File

@@ -1 +1,13 @@
class CentvrionError(Exception): pass
class CentvrionError(Exception):
def __init__(self, msg, lineno=None, colno=None):
self.msg = msg
self.lineno = lineno
self.colno = colno
super().__init__(msg)
def __str__(self):
if self.lineno is None:
return self.msg
if self.colno is None:
return f"{self.msg} at line {self.lineno}"
return f"{self.msg} at line {self.lineno}, column {self.colno}"

View File

@@ -42,7 +42,30 @@ def _unescape(s):
return ''.join(out)
def _parse_interpolated(raw_value):
def _at(node, src):
"""Stamp a (lineno, colno) onto a freshly built AST node.
`src` can be an rply Token (uses .source_pos) or another Node (copies .pos).
"""
if src is None:
return node
pos = getattr(src, "pos", None)
if pos is not None:
node.pos = pos
return node
sp = getattr(src, "source_pos", None)
if sp is not None:
node.pos = (sp.lineno, sp.colno)
return node
def _parse_interpolated(raw_value, source_pos=None):
lineno = source_pos.lineno if source_pos is not None else None
colno = source_pos.colno if source_pos is not None else None
def _err(msg):
return CentvrionError(msg, lineno, colno)
quote_char = raw_value[0]
inner = raw_value[1:-1]
@@ -79,15 +102,15 @@ def _parse_interpolated(raw_value):
depth -= 1
j += 1
if depth != 0:
raise CentvrionError("Unclosed '{' in interpolated string")
raise _err("Unclosed '{' in interpolated string")
expr_src = inner[i + 1:j - 1]
tokens = Lexer().get_lexer().lex(expr_src + "\n")
program = Parser().parse(tokens)
if len(program.statements) != 1:
raise CentvrionError("Interpolation must contain exactly one expression")
raise _err("Interpolation must contain exactly one expression")
stmt = program.statements[0]
if not isinstance(stmt, ast_nodes.ExpressionStatement):
raise CentvrionError("Interpolation must contain an expression, not a statement")
raise _err("Interpolation must contain an expression, not a statement")
parts.append(stmt.expression)
i = j
elif ch == '}':
@@ -95,7 +118,7 @@ def _parse_interpolated(raw_value):
current.append('}')
i += 2
continue
raise CentvrionError("Unmatched '}' in string (use '}}' for literal '}')")
raise _err("Unmatched '}' in string (use '}}' for literal '}')")
else:
current.append(ch)
i += 1
@@ -154,7 +177,7 @@ class Parser():
@self.pg.production('module_call : KEYWORD_CVM MODULE')
def module_call(tokens):
return ast_nodes.ModuleCall(tokens[1].value)
return _at(ast_nodes.ModuleCall(tokens[1].value), tokens[0])
# Statements
@@ -172,7 +195,7 @@ class Parser():
@self.pg.production('statement : KEYWORD_DESIGNA id KEYWORD_VT expression')
def statement_designa(tokens):
return ast_nodes.Designa(tokens[1], tokens[3])
return _at(ast_nodes.Designa(tokens[1], tokens[3]), tokens[0])
@self.pg.production('index_chain : SYMBOL_LBRACKET expression SYMBOL_RBRACKET')
def index_chain_single(tokens):
@@ -184,39 +207,39 @@ class Parser():
@self.pg.production('statement : KEYWORD_DESIGNA id index_chain KEYWORD_VT expression')
def statement_designa_index(tokens):
return ast_nodes.DesignaIndex(tokens[1], tokens[2], tokens[4])
return _at(ast_nodes.DesignaIndex(tokens[1], tokens[2], tokens[4]), tokens[0])
@self.pg.production('statement : KEYWORD_DESIGNA id SYMBOL_COMMA id_list_rest KEYWORD_VT expression')
def statement_designa_destructure(tokens):
return ast_nodes.DesignaDestructure([tokens[1]] + tokens[3], tokens[5])
return _at(ast_nodes.DesignaDestructure([tokens[1]] + tokens[3], tokens[5]), tokens[0])
@self.pg.production('statement : id KEYWORD_AVGE expression')
def statement_avge(tokens):
return ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_PLUS"))
return _at(ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_PLUS")), tokens[0])
@self.pg.production('statement : id KEYWORD_MINVE expression')
def statement_minve(tokens):
return ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_MINUS"))
return _at(ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_MINUS")), tokens[0])
@self.pg.production('statement : id KEYWORD_MVLTIPLICA expression')
def statement_mvltiplica(tokens):
return ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_TIMES"))
return _at(ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_TIMES")), tokens[0])
@self.pg.production('statement : id KEYWORD_DIVIDE expression')
def statement_divide(tokens):
return ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_DIVIDE"))
return _at(ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_DIVIDE")), tokens[0])
@self.pg.production('statement : expression')
def statement_expression(tokens):
return ast_nodes.ExpressionStatement(tokens[0])
return _at(ast_nodes.ExpressionStatement(tokens[0]), tokens[0])
@self.pg.production('statement : KEYWORD_DEFINI id ids KEYWORD_VT SYMBOL_LCURL statements SYMBOL_RCURL')
def defini(tokens):
return ast_nodes.Defini(tokens[1], tokens[2], tokens[5])
return _at(ast_nodes.Defini(tokens[1], tokens[2], tokens[5]), tokens[0])
@self.pg.production('statement : KEYWORD_REDI expressions')
def redi(tokens):
return ast_nodes.Redi(tokens[1])
return _at(ast_nodes.Redi(tokens[1]), tokens[0])
@self.pg.production('statement : per_statement')
@self.pg.production('statement : dum_statement')
@@ -227,20 +250,20 @@ class Parser():
return tokens[0]
@self.pg.production('statement : KEYWORD_ERVMPE')
def erumpe(_):
return ast_nodes.Erumpe()
def erumpe(tokens):
return _at(ast_nodes.Erumpe(), tokens[0])
@self.pg.production('statement : KEYWORD_CONTINVA')
def continva(_):
return ast_nodes.Continva()
def continva(tokens):
return _at(ast_nodes.Continva(), tokens[0])
@self.pg.production('si_statement : KEYWORD_SI expression KEYWORD_TVNC SYMBOL_LCURL statements SYMBOL_RCURL')
@self.pg.production('si_statement : KEYWORD_SI expression KEYWORD_TVNC SYMBOL_LCURL statements SYMBOL_RCURL aluid_statement')
def si_statement(tokens):
if len(tokens) == 7:
return ast_nodes.SiStatement(tokens[1], tokens[4], tokens[6])
return _at(ast_nodes.SiStatement(tokens[1], tokens[4], tokens[6]), tokens[0])
else:
return ast_nodes.SiStatement(tokens[1], tokens[4], None)
return _at(ast_nodes.SiStatement(tokens[1], tokens[4], None), tokens[0])
@self.pg.production('aluid_statement : KEYWORD_ALIVD si_statement')
def aluid_si(tokens):
@@ -252,34 +275,34 @@ class Parser():
@self.pg.production('dum_statement : KEYWORD_DVM expression KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
def dum(tokens):
return ast_nodes.DumStatement(tokens[1], tokens[4])
return _at(ast_nodes.DumStatement(tokens[1], tokens[4]), tokens[0])
# AETERNVM is sugar for `DVM FALSITAS` — same AST, no observable difference.
@self.pg.production('dum_statement : KEYWORD_AETERNVM KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
def aeternvm(tokens):
return ast_nodes.DumStatement(ast_nodes.Bool(False), tokens[3])
return _at(ast_nodes.DumStatement(ast_nodes.Bool(False), tokens[3]), tokens[0])
@self.pg.production('per_statement : KEYWORD_PER id SYMBOL_COMMA id_list_rest KEYWORD_IN expression KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
def per_destructure(tokens):
return ast_nodes.PerStatement(tokens[5], [tokens[1]] + tokens[3], tokens[8])
return _at(ast_nodes.PerStatement(tokens[5], [tokens[1]] + tokens[3], tokens[8]), tokens[0])
@self.pg.production('per_statement : KEYWORD_PER id KEYWORD_IN expression KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
def per(tokens):
return ast_nodes.PerStatement(tokens[3], tokens[1], tokens[6])
return _at(ast_nodes.PerStatement(tokens[3], tokens[1], tokens[6]), tokens[0])
@self.pg.production('tempta_statement : KEYWORD_TEMPTA SYMBOL_LCURL statements SYMBOL_RCURL KEYWORD_CAPE id SYMBOL_LCURL statements SYMBOL_RCURL')
def tempta(tokens):
return ast_nodes.TemptaStatement(tokens[2], tokens[5], tokens[7])
return _at(ast_nodes.TemptaStatement(tokens[2], tokens[5], tokens[7]), tokens[0])
@self.pg.production('donicum_statement : KEYWORD_DONICVM id KEYWORD_VT expression KEYWORD_VSQVE expression KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
def donicum(tokens):
range_array = ast_nodes.DataRangeArray(tokens[3], tokens[5])
return ast_nodes.PerStatement(range_array, tokens[1], tokens[8])
range_array = _at(ast_nodes.DataRangeArray(tokens[3], tokens[5]), tokens[0])
return _at(ast_nodes.PerStatement(range_array, tokens[1], tokens[8]), tokens[0])
@self.pg.production('donicum_statement : KEYWORD_DONICVM id KEYWORD_VT expression KEYWORD_VSQVE expression KEYWORD_GRADV expression KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
def donicum_step(tokens):
range_array = ast_nodes.DataRangeArray(tokens[3], tokens[5], tokens[7])
return ast_nodes.PerStatement(range_array, tokens[1], tokens[10])
range_array = _at(ast_nodes.DataRangeArray(tokens[3], tokens[5], tokens[7]), tokens[0])
return _at(ast_nodes.PerStatement(range_array, tokens[1], tokens[10]), tokens[0])
# expressions
@self.pg.production('expressions : SYMBOL_LPARENS expression_list')
@@ -311,28 +334,29 @@ class Parser():
@self.pg.production('expression : BUILTIN expressions')
def expression_builtin(tokens):
return ast_nodes.BuiltIn(tokens[0].value, tokens[1])
return _at(ast_nodes.BuiltIn(tokens[0].value, tokens[1]), tokens[0])
@self.pg.production('expression : DATA_STRING')
def expression_string(tokens):
return _parse_interpolated(tokens[0].value)
node = _parse_interpolated(tokens[0].value, tokens[0].source_pos)
return _at(node, tokens[0])
@self.pg.production('expression : DATA_NUMERAL')
def expression_numeral(tokens):
return ast_nodes.Numeral(tokens[0].value)
return _at(ast_nodes.Numeral(tokens[0].value), tokens[0])
@self.pg.production('expression : DATA_FRACTION')
def expression_fraction(tokens):
return ast_nodes.Fractio(tokens[0].value)
return _at(ast_nodes.Fractio(tokens[0].value), tokens[0])
@self.pg.production('expression : KEYWORD_FALSITAS')
@self.pg.production('expression : KEYWORD_VERITAS')
def expression_bool(tokens):
return ast_nodes.Bool(tokens[0].name == "KEYWORD_VERITAS")
return _at(ast_nodes.Bool(tokens[0].name == "KEYWORD_VERITAS"), tokens[0])
@self.pg.production('expression : KEYWORD_NVLLVS')
def expression_nullus(_):
return ast_nodes.Nullus()
def expression_nullus(tokens):
return _at(ast_nodes.Nullus(), tokens[0])
@self.pg.production('expression : expression SYMBOL_AT expression')
@self.pg.production('expression : expression SYMBOL_AMPERSAND expression')
@@ -350,23 +374,23 @@ class Parser():
@self.pg.production('expression : expression KEYWORD_ET expression')
@self.pg.production('expression : expression KEYWORD_AVT expression')
def binop(tokens):
return ast_nodes.BinOp(tokens[0], tokens[2], tokens[1].name)
return _at(ast_nodes.BinOp(tokens[0], tokens[2], tokens[1].name), tokens[0])
@self.pg.production('expression : SYMBOL_MINUS expression', precedence='UMINUS')
def unary_minus(tokens):
return ast_nodes.UnaryMinus(tokens[1])
return _at(ast_nodes.UnaryMinus(tokens[1]), tokens[0])
@self.pg.production('expression : KEYWORD_NON expression', precedence='UNOT')
def unary_not(tokens):
return ast_nodes.UnaryNot(tokens[1])
return _at(ast_nodes.UnaryNot(tokens[1]), tokens[0])
@self.pg.production('expression : KEYWORD_INVOCA expression expressions')
def invoca(tokens):
return ast_nodes.Invoca(tokens[1], tokens[2])
return _at(ast_nodes.Invoca(tokens[1], tokens[2]), tokens[0])
@self.pg.production('expression : KEYWORD_FVNCTIO ids KEYWORD_VT SYMBOL_LCURL statements SYMBOL_RCURL')
def fvnctio(tokens):
return ast_nodes.Fvnctio(tokens[1], tokens[4])
return _at(ast_nodes.Fvnctio(tokens[1], tokens[4]), tokens[0])
@self.pg.production('expression : SYMBOL_LPARENS expression SYMBOL_RPARENS')
def parens(tokens):
@@ -382,40 +406,40 @@ class Parser():
@self.pg.production('expression : KEYWORD_TABVLA SYMBOL_LCURL opt_newline SYMBOL_RCURL')
def dict_literal_empty(tokens):
return ast_nodes.DataDict([])
return _at(ast_nodes.DataDict([]), tokens[0])
@self.pg.production('expression : KEYWORD_TABVLA SYMBOL_LCURL opt_newline dict_items opt_newline SYMBOL_RCURL')
def dict_literal(tokens):
return ast_nodes.DataDict(tokens[3])
return _at(ast_nodes.DataDict(tokens[3]), tokens[0])
@self.pg.production('expression : SYMBOL_LBRACKET SYMBOL_RBRACKET')
@self.pg.production('expression : SYMBOL_LBRACKET newlines SYMBOL_RBRACKET')
def array_empty(_):
return ast_nodes.DataArray([])
def array_empty(tokens):
return _at(ast_nodes.DataArray([]), tokens[0])
@self.pg.production('expression : SYMBOL_LBRACKET array_items opt_newline SYMBOL_RBRACKET')
def array(tokens):
return ast_nodes.DataArray(tokens[1])
return _at(ast_nodes.DataArray(tokens[1]), tokens[0])
@self.pg.production('expression : SYMBOL_LBRACKET newlines array_items opt_newline SYMBOL_RBRACKET')
def array_leading_newline(tokens):
return ast_nodes.DataArray(tokens[2])
return _at(ast_nodes.DataArray(tokens[2]), tokens[0])
@self.pg.production('expression : SYMBOL_LBRACKET expression KEYWORD_VSQVE expression SYMBOL_RBRACKET')
def range_array(tokens):
return ast_nodes.DataRangeArray(tokens[1], tokens[3])
return _at(ast_nodes.DataRangeArray(tokens[1], tokens[3]), tokens[0])
@self.pg.production('expression : SYMBOL_LBRACKET expression KEYWORD_VSQVE expression KEYWORD_GRADV expression SYMBOL_RBRACKET')
def range_array_step(tokens):
return ast_nodes.DataRangeArray(tokens[1], tokens[3], tokens[5])
return _at(ast_nodes.DataRangeArray(tokens[1], tokens[3], tokens[5]), tokens[0])
@self.pg.production('expression : expression SYMBOL_LBRACKET expression SYMBOL_RBRACKET', precedence='INDEX')
def array_index(tokens):
return ast_nodes.ArrayIndex(tokens[0], tokens[2])
return _at(ast_nodes.ArrayIndex(tokens[0], tokens[2]), tokens[0])
@self.pg.production('expression : expression SYMBOL_LBRACKET expression KEYWORD_VSQVE expression SYMBOL_RBRACKET', precedence='INDEX')
def array_slice(tokens):
return ast_nodes.ArraySlice(tokens[0], tokens[2], tokens[4])
return _at(ast_nodes.ArraySlice(tokens[0], tokens[2], tokens[4]), tokens[0])
# ids
@self.pg.production('ids : SYMBOL_LPARENS id_list')
@@ -443,7 +467,7 @@ class Parser():
@self.pg.production("id : ID")
def id_expression(tokens):
return ast_nodes.ID(tokens[0].value)
return _at(ast_nodes.ID(tokens[0].value), tokens[0])
@self.pg.error
def error_handle(token):

View File

@@ -174,3 +174,34 @@ class TestCompilerErrors(unittest.TestCase):
@parameterized.expand(compiler_error_tests)
def test_compiler_errors(self, source, error_type):
run_compiler_error_test(self, source)
class TestErrorLineNumbers(unittest.TestCase):
def test_interpreter_error_includes_line(self):
source = "DESIGNA x VT III\nDIC(y)\n"
tokens = Lexer().get_lexer().lex(source)
program = Parser().parse(tokens)
with self.assertRaisesRegex(CentvrionError, r"at line 2"):
program.eval()
def test_compiled_error_includes_line(self):
source = "DESIGNA x VT III\nDIC(y)\n"
tokens = Lexer().get_lexer().lex(source)
program = Parser().parse(tokens)
c_source = compile_program(program)
with tempfile.NamedTemporaryFile(suffix=".c", delete=False, mode="w") as tmp_c:
tmp_c.write(c_source)
tmp_c_path = tmp_c.name
with tempfile.NamedTemporaryFile(suffix="", delete=False) as tmp_bin:
tmp_bin_path = tmp_bin.name
try:
subprocess.run(
["gcc", "-O2", tmp_c_path, _RUNTIME_C, "-o", tmp_bin_path, "-lcurl", "-lmicrohttpd"],
check=True, capture_output=True,
)
proc = subprocess.run([tmp_bin_path], capture_output=True, text=True)
self.assertNotEqual(proc.returncode, 0)
self.assertIn("at line 2", proc.stderr)
finally:
os.unlink(tmp_c_path)
os.unlink(tmp_bin_path)