🐐 Line numbers in errors
This commit is contained in:
@@ -318,8 +318,15 @@ def fraction_to_frac(f, magnvm=False, svbnvlla=False) -> str:
|
|||||||
|
|
||||||
|
|
||||||
class Node(BaseBox):
|
class Node(BaseBox):
|
||||||
|
pos = None # (lineno, colno) — set in parser productions
|
||||||
|
|
||||||
def eval(self, vtable):
|
def eval(self, vtable):
|
||||||
return self._eval(vtable.copy())
|
try:
|
||||||
|
return self._eval(vtable.copy())
|
||||||
|
except CentvrionError as e:
|
||||||
|
if e.lineno is None and self.pos is not None:
|
||||||
|
e.lineno, e.colno = self.pos
|
||||||
|
raise
|
||||||
|
|
||||||
def _eval(self, vtable):
|
def _eval(self, vtable):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
@@ -1321,7 +1328,7 @@ class TemptaStatement(Node):
|
|||||||
if vtable["#return"] is not None or vtable["#break"] or vtable["#continue"]:
|
if vtable["#return"] is not None or vtable["#break"] or vtable["#continue"]:
|
||||||
return vtable, last_val
|
return vtable, last_val
|
||||||
except CentvrionError as e:
|
except CentvrionError as e:
|
||||||
vtable[self.error_var.name] = ValStr(str(e))
|
vtable[self.error_var.name] = ValStr(e.msg)
|
||||||
for statement in self.catch_statements:
|
for statement in self.catch_statements:
|
||||||
vtable, last_val = statement.eval(vtable)
|
vtable, last_val = statement.eval(vtable)
|
||||||
if vtable["#return"] is not None or vtable["#break"] or vtable["#continue"]:
|
if vtable["#return"] is not None or vtable["#break"] or vtable["#continue"]:
|
||||||
|
|||||||
@@ -7,6 +7,14 @@ from centvrion.ast_nodes import (
|
|||||||
num_to_int, frac_to_fraction,
|
num_to_int, frac_to_fraction,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _err(node, msg):
|
||||||
|
"""Build a CentvrionError stamped with a node's source position, if any."""
|
||||||
|
pos = getattr(node, "pos", None)
|
||||||
|
if pos is not None:
|
||||||
|
return CentvrionError(msg, pos[0], pos[1])
|
||||||
|
return CentvrionError(msg)
|
||||||
|
|
||||||
|
|
||||||
_BINOP_FN = {
|
_BINOP_FN = {
|
||||||
"SYMBOL_PLUS": "cent_add",
|
"SYMBOL_PLUS": "cent_add",
|
||||||
"SYMBOL_MINUS": "cent_sub",
|
"SYMBOL_MINUS": "cent_sub",
|
||||||
@@ -84,7 +92,7 @@ def emit_expr(node, ctx):
|
|||||||
|
|
||||||
if isinstance(node, Fractio):
|
if isinstance(node, Fractio):
|
||||||
if not ctx.has_module("FRACTIO"):
|
if not ctx.has_module("FRACTIO"):
|
||||||
raise CentvrionError("Cannot use fraction literals without 'FRACTIO' module")
|
raise _err(node, "Cannot use fraction literals without 'FRACTIO' module")
|
||||||
tmp = ctx.fresh_tmp()
|
tmp = ctx.fresh_tmp()
|
||||||
magnvm = "MAGNVM" in ctx.modules
|
magnvm = "MAGNVM" in ctx.modules
|
||||||
svbnvlla = "SVBNVLLA" in ctx.modules
|
svbnvlla = "SVBNVLLA" in ctx.modules
|
||||||
@@ -318,25 +326,25 @@ def _emit_builtin(node, ctx):
|
|||||||
f"CentValue {tmp} = cent_ordina_cmp({param_vars[0]}, {param_vars[1]}, _scope);"
|
f"CentValue {tmp} = cent_ordina_cmp({param_vars[0]}, {param_vars[1]}, _scope);"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise CentvrionError("ORDINA takes 1 or 2 arguments")
|
raise _err(node, "ORDINA takes 1 or 2 arguments")
|
||||||
|
|
||||||
case "MVTA":
|
case "MVTA":
|
||||||
if len(param_vars) != 2:
|
if len(param_vars) != 2:
|
||||||
raise CentvrionError("MVTA takes II arguments")
|
raise _err(node, "MVTA takes II arguments")
|
||||||
lines.append(
|
lines.append(
|
||||||
f"CentValue {tmp} = cent_mvta({param_vars[0]}, {param_vars[1]}, _scope);"
|
f"CentValue {tmp} = cent_mvta({param_vars[0]}, {param_vars[1]}, _scope);"
|
||||||
)
|
)
|
||||||
|
|
||||||
case "CRIBRA":
|
case "CRIBRA":
|
||||||
if len(param_vars) != 2:
|
if len(param_vars) != 2:
|
||||||
raise CentvrionError("CRIBRA takes II arguments")
|
raise _err(node, "CRIBRA takes II arguments")
|
||||||
lines.append(
|
lines.append(
|
||||||
f"CentValue {tmp} = cent_cribra({param_vars[0]}, {param_vars[1]}, _scope);"
|
f"CentValue {tmp} = cent_cribra({param_vars[0]}, {param_vars[1]}, _scope);"
|
||||||
)
|
)
|
||||||
|
|
||||||
case "CONFLA":
|
case "CONFLA":
|
||||||
if len(param_vars) != 3:
|
if len(param_vars) != 3:
|
||||||
raise CentvrionError("CONFLA takes III arguments")
|
raise _err(node, "CONFLA takes III arguments")
|
||||||
lines.append(
|
lines.append(
|
||||||
f"CentValue {tmp} = cent_confla({param_vars[0]}, {param_vars[1]}, {param_vars[2]}, _scope);"
|
f"CentValue {tmp} = cent_confla({param_vars[0]}, {param_vars[1]}, {param_vars[2]}, _scope);"
|
||||||
)
|
)
|
||||||
@@ -456,7 +464,7 @@ def _emit_invoca(node, ctx):
|
|||||||
lines.append(f"CentScope {call_scope_var} = cent_scope_copy(&_scope);")
|
lines.append(f"CentScope {call_scope_var} = cent_scope_copy(&_scope);")
|
||||||
param_names = ctx.functions[c_func_name]
|
param_names = ctx.functions[c_func_name]
|
||||||
if len(param_vars) != len(param_names):
|
if len(param_vars) != len(param_names):
|
||||||
raise CentvrionError(
|
raise _err(node,
|
||||||
f"Function '{node.callee.name}' expects {len(param_names)} argument(s), "
|
f"Function '{node.callee.name}' expects {len(param_names)} argument(s), "
|
||||||
f"got {len(param_vars)}"
|
f"got {len(param_vars)}"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -11,6 +11,14 @@ def emit_stmt(node, ctx):
|
|||||||
Emit C code for a CENTVRION statement node.
|
Emit C code for a CENTVRION statement node.
|
||||||
Returns lines — list of C statements.
|
Returns lines — list of C statements.
|
||||||
"""
|
"""
|
||||||
|
body = _emit_stmt_body(node, ctx)
|
||||||
|
pos = getattr(node, "pos", None)
|
||||||
|
if pos is not None:
|
||||||
|
return [f"_cent_current_line = {pos[0]};"] + body
|
||||||
|
return body
|
||||||
|
|
||||||
|
|
||||||
|
def _emit_stmt_body(node, ctx):
|
||||||
if isinstance(node, Designa):
|
if isinstance(node, Designa):
|
||||||
val_lines, val_var = emit_expr(node.value, ctx)
|
val_lines, val_var = emit_expr(node.value, ctx)
|
||||||
return val_lines + [f'cent_scope_set(&_scope, "{node.id.name}", {val_var});']
|
return val_lines + [f'cent_scope_set(&_scope, "{node.id.name}", {val_var});']
|
||||||
|
|||||||
@@ -35,6 +35,7 @@ static uint32_t cent_rng_next(void) {
|
|||||||
jmp_buf _cent_try_stack[CENT_TRY_STACK_MAX];
|
jmp_buf _cent_try_stack[CENT_TRY_STACK_MAX];
|
||||||
int _cent_try_depth = 0;
|
int _cent_try_depth = 0;
|
||||||
const char *_cent_error_msg = NULL;
|
const char *_cent_error_msg = NULL;
|
||||||
|
int _cent_current_line = 0;
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
/* ------------------------------------------------------------------ */
|
||||||
/* Arena allocator */
|
/* Arena allocator */
|
||||||
@@ -74,13 +75,20 @@ void *cent_arena_alloc(CentArena *a, size_t n) {
|
|||||||
/* Error handling */
|
/* Error handling */
|
||||||
/* ------------------------------------------------------------------ */
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
static void _cent_die(const char *kind, const char *msg) {
|
||||||
|
if (_cent_current_line > 0)
|
||||||
|
fprintf(stderr, "CENTVRION %s: %s at line %d\n", kind, msg, _cent_current_line);
|
||||||
|
else
|
||||||
|
fprintf(stderr, "CENTVRION %s: %s\n", kind, msg);
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
void cent_type_error(const char *msg) {
|
void cent_type_error(const char *msg) {
|
||||||
if (_cent_try_depth > 0) {
|
if (_cent_try_depth > 0) {
|
||||||
_cent_error_msg = msg;
|
_cent_error_msg = msg;
|
||||||
longjmp(_cent_try_stack[_cent_try_depth - 1], 1);
|
longjmp(_cent_try_stack[_cent_try_depth - 1], 1);
|
||||||
}
|
}
|
||||||
fprintf(stderr, "CENTVRION type error: %s\n", msg);
|
_cent_die("type error", msg);
|
||||||
exit(1);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void cent_runtime_error(const char *msg) {
|
void cent_runtime_error(const char *msg) {
|
||||||
@@ -88,8 +96,7 @@ void cent_runtime_error(const char *msg) {
|
|||||||
_cent_error_msg = msg;
|
_cent_error_msg = msg;
|
||||||
longjmp(_cent_try_stack[_cent_try_depth - 1], 1);
|
longjmp(_cent_try_stack[_cent_try_depth - 1], 1);
|
||||||
}
|
}
|
||||||
fprintf(stderr, "CENTVRION error: %s\n", msg);
|
_cent_die("error", msg);
|
||||||
exit(1);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
/* ------------------------------------------------------------------ */
|
||||||
@@ -101,8 +108,11 @@ CentValue cent_scope_get(CentScope *s, const char *name) {
|
|||||||
if (strcmp(s->names[i], name) == 0)
|
if (strcmp(s->names[i], name) == 0)
|
||||||
return s->vals[i];
|
return s->vals[i];
|
||||||
}
|
}
|
||||||
fprintf(stderr, "CENTVRION error: undefined variable '%s'\n", name);
|
size_t bufsz = strlen(name) + 32;
|
||||||
exit(1);
|
char *buf = cent_arena_alloc(cent_arena, bufsz);
|
||||||
|
snprintf(buf, bufsz, "undefined variable '%s'", name);
|
||||||
|
cent_runtime_error(buf);
|
||||||
|
return cent_null(); /* unreachable */
|
||||||
}
|
}
|
||||||
|
|
||||||
void cent_scope_set(CentScope *s, const char *name, CentValue v) {
|
void cent_scope_set(CentScope *s, const char *name, CentValue v) {
|
||||||
@@ -236,8 +246,10 @@ long cent_roman_to_int(const char *s) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!matched) {
|
if (!matched) {
|
||||||
fprintf(stderr, "CENTVRION error: invalid Roman numeral: %s\n", s);
|
size_t bufsz = strlen(s) + 32;
|
||||||
exit(1);
|
char *buf = cent_arena_alloc(cent_arena, bufsz);
|
||||||
|
snprintf(buf, bufsz, "invalid Roman numeral: %s", s);
|
||||||
|
cent_runtime_error(buf);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
|
|||||||
@@ -154,6 +154,9 @@ extern jmp_buf _cent_try_stack[];
|
|||||||
extern int _cent_try_depth;
|
extern int _cent_try_depth;
|
||||||
extern const char *_cent_error_msg;
|
extern const char *_cent_error_msg;
|
||||||
|
|
||||||
|
/* Updated at the start of every emitted statement; 0 means "no line known". */
|
||||||
|
extern int _cent_current_line;
|
||||||
|
|
||||||
void cent_type_error(const char *msg); /* type mismatch → longjmp or exit(1) */
|
void cent_type_error(const char *msg); /* type mismatch → longjmp or exit(1) */
|
||||||
void cent_runtime_error(const char *msg); /* runtime fault → longjmp or exit(1) */
|
void cent_runtime_error(const char *msg); /* runtime fault → longjmp or exit(1) */
|
||||||
|
|
||||||
|
|||||||
@@ -1 +1,13 @@
|
|||||||
class CentvrionError(Exception): pass
|
class CentvrionError(Exception):
|
||||||
|
def __init__(self, msg, lineno=None, colno=None):
|
||||||
|
self.msg = msg
|
||||||
|
self.lineno = lineno
|
||||||
|
self.colno = colno
|
||||||
|
super().__init__(msg)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if self.lineno is None:
|
||||||
|
return self.msg
|
||||||
|
if self.colno is None:
|
||||||
|
return f"{self.msg} at line {self.lineno}"
|
||||||
|
return f"{self.msg} at line {self.lineno}, column {self.colno}"
|
||||||
|
|||||||
@@ -42,7 +42,30 @@ def _unescape(s):
|
|||||||
return ''.join(out)
|
return ''.join(out)
|
||||||
|
|
||||||
|
|
||||||
def _parse_interpolated(raw_value):
|
def _at(node, src):
|
||||||
|
"""Stamp a (lineno, colno) onto a freshly built AST node.
|
||||||
|
|
||||||
|
`src` can be an rply Token (uses .source_pos) or another Node (copies .pos).
|
||||||
|
"""
|
||||||
|
if src is None:
|
||||||
|
return node
|
||||||
|
pos = getattr(src, "pos", None)
|
||||||
|
if pos is not None:
|
||||||
|
node.pos = pos
|
||||||
|
return node
|
||||||
|
sp = getattr(src, "source_pos", None)
|
||||||
|
if sp is not None:
|
||||||
|
node.pos = (sp.lineno, sp.colno)
|
||||||
|
return node
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_interpolated(raw_value, source_pos=None):
|
||||||
|
lineno = source_pos.lineno if source_pos is not None else None
|
||||||
|
colno = source_pos.colno if source_pos is not None else None
|
||||||
|
|
||||||
|
def _err(msg):
|
||||||
|
return CentvrionError(msg, lineno, colno)
|
||||||
|
|
||||||
quote_char = raw_value[0]
|
quote_char = raw_value[0]
|
||||||
inner = raw_value[1:-1]
|
inner = raw_value[1:-1]
|
||||||
|
|
||||||
@@ -79,15 +102,15 @@ def _parse_interpolated(raw_value):
|
|||||||
depth -= 1
|
depth -= 1
|
||||||
j += 1
|
j += 1
|
||||||
if depth != 0:
|
if depth != 0:
|
||||||
raise CentvrionError("Unclosed '{' in interpolated string")
|
raise _err("Unclosed '{' in interpolated string")
|
||||||
expr_src = inner[i + 1:j - 1]
|
expr_src = inner[i + 1:j - 1]
|
||||||
tokens = Lexer().get_lexer().lex(expr_src + "\n")
|
tokens = Lexer().get_lexer().lex(expr_src + "\n")
|
||||||
program = Parser().parse(tokens)
|
program = Parser().parse(tokens)
|
||||||
if len(program.statements) != 1:
|
if len(program.statements) != 1:
|
||||||
raise CentvrionError("Interpolation must contain exactly one expression")
|
raise _err("Interpolation must contain exactly one expression")
|
||||||
stmt = program.statements[0]
|
stmt = program.statements[0]
|
||||||
if not isinstance(stmt, ast_nodes.ExpressionStatement):
|
if not isinstance(stmt, ast_nodes.ExpressionStatement):
|
||||||
raise CentvrionError("Interpolation must contain an expression, not a statement")
|
raise _err("Interpolation must contain an expression, not a statement")
|
||||||
parts.append(stmt.expression)
|
parts.append(stmt.expression)
|
||||||
i = j
|
i = j
|
||||||
elif ch == '}':
|
elif ch == '}':
|
||||||
@@ -95,7 +118,7 @@ def _parse_interpolated(raw_value):
|
|||||||
current.append('}')
|
current.append('}')
|
||||||
i += 2
|
i += 2
|
||||||
continue
|
continue
|
||||||
raise CentvrionError("Unmatched '}' in string (use '}}' for literal '}')")
|
raise _err("Unmatched '}' in string (use '}}' for literal '}')")
|
||||||
else:
|
else:
|
||||||
current.append(ch)
|
current.append(ch)
|
||||||
i += 1
|
i += 1
|
||||||
@@ -154,7 +177,7 @@ class Parser():
|
|||||||
|
|
||||||
@self.pg.production('module_call : KEYWORD_CVM MODULE')
|
@self.pg.production('module_call : KEYWORD_CVM MODULE')
|
||||||
def module_call(tokens):
|
def module_call(tokens):
|
||||||
return ast_nodes.ModuleCall(tokens[1].value)
|
return _at(ast_nodes.ModuleCall(tokens[1].value), tokens[0])
|
||||||
|
|
||||||
|
|
||||||
# Statements
|
# Statements
|
||||||
@@ -172,7 +195,7 @@ class Parser():
|
|||||||
|
|
||||||
@self.pg.production('statement : KEYWORD_DESIGNA id KEYWORD_VT expression')
|
@self.pg.production('statement : KEYWORD_DESIGNA id KEYWORD_VT expression')
|
||||||
def statement_designa(tokens):
|
def statement_designa(tokens):
|
||||||
return ast_nodes.Designa(tokens[1], tokens[3])
|
return _at(ast_nodes.Designa(tokens[1], tokens[3]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('index_chain : SYMBOL_LBRACKET expression SYMBOL_RBRACKET')
|
@self.pg.production('index_chain : SYMBOL_LBRACKET expression SYMBOL_RBRACKET')
|
||||||
def index_chain_single(tokens):
|
def index_chain_single(tokens):
|
||||||
@@ -184,39 +207,39 @@ class Parser():
|
|||||||
|
|
||||||
@self.pg.production('statement : KEYWORD_DESIGNA id index_chain KEYWORD_VT expression')
|
@self.pg.production('statement : KEYWORD_DESIGNA id index_chain KEYWORD_VT expression')
|
||||||
def statement_designa_index(tokens):
|
def statement_designa_index(tokens):
|
||||||
return ast_nodes.DesignaIndex(tokens[1], tokens[2], tokens[4])
|
return _at(ast_nodes.DesignaIndex(tokens[1], tokens[2], tokens[4]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('statement : KEYWORD_DESIGNA id SYMBOL_COMMA id_list_rest KEYWORD_VT expression')
|
@self.pg.production('statement : KEYWORD_DESIGNA id SYMBOL_COMMA id_list_rest KEYWORD_VT expression')
|
||||||
def statement_designa_destructure(tokens):
|
def statement_designa_destructure(tokens):
|
||||||
return ast_nodes.DesignaDestructure([tokens[1]] + tokens[3], tokens[5])
|
return _at(ast_nodes.DesignaDestructure([tokens[1]] + tokens[3], tokens[5]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('statement : id KEYWORD_AVGE expression')
|
@self.pg.production('statement : id KEYWORD_AVGE expression')
|
||||||
def statement_avge(tokens):
|
def statement_avge(tokens):
|
||||||
return ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_PLUS"))
|
return _at(ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_PLUS")), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('statement : id KEYWORD_MINVE expression')
|
@self.pg.production('statement : id KEYWORD_MINVE expression')
|
||||||
def statement_minve(tokens):
|
def statement_minve(tokens):
|
||||||
return ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_MINUS"))
|
return _at(ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_MINUS")), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('statement : id KEYWORD_MVLTIPLICA expression')
|
@self.pg.production('statement : id KEYWORD_MVLTIPLICA expression')
|
||||||
def statement_mvltiplica(tokens):
|
def statement_mvltiplica(tokens):
|
||||||
return ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_TIMES"))
|
return _at(ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_TIMES")), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('statement : id KEYWORD_DIVIDE expression')
|
@self.pg.production('statement : id KEYWORD_DIVIDE expression')
|
||||||
def statement_divide(tokens):
|
def statement_divide(tokens):
|
||||||
return ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_DIVIDE"))
|
return _at(ast_nodes.Designa(tokens[0], ast_nodes.BinOp(tokens[0], tokens[2], "SYMBOL_DIVIDE")), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('statement : expression')
|
@self.pg.production('statement : expression')
|
||||||
def statement_expression(tokens):
|
def statement_expression(tokens):
|
||||||
return ast_nodes.ExpressionStatement(tokens[0])
|
return _at(ast_nodes.ExpressionStatement(tokens[0]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('statement : KEYWORD_DEFINI id ids KEYWORD_VT SYMBOL_LCURL statements SYMBOL_RCURL')
|
@self.pg.production('statement : KEYWORD_DEFINI id ids KEYWORD_VT SYMBOL_LCURL statements SYMBOL_RCURL')
|
||||||
def defini(tokens):
|
def defini(tokens):
|
||||||
return ast_nodes.Defini(tokens[1], tokens[2], tokens[5])
|
return _at(ast_nodes.Defini(tokens[1], tokens[2], tokens[5]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('statement : KEYWORD_REDI expressions')
|
@self.pg.production('statement : KEYWORD_REDI expressions')
|
||||||
def redi(tokens):
|
def redi(tokens):
|
||||||
return ast_nodes.Redi(tokens[1])
|
return _at(ast_nodes.Redi(tokens[1]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('statement : per_statement')
|
@self.pg.production('statement : per_statement')
|
||||||
@self.pg.production('statement : dum_statement')
|
@self.pg.production('statement : dum_statement')
|
||||||
@@ -227,20 +250,20 @@ class Parser():
|
|||||||
return tokens[0]
|
return tokens[0]
|
||||||
|
|
||||||
@self.pg.production('statement : KEYWORD_ERVMPE')
|
@self.pg.production('statement : KEYWORD_ERVMPE')
|
||||||
def erumpe(_):
|
def erumpe(tokens):
|
||||||
return ast_nodes.Erumpe()
|
return _at(ast_nodes.Erumpe(), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('statement : KEYWORD_CONTINVA')
|
@self.pg.production('statement : KEYWORD_CONTINVA')
|
||||||
def continva(_):
|
def continva(tokens):
|
||||||
return ast_nodes.Continva()
|
return _at(ast_nodes.Continva(), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('si_statement : KEYWORD_SI expression KEYWORD_TVNC SYMBOL_LCURL statements SYMBOL_RCURL')
|
@self.pg.production('si_statement : KEYWORD_SI expression KEYWORD_TVNC SYMBOL_LCURL statements SYMBOL_RCURL')
|
||||||
@self.pg.production('si_statement : KEYWORD_SI expression KEYWORD_TVNC SYMBOL_LCURL statements SYMBOL_RCURL aluid_statement')
|
@self.pg.production('si_statement : KEYWORD_SI expression KEYWORD_TVNC SYMBOL_LCURL statements SYMBOL_RCURL aluid_statement')
|
||||||
def si_statement(tokens):
|
def si_statement(tokens):
|
||||||
if len(tokens) == 7:
|
if len(tokens) == 7:
|
||||||
return ast_nodes.SiStatement(tokens[1], tokens[4], tokens[6])
|
return _at(ast_nodes.SiStatement(tokens[1], tokens[4], tokens[6]), tokens[0])
|
||||||
else:
|
else:
|
||||||
return ast_nodes.SiStatement(tokens[1], tokens[4], None)
|
return _at(ast_nodes.SiStatement(tokens[1], tokens[4], None), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('aluid_statement : KEYWORD_ALIVD si_statement')
|
@self.pg.production('aluid_statement : KEYWORD_ALIVD si_statement')
|
||||||
def aluid_si(tokens):
|
def aluid_si(tokens):
|
||||||
@@ -252,34 +275,34 @@ class Parser():
|
|||||||
|
|
||||||
@self.pg.production('dum_statement : KEYWORD_DVM expression KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
|
@self.pg.production('dum_statement : KEYWORD_DVM expression KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
|
||||||
def dum(tokens):
|
def dum(tokens):
|
||||||
return ast_nodes.DumStatement(tokens[1], tokens[4])
|
return _at(ast_nodes.DumStatement(tokens[1], tokens[4]), tokens[0])
|
||||||
|
|
||||||
# AETERNVM is sugar for `DVM FALSITAS` — same AST, no observable difference.
|
# AETERNVM is sugar for `DVM FALSITAS` — same AST, no observable difference.
|
||||||
@self.pg.production('dum_statement : KEYWORD_AETERNVM KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
|
@self.pg.production('dum_statement : KEYWORD_AETERNVM KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
|
||||||
def aeternvm(tokens):
|
def aeternvm(tokens):
|
||||||
return ast_nodes.DumStatement(ast_nodes.Bool(False), tokens[3])
|
return _at(ast_nodes.DumStatement(ast_nodes.Bool(False), tokens[3]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('per_statement : KEYWORD_PER id SYMBOL_COMMA id_list_rest KEYWORD_IN expression KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
|
@self.pg.production('per_statement : KEYWORD_PER id SYMBOL_COMMA id_list_rest KEYWORD_IN expression KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
|
||||||
def per_destructure(tokens):
|
def per_destructure(tokens):
|
||||||
return ast_nodes.PerStatement(tokens[5], [tokens[1]] + tokens[3], tokens[8])
|
return _at(ast_nodes.PerStatement(tokens[5], [tokens[1]] + tokens[3], tokens[8]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('per_statement : KEYWORD_PER id KEYWORD_IN expression KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
|
@self.pg.production('per_statement : KEYWORD_PER id KEYWORD_IN expression KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
|
||||||
def per(tokens):
|
def per(tokens):
|
||||||
return ast_nodes.PerStatement(tokens[3], tokens[1], tokens[6])
|
return _at(ast_nodes.PerStatement(tokens[3], tokens[1], tokens[6]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('tempta_statement : KEYWORD_TEMPTA SYMBOL_LCURL statements SYMBOL_RCURL KEYWORD_CAPE id SYMBOL_LCURL statements SYMBOL_RCURL')
|
@self.pg.production('tempta_statement : KEYWORD_TEMPTA SYMBOL_LCURL statements SYMBOL_RCURL KEYWORD_CAPE id SYMBOL_LCURL statements SYMBOL_RCURL')
|
||||||
def tempta(tokens):
|
def tempta(tokens):
|
||||||
return ast_nodes.TemptaStatement(tokens[2], tokens[5], tokens[7])
|
return _at(ast_nodes.TemptaStatement(tokens[2], tokens[5], tokens[7]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('donicum_statement : KEYWORD_DONICVM id KEYWORD_VT expression KEYWORD_VSQVE expression KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
|
@self.pg.production('donicum_statement : KEYWORD_DONICVM id KEYWORD_VT expression KEYWORD_VSQVE expression KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
|
||||||
def donicum(tokens):
|
def donicum(tokens):
|
||||||
range_array = ast_nodes.DataRangeArray(tokens[3], tokens[5])
|
range_array = _at(ast_nodes.DataRangeArray(tokens[3], tokens[5]), tokens[0])
|
||||||
return ast_nodes.PerStatement(range_array, tokens[1], tokens[8])
|
return _at(ast_nodes.PerStatement(range_array, tokens[1], tokens[8]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('donicum_statement : KEYWORD_DONICVM id KEYWORD_VT expression KEYWORD_VSQVE expression KEYWORD_GRADV expression KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
|
@self.pg.production('donicum_statement : KEYWORD_DONICVM id KEYWORD_VT expression KEYWORD_VSQVE expression KEYWORD_GRADV expression KEYWORD_FAC SYMBOL_LCURL statements SYMBOL_RCURL')
|
||||||
def donicum_step(tokens):
|
def donicum_step(tokens):
|
||||||
range_array = ast_nodes.DataRangeArray(tokens[3], tokens[5], tokens[7])
|
range_array = _at(ast_nodes.DataRangeArray(tokens[3], tokens[5], tokens[7]), tokens[0])
|
||||||
return ast_nodes.PerStatement(range_array, tokens[1], tokens[10])
|
return _at(ast_nodes.PerStatement(range_array, tokens[1], tokens[10]), tokens[0])
|
||||||
|
|
||||||
# expressions
|
# expressions
|
||||||
@self.pg.production('expressions : SYMBOL_LPARENS expression_list')
|
@self.pg.production('expressions : SYMBOL_LPARENS expression_list')
|
||||||
@@ -311,28 +334,29 @@ class Parser():
|
|||||||
|
|
||||||
@self.pg.production('expression : BUILTIN expressions')
|
@self.pg.production('expression : BUILTIN expressions')
|
||||||
def expression_builtin(tokens):
|
def expression_builtin(tokens):
|
||||||
return ast_nodes.BuiltIn(tokens[0].value, tokens[1])
|
return _at(ast_nodes.BuiltIn(tokens[0].value, tokens[1]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : DATA_STRING')
|
@self.pg.production('expression : DATA_STRING')
|
||||||
def expression_string(tokens):
|
def expression_string(tokens):
|
||||||
return _parse_interpolated(tokens[0].value)
|
node = _parse_interpolated(tokens[0].value, tokens[0].source_pos)
|
||||||
|
return _at(node, tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : DATA_NUMERAL')
|
@self.pg.production('expression : DATA_NUMERAL')
|
||||||
def expression_numeral(tokens):
|
def expression_numeral(tokens):
|
||||||
return ast_nodes.Numeral(tokens[0].value)
|
return _at(ast_nodes.Numeral(tokens[0].value), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : DATA_FRACTION')
|
@self.pg.production('expression : DATA_FRACTION')
|
||||||
def expression_fraction(tokens):
|
def expression_fraction(tokens):
|
||||||
return ast_nodes.Fractio(tokens[0].value)
|
return _at(ast_nodes.Fractio(tokens[0].value), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : KEYWORD_FALSITAS')
|
@self.pg.production('expression : KEYWORD_FALSITAS')
|
||||||
@self.pg.production('expression : KEYWORD_VERITAS')
|
@self.pg.production('expression : KEYWORD_VERITAS')
|
||||||
def expression_bool(tokens):
|
def expression_bool(tokens):
|
||||||
return ast_nodes.Bool(tokens[0].name == "KEYWORD_VERITAS")
|
return _at(ast_nodes.Bool(tokens[0].name == "KEYWORD_VERITAS"), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : KEYWORD_NVLLVS')
|
@self.pg.production('expression : KEYWORD_NVLLVS')
|
||||||
def expression_nullus(_):
|
def expression_nullus(tokens):
|
||||||
return ast_nodes.Nullus()
|
return _at(ast_nodes.Nullus(), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : expression SYMBOL_AT expression')
|
@self.pg.production('expression : expression SYMBOL_AT expression')
|
||||||
@self.pg.production('expression : expression SYMBOL_AMPERSAND expression')
|
@self.pg.production('expression : expression SYMBOL_AMPERSAND expression')
|
||||||
@@ -350,23 +374,23 @@ class Parser():
|
|||||||
@self.pg.production('expression : expression KEYWORD_ET expression')
|
@self.pg.production('expression : expression KEYWORD_ET expression')
|
||||||
@self.pg.production('expression : expression KEYWORD_AVT expression')
|
@self.pg.production('expression : expression KEYWORD_AVT expression')
|
||||||
def binop(tokens):
|
def binop(tokens):
|
||||||
return ast_nodes.BinOp(tokens[0], tokens[2], tokens[1].name)
|
return _at(ast_nodes.BinOp(tokens[0], tokens[2], tokens[1].name), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : SYMBOL_MINUS expression', precedence='UMINUS')
|
@self.pg.production('expression : SYMBOL_MINUS expression', precedence='UMINUS')
|
||||||
def unary_minus(tokens):
|
def unary_minus(tokens):
|
||||||
return ast_nodes.UnaryMinus(tokens[1])
|
return _at(ast_nodes.UnaryMinus(tokens[1]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : KEYWORD_NON expression', precedence='UNOT')
|
@self.pg.production('expression : KEYWORD_NON expression', precedence='UNOT')
|
||||||
def unary_not(tokens):
|
def unary_not(tokens):
|
||||||
return ast_nodes.UnaryNot(tokens[1])
|
return _at(ast_nodes.UnaryNot(tokens[1]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : KEYWORD_INVOCA expression expressions')
|
@self.pg.production('expression : KEYWORD_INVOCA expression expressions')
|
||||||
def invoca(tokens):
|
def invoca(tokens):
|
||||||
return ast_nodes.Invoca(tokens[1], tokens[2])
|
return _at(ast_nodes.Invoca(tokens[1], tokens[2]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : KEYWORD_FVNCTIO ids KEYWORD_VT SYMBOL_LCURL statements SYMBOL_RCURL')
|
@self.pg.production('expression : KEYWORD_FVNCTIO ids KEYWORD_VT SYMBOL_LCURL statements SYMBOL_RCURL')
|
||||||
def fvnctio(tokens):
|
def fvnctio(tokens):
|
||||||
return ast_nodes.Fvnctio(tokens[1], tokens[4])
|
return _at(ast_nodes.Fvnctio(tokens[1], tokens[4]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : SYMBOL_LPARENS expression SYMBOL_RPARENS')
|
@self.pg.production('expression : SYMBOL_LPARENS expression SYMBOL_RPARENS')
|
||||||
def parens(tokens):
|
def parens(tokens):
|
||||||
@@ -382,40 +406,40 @@ class Parser():
|
|||||||
|
|
||||||
@self.pg.production('expression : KEYWORD_TABVLA SYMBOL_LCURL opt_newline SYMBOL_RCURL')
|
@self.pg.production('expression : KEYWORD_TABVLA SYMBOL_LCURL opt_newline SYMBOL_RCURL')
|
||||||
def dict_literal_empty(tokens):
|
def dict_literal_empty(tokens):
|
||||||
return ast_nodes.DataDict([])
|
return _at(ast_nodes.DataDict([]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : KEYWORD_TABVLA SYMBOL_LCURL opt_newline dict_items opt_newline SYMBOL_RCURL')
|
@self.pg.production('expression : KEYWORD_TABVLA SYMBOL_LCURL opt_newline dict_items opt_newline SYMBOL_RCURL')
|
||||||
def dict_literal(tokens):
|
def dict_literal(tokens):
|
||||||
return ast_nodes.DataDict(tokens[3])
|
return _at(ast_nodes.DataDict(tokens[3]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : SYMBOL_LBRACKET SYMBOL_RBRACKET')
|
@self.pg.production('expression : SYMBOL_LBRACKET SYMBOL_RBRACKET')
|
||||||
@self.pg.production('expression : SYMBOL_LBRACKET newlines SYMBOL_RBRACKET')
|
@self.pg.production('expression : SYMBOL_LBRACKET newlines SYMBOL_RBRACKET')
|
||||||
def array_empty(_):
|
def array_empty(tokens):
|
||||||
return ast_nodes.DataArray([])
|
return _at(ast_nodes.DataArray([]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : SYMBOL_LBRACKET array_items opt_newline SYMBOL_RBRACKET')
|
@self.pg.production('expression : SYMBOL_LBRACKET array_items opt_newline SYMBOL_RBRACKET')
|
||||||
def array(tokens):
|
def array(tokens):
|
||||||
return ast_nodes.DataArray(tokens[1])
|
return _at(ast_nodes.DataArray(tokens[1]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : SYMBOL_LBRACKET newlines array_items opt_newline SYMBOL_RBRACKET')
|
@self.pg.production('expression : SYMBOL_LBRACKET newlines array_items opt_newline SYMBOL_RBRACKET')
|
||||||
def array_leading_newline(tokens):
|
def array_leading_newline(tokens):
|
||||||
return ast_nodes.DataArray(tokens[2])
|
return _at(ast_nodes.DataArray(tokens[2]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : SYMBOL_LBRACKET expression KEYWORD_VSQVE expression SYMBOL_RBRACKET')
|
@self.pg.production('expression : SYMBOL_LBRACKET expression KEYWORD_VSQVE expression SYMBOL_RBRACKET')
|
||||||
def range_array(tokens):
|
def range_array(tokens):
|
||||||
return ast_nodes.DataRangeArray(tokens[1], tokens[3])
|
return _at(ast_nodes.DataRangeArray(tokens[1], tokens[3]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : SYMBOL_LBRACKET expression KEYWORD_VSQVE expression KEYWORD_GRADV expression SYMBOL_RBRACKET')
|
@self.pg.production('expression : SYMBOL_LBRACKET expression KEYWORD_VSQVE expression KEYWORD_GRADV expression SYMBOL_RBRACKET')
|
||||||
def range_array_step(tokens):
|
def range_array_step(tokens):
|
||||||
return ast_nodes.DataRangeArray(tokens[1], tokens[3], tokens[5])
|
return _at(ast_nodes.DataRangeArray(tokens[1], tokens[3], tokens[5]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : expression SYMBOL_LBRACKET expression SYMBOL_RBRACKET', precedence='INDEX')
|
@self.pg.production('expression : expression SYMBOL_LBRACKET expression SYMBOL_RBRACKET', precedence='INDEX')
|
||||||
def array_index(tokens):
|
def array_index(tokens):
|
||||||
return ast_nodes.ArrayIndex(tokens[0], tokens[2])
|
return _at(ast_nodes.ArrayIndex(tokens[0], tokens[2]), tokens[0])
|
||||||
|
|
||||||
@self.pg.production('expression : expression SYMBOL_LBRACKET expression KEYWORD_VSQVE expression SYMBOL_RBRACKET', precedence='INDEX')
|
@self.pg.production('expression : expression SYMBOL_LBRACKET expression KEYWORD_VSQVE expression SYMBOL_RBRACKET', precedence='INDEX')
|
||||||
def array_slice(tokens):
|
def array_slice(tokens):
|
||||||
return ast_nodes.ArraySlice(tokens[0], tokens[2], tokens[4])
|
return _at(ast_nodes.ArraySlice(tokens[0], tokens[2], tokens[4]), tokens[0])
|
||||||
|
|
||||||
# ids
|
# ids
|
||||||
@self.pg.production('ids : SYMBOL_LPARENS id_list')
|
@self.pg.production('ids : SYMBOL_LPARENS id_list')
|
||||||
@@ -443,7 +467,7 @@ class Parser():
|
|||||||
|
|
||||||
@self.pg.production("id : ID")
|
@self.pg.production("id : ID")
|
||||||
def id_expression(tokens):
|
def id_expression(tokens):
|
||||||
return ast_nodes.ID(tokens[0].value)
|
return _at(ast_nodes.ID(tokens[0].value), tokens[0])
|
||||||
|
|
||||||
@self.pg.error
|
@self.pg.error
|
||||||
def error_handle(token):
|
def error_handle(token):
|
||||||
|
|||||||
@@ -174,3 +174,34 @@ class TestCompilerErrors(unittest.TestCase):
|
|||||||
@parameterized.expand(compiler_error_tests)
|
@parameterized.expand(compiler_error_tests)
|
||||||
def test_compiler_errors(self, source, error_type):
|
def test_compiler_errors(self, source, error_type):
|
||||||
run_compiler_error_test(self, source)
|
run_compiler_error_test(self, source)
|
||||||
|
|
||||||
|
|
||||||
|
class TestErrorLineNumbers(unittest.TestCase):
|
||||||
|
def test_interpreter_error_includes_line(self):
|
||||||
|
source = "DESIGNA x VT III\nDIC(y)\n"
|
||||||
|
tokens = Lexer().get_lexer().lex(source)
|
||||||
|
program = Parser().parse(tokens)
|
||||||
|
with self.assertRaisesRegex(CentvrionError, r"at line 2"):
|
||||||
|
program.eval()
|
||||||
|
|
||||||
|
def test_compiled_error_includes_line(self):
|
||||||
|
source = "DESIGNA x VT III\nDIC(y)\n"
|
||||||
|
tokens = Lexer().get_lexer().lex(source)
|
||||||
|
program = Parser().parse(tokens)
|
||||||
|
c_source = compile_program(program)
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=".c", delete=False, mode="w") as tmp_c:
|
||||||
|
tmp_c.write(c_source)
|
||||||
|
tmp_c_path = tmp_c.name
|
||||||
|
with tempfile.NamedTemporaryFile(suffix="", delete=False) as tmp_bin:
|
||||||
|
tmp_bin_path = tmp_bin.name
|
||||||
|
try:
|
||||||
|
subprocess.run(
|
||||||
|
["gcc", "-O2", tmp_c_path, _RUNTIME_C, "-o", tmp_bin_path, "-lcurl", "-lmicrohttpd"],
|
||||||
|
check=True, capture_output=True,
|
||||||
|
)
|
||||||
|
proc = subprocess.run([tmp_bin_path], capture_output=True, text=True)
|
||||||
|
self.assertNotEqual(proc.returncode, 0)
|
||||||
|
self.assertIn("at line 2", proc.stderr)
|
||||||
|
finally:
|
||||||
|
os.unlink(tmp_c_path)
|
||||||
|
os.unlink(tmp_bin_path)
|
||||||
|
|||||||
Reference in New Issue
Block a user