Files
centvrion/parser.py
NikolajDanger 935c90f645
2022-06-08 13:30:17 +02:00

187 lines
7.2 KiB
Python

from multiprocessing.dummy import Array
from rply import ParserGenerator
from lexer import all_tokens
import ast_nodes
ALL_TOKENS = list(set([i[0] for i in all_tokens]))
class Parser():
def __init__(self):
self.pg = ParserGenerator(
ALL_TOKENS,
precedence=[
('left', ["KEYWORD_PLUS", "KEYWORD_MINUS", "KEYWORD_EST"]),
('left', ["SYMBOL_PLUS", "SYMBOL_MINUS"]),
('left', ["SYMBOL_TIMES", "SYMBOL_DIVIDE"])
]
)
def parse(self):
@self.pg.production('program : opt_newline module_calls statements')
def program(tokens):
return ast_nodes.Program(tokens[-2], tokens[-1])
@self.pg.production('opt_newline : ')
@self.pg.production('opt_newline : NEWLINE')
def opt_newline(_):
return None
@self.pg.production('module_calls : ')
@self.pg.production('module_calls : module_call NEWLINE module_calls')
def module_calls(calls):
if len(calls) == 0:
return []
else:
return [calls[0]] + calls[2]
@self.pg.production('module_call : KEYWORD_VOCA MODULE')
def module_call(tokens):
return ast_nodes.ModuleCall(tokens[1].value)
@self.pg.production('statements : ')
@self.pg.production('statements : statement NEWLINE statements')
def statements(calls):
if len(calls) == 0:
return []
else:
return [calls[0]] + calls[2]
@self.pg.production('statement : KEYWORD_DESIGNA id KEYWORD_UT expression')
def statement_designa(tokens):
return ast_nodes.Designa(tokens[1], tokens[3])
@self.pg.production('statement : expression')
def statement_expression(tokens):
return ast_nodes.ExpressionStatement(tokens[0])
@self.pg.production('expressions : ')
@self.pg.production('expressions : expression expressions')
def expressions(calls):
if len(calls) == 0:
return []
else:
return [calls[0]] + calls[1]
@self.pg.production('ids : ')
@self.pg.production('ids : id ids')
def ids(calls):
if len(calls) == 0:
return []
else:
return [calls[0]] + calls[1]
@self.pg.production('expression : id')
def expression_id(tokens):
return tokens[0]
@self.pg.production('statement : KEYWORD_DEFINI id ids KEYWORD_UT SYMBOL_LCURL opt_newline statements opt_newline SYMBOL_RCURL')
def defini(tokens):
return ast_nodes.Defini(tokens[1], tokens[2], tokens[6])
@self.pg.production('statement : KEYWORD_REDI expressions')
def redi(tokens):
return ast_nodes.Redi(tokens[1])
@self.pg.production('expression : DATA_STRING')
def expression_string(tokens):
return ast_nodes.String(tokens[0].value[1:-1])
@self.pg.production('expression : DATA_NUMERAL')
def expression_numeral(tokens):
return ast_nodes.Numeral(tokens[0].value)
@self.pg.production('expression : KEYWORD_FALSITAS')
@self.pg.production('expression : KEYWORD_VERITAS')
def expression_bool(tokens):
return ast_nodes.Bool(tokens[0].name == "KEYWORD_VERITAS")
@self.pg.production('expression : KEYWORD_NULLUS')
def expression_nullus(_):
return ast_nodes.Nullus()
@self.pg.production('expression : expression SYMBOL_MINUS expression')
@self.pg.production('expression : expression SYMBOL_PLUS expression')
@self.pg.production('expression : expression SYMBOL_TIMES expression')
@self.pg.production('expression : expression SYMBOL_DIVIDE expression')
@self.pg.production('expression : expression KEYWORD_EST expression')
@self.pg.production('expression : expression KEYWORD_MINUS expression')
@self.pg.production('expression : expression KEYWORD_PLUS expression')
def binop(tokens):
return ast_nodes.BinOp(tokens[0], tokens[2], tokens[1].name)
@self.pg.production('expression : BUILTIN expressions')
def expression_builtin(tokens):
return ast_nodes.BuiltIn(tokens[0].value, tokens[1])
@self.pg.production("id : ID")
def id_expression(tokens):
return ast_nodes.ID(tokens[0].value)
@self.pg.production('expression : KEYWORD_INVOCA id expressions')
def invoca(tokens):
return ast_nodes.Invoca(tokens[1], tokens[2])
@self.pg.production('statement : si_statement')
def si_statement(tokens):
return tokens[0]
@self.pg.production('statement : per_statement')
@self.pg.production('statement : dum_statement')
@self.pg.production('statement : donicum_statement')
def loops(tokens):
return tokens[0]
@self.pg.production('statement : KEYWORD_ERUMPE')
def erumpe(_):
return ast_nodes.Erumpe()
@self.pg.production('si_statement : KEYWORD_SI expression KEYWORD_TUNC SYMBOL_LCURL opt_newline statements opt_newline SYMBOL_RCURL opt_newline aluid_statement')
def si(tokens):
return ast_nodes.SiStatement(tokens[1], tokens[5], tokens[9])
@self.pg.production('dum_statement : KEYWORD_DUM expression KEYWORD_FACE SYMBOL_LCURL opt_newline statements opt_newline SYMBOL_RCURL')
def dum(tokens):
return ast_nodes.DumStatement(tokens[1], tokens[5])
@self.pg.production('per_statement : KEYWORD_PER id KEYWORD_IN expression KEYWORD_FACE SYMBOL_LCURL opt_newline statements opt_newline SYMBOL_RCURL')
def per(tokens):
return ast_nodes.PerStatement(tokens[3], tokens[1], tokens[7])
@self.pg.production('donicum_statement : KEYWORD_DONICUM id KEYWORD_UT expression KEYWORD_USQUE expression KEYWORD_FACE SYMBOL_LCURL opt_newline statements opt_newline SYMBOL_RCURL')
def donicum(tokens):
range_array = ast_nodes.DataRangeArray(tokens[3], tokens[5])
return ast_nodes.PerStatement(range_array, tokens[1], tokens[9])
@self.pg.production('aluid_statement : ')
def aluid_empty(_):
return None
@self.pg.production('aluid_statement : KEYWORD_ALUID si_statement')
def aluid_si(tokens):
return [tokens[1]]
@self.pg.production('aluid_statement : KEYWORD_ALUID SYMBOL_LCURL opt_newline statements opt_newline SYMBOL_RCURL aluid_statement')
def aluid(tokens):
return tokens[3]
@self.pg.production('expression : SYMBOL_LPARENS expression SYMBOL_RPARENS')
def parens(tokens):
return tokens[1]
@self.pg.production('expression : SYMBOL_LBRACKET expressions SYMBOL_RBRACKET')
def array(tokens):
return ast_nodes.DataArray(tokens[1])
@self.pg.production('expression : SYMBOL_LBRACKET expression KEYWORD_USQUE expression SYMBOL_RBRACKET')
def range_array(tokens):
return ast_nodes.DataRangeArray(tokens[1], tokens[3])
@self.pg.error
def error_handle(token):
raise ValueError(token)
def get_parser(self):
return self.pg.build()