154 lines
5.5 KiB
Python
154 lines
5.5 KiB
Python
from rply import ParserGenerator
|
|
|
|
from lexer import all_tokens
|
|
import ast_nodes
|
|
|
|
ALL_TOKENS = list(set([i[0] for i in all_tokens]))
|
|
|
|
class Parser():
|
|
def __init__(self):
|
|
self.pg = ParserGenerator(ALL_TOKENS)
|
|
|
|
def parse(self):
|
|
@self.pg.production('program : opt_newline module_calls statements')
|
|
def program(tokens):
|
|
return ast_nodes.Program(tokens[-2], tokens[-1])
|
|
|
|
@self.pg.production('opt_newline : ')
|
|
@self.pg.production('opt_newline : NEWLINE')
|
|
def opt_newline(_):
|
|
return None
|
|
|
|
@self.pg.production('module_calls : ')
|
|
@self.pg.production('module_calls : module_call NEWLINE module_calls')
|
|
def module_calls(calls):
|
|
if len(calls) == 0:
|
|
return []
|
|
else:
|
|
return [calls[0]] + calls[2]
|
|
|
|
@self.pg.production('module_call : KEYWORD_VOCA MODULE')
|
|
def module_call(tokens):
|
|
return ast_nodes.ModuleCall(tokens[1].value)
|
|
|
|
@self.pg.production('statements : ')
|
|
@self.pg.production('statements : statement NEWLINE statements')
|
|
def statements(calls):
|
|
if len(calls) == 0:
|
|
return []
|
|
else:
|
|
return [calls[0]] + calls[2]
|
|
|
|
@self.pg.production('statement : KEYWORD_DESIGNA id KEYWORD_UT expression')
|
|
def statement_designa(tokens):
|
|
return ast_nodes.Designa(tokens[1], tokens[3])
|
|
|
|
@self.pg.production('statement : expression')
|
|
def statement_expression(tokens):
|
|
return ast_nodes.ExpressionStatement(tokens[0])
|
|
|
|
@self.pg.production('expressions : ')
|
|
@self.pg.production('expressions : expression expressions')
|
|
def expressions(calls):
|
|
if len(calls) == 0:
|
|
return []
|
|
else:
|
|
return [calls[0]] + calls[1]
|
|
|
|
@self.pg.production('ids : ')
|
|
@self.pg.production('ids : id ids')
|
|
def ids(calls):
|
|
if len(calls) == 0:
|
|
return []
|
|
else:
|
|
return [calls[0]] + calls[1]
|
|
|
|
@self.pg.production('expression : id')
|
|
def expression_id(tokens):
|
|
return tokens[0]
|
|
|
|
@self.pg.production('statement : KEYWORD_DEFINI id ids KEYWORD_UT SYMBOL_LCURL opt_newline statements opt_newline SYMBOL_RCURL')
|
|
def defini(tokens):
|
|
return ast_nodes.Defini(tokens[1], tokens[2], tokens[6])
|
|
|
|
@self.pg.production('statement : KEYWORD_REDI expressions')
|
|
def redi(tokens):
|
|
return ast_nodes.Redi(tokens[1])
|
|
|
|
@self.pg.production('expression : DATA_STRING')
|
|
def expression_string(tokens):
|
|
return ast_nodes.String(tokens[0].value)
|
|
|
|
@self.pg.production('expression : DATA_NUMERAL')
|
|
def expression_numeral(tokens):
|
|
return ast_nodes.Numeral(tokens[0].value)
|
|
|
|
@self.pg.production('expression : KEYWORD_FALSITAS')
|
|
@self.pg.production('expression : KEYWORD_VERITAS')
|
|
def expression_bool(tokens):
|
|
return ast_nodes.Bool(tokens[0].name == "KEYWORD_VERITAS")
|
|
|
|
@self.pg.production('expression : KEYWORD_NULLUS')
|
|
def expression_nullus(_):
|
|
return ast_nodes.Nullus()
|
|
|
|
@self.pg.production('expression : expression SYMBOL_MINUS expression')
|
|
@self.pg.production('expression : expression SYMBOL_PLUS expression')
|
|
@self.pg.production('expression : expression KEYWORD_EST expression')
|
|
@self.pg.production('expression : expression KEYWORD_MINUS expression')
|
|
@self.pg.production('expression : expression KEYWORD_PLUS expression')
|
|
def binop(tokens):
|
|
return ast_nodes.BinOp(tokens[0], tokens[2], tokens[1].name)
|
|
|
|
@self.pg.production('expression : BUILTIN expressions')
|
|
def expression_builtin(tokens):
|
|
return ast_nodes.BuiltIn(tokens[0].value, tokens[1])
|
|
|
|
@self.pg.production("id : ID")
|
|
def id_expression(tokens):
|
|
return ast_nodes.ID(tokens[0].value)
|
|
|
|
@self.pg.production('expression : KEYWORD_INVOCA id expressions')
|
|
def invoca(tokens):
|
|
return ast_nodes.Invoca(tokens[1], tokens[2])
|
|
|
|
@self.pg.production('statement : si_statement')
|
|
def si_statement(tokens):
|
|
return tokens[0]
|
|
|
|
@self.pg.production('statement : dum_statement')
|
|
def dum_statement(tokens):
|
|
return tokens[0]
|
|
|
|
@self.pg.production('si_statement : KEYWORD_SI expression KEYWORD_TUNC SYMBOL_LCURL opt_newline statements opt_newline SYMBOL_RCURL opt_newline aluid_statement')
|
|
def si(tokens):
|
|
return ast_nodes.SiStatement(tokens[1], tokens[5], tokens[9])
|
|
|
|
@self.pg.production('dum_statement : KEYWORD_DUM expression KEYWORD_FACE SYMBOL_LCURL opt_newline statements opt_newline SYMBOL_RCURL')
|
|
def dum(tokens):
|
|
return ast_nodes.DumStatement(tokens[1], tokens[5])
|
|
|
|
@self.pg.production('aluid_statement : ')
|
|
def aluid_empty(_):
|
|
return None
|
|
|
|
@self.pg.production('aluid_statement : KEYWORD_ALUID si_statement')
|
|
def aluid_si(tokens):
|
|
return [tokens[1]]
|
|
|
|
@self.pg.production('aluid_statement : KEYWORD_ALUID SYMBOL_LCURL opt_newline statements opt_newline SYMBOL_RCURL aluid_statement')
|
|
def aluid(tokens):
|
|
return tokens[3]
|
|
|
|
@self.pg.production('expression : SYMBOL_LPARENS expression SYMBOL_RPARENS')
|
|
def parens(tokens):
|
|
return tokens[1]
|
|
|
|
@self.pg.error
|
|
def error_handle(token):
|
|
raise ValueError(token)
|
|
|
|
|
|
def get_parser(self):
|
|
return self.pg.build()
|