started work on parser
This commit is contained in:
2
main.py
2
main.py
@@ -4,7 +4,7 @@ LEXER_DEBUG: bool = True
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with open("tests/lexer.pla") as f:
|
||||
with open("tests/parser.pla") as f:
|
||||
code: str = f.read()
|
||||
|
||||
if LEXER_DEBUG:
|
||||
|
||||
62
parser.py
Normal file
62
parser.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from lexer import Lexer
|
||||
from lexer_token import Token, TokenType
|
||||
from typing import Callable
|
||||
from enum import Enum, auto
|
||||
|
||||
class PrecedenceType(Enum):
|
||||
P_LOWEST = 0
|
||||
P_EQUALS = auto()
|
||||
P_LESSGREATER = auto()
|
||||
P_SUM = auto()
|
||||
P_PRODUCT = auto()
|
||||
P_EXPONENT = auto()
|
||||
P_PREFIX = auto()
|
||||
P_CALL = auto()
|
||||
P_INDEX = auto()
|
||||
|
||||
PRECEDENCES: dict[TokenType, PrecedenceType] = {
|
||||
TokenType.PLUS: PrecedenceType.P_SUM,
|
||||
TokenType.MINUS: PrecedenceType.P_SUM,
|
||||
TokenType.ASTERISK: PrecedenceType.P_PRODUCT,
|
||||
TokenType.SLASH: PrecedenceType.P_PRODUCT,
|
||||
TokenType.MODULUS: PrecedenceType.P_PRODUCT,
|
||||
TokenType.POW: PrecedenceType.P_EXPONENT
|
||||
}
|
||||
|
||||
class Parser:
|
||||
def __init__(self, lexer: Lexer) -> None:
|
||||
self.lexer: Lexer = lexer
|
||||
|
||||
self.errors: list[str] = []
|
||||
|
||||
self.current_token: Token = None
|
||||
self.peek_token: Token = None
|
||||
|
||||
self.prefix_parse_functions: dict[Token, Callable] = {} # -1
|
||||
self.infix_parse_functions: dict[Token, Callable] = {} # 5 + 5
|
||||
|
||||
self.__next_token()
|
||||
self.__next_token()
|
||||
|
||||
# region Parser helpers
|
||||
def __next_token(self) -> None:
|
||||
self.current_token = self.peek_token
|
||||
self.peek_token = self.lexer.next_token()
|
||||
|
||||
def __peek_token_is(self, tt: TokenType) -> bool:
|
||||
return self.peek_token.type == tt
|
||||
|
||||
def __expect_peek(self, tt: TokenType) -> bool:
|
||||
if self.__peek_token_is(tt):
|
||||
self.__next_token()
|
||||
return True
|
||||
else:
|
||||
self.__peek_error(tt)
|
||||
return False
|
||||
|
||||
def __peek_error(self, tt: TokenType):
|
||||
self.errors.append(f"Expected next token to be {tt}, got {self.peek_token.type} instead.")
|
||||
|
||||
def __no_prefix_parse_function_error(self, tt: TokenType):
|
||||
self.errors.append(f"No Prefix Parse Function for {tt} found.")
|
||||
# endregion
|
||||
1
tests/parser.pla
Normal file
1
tests/parser.pla
Normal file
@@ -0,0 +1 @@
|
||||
5 + 5
|
||||
Reference in New Issue
Block a user