started work on lexer
This commit is contained in:
106
lexer.py
Normal file
106
lexer.py
Normal file
@@ -0,0 +1,106 @@
|
||||
from lexer_token import Token, TokenType
|
||||
from typing import Any
|
||||
|
||||
|
||||
class Lexer:
|
||||
def __init__(self, source: str) -> None:
|
||||
self.source = source
|
||||
|
||||
self.position: int = -1
|
||||
self.read_position: int = 0
|
||||
self.line_no: int = 1
|
||||
|
||||
self.current_char: str | None = None
|
||||
|
||||
self.__read_char()
|
||||
|
||||
def __read_char(self) -> None:
|
||||
if self.read_position >= len(self.source):
|
||||
self.current_char = None
|
||||
else:
|
||||
self.current_char = self.source[self.read_position]
|
||||
|
||||
self.position = self.read_position
|
||||
self.read_position += 1
|
||||
|
||||
def __skip_whitespace(self) -> None:
|
||||
while self.current_char in [' ', '\t', '\n', '\r']:
|
||||
if self.current_char == "\n":
|
||||
self.line_no += 1
|
||||
|
||||
self.__read_char()
|
||||
|
||||
def __new_token(self, tt: TokenType, literal: Any) -> Token:
|
||||
return Token(tt, literal, self.line_no, self.position)
|
||||
|
||||
def __is_digit(self, char: str) -> bool:
|
||||
return "0" <= char and char <= "9"
|
||||
|
||||
def __read_number(self) -> Token:
|
||||
start_pos: int = self.position
|
||||
dot_count: int = 0
|
||||
|
||||
output: str = ""
|
||||
|
||||
while self.__is_digit(self.current_char) or self.current_char == ".":
|
||||
if self.current_char == ".":
|
||||
dot_count += 1
|
||||
|
||||
if dot_count > 1:
|
||||
# todo: error message
|
||||
return self.__new_token(TokenType.ILLEGAL, self.source[start_pos:self.position])
|
||||
|
||||
output += self.source[self.position]
|
||||
self.__read_char()
|
||||
|
||||
if self.current_char is None:
|
||||
break
|
||||
|
||||
if dot_count == 0:
|
||||
return self.__new_token(TokenType.INT, int(output))
|
||||
else:
|
||||
return self.__new_token(TokenType.FLOAT, float(output))
|
||||
|
||||
def next_token(self) -> Token:
|
||||
tok: Token = None
|
||||
|
||||
self.__skip_whitespace()
|
||||
|
||||
match self.current_char:
|
||||
case "+":
|
||||
tok = self.__new_token(TokenType.PLUS, self.current_char)
|
||||
case "-":
|
||||
tok = self.__new_token(TokenType.MINUS, self.current_char)
|
||||
case "*":
|
||||
tok = self.__new_token(TokenType.ASTERISK, self.current_char)
|
||||
case "/":
|
||||
tok = self.__new_token(TokenType.SLASH, self.current_char)
|
||||
case "^":
|
||||
tok = self.__new_token(TokenType.POW, self.current_char)
|
||||
case "%":
|
||||
tok = self.__new_token(TokenType.MODULUS, self.current_char)
|
||||
case "(":
|
||||
tok = self.__new_token(TokenType.LPAREN, self.current_char)
|
||||
case ")":
|
||||
tok = self.__new_token(TokenType.RPAREN, self.current_char)
|
||||
case "[":
|
||||
tok = self.__new_token(TokenType.LBRACKET, self.current_char)
|
||||
case "]":
|
||||
tok = self.__new_token(TokenType.RBRACKET, self.current_char)
|
||||
case "{":
|
||||
tok = self.__new_token(TokenType.LCURLY, self.current_char)
|
||||
case "}":
|
||||
tok = self.__new_token(TokenType.RCURLY, self.current_char)
|
||||
case ":":
|
||||
tok = self.__new_token(TokenType.COLON, self.current_char)
|
||||
case None:
|
||||
tok = self.__new_token(TokenType.EOF, "")
|
||||
case _:
|
||||
if self.__is_digit(self.current_char):
|
||||
tok = self.__read_number()
|
||||
return tok
|
||||
else:
|
||||
tok = self.__new_token(TokenType.ILLEGAL, self.current_char)
|
||||
|
||||
self.__read_char()
|
||||
return tok
|
||||
Reference in New Issue
Block a user