Initial Commit

This commit is contained in:
2025-12-26 18:32:08 +11:00
commit 7949943c17
8 changed files with 255 additions and 0 deletions

5
src/error.py Normal file
View File

@@ -0,0 +1,5 @@
def error(errorType: str = 'Error', message: str = "This is a bugged error message. Please report this issue."):
return f"\033[91m{errorType}: \033[0m{message}"
def warn(message: str):
return f"\033[33mWarning: {message}\033[0m"

13
src/interpreter.py Normal file
View File

@@ -0,0 +1,13 @@
import parser as p
import error as err
import sys
# Feel free to replace the return type
def run_program(parsed: list[p.ParsedLine]) -> None:
for line in parsed:
match line.keyword:
case p.Keyword.Default:
sys.exit(err.error('unknownError', 'This is a bugged error message that should never be reached.'))
# Add implementation for other commands here
case _:
sys.exit(err.error('notImplementedError', f'{line.keyword.value} keyword not implemented yet'))

39
src/lexer.py Normal file
View File

@@ -0,0 +1,39 @@
def tokenise(code: str) -> list[list[str]]:
token: str = ''
tokens: list[list[str]] = [[]]
isString = False
isComment = False
for char in code:
if isString:
token += char
if char == '"' or char == "'":
isString = False
elif isComment and char != '\n':
pass
else:
match char:
case '\n':
isComment = False
if token != '' or tokens[-1] != []:
if token != '':
tokens[-1].append(token)
token = ''
tokens.append([])
case ' ':
tokens[-1].append(token)
token = ''
case '\'':
isString = True
token += '\''
case '\"':
isString = True
token += '\"'
case '#':
isComment = True
case _:
token += char
if token:
tokens[-1].append(token)
return tokens

25
src/main.py Executable file
View File

@@ -0,0 +1,25 @@
#!/usr/bin/env python3
import sys
import lexer
import parser
import interpreter
import error as err
def main(argc: int, argv: list[str]):
if argc < 3:
return f'Usage: {argv[0]} (filename) (writefile) (flags)'
try:
with open(argv[1], 'r') as file:
content = file.read()
except FileNotFoundError:
return err.error(f'File {argv[1]} not found.')
tokens = lexer.tokenise(content)
nodes = parser.parse(tokens)
interpreter.run_program(nodes)
return 0
if __name__ == "__main__":
sys.exit(main(len(sys.argv), sys.argv))

145
src/parser.py Normal file
View File

@@ -0,0 +1,145 @@
from enum import Enum
import error as err
import sys
class Keyword(Enum):
Default = -1
# All below can be replaced
If = 'if'
Jump = 'jump'
End = 'end'
Stdin = 'stdin'
Stdout = 'stdout'
Stdlnout = 'stdlnout'
Set = 'set'
Gettype = 'gettype'
Exists = 'exists'
Setlist = 'setlist'
Setlistat = 'setlistat'
Getlistat = 'getlistat'
Getlistsize = 'getlistsize'
Listappend = 'listappend'
Getstrsize = 'getstrsize'
Getstrcharat = 'getstrcharat'
Add = 'add'
Subtract = 'subtract'
Multiply = 'multiply'
Divide = 'divide'
Equal = 'equal'
Inequal = 'inequal'
Not = 'not'
Greater = 'greater'
Lesser = 'lesser'
Stoi = 'stoi'
Stod = 'stod'
Tostring = 'tostring'
Fun = 'fun'
Return = 'return'
Endfun = 'endfun'
Pusharg = 'pusharg'
Call = 'call'
Struct = 'struct'
Endstruct = 'endstruct'
Init = 'init'
Use = 'use'
Extern = 'extern'
class ArgType(Enum):
Keyword = 0 # e.g. end if
Value = 1 # e.g. println $var
Direct = 2 # e.g. setlist &var
Line = 3 # e.g. jump %start
Function = 4 # e.g. call !doStuff
Type = 5 # e.g. call -int ...
Literal_String = 6 # e.g. println "Hello, world!"
Literal_Int = 7 # e.g. println 97104
Literal_Char = 8 # e.g. println 'a'
keywords: dict[str, Keyword] = {
'': Keyword.Default,
# All below can be replaced
'if': Keyword.If,
'jump': Keyword.Jump,
'end': Keyword.End,
'input': Keyword.Stdin,
'stdin': Keyword.Stdin,
'print': Keyword.Stdout,
'stdout': Keyword.Stdout,
'println': Keyword.Stdlnout,
'stdlnout': Keyword.Stdlnout,
'set': Keyword.Set,
'gettype': Keyword.Gettype,
'exists': Keyword.Exists,
'setlist': Keyword.Setlist,
'setlistat': Keyword.Setlistat,
'getlistat': Keyword.Getlistat,
'getlistsize': Keyword.Getlistsize,
'listappend': Keyword.Listappend,
'getstrsize': Keyword.Getstrsize,
'getstrcharat': Keyword.Getstrcharat,
'add': Keyword.Add,
'subtract': Keyword.Subtract,
'multiply': Keyword.Multiply,
'divide': Keyword.Divide,
'equal': Keyword.Equal,
'inequal': Keyword.Inequal,
'not': Keyword.Not,
'greater': Keyword.Greater,
'lesser': Keyword.Lesser,
'stoi': Keyword.Stoi,
'stod': Keyword.Stod,
'tostring': Keyword.Tostring,
'fun': Keyword.Fun,
'return': Keyword.Return,
'endfun': Keyword.Endfun,
}
class ParsedLine:
def __init__(self, keyword = '', args=[]):
self.keyword: Keyword = keywords[keyword]
self.args: list[tuple[ArgType, str]] = args
def __repr__(self):
return f'({self.keyword}, {self.args})'
def parse(tokens: list[list[str]]) -> list[ParsedLine]:
parsed_lines: list[ParsedLine] = []
for line in tokens:
parsed = ParsedLine('', [])
try:
parsed.keyword = keywords[line[0]]
except KeyError:
sys.exit(err.error('syntaxError', f'Invalid keyword: {line[0]}'))
for arg in line[1:]:
match arg[0]:
case '$':
parsed.args.append((ArgType.Value, arg[1:]))
case '&':
parsed.args.append((ArgType.Direct, arg[1:]))
case '%':
parsed.args.append((ArgType.Line, arg[1:]))
case '!':
parsed.args.append((ArgType.Function, arg[1:]))
case '-':
parsed.args.append((ArgType.Type, arg[1:]))
case _:
# Todo
if arg[0] == arg[-1] == '\'':
if len(arg) != 3:
sys.exit(err.error('typeError', f'Invalid char: {arg}'))
parsed.args.append((ArgType.Literal_Char, arg[1]))
elif arg[0] == arg[-1] == '\"':
parsed.args.append((ArgType.Literal_String, arg[1:-1]))
else:
try:
parsed.args.append((ArgType.Literal_Int, str(int(arg))))
except ValueError:
try:
parsed.args.append((ArgType.Keyword, arg))
except AttributeError:
sys.exit(err.error('syntaxError', f'Invalid literal: {arg}'))
parsed_lines.append(parsed)
return parsed_lines