added not instruction and escape sequences
This commit is contained in:
29
tokenizer.py
29
tokenizer.py
@@ -387,6 +387,35 @@ def tokenize(input_string: str):
|
||||
|
||||
current_char = input_string[pos]
|
||||
|
||||
if current_char == "\\":
|
||||
escape = ""
|
||||
pos += 1
|
||||
column += 1
|
||||
while pos <= len(input_string)-1:
|
||||
escape += input_string[pos]
|
||||
print(escape)
|
||||
|
||||
valid_escapes = ['"', 'n', 't', 'a', 'r']
|
||||
|
||||
if escape == '"':
|
||||
current_token += 'aaaaaaa'
|
||||
elif escape == "n":
|
||||
current_token += '\n'
|
||||
elif escape == "t":
|
||||
current_token += '\t'
|
||||
elif escape == "a":
|
||||
current_token += "\a"
|
||||
elif escape == "r":
|
||||
current_token += "\r"
|
||||
|
||||
if escape in valid_escapes:
|
||||
break
|
||||
|
||||
pos += 1
|
||||
column += 1
|
||||
|
||||
|
||||
|
||||
tokens.append(Token(
|
||||
TokenType.STRING,
|
||||
value=current_token
|
||||
|
Reference in New Issue
Block a user