fix of escape sequences
This commit is contained in:
10
tokenizer.py
10
tokenizer.py
@@ -379,7 +379,8 @@ def tokenize(input_string: str):
|
||||
current_char = input_string[pos]
|
||||
|
||||
while current_char != '"':
|
||||
current_token += current_char
|
||||
if current_char != "\\":
|
||||
current_token += current_char
|
||||
pos += 1
|
||||
column += 1
|
||||
if pos > len(input_string)-1:
|
||||
@@ -393,12 +394,11 @@ def tokenize(input_string: str):
|
||||
column += 1
|
||||
while pos <= len(input_string)-1:
|
||||
escape += input_string[pos]
|
||||
print(escape)
|
||||
|
||||
valid_escapes = ['"', 'n', 't', 'a', 'r']
|
||||
valid_escapes = ['"', 'n', 't', 'a', 'r', '\\']
|
||||
|
||||
if escape == '"':
|
||||
current_token += 'aaaaaaa'
|
||||
current_token += '"'
|
||||
elif escape == "n":
|
||||
current_token += '\n'
|
||||
elif escape == "t":
|
||||
@@ -407,6 +407,8 @@ def tokenize(input_string: str):
|
||||
current_token += "\a"
|
||||
elif escape == "r":
|
||||
current_token += "\r"
|
||||
elif escape == "\\":
|
||||
current_token += "\\"
|
||||
|
||||
if escape in valid_escapes:
|
||||
break
|
||||
|
Reference in New Issue
Block a user