fix of escape sequences
This commit is contained in:
		| @@ -704,7 +704,12 @@ class X86_64Generator(Generator): | ||||
| 				value_type = type(value) | ||||
| 				if value_type == str: | ||||
| 					if not const["no_string"]: | ||||
| 						final = f'db "' + value.replace("\\n", "\", 10, \"")  +  "\", 0" | ||||
| 						value = value.replace("\"", "\", 34, \"") | ||||
| 						value = value.replace("\r", "\", 13, \"") | ||||
| 						value = value.replace("\n", "\", 10, \"") | ||||
| 						value = value.replace("\a", "\", 7, \"") | ||||
|  | ||||
| 						final = f'db "' + value +  "\", 0" | ||||
| 						final = final.replace(", \"\", ", ", ") | ||||
| 						f.write(final) | ||||
| 					else: | ||||
|   | ||||
| @@ -0,0 +1,6 @@ | ||||
| @loop | ||||
| stdout "/\r" | ||||
| stdout "-\r" | ||||
| stdout "\\r" | ||||
| stdout "|\r" | ||||
| jump %loop | ||||
							
								
								
									
										10
									
								
								tokenizer.py
									
									
									
									
									
								
							
							
						
						
									
										10
									
								
								tokenizer.py
									
									
									
									
									
								
							| @@ -379,7 +379,8 @@ def tokenize(input_string: str): | ||||
| 			current_char = input_string[pos] | ||||
|  | ||||
| 			while current_char != '"': | ||||
| 				current_token += current_char | ||||
| 				if current_char != "\\": | ||||
| 					current_token += current_char | ||||
| 				pos += 1 | ||||
| 				column += 1 | ||||
| 				if pos > len(input_string)-1: | ||||
| @@ -393,12 +394,11 @@ def tokenize(input_string: str): | ||||
| 					column += 1 | ||||
| 					while pos <= len(input_string)-1: | ||||
| 						escape += input_string[pos] | ||||
| 						print(escape) | ||||
|  | ||||
| 						valid_escapes = ['"', 'n', 't', 'a', 'r'] | ||||
| 						valid_escapes = ['"', 'n', 't', 'a', 'r', '\\'] | ||||
|  | ||||
| 						if escape == '"': | ||||
| 							current_token += 'aaaaaaa' | ||||
| 							current_token += '"' | ||||
| 						elif escape == "n": | ||||
| 							current_token += '\n' | ||||
| 						elif escape == "t": | ||||
| @@ -407,6 +407,8 @@ def tokenize(input_string: str): | ||||
| 							current_token += "\a" | ||||
| 						elif escape == "r": | ||||
| 							current_token += "\r" | ||||
| 						elif escape == "\\": | ||||
| 							current_token += "\\" | ||||
|  | ||||
| 						if escape in valid_escapes: | ||||
| 							break | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 SpookyDervish
					SpookyDervish