more token stuff
This commit is contained in:
38
README.md
Normal file
38
README.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# Solstice rewrite in C
|
||||
|
||||
Not self hosting yet. Maybe once this compiler is done.
|
||||
|
||||
## Building
|
||||
|
||||
bash build.c
|
||||
|
||||
## Todo List
|
||||
|
||||
- [ ] Lexer
|
||||
- [x] Lex values
|
||||
- [x] int
|
||||
- [x] double
|
||||
- [x] string
|
||||
- [x] char
|
||||
- [x] bool
|
||||
- [x] Lex keywords
|
||||
- [ ] Ignore comments (//, /**/, #)
|
||||
- [ ] Lex delimiters
|
||||
- [x] ()
|
||||
- [x] {}
|
||||
- [x] .
|
||||
- [x] :
|
||||
- [x] ,
|
||||
- [ ] ==, =
|
||||
- [ ] >, >=
|
||||
- [ ] <, <=
|
||||
- [ ] !, !=
|
||||
- [ ] +, +=, ++
|
||||
- [ ] -, -=, --
|
||||
- [ ] *, *=
|
||||
- [ ] /, /=
|
||||
- [ ] Lex types
|
||||
- [x] Basic types (int, double, string, bool, char)
|
||||
- [ ] Advanced types (fun(...), template(...), object(...))
|
||||
- [x] Lex identifiers
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
#include <stdarg.h>
|
||||
|
||||
typedef enum SolsTokenType {
|
||||
STT_IDENTIFIER, STT_LITERAL, STT_TYPE, STT_DOT, STT_OPEN_CURLY, STT_CLOSE_CURLY, STT_OPEN_PAREN, STT_CLOSE_PAREN, STT_OP_ADD, STT_OP_SUB, STT_OP_MUL, STT_OP_DIV, STT_OP_SET, STT_OP_GREATER, STT_OP_LESSER, STT_OP_EQUAL, STT_OP_INEQUAL, STT_OP_EQGREATER, STT_OP_EQLESSER, STT_KW_DEF, STT_KW_STRUCT, STT_KW_PUTS, STT_KW_IF, STT_KW_WHILE, STT_KW_NEW, STT_KW_GROUND
|
||||
STT_IDENTIFIER, STT_LITERAL, STT_TYPE, STT_DOT, STT_OPEN_CURLY, STT_CLOSE_CURLY, STT_OPEN_PAREN, STT_CLOSE_PAREN, STT_OP_ADD, STT_OP_SUB, STT_OP_MUL, STT_OP_DIV, STT_OP_ADDTO, STT_OP_SUBTO, STT_OP_MULTO, STT_OP_DIVTO, STT_OP_INCREMENT, STT_OP_DECREMENT, STT_OP_SET, STT_OP_GREATER, STT_OP_LESSER, STT_OP_EQUAL, STT_OP_INEQUAL, STT_OP_EQGREATER, STT_OP_EQLESSER, STT_KW_DEF, STT_KW_STRUCT, STT_KW_PUTS, STT_KW_IF, STT_KW_WHILE, STT_KW_NEW, STT_KW_GROUND
|
||||
} SolsTokenType;
|
||||
|
||||
typedef char* charptr;
|
||||
|
||||
@@ -286,6 +286,24 @@ ResultType(SolsToken, charptr) identifyToken(const char* token) {
|
||||
if (strcmp(token, "=") == 0 || strcmp(token, "is") == 0) {
|
||||
return Success(SolsToken, charptr, {STT_OP_SET});
|
||||
}
|
||||
if (strcmp(token, "+=") == 0) {
|
||||
return Success(SolsToken, charptr, {STT_OP_ADDTO});
|
||||
}
|
||||
if (strcmp(token, "-=") == 0) {
|
||||
return Success(SolsToken, charptr, {STT_OP_SUBTO});
|
||||
}
|
||||
if (strcmp(token, "*=") == 0) {
|
||||
return Success(SolsToken, charptr, {STT_OP_MULTO});
|
||||
}
|
||||
if (strcmp(token, "/=") == 0) {
|
||||
return Success(SolsToken, charptr, {STT_OP_DIVTO});
|
||||
}
|
||||
if (strcmp(token, "++") == 0) {
|
||||
return Success(SolsToken, charptr, {STT_OP_INCREMENT});
|
||||
}
|
||||
if (strcmp(token, "--") == 0) {
|
||||
return Success(SolsToken, charptr, {STT_OP_DECREMENT});
|
||||
}
|
||||
|
||||
// No appropriate token found, it's an identifier (I hope)
|
||||
SolsToken id = {
|
||||
@@ -406,6 +424,48 @@ ResultType(voidptr, charptr) lex(SolsLexer* lexer) {
|
||||
break;
|
||||
}
|
||||
|
||||
// These characters may be repeated, or followed by an equals sign.
|
||||
case '+':
|
||||
case '-': {
|
||||
if (strcmp(buf.str, "") != 0) {
|
||||
ResultType(SolsToken, charptr) result = identifyToken(buf.str);
|
||||
if (result.error) {
|
||||
return Error(voidptr, charptr, createParsingError(lineNum, currentLine.str, result.as.error));
|
||||
}
|
||||
addTokenToSolsTokens(&lexer->output, result.as.success);
|
||||
DESTROY_ESTR(buf);
|
||||
buf = CREATE_ESTR("");
|
||||
}
|
||||
ResultType(char, Nothing) next = lexerPeek(lexer, 1);
|
||||
if (next.error || next.as.success != chr.as.success || next.as.success != '=') {
|
||||
char tmp[] = {chr.as.success, '\0'};
|
||||
ResultType(SolsToken, charptr) result = identifyToken(tmp);
|
||||
if (result.error) {
|
||||
return Error(voidptr, charptr, createParsingError(lineNum, currentLine.str, result.as.error));
|
||||
}
|
||||
addTokenToSolsTokens(&lexer->output, result.as.success);
|
||||
}
|
||||
if (next.as.success == '=') {
|
||||
char tmp[] = {chr.as.success, '=', '\0'};
|
||||
ResultType(SolsToken, charptr) result = identifyToken(tmp);
|
||||
if (result.error) {
|
||||
return Error(voidptr, charptr, createParsingError(lineNum, currentLine.str, result.as.error));
|
||||
}
|
||||
addTokenToSolsTokens(&lexer->output, result.as.success);
|
||||
lexerConsume(lexer);
|
||||
}
|
||||
if (next.as.success == chr.as.success) {
|
||||
char tmp[] = {chr.as.success, chr.as.success, '\0'};
|
||||
ResultType(SolsToken, charptr) result = identifyToken(tmp);
|
||||
if (result.error) {
|
||||
return Error(voidptr, charptr, createParsingError(lineNum, currentLine.str, result.as.error));
|
||||
}
|
||||
addTokenToSolsTokens(&lexer->output, result.as.success);
|
||||
lexerConsume(lexer);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// '.' requires checking whether it's a number or an identifier after
|
||||
case '.': {
|
||||
ResultType(char, Nothing) peek = lexerPeek(lexer, 1);
|
||||
|
||||
Reference in New Issue
Block a user