Continue parser work

This commit is contained in:
2026-02-12 08:46:12 +11:00
parent 1d1a34acce
commit 1dfd1b7a61
4 changed files with 191 additions and 4 deletions

View File

@@ -1,8 +1,10 @@
#include "lexer.h"
#include "SolsLiteral.h"
#include "SolsToken.h"
#include "../include/error.h"
#include "../include/estr.h"
ResultType(SolsLexer, charptr) createLexer(char* input) {
char* inputcopy = malloc(strlen(input) + 1);
if (inputcopy == NULL) {
@@ -17,18 +19,183 @@ ResultType(SolsLexer, charptr) createLexer(char* input) {
}
SolsLexer lexer = {
.input = inputcopy,
.inputsize = strlen(inputcopy),
.output = tokens.as.success,
.current = 0,
};
return Success(SolsLexer, charptr, lexer);
}
ResultType(char, Nothing) lexerPeek(SolsLexer* lexer, size_t ahead) {
if (lexer->input == NULL) {
return Error(char, Nothing, {});
}
if (lexer->current + ahead > lexer->inputsize) {
return Error(char, Nothing, {});
}
return Success(char, Nothing, lexer->input[lexer->current + ahead]);
}
ResultType(char, Nothing) lexerConsume(SolsLexer* lexer) {
if (lexer->input == NULL) {
return Error(char, Nothing, {});
}
if (lexer->current + 1 > lexer->inputsize) {
return Error(char, Nothing, {});
}
return Success(char, Nothing, lexer->input[lexer->current++]);
}
ResultType(SolsToken, charptr) identifyToken(const char* token) {
if (token[0] == '"') {
if (token[strlen(token) - 1] == '"') {
ResultType(SolsLiteral, charptr) literal = createSolsLiteral(SLT_STRING, token);
if (literal.error) {
Estr str = CREATE_ESTR(literal.as.error);
APPEND_ESTR(str, " (in identifyToken() function)");
return Error(SolsToken, charptr, str.str);
}
SolsToken tok = {
.type = STT_LITERAL,
};
return Success(SolsToken, charptr, {STT_LITERAL});
}
return Error(SolsToken, charptr, "Unterminated string ()");
}
// FIXME do tihs better sometime
if (strcmp(token, "puts") == 0) {
return Success(SolsToken, charptr, {STT_KW_PUTS});
}
if (strcmp(token, "if") == 0) {
return Success(SolsToken, charptr, {STT_KW_IF});
}
if (strcmp(token, "while") == 0) {
return Success(SolsToken, charptr, {STT_KW_WHILE});
}
if (strcmp(token, "def") == 0) {
return Success(SolsToken, charptr, {STT_KW_DEF});
}
if (strcmp(token, "struct") == 0) {
return Success(SolsToken, charptr, {STT_KW_DEF});
}
// Shh, this is our little secret now
if (strcmp(token, "{") == 0 || strcmp(token, "then")) {
return Success(SolsToken, charptr, {STT_OPEN_CURLY});
}
if (strcmp(token, "}") == 0 || strcmp(token, "end") == 0) {
return Success(SolsToken, charptr, {STT_CLOSE_CURLY});
}
if (strcmp(token, "(") == 0) {
return Success(SolsToken, charptr, {STT_OPEN_PAREN});
}
if (strcmp(token, ")") == 0) {
return Success(SolsToken, charptr, {STT_CLOSE_PAREN});
}
if (strcmp(token, "+") == 0 || strcmp(token, "plus") == 0) {
return Success(SolsToken, charptr, {STT_OP_ADD});
}
if (strcmp(token, "-") == 0 || strcmp(token, "minus") == 0) {
return Success(SolsToken, charptr, {STT_OP_SUB});
}
if (strcmp(token, "*") == 0 || strcmp(token, "times") == 0) {
return Success(SolsToken, charptr, {STT_OP_MUL});
}
if (strcmp(token, "/") == 0 || strcmp(token, "dividedby") == 0) {
return Success(SolsToken, charptr, {STT_OP_DIV});
}
if (strcmp(token, "=") == 0 || strcmp(token, "is") == 0) {
return Success(SolsToken, charptr, {STT_OP_SET});
}
return Success(SolsToken, charptr, {STT_IDENTIFIER});
}
ResultType(voidptr, charptr) lex(SolsLexer* lexer) {
if (lexer->input == NULL) {
return Error(voidptr, charptr, "Lexer is not initialised");
}
ResultType(SolsTokens, charptr) tokens = createSolsTokens();
if (tokens.error) {
Estr e = CREATE_ESTR(tokens.as.error);
APPEND_ESTR(e, " (in createSolsTokens() function)");
return Error(voidptr, charptr, e.str);
}
lexer->output = tokens.as.success;
lexer->current = 0;
Estr buf = CREATE_ESTR("");
bool inString = false;
size_t lineNum = 1;
size_t lineStart = 0;
Estr currentLine = CREATE_ESTR("");
for (; lineStart < lexer->inputsize; lineStart++) {
if (lexer->input[lineStart] == '\n') {
break;
}
char buf[] = {lexer->input[lineStart], '\0'};
APPEND_ESTR(currentLine, buf);
}
for (;;) {
ResultType(char, Nothing) chr = lexerConsume(lexer);
if (chr.error) {
break;
}
if (chr.as.success == '\n') {
for (; lineStart < lexer->inputsize; lineStart++) {
if (lexer->input[lineStart] == '\n') {
break;
}
char buf[] = {lexer->input[lineStart], '\0'};
APPEND_ESTR(currentLine, buf);
}
lineNum ++;
}
if (inString) {
char str[2] = { chr.as.success, '\0' };
APPEND_ESTR(buf, str);
if (chr.as.success == '"') {
inString = false;
}
}
switch (chr.as.success) {
case '"': {
inString = true;
APPEND_ESTR(buf, "\"");
break;
}
case ' ': {
if (!buf.destroyed) {
ResultType(SolsToken, charptr) result = identifyToken(buf.str);
if (result.error) {
Estr error = CREATE_ESTR("Parsing Error:\n");
APPEND_ESTR(error, "On line ");
char buf[256];
snprintf(buf, sizeof(buf), "%zu", lineNum);
APPEND_ESTR(error, buf);
APPEND_ESTR(error, "\n");
APPEND_ESTR(error, currentLine.str);
APPEND_ESTR(error, "\n");
APPEND_ESTR(error, "Why: ");
APPEND_ESTR(error, result.as.error);
APPEND_ESTR(error, "\n");
return Error(voidptr, charptr, error.str);
}
}
}
}
}
return Success(voidptr, charptr, NULL);
}