Refactor lexer for inline ground
This commit is contained in:
@@ -15,6 +15,7 @@ struct _SolsTokenTypeMap SolsTokenTypeMap[] = {
|
||||
{"return", STT_KW_RETURN},
|
||||
{"use", STT_KW_USE},
|
||||
{"struct", STT_KW_STRUCT},
|
||||
{"ground", STT_KW_GROUND},
|
||||
{"{", STT_OPEN_CURLY},
|
||||
{"}", STT_CLOSE_CURLY},
|
||||
{"(", STT_OPEN_PAREN},
|
||||
@@ -74,6 +75,107 @@ ResultType(SolsTokenType, Nothing) getTokenType(const char* input) {
|
||||
return Error(SolsTokenType, Nothing, {});
|
||||
}
|
||||
|
||||
static ResultType(Nothing, charptr) handleGround(SolsLexer* lexer, SolsToken* token, size_t* lineNum, Estr* currentLine, char currentChr, bool* skipDelimiter) {
|
||||
bool foundBrace = false;
|
||||
if (currentChr == '{') {
|
||||
foundBrace = true;
|
||||
*skipDelimiter = true;
|
||||
} else {
|
||||
while (true) {
|
||||
ResultType(char, Nothing) peek = lexerPeek(lexer, 1);
|
||||
if (peek.error) break;
|
||||
if (isspace(peek.as.success)) {
|
||||
char c = lexerConsume(lexer).as.success;
|
||||
if (c == '\n') {
|
||||
(*lineNum)++;
|
||||
DESTROY_ESTR((*currentLine));
|
||||
*currentLine = CREATE_ESTR("");
|
||||
size_t lineStart = lexer->current;
|
||||
for (size_t i = lineStart; i < lexer->inputsize; i++) {
|
||||
if (lexer->input[i] == '\n') break;
|
||||
char buf_tmp[] = {lexer->input[i], '\0'};
|
||||
APPEND_ESTR((*currentLine), buf_tmp);
|
||||
}
|
||||
}
|
||||
} else if (peek.as.success == '{') {
|
||||
lexerConsume(lexer);
|
||||
foundBrace = true;
|
||||
break;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!foundBrace) {
|
||||
return Error(Nothing, charptr, "Expected '{' after 'ground'");
|
||||
}
|
||||
|
||||
Estr groundBuf = CREATE_ESTR("");
|
||||
int depth = 1;
|
||||
while (depth > 0) {
|
||||
ResultType(char, Nothing) next = lexerConsume(lexer);
|
||||
if (next.error) {
|
||||
DESTROY_ESTR(groundBuf);
|
||||
return Error(Nothing, charptr, "Unterminated 'ground' block");
|
||||
}
|
||||
if (next.as.success == '{') depth++;
|
||||
if (next.as.success == '}') {
|
||||
depth--;
|
||||
if (depth == 0) break;
|
||||
}
|
||||
|
||||
char tmp[] = {next.as.success, '\0'};
|
||||
APPEND_ESTR(groundBuf, tmp);
|
||||
|
||||
if (next.as.success == '\n') {
|
||||
(*lineNum)++;
|
||||
DESTROY_ESTR((*currentLine));
|
||||
*currentLine = CREATE_ESTR("");
|
||||
size_t lineStart = lexer->current;
|
||||
for (size_t i = lineStart; i < lexer->inputsize; i++) {
|
||||
if (lexer->input[i] == '\n') break;
|
||||
char buf_tmp[] = {lexer->input[i], '\0'};
|
||||
APPEND_ESTR((*currentLine), buf_tmp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
token->as.inlineGround = malloc(strlen(groundBuf.str) + 1);
|
||||
if (token->as.inlineGround == NULL) {
|
||||
DESTROY_ESTR(groundBuf);
|
||||
return Error(Nothing, charptr, "Memory allocation failed (in handleGround() function)");
|
||||
}
|
||||
strcpy(token->as.inlineGround, groundBuf.str);
|
||||
DESTROY_ESTR(groundBuf);
|
||||
return Success(Nothing, charptr, {});
|
||||
}
|
||||
|
||||
static ResultType(Nothing, charptr) identifyAndAdd(SolsLexer* lexer, Estr* buf, size_t* lineNum, Estr* currentLine, char currentChr, bool* skipDelimiter) {
|
||||
if (strcmp(buf->str, "") == 0) return Success(Nothing, charptr, {});
|
||||
|
||||
ResultType(SolsToken, charptr) result = identifyToken(buf->str);
|
||||
if (result.error) {
|
||||
return Error(Nothing, charptr, result.as.error);
|
||||
}
|
||||
result.as.success.line.num = *lineNum;
|
||||
result.as.success.line.content = malloc(strlen(currentLine->str) + 1);
|
||||
if (result.as.success.line.content == NULL) {
|
||||
return Error(Nothing, charptr, "Couldn't allocate memory to store line information in token (in identifyAndAdd() function)");
|
||||
}
|
||||
strcpy(result.as.success.line.content, currentLine->str);
|
||||
|
||||
if (result.as.success.type == STT_KW_GROUND) {
|
||||
ResultType(Nothing, charptr) res = handleGround(lexer, &result.as.success, lineNum, currentLine, currentChr, skipDelimiter);
|
||||
if (res.error) return res;
|
||||
}
|
||||
|
||||
addTokenToSolsTokens(&lexer->output, result.as.success);
|
||||
DESTROY_ESTR((*buf));
|
||||
*buf = CREATE_ESTR("");
|
||||
return Success(Nothing, charptr, {});
|
||||
}
|
||||
|
||||
|
||||
ResultType(SolsLexer, charptr) createLexer(char* input) {
|
||||
|
||||
@@ -386,6 +488,7 @@ ResultType(Nothing, charptr) lex(SolsLexer* lexer) {
|
||||
APPEND_ESTR(currentLine, tmp);
|
||||
}
|
||||
|
||||
bool skipDelimiter = false;
|
||||
for (;;) {
|
||||
ResultType(char, Nothing) chr = lexerConsume(lexer);
|
||||
|
||||
@@ -393,6 +496,8 @@ ResultType(Nothing, charptr) lex(SolsLexer* lexer) {
|
||||
break;
|
||||
}
|
||||
|
||||
skipDelimiter = false;
|
||||
|
||||
if (chr.as.success == '/' && !inString) {
|
||||
ResultType(char, Nothing) peek = lexerPeek(lexer, 1);
|
||||
if (!peek.error && peek.as.success == '/') {
|
||||
@@ -481,27 +586,16 @@ ResultType(Nothing, charptr) lex(SolsLexer* lexer) {
|
||||
case ';':
|
||||
case '\n':
|
||||
{
|
||||
if (strcmp(buf.str, "") != 0) {
|
||||
ResultType(SolsToken, charptr) result = identifyToken(buf.str);
|
||||
if (result.error) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, result.as.error);
|
||||
DESTROY_ESTR(buf);
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
result.as.success.line.num = lineNum;
|
||||
result.as.success.line.content = malloc(strlen(currentLine.str) + 1);
|
||||
if (result.as.success.line.content == NULL) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, "Couldn't allocate memory to store line information in token (in lex() function)");
|
||||
DESTROY_ESTR(buf);
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
strcpy(result.as.success.line.content, currentLine.str);
|
||||
addTokenToSolsTokens(&lexer->output, result.as.success);
|
||||
ResultType(Nothing, charptr) res = identifyAndAdd(lexer, &buf, &lineNum, ¤tLine, chr.as.success, &skipDelimiter);
|
||||
if (res.error) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, res.as.error);
|
||||
DESTROY_ESTR(buf);
|
||||
buf = CREATE_ESTR("");
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
|
||||
if (skipDelimiter) break;
|
||||
|
||||
char tmp[] = {chr.as.success, '\0'};
|
||||
ResultType(SolsToken, charptr) result = identifyToken(tmp);
|
||||
if (result.error) {
|
||||
@@ -527,27 +621,16 @@ ResultType(Nothing, charptr) lex(SolsLexer* lexer) {
|
||||
// These characters may be repeated, or followed by an equals sign.
|
||||
case '+':
|
||||
case '-': {
|
||||
if (strcmp(buf.str, "") != 0) {
|
||||
ResultType(SolsToken, charptr) result = identifyToken(buf.str);
|
||||
if (result.error) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, result.as.error);
|
||||
DESTROY_ESTR(buf);
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
result.as.success.line.num = lineNum;
|
||||
result.as.success.line.content = malloc(strlen(currentLine.str) + 1);
|
||||
if (result.as.success.line.content == NULL) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, "Couldn't allocate memory to store line information in token (in lex() function)");
|
||||
DESTROY_ESTR(buf);
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
strcpy(result.as.success.line.content, currentLine.str);
|
||||
addTokenToSolsTokens(&lexer->output, result.as.success);
|
||||
ResultType(Nothing, charptr) res = identifyAndAdd(lexer, &buf, &lineNum, ¤tLine, chr.as.success, &skipDelimiter);
|
||||
if (res.error) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, res.as.error);
|
||||
DESTROY_ESTR(buf);
|
||||
buf = CREATE_ESTR("");
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
// skipDelimiter is unlikely here but handled just in case
|
||||
if (skipDelimiter) break;
|
||||
|
||||
ResultType(char, Nothing) next = lexerPeek(lexer, 1);
|
||||
if (next.error || (next.as.success != chr.as.success && next.as.success != '=')) {
|
||||
char tmp[] = {chr.as.success, '\0'};
|
||||
@@ -621,27 +704,15 @@ ResultType(Nothing, charptr) lex(SolsLexer* lexer) {
|
||||
case '<':
|
||||
case '*':
|
||||
case '/': {
|
||||
if (strcmp(buf.str, "") != 0) {
|
||||
ResultType(SolsToken, charptr) result = identifyToken(buf.str);
|
||||
if (result.error) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, result.as.error);
|
||||
DESTROY_ESTR(buf);
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
result.as.success.line.num = lineNum;
|
||||
result.as.success.line.content = malloc(strlen(currentLine.str) + 1);
|
||||
if (result.as.success.line.content == NULL) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, "Couldn't allocate memory to store line information in token (in lex() function)");
|
||||
DESTROY_ESTR(buf);
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
strcpy(result.as.success.line.content, currentLine.str);
|
||||
addTokenToSolsTokens(&lexer->output, result.as.success);
|
||||
ResultType(Nothing, charptr) res = identifyAndAdd(lexer, &buf, &lineNum, ¤tLine, chr.as.success, &skipDelimiter);
|
||||
if (res.error) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, res.as.error);
|
||||
DESTROY_ESTR(buf);
|
||||
buf = CREATE_ESTR("");
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
if (skipDelimiter) break;
|
||||
|
||||
ResultType(char, Nothing) next = lexerPeek(lexer, 1);
|
||||
if (next.error || next.as.success != '=') {
|
||||
char tmp[] = {chr.as.success, '\0'};
|
||||
@@ -695,28 +766,16 @@ ResultType(Nothing, charptr) lex(SolsLexer* lexer) {
|
||||
if (!peek.error && isdigit(peek.as.success)) {
|
||||
APPEND_ESTR(buf, ".");
|
||||
} else {
|
||||
if (strcmp(buf.str, "") != 0) {
|
||||
ResultType(SolsToken, charptr) result = identifyToken(buf.str);
|
||||
if (result.error) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, result.as.error);
|
||||
DESTROY_ESTR(buf);
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
result.as.success.line.num = lineNum;
|
||||
result.as.success.line.content = malloc(strlen(currentLine.str) + 1);
|
||||
if (result.as.success.line.content == NULL) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, "Couldn't allocate memory to store line information in token (in lex() function)");
|
||||
DESTROY_ESTR(buf);
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
strcpy(result.as.success.line.content, currentLine.str);
|
||||
addTokenToSolsTokens(&lexer->output, result.as.success);
|
||||
ResultType(Nothing, charptr) res = identifyAndAdd(lexer, &buf, &lineNum, ¤tLine, chr.as.success, &skipDelimiter);
|
||||
if (res.error) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, res.as.error);
|
||||
DESTROY_ESTR(buf);
|
||||
buf = CREATE_ESTR("");
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
if (!skipDelimiter) {
|
||||
addTokenToSolsTokens(&lexer->output, (SolsToken) {.type = STT_DOT});
|
||||
}
|
||||
addTokenToSolsTokens(&lexer->output, (SolsToken) {.type = STT_DOT});
|
||||
}
|
||||
break;
|
||||
}
|
||||
@@ -724,26 +783,12 @@ ResultType(Nothing, charptr) lex(SolsLexer* lexer) {
|
||||
// This whitespace splits the program and does not get appended as it's own token.
|
||||
case '\t':
|
||||
case ' ': {
|
||||
if (strcmp(buf.str, "") != 0) {
|
||||
ResultType(SolsToken, charptr) result = identifyToken(buf.str);
|
||||
if (result.error) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, result.as.error);
|
||||
DESTROY_ESTR(buf);
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
result.as.success.line.num = lineNum;
|
||||
result.as.success.line.content = malloc(strlen(currentLine.str) + 1);
|
||||
if (result.as.success.line.content == NULL) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, "Couldn't allocate memory to store line information in token (in lex() function)");
|
||||
DESTROY_ESTR(buf);
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
strcpy(result.as.success.line.content, currentLine.str);
|
||||
addTokenToSolsTokens(&lexer->output, result.as.success);
|
||||
ResultType(Nothing, charptr) res = identifyAndAdd(lexer, &buf, &lineNum, ¤tLine, chr.as.success, &skipDelimiter);
|
||||
if (res.error) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, res.as.error);
|
||||
DESTROY_ESTR(buf);
|
||||
buf = CREATE_ESTR("");
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
break;
|
||||
}
|
||||
@@ -774,24 +819,12 @@ ResultType(Nothing, charptr) lex(SolsLexer* lexer) {
|
||||
}
|
||||
}
|
||||
|
||||
if (strcmp(buf.str, "") != 0) {
|
||||
ResultType(SolsToken, charptr) result = identifyToken(buf.str);
|
||||
if (result.error) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, result.as.error);
|
||||
DESTROY_ESTR(buf);
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
result.as.success.line.num = lineNum;
|
||||
result.as.success.line.content = malloc(strlen(currentLine.str) + 1);
|
||||
if (result.as.success.line.content == NULL) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, "Couldn't allocate memory to store line information in token (in lex() function)");
|
||||
DESTROY_ESTR(buf);
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
strcpy(result.as.success.line.content, currentLine.str);
|
||||
addTokenToSolsTokens(&lexer->output, result.as.success);
|
||||
ResultType(Nothing, charptr) res = identifyAndAdd(lexer, &buf, &lineNum, ¤tLine, '\0', &skipDelimiter);
|
||||
if (res.error) {
|
||||
char* err = createLexingError(lineNum, currentLine.str, res.as.error);
|
||||
DESTROY_ESTR(buf);
|
||||
DESTROY_ESTR(currentLine);
|
||||
return Error(Nothing, charptr, err);
|
||||
}
|
||||
|
||||
if (inString) {
|
||||
|
||||
Reference in New Issue
Block a user