Skip to content

Commit

Permalink
Good things happening
Browse files Browse the repository at this point in the history
  • Loading branch information
PsychedelicPalimpsest committed Apr 19, 2024
1 parent 6d3c111 commit e769204
Show file tree
Hide file tree
Showing 4 changed files with 98 additions and 61 deletions.
2 changes: 1 addition & 1 deletion source/parsing/tokenizer/tokens.d
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ bool isSingleLineComment(dchar first, dchar secound)
{
static foreach (const dchar[] style; validSingleLineCommentStyles)
{
if (style[0] == first || style[0] == secound)
if (style[0] == first && style[0] == secound)
return true;
}
return false;
Expand Down
86 changes: 86 additions & 0 deletions source/parsing/treegen/expressionParser.d
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
module parsing.treegen.expressionParser;

import parsing.treegen.astTypes;
import parsing.tokenizer.tokens;
import parsing.treegen.tokenRelationships;
import errors;
import std.stdio;

// First step of the AST gen process. Puts the tokens into
// AstNode objects and extracts parenthesis into deeper
// levels of nesting so that later they can be recursivly parsed
AstNode[] parenthesisExtract(Token[] tokens)
{
AstNode[] ret;
AstNode[] parenthesisStack;
foreach (Token token; tokens)
{
if (token.tokenVariety == TokenType.OpenBraces)
{
AstNode newExpression;
newExpression.action = AstAction.Expression;
newExpression.expressionNodeData = ExpressionNodeData(
token.value[0],
braceOpenToBraceClose[token.value[0]],
[]
);
parenthesisStack ~= newExpression;
continue;
}
if (token.tokenVariety == TokenType.CloseBraces)
{

if (parenthesisStack.length == 0)
throw new SyntaxError("Parenthesis closed but never opened");

AstNode node = parenthesisStack[$ - 1];

if (node.expressionNodeData.closer != token.value[0])
throw new SyntaxError("Parenthesis not closed with correct token");

parenthesisStack.length--;

if (parenthesisStack.length == 0)
ret ~= node;
else
parenthesisStack[$ - 1].expressionNodeData.components ~= node;
continue;
}

AstNode tokenToBeParsedLater;
tokenToBeParsedLater.action = AstAction.TokenHolder;
tokenToBeParsedLater.tokenBeingHeld = token;
if (parenthesisStack.length == 0)
ret ~= tokenToBeParsedLater;
else
parenthesisStack[$ - 1].expressionNodeData.components ~= tokenToBeParsedLater;
}
return ret;
}
void parseExpression(AstNode[] nodes){
for(size_t index = 0; index < nodes.length; index++){
AstNode node = nodes[index];
if (index != 0 && node.action == AstAction.Expression && node.expressionNodeData.opener == '('
&& nodes[index-1].action == AstAction.TokenHolder
&& nodes[index-1].tokenBeingHeld.tokenVariety == TokenType.Letter){
AstNode functionCall;
functionCall.action = AstAction.Call;
}
}
}

void parseExpression(Token[] tokens)
{
parseExpression(parenthesisExtract(tokens));
// tokens[0].tokenVariety


}


unittest
{

import parsing.tokenizer.make_tokens;
parseExpression("sqrt(8*9+5*2 / (6+10*2))".tokenizeText);
}
20 changes: 11 additions & 9 deletions source/parsing/treegen/scopeParser.d
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ struct LineVarietyAndLength
size_t length;
}

LineVarietyAndLength getLineVarietyAndLength(Token[] tokens)
LineVarietyAndLength getLineVarietyAndLength(Token[] tokens, size_t index)
{
size_t length;
size_t temp_index = index;

static foreach (i, func; [
IfStatementWithScope,
Expand All @@ -28,24 +28,24 @@ LineVarietyAndLength getLineVarietyAndLength(Token[] tokens)
DeclarationAndAssignment
])
{
if (func.matchesToken(tokens, length))
if (func.matchesToken(tokens, temp_index))
return LineVarietyAndLength(
[
LineVariety.IfStatementWithScope,
LineVariety.IfStatementWithoutScope,
LineVariety.DeclarationLine,
LineVariety.DeclarationAndAssignment
][i], length
][i], temp_index - index
);
length = 0;
temp_index = index;
}

return LineVarietyAndLength(LineVariety.SimpleExpression, -1);
}

void parseLine(Token[] tokens)
import std.stdio;
void parseLine(Token[] tokens, ref size_t index)
{

LineVarietyAndLength lineVariety = tokens.getLineVarietyAndLength(index);
}

unittest
Expand All @@ -55,7 +55,9 @@ unittest

// assert(LineVariety.IfStatementWithoutScope == getLineVariety("if (hello) world;".tokenizeText));
// assert(LineVariety.IfStatementWithScope == getLineVariety("if (hello) {wo\n rl\nd};".tokenizeText));
getLineVarietyAndLength("int x = 4;".tokenizeText).writeln;
size_t i = 0;
// getLineVarietyAndLength("int x = 4;".tokenizeText, 0).writeln;
// parseLine("int x = 4;".tokenizeText, i);
// DeclarationLine.matchesToken()

}
51 changes: 0 additions & 51 deletions source/parsing/treegen/treeGenUtils.d
Original file line number Diff line number Diff line change
Expand Up @@ -35,57 +35,6 @@ NameUnit genNameUnit(Token[] tokens, ref size_t index)

}

// First step of the AST gen process. Puts the tokens into
// AstNode objects and extracts parenthesis into deeper
// levels of nesting so that later they can be recursivly parsed
AstNode[] parenthesisExtract(Token[] tokens)
{
AstNode[] ret;
AstNode[] parenthesisStack;
foreach (Token token; tokens)
{
if (token.tokenVariety == TokenType.OpenBraces)
{
AstNode newExpression;
newExpression.action = AstAction.Expression;
newExpression.expressionNodeData = ExpressionNodeData(
token.value[0],
braceOpenToBraceClose[token.value[0]],
[]
);
parenthesisStack ~= newExpression;
continue;
}
if (token.tokenVariety == TokenType.CloseBraces)
{
if (parenthesisStack.length == 0)
throw new SyntaxError("Parenthesis closed but never opened");

AstNode node = parenthesisStack[$ - 1];

if (node.expressionNodeData.closer != token.value[0])
throw new SyntaxError("Parenthesis not closed with correct token");

parenthesisStack.length--;

if (parenthesisStack.length == 0)
ret ~= node;
else
parenthesisStack[$ - 1].expressionNodeData.components ~= node;
continue;
}

AstNode tokenToBeParsedLater;
tokenToBeParsedLater.action = AstAction.TokenHolder;
tokenToBeParsedLater.tokenBeingHeld = token;
if (parenthesisStack.length == 0)
ret ~= tokenToBeParsedLater;
else
parenthesisStack[$ - 1].expressionNodeData.components ~= tokenToBeParsedLater;
}
return ret;
}

unittest
{
import parsing.tokenizer.make_tokens;
Expand Down

0 comments on commit e769204

Please sign in to comment.