Skip to content

Commit

Permalink
Dfmt code
Browse files Browse the repository at this point in the history
  • Loading branch information
HeronErin committed Apr 22, 2024
1 parent bc1259c commit 33dd850
Show file tree
Hide file tree
Showing 9 changed files with 109 additions and 78 deletions.
10 changes: 6 additions & 4 deletions source/parsing/tokenizer/tokens.d
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ const dchar[] validBraceVarieties = ['{', '}', '(', ')', '[', ']'];
const dchar[] validOpenBraceVarieties = ['{', '(', '['];
const dchar[] validCloseBraceVarieties = ['}', ')', ']'];
const dchar[dchar] braceOpenToBraceClose = [
'{' : '}',
'(' : ')',
'{': '}',
'(': ')',
'[': ']'
];

Expand Down Expand Up @@ -152,13 +152,15 @@ import tern.typecons.common : Nullable, nullable;
Nullable!Token nextToken(Token[] tokens, ref size_t index)
{
Nullable!Token found = null;
if (tokens.length <= index+1){
if (tokens.length <= index + 1)
{
import std.stdio;

"boinc".writeln;
found.ptr.writeln;
return found;
}

found = tokens[++index];
return found;
}
Expand Down
4 changes: 3 additions & 1 deletion source/parsing/treegen/astTypes.d
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,8 @@ class AstNode
doubleArgumentOperationNodeData.right.tree(tabCount + 1);
break;
case AstAction.Expression:
writeln("Result of expression with " ~ expressionNodeData.components.length.to!string ~ " components:");
writeln(
"Result of expression with " ~ expressionNodeData.components.length.to!string ~ " components:");
foreach (subnode; expressionNodeData.components)
{
subnode.tree(tabCount + 1);
Expand Down Expand Up @@ -253,6 +254,7 @@ Nullable!AstNode nextNonWhiteNode(Array!AstNode nodes, ref size_t index)
}
return found;
}

Nullable!AstNode nextNonWhiteNode(AstNode[] nodes, ref size_t index)
{
Nullable!AstNode found;
Expand Down
61 changes: 40 additions & 21 deletions source/parsing/treegen/expressionParser.d
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ public AstNode[] phaseOne(Token[] tokens)
}
if (token.tokenVariety == TokenType.CloseBraces)
{

if (parenthesisStack.length == 0)
throw new SyntaxError("Parenthesis closed but never opened");

Expand All @@ -51,23 +51,29 @@ public AstNode[] phaseOne(Token[] tokens)
continue;
}
AstNode tokenToBeParsedLater = new AstNode();
if (token.tokenVariety == TokenType.Letter){
if (token.tokenVariety == TokenType.Letter)
{
tokenToBeParsedLater.action = AstAction.NamedUnit;
size_t old_index = index;
tokenToBeParsedLater.namedUnit = tokens.genNameUnit(index);
if (old_index != index) index--;
}else if(token.tokenVariety == TokenType.Number){
if (old_index != index)
index--;
}
else if (token.tokenVariety == TokenType.Number)
{
tokenToBeParsedLater.action = AstAction.LiteralUnit;
tokenToBeParsedLater.literalUnitCompenents = [token];
}
else if(token.tokenVariety != TokenType.Comment){
else if (token.tokenVariety != TokenType.Comment)
{
bool isWhite = token.tokenVariety == TokenType.WhiteSpace;
if (isWhite && isLastTokenWhite) continue;
if (isWhite && isLastTokenWhite)
continue;
isLastTokenWhite = isWhite;

tokenToBeParsedLater.action = AstAction.TokenHolder;
tokenToBeParsedLater.tokenBeingHeld = token;
}
}

if (parenthesisStack.length == 0)
ret ~= tokenToBeParsedLater;
Expand All @@ -77,38 +83,51 @@ public AstNode[] phaseOne(Token[] tokens)
return ret;
}


// Handle function calls and operators
public void phaseTwo(Array!AstNode nodes){
for (size_t index = 0; index < nodes.length; index++){
public void phaseTwo(Array!AstNode nodes)
{
for (size_t index = 0; index < nodes.length; index++)
{
AstNode node = nodes[index];
if (node.action == AstAction.NamedUnit && index+1 < nodes.length && nodes[index+1].action == AstAction.Expression){
if (node.action == AstAction.NamedUnit && index + 1 < nodes.length && nodes[index + 1].action == AstAction
.Expression)
{
AstNode functionCall = new AstNode();
AstNode args = nodes[index+1];
AstNode args = nodes[index + 1];

Array!AstNode components;
components~=args.expressionNodeData.components;
components ~= args.expressionNodeData.components;
phaseTwo(components);
scanAndMergeOperators(components);
args.expressionNodeData.components.length = components.data.length;
args.expressionNodeData.components[0..$] = components.data[0..$];


args.expressionNodeData.components[0 .. $] = components.data[0 .. $];

functionCall.action = AstAction.Call;
functionCall.callNodeData = CallNodeData(
node.namedUnit,
args
);
nodes[index] = functionCall;
nodes.linearRemove(nodes[index+1..index+2]);
nodes.linearRemove(nodes[index + 1 .. index + 2]);
}
else if (node.action == AstAction.Expression){
else if (node.action == AstAction.Expression)
{
Array!AstNode components;
components~=node.expressionNodeData.components;
components ~= node.expressionNodeData.components;
phaseTwo(components);
scanAndMergeOperators(components);
node.expressionNodeData.components.length = components.data.length;
node.expressionNodeData.components[0..$] = components.data[0..$];
node.expressionNodeData.components[0 .. $] = components.data[0 .. $];
}
}
}

Array!AstNode expressionNodeFromTokens(Token[] tokens)
{
AstNode[] phaseOneNodes = phaseOne(tokens);
Array!AstNode nodes;
nodes ~= phaseOneNodes;
phaseTwo(nodes);
scanAndMergeOperators(nodes);
return nodes;
}
2 changes: 0 additions & 2 deletions source/parsing/treegen/gentree.d
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,6 @@ import std.stdio;

import tern.typecons.common : Nullable, nullable;



void generateGlobalScopeForCompilationUnit(Token[] tokens)
{
size_t index = 0;
Expand Down
1 change: 0 additions & 1 deletion source/parsing/treegen/keywords.d
Original file line number Diff line number Diff line change
Expand Up @@ -75,4 +75,3 @@ dchar[][] skipAndExtractKeywords(ref Token[] tokens, ref size_t index)
}
return keywords;
}

3 changes: 2 additions & 1 deletion source/parsing/treegen/scopeParser.d
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,10 @@ LineVarietyAndLength getLineVarietyAndLength(Token[] tokens, size_t index)

return LineVarietyAndLength(LineVariety.SimpleExpression, -1);
}

import std.stdio;

void parseLine(Token[] tokens, ref size_t index)
{
LineVarietyAndLength lineVariety = tokens.getLineVarietyAndLength(index);
}

54 changes: 28 additions & 26 deletions source/parsing/treegen/tokenRelationships.d
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,6 @@ bool matchesToken(in TokenGrepPacket[] testWith, Token[] tokens, ref size_t inde
return true;
}


enum OperatorOrder
{
LeftToRight,
Expand Down Expand Up @@ -412,38 +411,38 @@ private bool testAndJoin(const(OperationPrecedenceEntry) entry, ref Array!AstNod
return false;
size_t nodeIndex = startIndex;
AstNode[] operands;

for (size_t index = 0; index < entry.tokens.length; index++)
{
Nullable!AstNode nodeNullable = nodes.nextNonWhiteNode(nodeIndex);
if (nodeNullable.ptr == null)
if (nodeNullable.ptr == null)
return false;
AstNode node = nodeNullable;
switch (entry.tokens[index].tokenVariety)
{
case TokenType.Filler:
if (node.action == AstAction.TokenHolder || node.action == AstAction.Keyword || node.action == AstAction
.Scope)
return false;
operands ~= node;
break;
case TokenType.Operator:
if (node.action != AstAction.TokenHolder)
return false;
Token token = node.tokenBeingHeld;
if (token.tokenVariety != TokenType.Equals && token.tokenVariety != TokenType.Operator)
return false;
if (token.value != entry.tokens[index].value)
return false;
break;
default:
assert(0);

case TokenType.Filler:

if (node.action == AstAction.TokenHolder || node.action == AstAction.Keyword || node.action == AstAction
.Scope)
return false;
operands ~= node;
break;
case TokenType.Operator:
if (node.action != AstAction.TokenHolder)
return false;
Token token = node.tokenBeingHeld;
if (token.tokenVariety != TokenType.Equals && token.tokenVariety != TokenType.Operator)
return false;
if (token.value != entry.tokens[index].value)
return false;
break;
default:
assert(0);

}
}

AstNode oprNode = new AstNode();
oprNode.action = AstAction.DoubleArgumentOperation;
if (operands.length == 0)
Expand Down Expand Up @@ -525,8 +524,12 @@ void scanAndMergeOperators(Array!AstNode nodes)
{
foreach (entry; layer.layer)
{
if (entry.testAndJoin(nodes, index)){index--; continue;}

if (entry.testAndJoin(nodes, index))
{
index--;
continue;
}

}

}
Expand All @@ -541,4 +544,3 @@ void scanAndMergeOperators(Array!AstNode nodes)
// }
}
}

15 changes: 7 additions & 8 deletions source/parsing/treegen/treeGenUtils.d
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ NameUnit genNameUnit(Token[] tokens, ref size_t index)
{
NameUnit ret;
Nullable!Token tokenNullable = tokens.nextNonWhiteToken(index);

Token token;

// An attempt to generate a name at an EOF
Expand All @@ -17,22 +17,21 @@ NameUnit genNameUnit(Token[] tokens, ref size_t index)
index--;
token = tokenNullable;

while (token.tokenVariety == TokenType.Letter || token.tokenVariety == TokenType.Number || token.tokenVariety == TokenType.Period)
while (token.tokenVariety == TokenType.Letter || token.tokenVariety == TokenType.Number || token.tokenVariety == TokenType
.Period)
{

if (token.tokenVariety != TokenType.Period)
ret.names ~= token.value;

Nullable!Token tokenNullable2 = tokens.nextToken(index);

// We hit an EOF
if (!tokenNullable2.ptr)
return ret;
token = tokenNullable2;



}
return ret;

}
}
37 changes: 23 additions & 14 deletions source/tests/treegen.d
Original file line number Diff line number Diff line change
@@ -1,40 +1,48 @@
module tests.parser;


import std.container.array;
import parsing.tokenizer.tokens;
import parsing.treegen.astTypes;
import parsing.treegen.expressionParser;
import parsing.treegen.treeGenUtils;

import parsing.treegen.tokenRelationships;

unittest
{
import parsing.tokenizer.make_tokens;
AstNode[] phaseOneNodes = phaseOne("math.sqrt(3 * 5 + 6 * 7 / 2)*(x+3)/2+4".tokenizeText);

AstNode[] phaseOneNodes = phaseOne("math.sqrt(3 * 5 + 6 * 7 / 2)*(x+3)/2+4".tokenizeText);
Array!AstNode nodes;
nodes~=phaseOneNodes;
nodes ~= phaseOneNodes;
phaseTwo(nodes);
scanAndMergeOperators(nodes);
assert(nodes.length == 1);
assert(nodes[0].action == AstAction.DoubleArgumentOperation);
assert(nodes[0].doubleArgumentOperationNodeData.operationVariety == OperationVariety.Add );
assert(nodes[0].doubleArgumentOperationNodeData.operationVariety == OperationVariety.Add);
assert(nodes[0].doubleArgumentOperationNodeData.right.action == AstAction.LiteralUnit);
assert(nodes[0].doubleArgumentOperationNodeData.right.literalUnitCompenents == [Token(TokenType.Number, ['4'], 37)]);
assert(nodes[0].doubleArgumentOperationNodeData.right.literalUnitCompenents == [
Token(TokenType.Number, ['4'], 37)
]);

}

unittest
{
import parsing.tokenizer.make_tokens;

size_t s = 0;
assert("int x = 4;".tokenizeText.genNameUnit(s).names == ["int".makeUnicodeString]);
s = 0;
assert("std.int x = 4;".tokenizeText.genNameUnit(s).names == [
"std".makeUnicodeString,
"int".makeUnicodeString
assert("int x = 4;".tokenizeText.genNameUnit(s).names == [
"int".makeUnicodeString
]);
s = 0;
assert("std.int x = 4;".tokenizeText.genNameUnit(s)
.names == [
"std".makeUnicodeString,
"int".makeUnicodeString
]);
}

unittest
{
import parsing.tokenizer.make_tokens;
Expand Down Expand Up @@ -63,6 +71,7 @@ unittest
DeclarationAndAssignment.matchesToken(tokenizeText("int x = 4;"))
);
}

unittest
{
import parsing.tokenizer.make_tokens;
Expand All @@ -73,8 +82,8 @@ unittest
size_t index = 0;
assert(
[
"align(an invalid alignment)".makeUnicodeString,
"abstract".makeUnicodeString, "pure".makeUnicodeString
] == skipAndExtractKeywords(tokens, index));
"align(an invalid alignment)".makeUnicodeString,
"abstract".makeUnicodeString, "pure".makeUnicodeString
] == skipAndExtractKeywords(tokens, index));
assert(tokens[index].value == "int".makeUnicodeString);
}
}

0 comments on commit 33dd850

Please sign in to comment.