Skip to content

Commit

Permalink
grand evil
Browse files Browse the repository at this point in the history
  • Loading branch information
cetio committed May 9, 2024
1 parent 3ecb2e4 commit 981dc41
Show file tree
Hide file tree
Showing 23 changed files with 102 additions and 86 deletions.
File renamed without changes.
File renamed without changes.
7 changes: 4 additions & 3 deletions source/errors.d → source/fnc/errors.d
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
module errors;
module fnc.errors;

import std.exception;
import parsing.tokenizer.tokens : Token;
import parsing.treegen.astTypes : AstNode, getMinMax;
import fnc.tokenizer.tokens : Token;
import fnc.treegen.ast_types : AstNode, getMinMax;

public static string GLOBAL_ERROR_STATE = null;

Expand Down
8 changes: 8 additions & 0 deletions source/fnc/package.d
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
module fnc;

public import fnc.tokenizer;
public import fnc.treegen;
public import fnc.errors;
public import fnc.emission;
public import fnc.propagation;
public import fnc.symbols;
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
module parsing.tokenizer.make_tokens;
module fnc.tokenizer.make_tokens;

import std.algorithm : find, min;
import std.string : indexOf;

import std.utf : decode;
import tern.typecons.common : Nullable, nullable;

import parsing.tokenizer.tokens;
import fnc.tokenizer.tokens;

dchar[] handleMultilineCommentsAtIndex(dchar[] input, ref size_t index)
{
Expand Down
4 changes: 4 additions & 0 deletions source/fnc/tokenizer/package.d
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
module fnc.tokenizer;

public import fnc.tokenizer.make_tokens;
public import fnc.tokenizer.tokens;
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
module parsing.tokenizer.tokens;
module fnc.tokenizer.tokens;

import std.ascii : isASCII, isDigit, isAlpha, isAlphaNum, isWhite;
import std.algorithm : find, min;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
module parsing.treegen.astTypes;
import parsing.tokenizer.tokens : Token;
module fnc.treegen.ast_types;

import fnc.tokenizer.tokens : Token;
import tern.typecons.common : Nullable, nullable;

struct NamedUnit
Expand Down Expand Up @@ -136,7 +137,7 @@ enum OperationVariety
Range, // x..y OR 0..99
}

import parsing.treegen.scopeParser : ScopeData;
import fnc.treegen.scope_parser : ScopeData;

struct ConditionNodeData
{
Expand Down Expand Up @@ -436,7 +437,7 @@ class AstNode
conditionNodeData.condition.tree(tabCount + 1);
if (conditionNodeData.isScope)
{
import parsing.treegen.scopeParser : tree;
import fnc.treegen.scope_parser : tree;

conditionNodeData.conditionScope.tree(tabCount + 1);
}
Expand All @@ -451,7 +452,7 @@ class AstNode
.precedingKeywords.to!string);
if (elseNodeData.isScope)
{
import parsing.treegen.scopeParser : tree;
import fnc.treegen.scope_parser : tree;

elseNodeData.elseScope.tree(tabCount + 1);
}
Expand Down Expand Up @@ -514,7 +515,7 @@ import std.container.array;

bool isWhite(const AstNode node)
{
import parsing.tokenizer.tokens : TokenType;
import fnc.tokenizer.tokens : TokenType;

return node.action == AstAction.TokenHolder &&
(node.tokenBeingHeld.tokenVariety == TokenType.WhiteSpace
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
module parsing.treegen.expressionParser;
module fnc.treegen.expression_parser;

import tern.typecons.common : Nullable, nullable;
import parsing.treegen.astTypes;
import parsing.tokenizer.tokens;
import parsing.treegen.tokenRelationships;
import parsing.treegen.treeGenUtils;
import errors;
import fnc.treegen.ast_types;
import fnc.tokenizer.tokens;
import fnc.treegen.relationships;
import fnc.treegen.utils;
import fnc.errors;
import std.stdio;
import std.container.array;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
module parsing.treegen.keywords;
import parsing.tokenizer.tokens;
module fnc.treegen.keywords;

import fnc.tokenizer.tokens;
import std.string : indexOf;
import errors;
import fnc.errors;

const dchar[][] FUNC_STYLE_KEYWORD = [
"align".makeUnicodeString
Expand Down
9 changes: 9 additions & 0 deletions source/fnc/treegen/package.d
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
module fnc.treegen;

public import fnc.treegen.ast_types;
public import fnc.treegen.expression_parser;
public import fnc.treegen.keywords;
public import fnc.treegen.scope_parser;
public import fnc.treegen.relationships;
public import fnc.treegen.utils;
public import fnc.treegen.type_parser;
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
module parsing.treegen.tokenRelationships;
import parsing.tokenizer.tokens;
import parsing.treegen.astTypes;
import parsing.treegen.treeGenUtils;
import parsing.treegen.typeParser;
module fnc.treegen.relationships;

import fnc.tokenizer.tokens;
import fnc.treegen.ast_types;
import fnc.treegen.utils;
import fnc.treegen.type_parser;
import tern.typecons.common : Nullable, nullable;

/+
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
module parsing.treegen.scopeParser;
import parsing.tokenizer.tokens;
import parsing.treegen.astTypes;
import parsing.treegen.expressionParser;
import parsing.treegen.treeGenUtils;
import parsing.treegen.tokenRelationships;
import parsing.treegen.keywords;
module fnc.treegen.scope_parser;

import fnc.tokenizer.tokens;
import fnc.treegen.ast_types;
import fnc.treegen.expression_parser;
import fnc.treegen.utils;
import fnc.treegen.relationships;
import fnc.treegen.keywords;

import tern.typecons.common : Nullable, nullable;
import std.container.array;
import errors;
import fnc.errors;

struct ImportStatement
{
Expand Down Expand Up @@ -380,7 +381,7 @@ ScopeData parseMultilineScope(const(VarietyTestPair[]) scopeParseMethod, Token[]

ScopeData parseMultilineScope(const(VarietyTestPair[]) scopeParseMethod, string data)
{
import parsing.tokenizer.make_tokens;
import fnc.tokenizer.make_tokens;

size_t index;
GLOBAL_ERROR_STATE = data;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
module parsing.treegen.typeParser;
module fnc.treegen.type_parser;

import parsing.tokenizer.tokens;
import parsing.treegen.astTypes;
import fnc.tokenizer.tokens;
import fnc.treegen.ast_types;

import tern.typecons.common : Nullable, nullable;
import errors;
import fnc.errors;

import std.array;
import std.stdio;
Expand Down Expand Up @@ -196,7 +196,7 @@ size_t prematureTypeLength(Token[] tokens, size_t index)

Nullable!AstNode typeFromTokens(Token[] tokens, ref size_t index)
{
import parsing.treegen.expressionParser : phaseOne;
import fnc.treegen.expression_parser : phaseOne;

size_t length = tokens.prematureTypeLength(index);
if (length == 0)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
module parsing.treegen.treeGenUtils;
import errors;
import parsing.treegen.astTypes;
import parsing.tokenizer.tokens;
module fnc.treegen.utils;

import fnc.errors;
import fnc.treegen.ast_types;
import fnc.tokenizer.tokens;
import tern.typecons.common : Nullable, nullable;

NamedUnit genNamedUnit(Token[] tokens, ref size_t index)
Expand Down
12 changes: 6 additions & 6 deletions source/main.d
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
module main;

import parsing.tokenizer.tokens;
import parsing.tokenizer.make_tokens;
import fnc.tokenizer.tokens;
import fnc.tokenizer.make_tokens;
import tern.typecons.common : Nullable, nullable;
import parsing.treegen.scopeParser;
import parsing.treegen.tokenRelationships;
import parsing.treegen.typeParser;
import fnc.treegen.scope_parser;
import fnc.treegen.relationships;
import fnc.treegen.type_parser;

import std.stdio;

Expand Down Expand Up @@ -66,7 +66,7 @@ void main()
size_t min = -1;
size_t max = 0;

import parsing.treegen.astTypes : getMinMax;
import fnc.treegen.ast_types : getMinMax;
getMinMax(newScope.instructions[0], min, max);
min.writeln;
max.writeln;
Expand Down
5 changes: 0 additions & 5 deletions source/package.d

This file was deleted.

6 changes: 0 additions & 6 deletions source/parsing/compolation.d

This file was deleted.

16 changes: 8 additions & 8 deletions source/tests/scopeParser.d
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
module tests.scopeParser;
module tests.scope_parser;

import parsing.treegen.astTypes;
import parsing.treegen.tokenRelationships;
import parsing.treegen.scopeParser;
import parsing.tokenizer.tokens;
import fnc.treegen.ast_types;
import fnc.treegen.relationships;
import fnc.treegen.scope_parser;
import fnc.tokenizer.tokens;
import tern.typecons.common : Nullable, nullable;

unittest
{
import parsing.tokenizer.make_tokens;
import parsing.treegen.scopeParser;
import fnc.tokenizer.make_tokens;
import fnc.treegen.scope_parser;

size_t index = 0;
auto newScope = parseMultilineScope(FUNCTION_SCOPE_PARSE, "
Expand Down Expand Up @@ -55,7 +55,7 @@ unittest

unittest
{
import parsing.tokenizer.make_tokens;
import fnc.tokenizer.make_tokens;

size_t index;
auto t = "let x = 4/*asdadasd*/;".tokenizeText;
Expand Down
4 changes: 2 additions & 2 deletions source/tests/tokenizer.d
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
module tests.tokenizer;
import parsing.tokenizer.tokens;
import parsing.tokenizer.make_tokens;
import fnc.tokenizer.tokens;
import fnc.tokenizer.make_tokens;

unittest
{
Expand Down
30 changes: 15 additions & 15 deletions source/tests/treegen.d
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
module tests.parser;

import std.container.array;
import parsing.tokenizer.tokens;
import parsing.treegen.astTypes;
import parsing.treegen.expressionParser;
import parsing.treegen.treeGenUtils;
import fnc.tokenizer.tokens;
import fnc.treegen.ast_types;
import fnc.treegen.expression_parser;
import fnc.treegen.utils;

import parsing.treegen.tokenRelationships;
import fnc.treegen.relationships;

// unittest
// {
// import parsing.tokenizer.make_tokens;
// import fnc.tokenizer.make_tokens;

// AstNode[] phaseOneNodes = phaseOne("math.sqrt(3 * 5 + 6 * 7 / 2)*(x+3)/2+4".tokenizeText);
// Array!AstNode nodes;
Expand All @@ -29,7 +29,7 @@ import parsing.treegen.tokenRelationships;

unittest
{
import parsing.tokenizer.make_tokens;
import fnc.tokenizer.make_tokens;

size_t s = 0;
assert("int x = 4;".tokenizeText.genNamedUnit(s).names == [
Expand All @@ -46,7 +46,7 @@ unittest
unittest
{
import std.stdio;
import parsing.tokenizer.make_tokens;
import fnc.tokenizer.make_tokens;

assert(DeclarationLine.matchesToken(
tokenizeText("mod.type.submod x,r,q,a, A_variable \n\r\t ;")
Expand Down Expand Up @@ -76,8 +76,8 @@ unittest

unittest
{
import parsing.tokenizer.make_tokens;
import parsing.treegen.keywords;
import fnc.tokenizer.make_tokens;
import fnc.treegen.keywords;

Token[] tokens = tokenizeText("align(an invalid alignment) abstract pure int x();");

Expand All @@ -92,7 +92,7 @@ unittest

unittest
{
import parsing.tokenizer.make_tokens;
import fnc.tokenizer.make_tokens;

auto nodes = expressionNodeFromTokens("(p[t++]<<<=1) + 10 / x[9]++".tokenizeText);
// assert(nodes.length == 1);
Expand All @@ -103,8 +103,8 @@ unittest

unittest
{
import parsing.tokenizer.make_tokens;
import parsing.treegen.scopeParser;
import fnc.tokenizer.make_tokens;
import fnc.treegen.scope_parser;

size_t index = 0;
auto scopeData = new ScopeData;
Expand All @@ -121,8 +121,8 @@ unittest

unittest
{
import parsing.tokenizer.make_tokens;
import parsing.treegen.scopeParser;
import fnc.tokenizer.make_tokens;
import fnc.treegen.scope_parser;
import std.stdio;

size_t index = 0;
Expand Down

0 comments on commit 981dc41

Please sign in to comment.