diff --git a/source/emission/package.d b/source/fnc/emission/package.d similarity index 100% rename from source/emission/package.d rename to source/fnc/emission/package.d diff --git a/source/emission/x86.d b/source/fnc/emission/x86.d similarity index 100% rename from source/emission/x86.d rename to source/fnc/emission/x86.d diff --git a/source/errors.d b/source/fnc/errors.d similarity index 97% rename from source/errors.d rename to source/fnc/errors.d index 311b27e..d4fa47e 100644 --- a/source/errors.d +++ b/source/fnc/errors.d @@ -1,7 +1,8 @@ -module errors; +module fnc.errors; + import std.exception; -import parsing.tokenizer.tokens : Token; -import parsing.treegen.astTypes : AstNode, getMinMax; +import fnc.tokenizer.tokens : Token; +import fnc.treegen.ast_types : AstNode, getMinMax; public static string GLOBAL_ERROR_STATE = null; diff --git a/source/fnc/package.d b/source/fnc/package.d new file mode 100644 index 0000000..b2e0d77 --- /dev/null +++ b/source/fnc/package.d @@ -0,0 +1,8 @@ +module fnc; + +public import fnc.tokenizer; +public import fnc.treegen; +public import fnc.errors; +public import fnc.emission; +public import fnc.propagation; +public import fnc.symbols; \ No newline at end of file diff --git a/source/propagation.d b/source/fnc/propagation.d similarity index 100% rename from source/propagation.d rename to source/fnc/propagation.d diff --git a/source/symbols.d b/source/fnc/symbols.d similarity index 100% rename from source/symbols.d rename to source/fnc/symbols.d diff --git a/source/parsing/tokenizer/make_tokens.d b/source/fnc/tokenizer/make_tokens.d similarity index 98% rename from source/parsing/tokenizer/make_tokens.d rename to source/fnc/tokenizer/make_tokens.d index 15db108..4f586c8 100644 --- a/source/parsing/tokenizer/make_tokens.d +++ b/source/fnc/tokenizer/make_tokens.d @@ -1,4 +1,4 @@ -module parsing.tokenizer.make_tokens; +module fnc.tokenizer.make_tokens; import std.algorithm : find, min; import std.string : indexOf; @@ -6,7 +6,7 @@ import std.string : indexOf; import std.utf : decode; import tern.typecons.common : Nullable, nullable; -import parsing.tokenizer.tokens; +import fnc.tokenizer.tokens; dchar[] handleMultilineCommentsAtIndex(dchar[] input, ref size_t index) { diff --git a/source/fnc/tokenizer/package.d b/source/fnc/tokenizer/package.d new file mode 100644 index 0000000..6a63af2 --- /dev/null +++ b/source/fnc/tokenizer/package.d @@ -0,0 +1,4 @@ +module fnc.tokenizer; + +public import fnc.tokenizer.make_tokens; +public import fnc.tokenizer.tokens; \ No newline at end of file diff --git a/source/parsing/tokenizer/tokens.d b/source/fnc/tokenizer/tokens.d similarity index 99% rename from source/parsing/tokenizer/tokens.d rename to source/fnc/tokenizer/tokens.d index 328d758..8e7dea5 100644 --- a/source/parsing/tokenizer/tokens.d +++ b/source/fnc/tokenizer/tokens.d @@ -1,4 +1,4 @@ -module parsing.tokenizer.tokens; +module fnc.tokenizer.tokens; import std.ascii : isASCII, isDigit, isAlpha, isAlphaNum, isWhite; import std.algorithm : find, min; diff --git a/source/parsing/treegen/astTypes.d b/source/fnc/treegen/ast_types.d similarity index 98% rename from source/parsing/treegen/astTypes.d rename to source/fnc/treegen/ast_types.d index 8ec9514..2d60e25 100644 --- a/source/parsing/treegen/astTypes.d +++ b/source/fnc/treegen/ast_types.d @@ -1,5 +1,6 @@ -module parsing.treegen.astTypes; -import parsing.tokenizer.tokens : Token; +module fnc.treegen.ast_types; + +import fnc.tokenizer.tokens : Token; import tern.typecons.common : Nullable, nullable; struct NamedUnit @@ -136,7 +137,7 @@ enum OperationVariety Range, // x..y OR 0..99 } -import parsing.treegen.scopeParser : ScopeData; +import fnc.treegen.scope_parser : ScopeData; struct ConditionNodeData { @@ -436,7 +437,7 @@ class AstNode conditionNodeData.condition.tree(tabCount + 1); if (conditionNodeData.isScope) { - import parsing.treegen.scopeParser : tree; + import fnc.treegen.scope_parser : tree; conditionNodeData.conditionScope.tree(tabCount + 1); } @@ -451,7 +452,7 @@ class AstNode .precedingKeywords.to!string); if (elseNodeData.isScope) { - import parsing.treegen.scopeParser : tree; + import fnc.treegen.scope_parser : tree; elseNodeData.elseScope.tree(tabCount + 1); } @@ -514,7 +515,7 @@ import std.container.array; bool isWhite(const AstNode node) { - import parsing.tokenizer.tokens : TokenType; + import fnc.tokenizer.tokens : TokenType; return node.action == AstAction.TokenHolder && (node.tokenBeingHeld.tokenVariety == TokenType.WhiteSpace diff --git a/source/parsing/treegen/expressionParser.d b/source/fnc/treegen/expression_parser.d similarity index 98% rename from source/parsing/treegen/expressionParser.d rename to source/fnc/treegen/expression_parser.d index 3e03c26..a15b454 100644 --- a/source/parsing/treegen/expressionParser.d +++ b/source/fnc/treegen/expression_parser.d @@ -1,11 +1,11 @@ -module parsing.treegen.expressionParser; +module fnc.treegen.expression_parser; import tern.typecons.common : Nullable, nullable; -import parsing.treegen.astTypes; -import parsing.tokenizer.tokens; -import parsing.treegen.tokenRelationships; -import parsing.treegen.treeGenUtils; -import errors; +import fnc.treegen.ast_types; +import fnc.tokenizer.tokens; +import fnc.treegen.relationships; +import fnc.treegen.utils; +import fnc.errors; import std.stdio; import std.container.array; diff --git a/source/parsing/treegen/keywords.d b/source/fnc/treegen/keywords.d similarity index 96% rename from source/parsing/treegen/keywords.d rename to source/fnc/treegen/keywords.d index b22ebf3..4a8319f 100644 --- a/source/parsing/treegen/keywords.d +++ b/source/fnc/treegen/keywords.d @@ -1,7 +1,8 @@ -module parsing.treegen.keywords; -import parsing.tokenizer.tokens; +module fnc.treegen.keywords; + +import fnc.tokenizer.tokens; import std.string : indexOf; -import errors; +import fnc.errors; const dchar[][] FUNC_STYLE_KEYWORD = [ "align".makeUnicodeString diff --git a/source/fnc/treegen/package.d b/source/fnc/treegen/package.d new file mode 100644 index 0000000..9ed8f6a --- /dev/null +++ b/source/fnc/treegen/package.d @@ -0,0 +1,9 @@ +module fnc.treegen; + +public import fnc.treegen.ast_types; +public import fnc.treegen.expression_parser; +public import fnc.treegen.keywords; +public import fnc.treegen.scope_parser; +public import fnc.treegen.relationships; +public import fnc.treegen.utils; +public import fnc.treegen.type_parser; \ No newline at end of file diff --git a/source/parsing/treegen/tokenRelationships.d b/source/fnc/treegen/relationships.d similarity index 99% rename from source/parsing/treegen/tokenRelationships.d rename to source/fnc/treegen/relationships.d index 4357448..a83637b 100644 --- a/source/parsing/treegen/tokenRelationships.d +++ b/source/fnc/treegen/relationships.d @@ -1,8 +1,9 @@ -module parsing.treegen.tokenRelationships; -import parsing.tokenizer.tokens; -import parsing.treegen.astTypes; -import parsing.treegen.treeGenUtils; -import parsing.treegen.typeParser; +module fnc.treegen.relationships; + +import fnc.tokenizer.tokens; +import fnc.treegen.ast_types; +import fnc.treegen.utils; +import fnc.treegen.type_parser; import tern.typecons.common : Nullable, nullable; /+ diff --git a/source/parsing/treegen/scopeParser.d b/source/fnc/treegen/scope_parser.d similarity index 97% rename from source/parsing/treegen/scopeParser.d rename to source/fnc/treegen/scope_parser.d index 58ec96e..6cc1744 100644 --- a/source/parsing/treegen/scopeParser.d +++ b/source/fnc/treegen/scope_parser.d @@ -1,14 +1,15 @@ -module parsing.treegen.scopeParser; -import parsing.tokenizer.tokens; -import parsing.treegen.astTypes; -import parsing.treegen.expressionParser; -import parsing.treegen.treeGenUtils; -import parsing.treegen.tokenRelationships; -import parsing.treegen.keywords; +module fnc.treegen.scope_parser; + +import fnc.tokenizer.tokens; +import fnc.treegen.ast_types; +import fnc.treegen.expression_parser; +import fnc.treegen.utils; +import fnc.treegen.relationships; +import fnc.treegen.keywords; import tern.typecons.common : Nullable, nullable; import std.container.array; -import errors; +import fnc.errors; struct ImportStatement { @@ -380,7 +381,7 @@ ScopeData parseMultilineScope(const(VarietyTestPair[]) scopeParseMethod, Token[] ScopeData parseMultilineScope(const(VarietyTestPair[]) scopeParseMethod, string data) { - import parsing.tokenizer.make_tokens; + import fnc.tokenizer.make_tokens; size_t index; GLOBAL_ERROR_STATE = data; diff --git a/source/parsing/treegen/typeParser.d b/source/fnc/treegen/type_parser.d similarity index 97% rename from source/parsing/treegen/typeParser.d rename to source/fnc/treegen/type_parser.d index e67150f..65f84f0 100644 --- a/source/parsing/treegen/typeParser.d +++ b/source/fnc/treegen/type_parser.d @@ -1,10 +1,10 @@ -module parsing.treegen.typeParser; +module fnc.treegen.type_parser; -import parsing.tokenizer.tokens; -import parsing.treegen.astTypes; +import fnc.tokenizer.tokens; +import fnc.treegen.ast_types; import tern.typecons.common : Nullable, nullable; -import errors; +import fnc.errors; import std.array; import std.stdio; @@ -196,7 +196,7 @@ size_t prematureTypeLength(Token[] tokens, size_t index) Nullable!AstNode typeFromTokens(Token[] tokens, ref size_t index) { - import parsing.treegen.expressionParser : phaseOne; + import fnc.treegen.expression_parser : phaseOne; size_t length = tokens.prematureTypeLength(index); if (length == 0) diff --git a/source/parsing/treegen/treeGenUtils.d b/source/fnc/treegen/utils.d similarity index 88% rename from source/parsing/treegen/treeGenUtils.d rename to source/fnc/treegen/utils.d index dec602e..21cf500 100644 --- a/source/parsing/treegen/treeGenUtils.d +++ b/source/fnc/treegen/utils.d @@ -1,7 +1,8 @@ -module parsing.treegen.treeGenUtils; -import errors; -import parsing.treegen.astTypes; -import parsing.tokenizer.tokens; +module fnc.treegen.utils; + +import fnc.errors; +import fnc.treegen.ast_types; +import fnc.tokenizer.tokens; import tern.typecons.common : Nullable, nullable; NamedUnit genNamedUnit(Token[] tokens, ref size_t index) diff --git a/source/main.d b/source/main.d index 72ce34c..98e9d7b 100644 --- a/source/main.d +++ b/source/main.d @@ -1,11 +1,11 @@ module main; -import parsing.tokenizer.tokens; -import parsing.tokenizer.make_tokens; +import fnc.tokenizer.tokens; +import fnc.tokenizer.make_tokens; import tern.typecons.common : Nullable, nullable; -import parsing.treegen.scopeParser; -import parsing.treegen.tokenRelationships; -import parsing.treegen.typeParser; +import fnc.treegen.scope_parser; +import fnc.treegen.relationships; +import fnc.treegen.type_parser; import std.stdio; @@ -66,7 +66,7 @@ void main() size_t min = -1; size_t max = 0; - import parsing.treegen.astTypes : getMinMax; + import fnc.treegen.ast_types : getMinMax; getMinMax(newScope.instructions[0], min, max); min.writeln; max.writeln; diff --git a/source/package.d b/source/package.d deleted file mode 100644 index 0d4071e..0000000 --- a/source/package.d +++ /dev/null @@ -1,5 +0,0 @@ -module source; - -public import fnc.emission; -public import fnc.propagation; -public import fnc.symbols; \ No newline at end of file diff --git a/source/parsing/compolation.d b/source/parsing/compolation.d deleted file mode 100644 index c23138b..0000000 --- a/source/parsing/compolation.d +++ /dev/null @@ -1,6 +0,0 @@ -module parsing.compolation; - -struct CompalationUnit -{ - -} diff --git a/source/tests/scopeParser.d b/source/tests/scopeParser.d index c105f34..93a92c9 100644 --- a/source/tests/scopeParser.d +++ b/source/tests/scopeParser.d @@ -1,15 +1,15 @@ -module tests.scopeParser; +module tests.scope_parser; -import parsing.treegen.astTypes; -import parsing.treegen.tokenRelationships; -import parsing.treegen.scopeParser; -import parsing.tokenizer.tokens; +import fnc.treegen.ast_types; +import fnc.treegen.relationships; +import fnc.treegen.scope_parser; +import fnc.tokenizer.tokens; import tern.typecons.common : Nullable, nullable; unittest { - import parsing.tokenizer.make_tokens; - import parsing.treegen.scopeParser; + import fnc.tokenizer.make_tokens; + import fnc.treegen.scope_parser; size_t index = 0; auto newScope = parseMultilineScope(FUNCTION_SCOPE_PARSE, " @@ -55,7 +55,7 @@ unittest unittest { - import parsing.tokenizer.make_tokens; + import fnc.tokenizer.make_tokens; size_t index; auto t = "let x = 4/*asdadasd*/;".tokenizeText; diff --git a/source/tests/tokenizer.d b/source/tests/tokenizer.d index e526fed..14af044 100644 --- a/source/tests/tokenizer.d +++ b/source/tests/tokenizer.d @@ -1,6 +1,6 @@ module tests.tokenizer; -import parsing.tokenizer.tokens; -import parsing.tokenizer.make_tokens; +import fnc.tokenizer.tokens; +import fnc.tokenizer.make_tokens; unittest { diff --git a/source/tests/treegen.d b/source/tests/treegen.d index 694c6ed..20ff51d 100644 --- a/source/tests/treegen.d +++ b/source/tests/treegen.d @@ -1,16 +1,16 @@ module tests.parser; import std.container.array; -import parsing.tokenizer.tokens; -import parsing.treegen.astTypes; -import parsing.treegen.expressionParser; -import parsing.treegen.treeGenUtils; +import fnc.tokenizer.tokens; +import fnc.treegen.ast_types; +import fnc.treegen.expression_parser; +import fnc.treegen.utils; -import parsing.treegen.tokenRelationships; +import fnc.treegen.relationships; // unittest // { -// import parsing.tokenizer.make_tokens; +// import fnc.tokenizer.make_tokens; // AstNode[] phaseOneNodes = phaseOne("math.sqrt(3 * 5 + 6 * 7 / 2)*(x+3)/2+4".tokenizeText); // Array!AstNode nodes; @@ -29,7 +29,7 @@ import parsing.treegen.tokenRelationships; unittest { - import parsing.tokenizer.make_tokens; + import fnc.tokenizer.make_tokens; size_t s = 0; assert("int x = 4;".tokenizeText.genNamedUnit(s).names == [ @@ -46,7 +46,7 @@ unittest unittest { import std.stdio; - import parsing.tokenizer.make_tokens; + import fnc.tokenizer.make_tokens; assert(DeclarationLine.matchesToken( tokenizeText("mod.type.submod x,r,q,a, A_variable \n\r\t ;") @@ -76,8 +76,8 @@ unittest unittest { - import parsing.tokenizer.make_tokens; - import parsing.treegen.keywords; + import fnc.tokenizer.make_tokens; + import fnc.treegen.keywords; Token[] tokens = tokenizeText("align(an invalid alignment) abstract pure int x();"); @@ -92,7 +92,7 @@ unittest unittest { - import parsing.tokenizer.make_tokens; + import fnc.tokenizer.make_tokens; auto nodes = expressionNodeFromTokens("(p[t++]<<<=1) + 10 / x[9]++".tokenizeText); // assert(nodes.length == 1); @@ -103,8 +103,8 @@ unittest unittest { - import parsing.tokenizer.make_tokens; - import parsing.treegen.scopeParser; + import fnc.tokenizer.make_tokens; + import fnc.treegen.scope_parser; size_t index = 0; auto scopeData = new ScopeData; @@ -121,8 +121,8 @@ unittest unittest { - import parsing.tokenizer.make_tokens; - import parsing.treegen.scopeParser; + import fnc.tokenizer.make_tokens; + import fnc.treegen.scope_parser; import std.stdio; size_t index = 0;