Skip to content

Commit

Permalink
Make tags use #+ syntax instead of //+ syntax so it no longer looks l…
Browse files Browse the repository at this point in the history
…ike a comment. Old style still works but is deprecated with a warning. Using unknown tags is now an error instead of a warning. There is a new token for #+ which consumes the whole line. The tags are parsed like before. A 'package_line_seen' bool has been added to the tokenizer to make sure #+ only eats the whole line if you are before the package line.
  • Loading branch information
karl-zylinski committed Sep 6, 2024
1 parent a99e57c commit 0f5aabf
Show file tree
Hide file tree
Showing 2 changed files with 93 additions and 46 deletions.
128 changes: 82 additions & 46 deletions src/parser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6091,7 +6091,7 @@ gb_internal String build_tag_get_token(String s, String *out) {
}

gb_internal bool parse_build_tag(Token token_for_pos, String s) {
String const prefix = str_lit("+build");
String const prefix = str_lit("build");
GB_ASSERT(string_starts_with(s, prefix));
s = string_trim_whitespace(substring(s, prefix.len, s.len));

Expand Down Expand Up @@ -6176,7 +6176,7 @@ gb_internal String vet_tag_get_token(String s, String *out) {


gb_internal u64 parse_vet_tag(Token token_for_pos, String s) {
String const prefix = str_lit("+vet");
String const prefix = str_lit("vet");
GB_ASSERT(string_starts_with(s, prefix));
s = string_trim_whitespace(substring(s, prefix.len, s.len));

Expand Down Expand Up @@ -6281,7 +6281,7 @@ gb_internal isize calc_decl_count(Ast *decl) {
}

gb_internal bool parse_build_project_directory_tag(Token token_for_pos, String s) {
String const prefix = str_lit("+build-project-name");
String const prefix = str_lit("build-project-name");
GB_ASSERT(string_starts_with(s, prefix));
s = string_trim_whitespace(substring(s, prefix.len, s.len));
if (s.len == 0) {
Expand Down Expand Up @@ -6325,6 +6325,48 @@ gb_internal bool parse_build_project_directory_tag(Token token_for_pos, String s
return any_correct;
}

gb_internal bool process_file_tag(const String &lc, const Token &tok, AstFile *f) {
if (string_starts_with(lc, str_lit("build-project-name"))) {
if (!parse_build_project_directory_tag(tok, lc)) {
return false;
}
} else if (string_starts_with(lc, str_lit("build"))) {
if (!parse_build_tag(tok, lc)) {
return false;
}
} else if (string_starts_with(lc, str_lit("vet"))) {
f->vet_flags = parse_vet_tag(tok, lc);
f->vet_flags_set = true;
} else if (string_starts_with(lc, str_lit("ignore"))) {
return false;
} else if (string_starts_with(lc, str_lit("private"))) {
f->flags |= AstFile_IsPrivatePkg;
String command = string_trim_starts_with(lc, str_lit("private "));
command = string_trim_whitespace(command);
if (lc == "private") {
f->flags |= AstFile_IsPrivatePkg;
} else if (command == "package") {
f->flags |= AstFile_IsPrivatePkg;
} else if (command == "file") {
f->flags |= AstFile_IsPrivateFile;
}
} else if (lc == "lazy") {
if (build_context.ignore_lazy) {
// Ignore
} else if (f->pkg->kind == Package_Init && build_context.command_kind == Command_doc) {
// Ignore
} else {
f->flags |= AstFile_IsLazy;
}
} else if (lc == "no-instrumentation") {
f->flags |= AstFile_NoInstrumentation;
} else {
error(tok, "Unknown tag '%.*s'", LIT(lc));
}

return true;
}

gb_internal bool parse_file(Parser *p, AstFile *f) {
if (f->tokens.count == 0) {
return true;
Expand All @@ -6337,8 +6379,18 @@ gb_internal bool parse_file(Parser *p, AstFile *f) {

String filepath = f->tokenizer.fullpath;
String base_dir = dir_from_path(filepath);
if (f->curr_token.kind == Token_Comment) {
consume_comment_groups(f, f->prev_token);

Array<Token> tags = array_make<Token>(ast_allocator(f));

while (f->curr_token.kind != Token_package) {
if (f->curr_token.kind == Token_Comment) {
consume_comment_groups(f, f->prev_token);
} else if (f->curr_token.kind == Token_FileTag) {
array_add(&tags, f->curr_token);
advance_token(f);
} else {
syntax_error(f->curr_token, "There can only be lines starting with #+ or // before package line");
}
}

CommentGroup *docs = f->lead_comment;
Expand Down Expand Up @@ -6379,55 +6431,39 @@ gb_internal bool parse_file(Parser *p, AstFile *f) {
}
f->package_name = package_name.string;

if (!f->pkg->is_single_file && docs != nullptr && docs->list.count > 0) {
for (Token const &tok : docs->list) {
GB_ASSERT(tok.kind == Token_Comment);
String str = tok.string;
if (string_starts_with(str, str_lit("//"))) {
String lc = string_trim_whitespace(substring(str, 2, str.len));
if (lc.len > 0 && lc[0] == '+') {
if (string_starts_with(lc, str_lit("+build-project-name"))) {
if (!parse_build_project_directory_tag(tok, lc)) {
return false;
}
} else if (string_starts_with(lc, str_lit("+build"))) {
if (!parse_build_tag(tok, lc)) {
if (!f->pkg->is_single_file) {
if (docs != nullptr && docs->list.count > 0) {
for (Token const &tok : docs->list) {
GB_ASSERT(tok.kind == Token_Comment);
String str = tok.string;
if (string_starts_with(str, str_lit("//"))) {
String lc = string_trim_whitespace(substring(str, 2, str.len));
if (string_starts_with(lc, str_lit("+"))) {
syntax_warning(tok, "//+ is deprecated: Use #+ instead");
String lt = substring(lc, 1, lc.len);
if (process_file_tag(lt, tok, f) == false) {
return false;
}
} else if (string_starts_with(lc, str_lit("+vet"))) {
f->vet_flags = parse_vet_tag(tok, lc);
f->vet_flags_set = true;
} else if (string_starts_with(lc, str_lit("+ignore"))) {
return false;
} else if (string_starts_with(lc, str_lit("+private"))) {
f->flags |= AstFile_IsPrivatePkg;
String command = string_trim_starts_with(lc, str_lit("+private "));
command = string_trim_whitespace(command);
if (lc == "+private") {
f->flags |= AstFile_IsPrivatePkg;
} else if (command == "package") {
f->flags |= AstFile_IsPrivatePkg;
} else if (command == "file") {
f->flags |= AstFile_IsPrivateFile;
}
} else if (lc == "+lazy") {
if (build_context.ignore_lazy) {
// Ignore
} else if (f->pkg->kind == Package_Init && build_context.command_kind == Command_doc) {
// Ignore
} else {
f->flags |= AstFile_IsLazy;
}
} else if (lc == "+no-instrumentation") {
f->flags |= AstFile_NoInstrumentation;
} else {
warning(tok, "Ignoring unknown tag '%.*s'", LIT(lc));
}
}
}
}

for (Token const &tok : tags) {
GB_ASSERT(tok.kind == Token_FileTag);
String str = tok.string;

if (string_starts_with(str, str_lit("#+"))) {
String lt = string_trim_whitespace(substring(str, 2, str.len));
if (process_file_tag(lt, tok, f) == false) {
return false;
}
}
}
}

array_free(&tags);

Ast *pd = ast_package_decl(f, f->package_token, package_name, docs, f->line_comment);
expect_semicolon(f);
f->pkg_decl = pd;
Expand Down
11 changes: 11 additions & 0 deletions src/tokenizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
TOKEN_KIND(Token_Invalid, "Invalid"), \
TOKEN_KIND(Token_EOF, "EOF"), \
TOKEN_KIND(Token_Comment, "Comment"), \
TOKEN_KIND(Token_FileTag, "FileTag"), \
\
TOKEN_KIND(Token__LiteralBegin, ""), \
TOKEN_KIND(Token_Ident, "identifier"), \
Expand Down Expand Up @@ -308,6 +309,7 @@ struct Tokenizer {
i32 error_count;

bool insert_semicolon;
bool package_line_seen;

LoadedFile loaded_file;
};
Expand Down Expand Up @@ -712,6 +714,10 @@ gb_internal void tokenizer_get_token(Tokenizer *t, Token *token, int repeat=0) {
if (token->kind == Token_not_in && entry->text.len == 5) {
syntax_error(*token, "Did you mean 'not_in'?");
}

if (token->kind == Token_package) {
t->package_line_seen = true;
}
}
}
}
Expand Down Expand Up @@ -939,6 +945,11 @@ gb_internal void tokenizer_get_token(Tokenizer *t, Token *token, int repeat=0) {
if (t->curr_rune == '!') {
token->kind = Token_Comment;
tokenizer_skip_line(t);
} else if (t->curr_rune == '+') {
if (!t->package_line_seen) {
token->kind = Token_FileTag;
tokenizer_skip_line(t);
}
}
break;
case '/':
Expand Down

0 comments on commit 0f5aabf

Please sign in to comment.