From 4f65d66590b64e1e685b2a48ccaeeeb1b36246d3 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Sun, 12 Mar 2023 17:31:17 +0100 Subject: [PATCH 1/2] refactor semantic tokens --- src/analysis.zig | 10 ++ src/semantic_tokens.zig | 245 +++++++++++++++++++--------------------- src/zls.zig | 1 + 3 files changed, 126 insertions(+), 130 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index c4d9fb159..3084019bb 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -614,6 +614,16 @@ fn allDigits(str: []const u8) bool { return true; } +pub fn isValueIdent(text: []const u8) bool { + const PrimitiveTypes = std.ComptimeStringMap(void, .{ + .{"true"}, + .{"false"}, + .{"null"}, + .{"undefined"}, + }); + return PrimitiveTypes.has(text); +} + pub fn isTypeIdent(text: []const u8) bool { const PrimitiveTypes = std.ComptimeStringMap(void, .{ .{"isize"}, .{"usize"}, diff --git a/src/semantic_tokens.zig b/src/semantic_tokens.zig index 0d65e0002..84c0c6a3c 100644 --- a/src/semantic_tokens.zig +++ b/src/semantic_tokens.zig @@ -25,7 +25,7 @@ pub const TokenType = enum(u32) { keywordLiteral, }; -pub const TokenModifiers = packed struct { +pub const TokenModifiers = packed struct(u16) { namespace: bool = false, @"struct": bool = false, @"enum": bool = false, @@ -35,73 +35,49 @@ pub const TokenModifiers = packed struct { @"async": bool = false, documentation: bool = false, generic: bool = false, - - fn toInt(self: TokenModifiers) u32 { - var res: u32 = 0; - inline for (std.meta.fields(TokenModifiers), 0..) |field, i| { - if (@field(self, field.name)) { - res |= 1 << i; - } - } - return res; - } - - inline fn set(self: *TokenModifiers, comptime field: []const u8) void { - @field(self, field) = true; - } + _: u7 = 0, }; const Builder = struct { arena: std.mem.Allocator, store: *DocumentStore, handle: *const DocumentStore.Handle, - previous_position: usize = 0, + previous_source_index: usize = 0, previous_token: ?Ast.TokenIndex = null, - arr: std.ArrayListUnmanaged(u32), + token_buffer: std.ArrayListUnmanaged(u32) = .{}, encoding: offsets.Encoding, - fn init(arena: std.mem.Allocator, store: *DocumentStore, handle: *const DocumentStore.Handle, encoding: offsets.Encoding) Builder { - return Builder{ - .arena = arena, - .store = store, - .handle = handle, - .arr = std.ArrayListUnmanaged(u32){}, - .encoding = encoding, - }; - } - fn add(self: *Builder, token: Ast.TokenIndex, token_type: TokenType, token_modifiers: TokenModifiers) !void { const tree = self.handle.tree; const starts = tree.tokens.items(.start); - const next_start = starts[token]; - if (next_start < self.previous_position) return; + if (starts[token] < self.previous_source_index) return; if (self.previous_token) |prev| { // Highlight gaps between AST nodes. These can contain comments or malformed code. var i = prev + 1; while (i < token) : (i += 1) { try handleComments(self, starts[i - 1], starts[i]); - try handleToken(self, i); } } self.previous_token = token; - try self.handleComments(if (token > 0) starts[token - 1] else 0, next_start); + try self.handleComments(starts[token -| 1], starts[token]); const length = offsets.tokenLength(tree, token, self.encoding); - try self.addDirect(token_type, token_modifiers, next_start, length); + try self.addDirect(token_type, token_modifiers, starts[token], length); } - fn finish(self: *Builder) !void { + fn finish(self: *Builder) error{OutOfMemory}!types.SemanticTokens { const starts = self.handle.tree.tokens.items(.start); const last_token = self.previous_token orelse 0; var i = last_token + 1; while (i < starts.len) : (i += 1) { try handleComments(self, starts[i - 1], starts[i]); - try handleToken(self, i); } try self.handleComments(starts[starts.len - 1], self.handle.tree.source.len); + + return .{ .data = try self.token_buffer.toOwnedSlice(self.arena) }; } /// Highlight a token without semantic context. @@ -181,23 +157,19 @@ const Builder = struct { } fn addDirect(self: *Builder, tok_type: TokenType, tok_mod: TokenModifiers, start: usize, length: usize) !void { - if (start < self.previous_position) return; + if (start < self.previous_source_index) return; - const text = self.handle.tree.source[self.previous_position..start]; + const text = self.handle.tree.source[self.previous_source_index..start]; const delta = offsets.indexToPosition(text, text.len, self.encoding); - try self.arr.appendSlice(self.arena, &.{ + try self.token_buffer.appendSlice(self.arena, &.{ @truncate(u32, delta.line), @truncate(u32, delta.character), @truncate(u32, length), @enumToInt(tok_type), - tok_mod.toInt(), + @bitCast(u16, tok_mod), }); - self.previous_position = start; - } - - fn toOwnedSlice(self: *Builder) error{OutOfMemory}![]u32 { - return self.arr.toOwnedSlice(self.arena); + self.previous_source_index = start; } }; @@ -217,10 +189,7 @@ fn writeDocComments(builder: *Builder, tree: Ast, doc: Ast.TokenIndex) !void { while (token_tags[tok_idx] == .doc_comment or token_tags[tok_idx] == .container_doc_comment) : (tok_idx += 1) { - var tok_mod = TokenModifiers{}; - tok_mod.set("documentation"); - - try builder.add(tok_idx, .comment, tok_mod); + try builder.add(tok_idx, .comment, .{ .documentation = true }); } } @@ -238,15 +207,15 @@ fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHan if (type_node.type.is_type_val) { var new_tok_mod = tok_mod; if (type_node.isNamespace()) - new_tok_mod.set("namespace") + new_tok_mod.namespace = true else if (type_node.isStructType()) - new_tok_mod.set("struct") + new_tok_mod.@"struct" = true else if (type_node.isEnumType()) - new_tok_mod.set("enum") + new_tok_mod.@"enum" = true else if (type_node.isUnionType()) - new_tok_mod.set("union") + new_tok_mod.@"union" = true else if (type_node.isOpaqueType()) - new_tok_mod.set("opaque"); + new_tok_mod.@"opaque" = true; try writeTokenMod(builder, target_tok, .type, new_tok_mod); } else if (type_node.isTypeFunc()) { @@ -254,7 +223,7 @@ fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHan } else if (type_node.isFunc()) { var new_tok_mod = tok_mod; if (type_node.isGenericFunc()) { - new_tok_mod.set("generic"); + new_tok_mod.generic = true; } try writeTokenMod(builder, target_tok, .function, new_tok_mod); } else { @@ -263,7 +232,7 @@ fn colorIdentifierBasedOnType(builder: *Builder, type_node: analysis.TypeWithHan } /// HACK self-hosted has not implemented async yet -fn callWriteNodeTokens(allocator: std.mem.Allocator, args: anytype) error{OutOfMemory}!void { +inline fn callWriteNodeTokens(allocator: std.mem.Allocator, args: anytype) error{OutOfMemory}!void { if (zig_builtin.zig_backend == .other or zig_builtin.zig_backend == .stage1) { const FrameSize = @sizeOf(@Frame(writeNodeTokens)); var child_frame = try allocator.alignedAlloc(u8, std.Target.stack_align, FrameSize); @@ -276,9 +245,7 @@ fn callWriteNodeTokens(allocator: std.mem.Allocator, args: anytype) error{OutOfM } } -fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMemory}!void { - const node = maybe_node orelse return; - +fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!void { const handle = builder.handle; const tree = handle.tree; const node_tags = tree.nodes.items(.tag); @@ -302,10 +269,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe try writeToken(builder, main_token, .keyword); if (node_data[node].lhs != 0) { - const payload_tok = node_data[node].lhs; - try writeToken(builder, payload_tok - 1, .operator); - try writeToken(builder, payload_tok, .variable); - try writeToken(builder, payload_tok + 1, .operator); + try writeTokenMod(builder, node_data[node].lhs, .variable, .{ .declaration = true }); } try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs }); @@ -316,7 +280,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe .block_two_semicolon, => { if (token_tags[main_token - 1] == .colon and token_tags[main_token - 2] == .identifier) { - try writeToken(builder, main_token - 2, .label); + try writeTokenMod(builder, main_token - 2, .label, .{ .declaration = true }); } var buffer: [2]Ast.Node.Index = undefined; @@ -350,14 +314,18 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe } else { try writeTokenMod(builder, var_decl.ast.mut_token + 1, .variable, .{ .declaration = true }); } - if (token_tags[var_decl.ast.mut_token + 2] == .equal) { - try writeToken(builder, var_decl.ast.mut_token + 2, .operator); - } try callWriteNodeTokens(allocator, .{ builder, var_decl.ast.type_node }); try callWriteNodeTokens(allocator, .{ builder, var_decl.ast.align_node }); try callWriteNodeTokens(allocator, .{ builder, var_decl.ast.section_node }); + if (var_decl.ast.init_node != 0) { + const equal_token = tree.firstToken(var_decl.ast.init_node) - 1; + if (token_tags[equal_token] == .equal) { + try writeToken(builder, equal_token, .operator); + } + } + try callWriteNodeTokens(allocator, .{ builder, var_decl.ast.init_node }); }, .@"usingnamespace" => { @@ -415,7 +383,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe } }, .error_value => { - if (node_data[node].lhs > 0) { + if (node_data[node].lhs != 0) { try writeToken(builder, node_data[node].lhs - 1, .keyword); } try writeToken(builder, node_data[node].rhs, .errorTag); @@ -423,7 +391,9 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe .identifier => { const name = offsets.nodeToSlice(tree, node); - if (std.mem.eql(u8, name, "undefined")) { + if (std.mem.eql(u8, name, "_")) { + return; + } else if (analysis.isValueIdent(name)) { return try writeToken(builder, main_token, .keywordLiteral); } else if (analysis.isTypeIdent(name)) { return try writeToken(builder, main_token, .type); @@ -446,7 +416,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe return try colorIdentifierBasedOnType(builder, decl_type, main_token, .{}); } } - return try writeToken(builder, main_token, .variable); + try writeTokenMod(builder, main_token, .variable, .{}); }, .fn_proto, .fn_proto_one, @@ -469,10 +439,10 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe else .function; - const tok_mod = if (analysis.isGenericFunction(tree, fn_proto)) - TokenModifiers{ .generic = true } - else - TokenModifiers{}; + const tok_mod = TokenModifiers{ + .declaration = true, + .generic = analysis.isGenericFunction(tree, fn_proto), + }; try writeTokenMod(builder, fn_proto.name_token, func_name_tok_type, tok_mod); @@ -497,11 +467,8 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs }); }, .anyframe_type => { - try writeToken(builder, main_token, .type); - if (node_data[node].rhs != 0) { - try writeToken(builder, node_data[node].lhs, .type); - try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs }); - } + try writeToken(builder, main_token, .keyword); + try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs }); }, .@"defer" => { try writeToken(builder, main_token, .keyword); @@ -537,10 +504,9 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe for (switch_case.ast.values) |item_node| try callWriteNodeTokens(allocator, .{ builder, item_node }); // check it it's 'else' if (switch_case.ast.values.len == 0) try writeToken(builder, switch_case.ast.arrow_token - 1, .keyword); - try writeToken(builder, switch_case.ast.arrow_token, .operator); if (switch_case.payload_token) |payload_token| { const actual_payload = payload_token + @boolToInt(token_tags[payload_token] == .asterisk); - try writeToken(builder, actual_payload, .variable); + try writeTokenMod(builder, actual_payload, .variable, .{ .declaration = true }); } try callWriteNodeTokens(allocator, .{ builder, switch_case.ast.target_expr }); }, @@ -554,15 +520,9 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe try writeToken(builder, while_node.ast.while_token, .keyword); try callWriteNodeTokens(allocator, .{ builder, while_node.ast.cond_expr }); if (while_node.payload_token) |payload| { - try writeToken(builder, payload - 1, .operator); - try writeToken(builder, payload, .variable); - var r_pipe = payload + 1; - if (token_tags[r_pipe] == .comma) { - r_pipe += 1; - try writeToken(builder, r_pipe, .variable); - r_pipe += 1; - } - try writeToken(builder, r_pipe, .operator); + const capture_is_ref = token_tags[payload] == .asterisk; + const name_token = payload + @boolToInt(capture_is_ref); + try writeTokenMod(builder, name_token, .variable, .{ .declaration = true }); } try callWriteNodeTokens(allocator, .{ builder, while_node.ast.cont_expr }); @@ -572,9 +532,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe try writeToken(builder, while_node.else_token, .keyword); if (while_node.error_token) |err_token| { - try writeToken(builder, err_token - 1, .operator); - try writeToken(builder, err_token, .variable); - try writeToken(builder, err_token + 1, .operator); + try writeTokenMod(builder, err_token, .variable, .{ .declaration = true }); } try callWriteNodeTokens(allocator, .{ builder, while_node.ast.else_expr }); } @@ -597,13 +555,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe const name_token = capture_token + @boolToInt(capture_is_ref); capture_token = name_token + 2; - if (capture_is_ref) { - try writeToken(builder, capture_token, .operator); - } - try writeToken(builder, name_token, .variable); - if (token_tags[name_token + 1] == .pipe) { - try writeToken(builder, name_token + 1, .operator); - } + try writeTokenMod(builder, name_token, .variable, .{ .declaration = true }); } try callWriteNodeTokens(allocator, .{ builder, for_node.ast.then_expr }); @@ -621,20 +573,14 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe try callWriteNodeTokens(allocator, .{ builder, if_node.ast.cond_expr }); if (if_node.payload_token) |payload| { - // if (?x) |x| - try writeToken(builder, payload - 1, .operator); // | - try writeToken(builder, payload, .variable); // x - try writeToken(builder, payload + 1, .operator); // | + try writeTokenMod(builder, payload, .variable, .{ .declaration = true }); } try callWriteNodeTokens(allocator, .{ builder, if_node.ast.then_expr }); if (if_node.ast.else_expr != 0) { try writeToken(builder, if_node.else_token, .keyword); if (if_node.error_token) |err_token| { - // else |err| - try writeToken(builder, err_token - 1, .operator); // | - try writeToken(builder, err_token, .variable); // err - try writeToken(builder, err_token + 1, .operator); // | + try writeTokenMod(builder, err_token, .variable, .{ .declaration = true }); } try callWriteNodeTokens(allocator, .{ builder, if_node.ast.else_expr }); } @@ -722,15 +668,9 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe try callWriteNodeTokens(allocator, .{ builder, slice.ast.sliced }); try callWriteNodeTokens(allocator, .{ builder, slice.ast.start }); - try writeToken(builder, ast.lastToken(tree, slice.ast.start) + 1, .operator); - try callWriteNodeTokens(allocator, .{ builder, slice.ast.end }); try callWriteNodeTokens(allocator, .{ builder, slice.ast.sentinel }); }, - .array_access => { - try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs }); - try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs }); - }, .deref => { try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs }); try writeToken(builder, main_token, .operator); @@ -758,7 +698,6 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe try writeToken(builder, main_token, .number); }, .enum_literal => { - try writeToken(builder, main_token - 1, .enumMember); try writeToken(builder, main_token, .enumMember); }, .builtin_call, @@ -788,8 +727,6 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe try writeToken(builder, main_token, .keywordLiteral); }, .@"asm", - .asm_output, - .asm_input, .asm_simple, => { const asm_node: Ast.full.Asm = tree.fullAsm(node).?; @@ -797,15 +734,56 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe try writeToken(builder, main_token, .keyword); try writeToken(builder, asm_node.volatile_token, .keyword); try callWriteNodeTokens(allocator, .{ builder, asm_node.ast.template }); - // TODO Inputs, outputs. + + for (asm_node.outputs) |output_node| { + try writeToken(builder, main_tokens[output_node], .variable); + try writeToken(builder, main_tokens[output_node] + 2, .string); + const has_arrow = token_tags[main_tokens[output_node] + 4] == .arrow; + if (has_arrow) { + try callWriteNodeTokens(allocator, .{ builder, node_data[output_node].lhs }); + } else { + try writeToken(builder, main_tokens[output_node] + 4, .variable); + } + } + + for (asm_node.inputs) |input_node| { + try writeToken(builder, main_tokens[input_node], .variable); + try writeToken(builder, main_tokens[input_node] + 2, .string); + try callWriteNodeTokens(allocator, .{ builder, node_data[input_node].lhs }); + } + + if (asm_node.first_clobber) |first_clobber| clobbers: { + var tok_i = first_clobber; + while (true) : (tok_i += 1) { + try writeToken(builder, tok_i, .string); + tok_i += 1; + switch (token_tags[tok_i]) { + .r_paren => break :clobbers, + .comma => { + if (token_tags[tok_i + 1] == .r_paren) { + break :clobbers; + } else { + continue; + } + }, + else => break :clobbers, + } + } + } }, + .asm_output, + .asm_input, + => unreachable, .test_decl => { if (analysis.getDocCommentTokenIndex(token_tags, main_token)) |doc| try writeDocComments(builder, tree, doc); try writeToken(builder, main_token, .keyword); - if (token_tags[main_token + 1] == .string_literal) - try writeToken(builder, main_token + 1, .string); + switch (token_tags[node_data[node].lhs]) { + .string_literal => try writeToken(builder, node_data[node].lhs, .string), + .identifier => try writeToken(builder, node_data[node].lhs, .variable), + else => {}, + } try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs }); }, @@ -813,9 +791,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs }); try writeToken(builder, main_token, .keyword); if (token_tags[main_token + 1] == .pipe) { - try writeTokenMod(builder, main_token + 2, .variable, .{ - .declaration = true, - }); + try writeTokenMod(builder, main_token + 2, .variable, .{ .declaration = true }); } try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs }); }, @@ -853,7 +829,6 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe .bool_or, .div, .equal_equal, - .error_union, .greater_or_equal, .greater_than, .less_or_equal, @@ -863,12 +838,10 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe .mul, .mul_wrap, .mul_sat, - .switch_range, .sub, .sub_wrap, .sub_sat, .@"orelse", - .for_range, => { try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs }); const token_type: TokenType = switch (tag) { @@ -879,6 +852,14 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe try writeToken(builder, main_token, token_type); try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs }); }, + .array_access, + .error_union, + .switch_range, + .for_range, + => { + try callWriteNodeTokens(allocator, .{ builder, node_data[node].lhs }); + try callWriteNodeTokens(allocator, .{ builder, node_data[node].rhs }); + }, .field_access => { const data = node_data[node]; if (data.rhs == 0) return; @@ -887,7 +868,7 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe // TODO This is basically exactly the same as what is done in analysis.resolveTypeOfNode, with the added // writeToken code. - // Maybe we can hook into it insead? Also applies to Identifier and VarDecl + // Maybe we can hook into it instead? Also applies to Identifier and VarDecl var bound_type_params = analysis.BoundTypeParams{}; defer bound_type_params.deinit(builder.store.allocator); @@ -980,7 +961,6 @@ fn writeNodeTokens(builder: *Builder, maybe_node: ?Ast.Node.Index) error{OutOfMe try callWriteNodeTokens(allocator, .{ builder, array_type.ast.elem_count }); try callWriteNodeTokens(allocator, .{ builder, array_type.ast.sentinel }); - try callWriteNodeTokens(allocator, .{ builder, array_type.ast.elem_type }); }, .address_of, @@ -1050,8 +1030,13 @@ pub fn writeSemanticTokens( handle: *const DocumentStore.Handle, loc: ?offsets.Loc, encoding: offsets.Encoding, -) !types.SemanticTokens { - var builder = Builder.init(arena, store, handle, encoding); +) error{OutOfMemory}!types.SemanticTokens { + var builder = Builder{ + .arena = arena, + .store = store, + .handle = handle, + .encoding = encoding, + }; const nodes = if (loc) |l| try ast.nodesAtLoc(arena, handle.tree, l) else handle.tree.rootDecls(); @@ -1059,6 +1044,6 @@ pub fn writeSemanticTokens( for (nodes) |child| { try writeNodeTokens(&builder, child); } - try builder.finish(); - return .{ .data = try builder.toOwnedSlice() }; + + return try builder.finish(); } diff --git a/src/zls.zig b/src/zls.zig index c1b1970b8..dee0b2aeb 100644 --- a/src/zls.zig +++ b/src/zls.zig @@ -17,6 +17,7 @@ pub const diff = @import("diff.zig"); pub const analyser = @import("analyser/analyser.zig"); pub const configuration = @import("configuration.zig"); pub const references = @import("references.zig"); +pub const semantic_tokens = @import("semantic_tokens.zig"); pub const ZigVersionWrapper = @import("ZigVersionWrapper.zig"); From a52db981bbfa1c7e481eb4672a2f05089a317efa Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Sun, 12 Mar 2023 17:31:55 +0100 Subject: [PATCH 2/2] expand semantic token test coverage --- tests/lsp_features/semantic_tokens.zig | 1027 +++++++++++++++++++++++- 1 file changed, 992 insertions(+), 35 deletions(-) diff --git a/tests/lsp_features/semantic_tokens.zig b/tests/lsp_features/semantic_tokens.zig index 149c2754c..84a3ec956 100644 --- a/tests/lsp_features/semantic_tokens.zig +++ b/tests/lsp_features/semantic_tokens.zig @@ -3,8 +3,10 @@ const zls = @import("zls"); const builtin = @import("builtin"); const Context = @import("../context.zig").Context; +const ErrorBuilder = @import("../ErrorBuilder.zig"); const types = zls.types; +const offsets = zls.offsets; const allocator: std.mem.Allocator = std.testing.allocator; @@ -12,57 +14,1012 @@ test "semantic tokens - empty" { try testSemanticTokens("", &.{}); } -test "semantic tokens" { +test "semantic tokens - comment" { + if (true) return error.SkipZigTest; // TODO + try testSemanticTokens( + \\// hello world + , &.{ + .{ "// hello world", .comment, .{} }, + }); + try testSemanticTokens( + \\//! hello world + \\ + , &.{ + .{ "//! hello world", .comment, .{ .documentation = true } }, + }); + try testSemanticTokens( + \\/// hello world + \\const a; + , &.{ + .{ "/// hello world", .comment, .{ .documentation = true } }, + .{ "const", .keyword, .{} }, + .{ "a", .variable, .{ .declaration = true } }, + }); +} + +test "semantic tokens - string literals" { + try testSemanticTokens( + \\const alpha = ""; + , &.{ + .{ "const", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "\"\"", .string, .{} }, + }); + try testSemanticTokens( + \\const beta = "hello"; + , &.{ + .{ "const", .keyword, .{} }, + .{ "beta", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "\"hello\"", .string, .{} }, + }); + try testSemanticTokens( + \\const gamma = + \\ \\hello + \\ \\world + \\ \\ + \\; + , &.{ + .{ "const", .keyword, .{} }, + .{ "gamma", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + // TODO remove the newline + .{ "\\\\hello\n", .string, .{} }, + .{ "\\\\world\n", .string, .{} }, + .{ "\\\\\n", .string, .{} }, + }); +} + +test "semantic tokens - char literals" { + try testSemanticTokens( + \\var alpha = ' '; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "' '", .string, .{} }, + }); +} + +test "semantic tokens - var decl" { + try testSemanticTokens( + \\var alpha = 3; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "3", .number, .{} }, + }); + try testSemanticTokens( + \\threadlocal var alpha = 3; + , &.{ + .{ "threadlocal", .keyword, .{} }, + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "3", .number, .{} }, + }); + try testSemanticTokens( + \\extern var alpha: u32; + , &.{ + .{ "extern", .keyword, .{} }, + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "u32", .type, .{} }, + }); + try testSemanticTokens( + \\pub extern var alpha = 3; + , &.{ + .{ "pub", .keyword, .{} }, + .{ "extern", .keyword, .{} }, + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "3", .number, .{} }, + }); + try testSemanticTokens( + \\var alpha; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + }); +} + +test "semantic tokens - local var decl" { + try testSemanticTokens( + \\const alpha = { + \\ comptime var beta: u32 = 3; + \\}; + \\ + , &.{ + .{ "const", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "comptime", .keyword, .{} }, + .{ "var", .keyword, .{} }, + .{ "beta", .variable, .{ .declaration = true } }, + .{ "u32", .type, .{} }, + .{ "=", .operator, .{} }, + .{ "3", .number, .{} }, + }); +} + +test "semantic tokens - escaped identifier" { + try testSemanticTokens( + \\var @"@" = 3; + , &.{ + .{ "var", .keyword, .{} }, + .{ "@\"@\"", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "3", .number, .{} }, + }); +} + +test "semantic tokens - operators" { + try testSemanticTokens( + \\var alpha = 3 + 3; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "3", .number, .{} }, + .{ "+", .operator, .{} }, + .{ "3", .number, .{} }, + }); + try testSemanticTokens( + \\var alpha = 3 orelse 3; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "3", .number, .{} }, + .{ "orelse", .keyword, .{} }, + .{ "3", .number, .{} }, + }); + try testSemanticTokens( + \\var alpha = true and false; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "true", .keywordLiteral, .{} }, + .{ "and", .keyword, .{} }, + .{ "false", .keywordLiteral, .{} }, + }); +} + +test "semantic tokens - field access" { + // this will make sure that the std module can be resolved try testSemanticTokens( \\const std = @import("std"); - , - &.{ 0, 0, 5, 7, 0, 0, 6, 3, 0, 33, 0, 4, 1, 11, 0, 0, 2, 7, 12, 0, 0, 8, 5, 9, 0 }, - ); + , &.{ + .{ "const", .keyword, .{} }, + .{ "std", .type, .{ .namespace = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "@import", .builtin, .{} }, + .{ "\"std\"", .string, .{} }, + }); + try testSemanticTokens( + \\const std = @import("std"); + \\const Ast = std.zig.Ast; + , &.{ + .{ "const", .keyword, .{} }, + .{ "std", .type, .{ .namespace = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "@import", .builtin, .{} }, + .{ "\"std\"", .string, .{} }, - // TODO more tests + .{ "const", .keyword, .{} }, + .{ "Ast", .type, .{ .@"struct" = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "std", .type, .{ .namespace = true } }, + .{ "zig", .type, .{ .namespace = true } }, + .{ "Ast", .type, .{ .@"struct" = true } }, + }); } -test "semantic tokens - comments" { +test "semantic tokens - catch" { try testSemanticTokens( - \\//!─ - , - &.{ 0, 0, 4, 8, 128 }, - ); + \\var alpha = a catch b; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "a", .variable, .{} }, + .{ "catch", .keyword, .{} }, + .{ "b", .variable, .{} }, + }); + try testSemanticTokens( + \\var alpha = a catch |err| b; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "a", .variable, .{} }, + .{ "catch", .keyword, .{} }, + .{ "err", .variable, .{ .declaration = true } }, + .{ "b", .variable, .{} }, + }); +} - // TODO more tests +test "semantic tokens - slicing" { + try testSemanticTokens( + \\var alpha = a[0..1]; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "a", .variable, .{} }, + .{ "0", .number, .{} }, + .{ "1", .number, .{} }, + }); + try testSemanticTokens( + \\var alpha = a[0..1: 2]; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "a", .variable, .{} }, + .{ "0", .number, .{} }, + .{ "1", .number, .{} }, + .{ "2", .number, .{} }, + }); } -test "semantic tokens - string literals" { - // https://github.com/zigtools/zls/issues/921 - try testSemanticTokens( - \\" - \\"",// - \\"": - , - // no idea if this output is correct but at least it doesn't crash - &.{ 1, 3, 3, 8, 0, 1, 0, 2, 9, 0 }, - ); +test "semantic tokens - enum literal" { + try testSemanticTokens( + \\var alpha = .beta; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "beta", .enumMember, .{} }, + }); } -fn testSemanticTokens(source: []const u8, expected: []const u32) !void { - var ctx = try Context.init(); - defer ctx.deinit(); +test "semantic tokens - error literal" { + try testSemanticTokens( + \\var alpha = error.OutOfMemory; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "error", .keyword, .{} }, + .{ "OutOfMemory", .errorTag, .{} }, + }); +} + +test "semantic tokens - array literal" { + try testSemanticTokens( + \\var alpha = [_]u32{ 1, 2 }; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "u32", .type, .{} }, + .{ "1", .number, .{} }, + .{ "2", .number, .{} }, + }); + try testSemanticTokens( + \\var alpha = [_:3]u32{}; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "3", .number, .{} }, + .{ "u32", .type, .{} }, + }); +} + +test "semantic tokens - struct literal" { + try testSemanticTokens( + \\var alpha = .{}; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + }); + try testSemanticTokens( + \\var alpha = .{1,2}; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "1", .number, .{} }, + .{ "2", .number, .{} }, + }); + try testSemanticTokens( + \\var alpha = Unknown{1,2}; + , &.{ + .{ "var", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "Unknown", .variable, .{} }, + .{ "1", .number, .{} }, + .{ "2", .number, .{} }, + }); +} + +test "semantic tokens - optional types" { + try testSemanticTokens( + \\const alpha = ?u32; + , &.{ + .{ "const", .keyword, .{} }, + .{ "alpha", .type, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "?", .operator, .{} }, + .{ "u32", .type, .{} }, + }); +} + +test "semantic tokens - array types" { + try testSemanticTokens( + \\const alpha = [1]u32; + , &.{ + .{ "const", .keyword, .{} }, + .{ "alpha", .type, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "1", .number, .{} }, + .{ "u32", .type, .{} }, + }); + try testSemanticTokens( + \\const alpha = [1:0]u32; + , &.{ + .{ "const", .keyword, .{} }, + .{ "alpha", .type, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "1", .number, .{} }, + .{ "0", .number, .{} }, + .{ "u32", .type, .{} }, + }); +} + +test "semantic tokens - pointer types" { + try testSemanticTokens( + \\const alpha = *u32; + , &.{ + .{ "const", .keyword, .{} }, + .{ "alpha", .type, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "*", .operator, .{} }, + .{ "u32", .type, .{} }, + }); + try testSemanticTokens( + \\const alpha = *allowzero u32; + , &.{ + .{ "const", .keyword, .{} }, + .{ "alpha", .type, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "*", .operator, .{} }, + .{ "allowzero", .keyword, .{} }, + .{ "u32", .type, .{} }, + }); + try testSemanticTokens( + \\const alpha = [:0]const u32; + , &.{ + .{ "const", .keyword, .{} }, + .{ "alpha", .type, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "0", .number, .{} }, + .{ "const", .keyword, .{} }, + .{ "u32", .type, .{} }, + }); + try testSemanticTokens( + \\const alpha = *align(1:2:3) u32; + , &.{ + .{ "const", .keyword, .{} }, + .{ "alpha", .type, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "*", .operator, .{} }, + .{ "align", .keyword, .{} }, + .{ "1", .number, .{} }, + .{ "2", .number, .{} }, + .{ "3", .number, .{} }, + .{ "u32", .type, .{} }, + }); +} + +test "semantic tokens - anyframe type" { + try testSemanticTokens( + \\const alpha = anyframe->u32; + , &.{ + .{ "const", .keyword, .{} }, + .{ "alpha", .variable, .{ .declaration = true } }, // TODO this should be .type + .{ "=", .operator, .{} }, + .{ "anyframe", .keyword, .{} }, + .{ "u32", .type, .{} }, + }); +} + +test "semantic tokens - error union types" { + try testSemanticTokens( + \\const alpha = u32!u32; + , &.{ + .{ "const", .keyword, .{} }, + .{ "alpha", .type, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "u32", .type, .{} }, + .{ "u32", .type, .{} }, + }); +} + +test "semantic tokens - struct" { + try testSemanticTokens( + \\const Foo = struct {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .namespace = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "struct", .keyword, .{} }, + }); + try testSemanticTokens( + \\const Foo = packed struct(u32) {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .namespace = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "packed", .keyword, .{} }, + .{ "struct", .keyword, .{} }, + .{ "u32", .type, .{} }, + }); + try testSemanticTokens( + \\const Foo = struct { + \\ alpha: u32, + \\ beta: void, + \\}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .@"struct" = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "struct", .keyword, .{} }, + .{ "alpha", .field, .{} }, + .{ "u32", .type, .{} }, + .{ "beta", .field, .{} }, + .{ "void", .type, .{} }, + }); + try testSemanticTokens( + \\const Foo = struct { + \\ alpha: u32 = 3, + \\ comptime beta: void = {}, + \\}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .@"struct" = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "struct", .keyword, .{} }, + .{ "alpha", .field, .{} }, + .{ "u32", .type, .{} }, + .{ "=", .operator, .{} }, + .{ "3", .number, .{} }, + .{ "comptime", .keyword, .{} }, + .{ "beta", .field, .{} }, + .{ "void", .type, .{} }, + .{ "=", .operator, .{} }, + }); + try testSemanticTokens( + \\const T = u32; + \\const Foo = struct { + \\ u32, + \\ T align(4), + \\}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "T", .type, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "u32", .type, .{} }, + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .@"struct" = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "struct", .keyword, .{} }, + .{ "u32", .type, .{} }, + .{ "T", .type, .{} }, + .{ "align", .keyword, .{} }, + .{ "4", .number, .{} }, + }); +} + +test "semantic tokens - union" { + try testSemanticTokens( + \\const Foo = union {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .@"union" = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "union", .keyword, .{} }, + }); + try testSemanticTokens( + \\const Foo = packed union(enum) {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .@"union" = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "packed", .keyword, .{} }, + .{ "union", .keyword, .{} }, + .{ "enum", .keyword, .{} }, + }); + if (true) return error.SkipZigTest; // TODO + try testSemanticTokens( + \\const Foo = union(E) { + \\ alpha, + \\}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .@"union" = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "union", .keyword, .{} }, + .{ "E", .variable, .{} }, + .{ "alpha", .field, .{} }, + }); + try testSemanticTokens( + \\const Foo = union(E) { + \\ alpha, + \\ beta: void, + \\}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .@"union" = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "union", .keyword, .{} }, + .{ "E", .variable, .{} }, + .{ "alpha", .field, .{} }, + .{ "beta", .field, .{} }, + .{ "void", .keyword, .{} }, + }); + try testSemanticTokens( + \\const Foo = union(E) { + \\ alpha: void align(2), + \\}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .@"union" = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "union", .keyword, .{} }, + .{ "E", .variable, .{} }, + .{ "alpha", .field, .{} }, + .{ "void", .keyword, .{} }, + .{ "align", .keyword, .{} }, + .{ "2", .number, .{} }, + }); +} + +test "semantic tokens - enum" { + if (true) return error.SkipZigTest; // TODO + try testSemanticTokens( + \\const Foo = enum {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .@"enum" = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "enum", .keyword, .{} }, + }); + try testSemanticTokens( + \\const Foo = enum { + \\ alpha, + \\ beta, + \\}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .@"enum" = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "enum", .keyword, .{} }, + .{ "alpha", .enumMember, .{} }, + .{ "beta", .enumMember, .{} }, + }); + try testSemanticTokens( + \\const Foo = enum(u4) {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .@"enum" = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "enum", .keyword, .{} }, + .{ "u4", .type, .{} }, + }); +} + +test "semantic tokens - error set" { + try testSemanticTokens( + \\const Foo = error {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "error", .keyword, .{} }, + }); + try testSemanticTokens( + \\const Foo = error { + \\ OutOfMemory, + \\}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "error", .keyword, .{} }, + .{ "OutOfMemory", .errorTag, .{} }, + }); +} + +test "semantic tokens - opaque" { + try testSemanticTokens( + \\const Foo = opaque {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "Foo", .type, .{ .@"opaque" = true, .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "opaque", .keyword, .{} }, + }); +} - const file_uri = try ctx.addDocument(source); +test "semantic tokens - function" { + try testSemanticTokens( + \\fn foo() void {} + , &.{ + .{ "fn", .keyword, .{} }, + .{ "foo", .function, .{ .declaration = true } }, + .{ "void", .type, .{} }, + }); + try testSemanticTokens( + \\pub fn foo(alpha: u32) void {} + , &.{ + .{ "pub", .keyword, .{} }, + .{ "fn", .keyword, .{} }, + .{ "foo", .function, .{ .declaration = true } }, + .{ "alpha", .parameter, .{ .declaration = true } }, + .{ "u32", .type, .{} }, + .{ "void", .type, .{} }, + }); + try testSemanticTokens( + \\extern fn foo() align(4) callconv(.C) void; + , &.{ + .{ "extern", .keyword, .{} }, + .{ "fn", .keyword, .{} }, + .{ "foo", .function, .{ .declaration = true } }, + // TODO .{ "align", .keyword, .{} }, + .{ "4", .number, .{} }, + // TODO .{ "callconv", .keyword, .{} }, + .{ "C", .enumMember, .{} }, + .{ "void", .type, .{} }, + }); + try testSemanticTokens( + \\fn foo(comptime T: type) void {} + , &.{ + .{ "fn", .keyword, .{} }, + .{ "foo", .function, .{ .declaration = true, .generic = true } }, + .{ "comptime", .keyword, .{} }, + .{ "T", .parameter, .{ .declaration = true } }, + .{ "type", .type, .{} }, + .{ "void", .type, .{} }, + }); +} - const Response = struct { - data: []const u32, - }; +test "semantic tokens - builtin fuctions" { + try testSemanticTokens( + \\const foo = @as(type, u32); + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "@as", .builtin, .{} }, + .{ "type", .type, .{} }, + .{ "u32", .type, .{} }, + }); +} - const expected_bytes = try std.json.stringifyAlloc(allocator, Response{ .data = expected }, .{}); - defer allocator.free(expected_bytes); +test "semantic tokens - block" { + try testSemanticTokens( + \\const foo = blk: {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "blk", .label, .{ .declaration = true } }, + }); + try testSemanticTokens( + \\const foo = blk: { + \\ break :blk 5; + \\}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "blk", .label, .{ .declaration = true } }, + .{ "break", .keyword, .{} }, + .{ "blk", .label, .{} }, + .{ "5", .number, .{} }, + }); +} - const params = try std.json.stringifyAlloc(allocator, .{ .textDocument = .{ .uri = file_uri } }, .{}); - defer allocator.free(params); +test "semantic tokens - if" { + try testSemanticTokens( + \\const foo = if (false) {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "if", .keyword, .{} }, + .{ "false", .keywordLiteral, .{} }, + }); + try testSemanticTokens( + \\const foo = if (false) 1 else 2; + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "if", .keyword, .{} }, + .{ "false", .keywordLiteral, .{} }, + .{ "1", .number, .{} }, + .{ "else", .keyword, .{} }, + .{ "2", .number, .{} }, + }); + try testSemanticTokens( + \\const foo = if (false) |val| val else |err| err; + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "if", .keyword, .{} }, + .{ "false", .keywordLiteral, .{} }, + .{ "val", .variable, .{ .declaration = true } }, + .{ "val", .variable, .{} }, + .{ "else", .keyword, .{} }, + .{ "err", .variable, .{ .declaration = true } }, + .{ "err", .variable, .{} }, + }); +} - try ctx.request( +test "semantic tokens - while" { + try testSemanticTokens( + \\const foo = while (false) {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "while", .keyword, .{} }, + .{ "false", .keywordLiteral, .{} }, + }); + try testSemanticTokens( + \\const foo = while (false) |*val| {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "while", .keyword, .{} }, + .{ "false", .keywordLiteral, .{} }, + .{ "val", .variable, .{ .declaration = true } }, + }); + try testSemanticTokens( + \\const foo = while (false) false else true; + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "while", .keyword, .{} }, + .{ "false", .keywordLiteral, .{} }, + .{ "false", .keywordLiteral, .{} }, + .{ "else", .keyword, .{} }, + .{ "true", .keywordLiteral, .{} }, + }); +} + +test "semantic tokens - for" { + try testSemanticTokens( + \\const foo = for ("") {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "for", .keyword, .{} }, + .{ "\"\"", .string, .{} }, + }); + try testSemanticTokens( + \\const foo = for ("") |val| {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "for", .keyword, .{} }, + .{ "\"\"", .string, .{} }, + .{ "val", .variable, .{ .declaration = true } }, + }); +} + +test "semantic tokens - switch" { + try testSemanticTokens( + \\const foo = switch (3) {}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "switch", .keyword, .{} }, + .{ "3", .number, .{} }, + }); + try testSemanticTokens( + \\const foo = switch (3) { + \\ 0 => true, + \\ else => false, + \\}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "switch", .keyword, .{} }, + .{ "3", .number, .{} }, + .{ "0", .number, .{} }, + .{ "true", .keywordLiteral, .{} }, + .{ "else", .keyword, .{} }, + .{ "false", .keywordLiteral, .{} }, + }); + try testSemanticTokens( + \\const foo = switch (3) { + \\ inline else => |*val| val, + \\}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .variable, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "switch", .keyword, .{} }, + .{ "3", .number, .{} }, + .{ "inline", .keyword, .{} }, + .{ "else", .keyword, .{} }, + .{ "val", .variable, .{ .declaration = true } }, + .{ "val", .variable, .{} }, + }); +} + +test "semantic tokens - defer" { + try testSemanticTokens( + \\fn foo() void { + \\ defer {}; + \\} + , &.{ + .{ "fn", .keyword, .{} }, + .{ "foo", .function, .{ .declaration = true } }, + .{ "void", .type, .{} }, + .{ "defer", .keyword, .{} }, + }); +} + +test "semantic tokens - errdefer" { + try testSemanticTokens( + \\fn foo() void { + \\ errdefer {}; + \\} + , &.{ + .{ "fn", .keyword, .{} }, + .{ "foo", .function, .{ .declaration = true } }, + .{ "void", .type, .{} }, + .{ "errdefer", .keyword, .{} }, + }); + try testSemanticTokens( + \\fn foo() void { + \\ errdefer |err| {}; + \\} + , &.{ + .{ "fn", .keyword, .{} }, + .{ "foo", .function, .{ .declaration = true } }, + .{ "void", .type, .{} }, + .{ "errdefer", .keyword, .{} }, + .{ "err", .variable, .{ .declaration = true } }, + }); +} + +test "semantic tokens - test decl" { + try testSemanticTokens( + \\test "test inside a test" {} + , &.{ + .{ "test", .keyword, .{} }, + .{ "\"test inside a test\"", .string, .{} }, + }); + try testSemanticTokens( + \\test foo {} + , &.{ + .{ "test", .keyword, .{} }, + .{ "foo", .variable, .{} }, + }); +} + +test "semantic tokens - assembly" { + try testSemanticTokens( + \\fn syscall1(number: usize, arg1: usize) usize { + \\ return asm volatile ("syscall" + \\ : [ret] "={rax}" (-> usize), + \\ : [number] "{rax}" (number), + \\ [arg1] "{rdi}" (arg1), + \\ : "rcx", "r11" + \\ ); + \\} + , &.{ + .{ "fn", .keyword, .{} }, + .{ "syscall1", .function, .{ .declaration = true } }, + .{ "number", .parameter, .{ .declaration = true } }, + .{ "usize", .type, .{} }, + .{ "arg1", .parameter, .{ .declaration = true } }, + .{ "usize", .type, .{} }, + .{ "usize", .type, .{} }, + .{ "return", .keyword, .{} }, + .{ "asm", .keyword, .{} }, + .{ "volatile", .keyword, .{} }, + .{ "\"syscall\"", .string, .{} }, + .{ "ret", .variable, .{} }, + .{ "\"={rax}\"", .string, .{} }, + .{ "usize", .type, .{} }, + .{ "number", .variable, .{} }, + .{ "\"{rax}\"", .string, .{} }, + .{ "number", .parameter, .{} }, + .{ "arg1", .variable, .{} }, + .{ "\"{rdi}\"", .string, .{} }, + .{ "arg1", .parameter, .{} }, + .{ "\"rcx\"", .string, .{} }, + .{ "\"r11\"", .string, .{} }, + }); +} + +const TokenData = struct { + []const u8, + zls.semantic_tokens.TokenType, + zls.semantic_tokens.TokenModifiers, +}; + +fn testSemanticTokens(source: [:0]const u8, expected_tokens: []const TokenData) !void { + var ctx = try Context.init(); + defer ctx.deinit(); + + const uri = try ctx.addDocument(source); + + const response = try ctx.requestGetResponse( + types.SemanticTokens, "textDocument/semanticTokens/full", - params, - expected_bytes, + types.SemanticTokensParams{ .textDocument = .{ .uri = uri } }, ); + + const actual = response.result.data; + try std.testing.expect(actual.len % 5 == 0); // every token is represented by 5 integers + + var error_builder = ErrorBuilder.init(allocator, source); + defer error_builder.deinit(); + errdefer error_builder.writeDebug(); + + var token_it = std.mem.window(u32, actual, 5, 5); + var position: types.Position = .{ .line = 0, .character = 0 }; + var last_token_end: usize = 0; + + for (expected_tokens) |expected_token| { + const token_data = token_it.next() orelse { + try error_builder.msgAtIndex("expected a `{s}` token here", last_token_end, .err, .{expected_token.@"0"}); + return error.ExpectedToken; + }; + + const delta_line = token_data[0]; + const delta_start = token_data[1]; + const length = token_data[2]; + const token_type = @intToEnum(zls.semantic_tokens.TokenType, token_data[3]); + const token_modifiers = @bitCast(zls.semantic_tokens.TokenModifiers, @intCast(u16, token_data[4])); + + position.line += delta_line; + position.character = delta_start + if (delta_line == 0) position.character else 0; + + const source_index = offsets.positionToIndex(source, position, .@"utf-8"); + const token_loc: offsets.Loc = .{ + .start = source_index, + .end = source_index + length, + }; + last_token_end = token_loc.end; + + const token_source = offsets.locToSlice(source, token_loc); + + const expected_token_source = expected_token.@"0"; + const expected_token_type = expected_token.@"1"; + const expected_token_modifiers = expected_token.@"2"; + + if (!std.mem.eql(u8, expected_token_source, token_source)) { + try error_builder.msgAtLoc("expected `{s}` as the next token but got `{s}` here", token_loc, .err, .{ expected_token_source, token_source }); + return error.UnexpectedTokenContent; + } else if (expected_token_type != token_type) { + try error_builder.msgAtLoc("expected token type `{s}` but got `{s}`", token_loc, .err, .{ @tagName(expected_token_type), @tagName(token_type) }); + return error.UnexpectedTokenType; + } else if (!std.meta.eql(expected_token_modifiers, token_modifiers)) { + try error_builder.msgAtLoc("expected token modifiers `{}` but got `{}`", token_loc, .err, .{ expected_token_modifiers, token_modifiers }); + return error.UnexpectedTokenModifiers; + } + } }