From 8e7673630f8fca4c62e3a0927d545719fd34f65c Mon Sep 17 00:00:00 2001 From: Radek Simko Date: Tue, 15 Mar 2022 19:51:04 +0000 Subject: [PATCH 1/3] internal/lsp: Provide (opt-in) custom semantic tokens & modifier --- go.mod | 4 +- go.sum | 11 +- internal/lsp/semantic_tokens.go | 56 ++++++++ internal/lsp/semtok/lsp_token_modifiers.go | 15 ++ internal/lsp/semtok/lsp_token_types.go | 27 ++++ internal/lsp/semtok/token_modifier.go | 37 +++++ internal/lsp/semtok/token_types.go | 23 +++ internal/lsp/token_encoder.go | 147 ++++++++++++-------- internal/lsp/token_encoder_test.go | 72 +++++----- internal/lsp/token_types.go | 154 --------------------- 10 files changed, 288 insertions(+), 258 deletions(-) create mode 100644 internal/lsp/semtok/lsp_token_modifiers.go create mode 100644 internal/lsp/semtok/lsp_token_types.go create mode 100644 internal/lsp/semtok/token_modifier.go create mode 100644 internal/lsp/semtok/token_types.go delete mode 100644 internal/lsp/token_types.go diff --git a/go.mod b/go.mod index 6f98b7df9..bb2c4729a 100644 --- a/go.mod +++ b/go.mod @@ -14,12 +14,12 @@ require ( github.com/hashicorp/go-uuid v1.0.2 github.com/hashicorp/go-version v1.4.0 github.com/hashicorp/hc-install v0.3.1 - github.com/hashicorp/hcl-lang v0.0.0-20220314150337-d770b425fb22 + github.com/hashicorp/hcl-lang v0.0.0-20220316204834-49ffde67ce68 github.com/hashicorp/hcl/v2 v2.11.1 github.com/hashicorp/terraform-exec v0.16.0 github.com/hashicorp/terraform-json v0.13.0 github.com/hashicorp/terraform-registry-address v0.0.0-20210816115301-cb2034eba045 - github.com/hashicorp/terraform-schema v0.0.0-20220225085753-faadc57bd40a + github.com/hashicorp/terraform-schema v0.0.0-20220316204916-c6585b866d6d github.com/kylelemons/godebug v1.1.0 // indirect github.com/mh-cbon/go-fmt-fail v0.0.0-20160815164508-67765b3fbcb5 github.com/mitchellh/cli v1.1.2 diff --git a/go.sum b/go.sum index b83512873..4db2408a5 100644 --- a/go.sum +++ b/go.sum @@ -295,10 +295,8 @@ github.com/hashicorp/hc-install v0.3.1 h1:VIjllE6KyAI1A244G8kTaHXy+TL5/XYzvrtFi8 github.com/hashicorp/hc-install v0.3.1/go.mod h1:3LCdWcCDS1gaHC9mhHCGbkYfoY6vdsKohGjugbZdZak= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/hcl-lang v0.0.0-20211118124824-da3a292c5d7a/go.mod h1:0W3+VP07azoS+fCX5hWk1KxwHnqf1s9J7oBg2cFXm1c= -github.com/hashicorp/hcl-lang v0.0.0-20220314150337-d770b425fb22 h1:u++Zu5hfJPSNuHh7cV1QfTItINnEGRMdOvT9KjaDQUQ= -github.com/hashicorp/hcl-lang v0.0.0-20220314150337-d770b425fb22/go.mod h1:vyszbX6YNHCKIaVUhbh3LIZljxwYOtgWCIkhT5zKfjc= -github.com/hashicorp/hcl/v2 v2.10.1/go.mod h1:FwWsfWEjyV/CMj8s/gqAuiviY72rJ1/oayI9WftqcKg= +github.com/hashicorp/hcl-lang v0.0.0-20220316204834-49ffde67ce68 h1:CdUL7gJYGdJheCfAmCWNE65wimdo9YWJSqB/+NtfWPc= +github.com/hashicorp/hcl-lang v0.0.0-20220316204834-49ffde67ce68/go.mod h1:oQgcOV8OizFyZfZh3FbQSsQtvtTv8hD23MLAxfn3E+E= github.com/hashicorp/hcl/v2 v2.11.1 h1:yTyWcXcm9XB0TEkyU/JCRU6rYy4K+mgLtzn2wlrJbcc= github.com/hashicorp/hcl/v2 v2.11.1/go.mod h1:FwWsfWEjyV/CMj8s/gqAuiviY72rJ1/oayI9WftqcKg= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= @@ -315,8 +313,8 @@ github.com/hashicorp/terraform-json v0.13.0/go.mod h1:y5OdLBCT+rxbwnpxZs9kGL7R9E github.com/hashicorp/terraform-registry-address v0.0.0-20210412075316-9b2996cce896/go.mod h1:bzBPnUIkI0RxauU8Dqo+2KrZZ28Cf48s8V6IHt3p4co= github.com/hashicorp/terraform-registry-address v0.0.0-20210816115301-cb2034eba045 h1:R/I8ofvXuPcTNoc//N4ruvaHGZcShI/VuU2iXo875Lo= github.com/hashicorp/terraform-registry-address v0.0.0-20210816115301-cb2034eba045/go.mod h1:anRyJbe12BZscpFgaeGu9gH12qfdBP094LYFtuAFzd4= -github.com/hashicorp/terraform-schema v0.0.0-20220225085753-faadc57bd40a h1:zmKoQsY/7OzNElKxg8Y+GGWzWCnQsE0JBdPfY7tMqPo= -github.com/hashicorp/terraform-schema v0.0.0-20220225085753-faadc57bd40a/go.mod h1:Y6ag6iaW+d2PwoWSLFrt+azKn4CryA+7bKUlqxD9ogQ= +github.com/hashicorp/terraform-schema v0.0.0-20220316204916-c6585b866d6d h1:XLelo71INyUNDHQAWnGwsfAA5Ccj9LqtbaejBUzYKhc= +github.com/hashicorp/terraform-schema v0.0.0-20220316204916-c6585b866d6d/go.mod h1:i0M64K9OfxlLRuFOThK1KRi9+20Y9XbyWpgPaEycbec= github.com/hashicorp/terraform-svchost v0.0.0-20200729002733-f050f53b9734 h1:HKLsbzeOsfXmKNpr3GiT18XAblV0BjCbzL8KQAMZGa0= github.com/hashicorp/terraform-svchost v0.0.0-20200729002733-f050f53b9734/go.mod h1:kNDNcF7sN4DocDLBkQYz73HGKwN1ANB1blq4lIYLYvg= github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw= @@ -811,7 +809,6 @@ golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= -golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.10 h1:QjFRCZxdOhBJ/UNgnBZLbNV13DlbnK0quyivTnXJM20= golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/internal/lsp/semantic_tokens.go b/internal/lsp/semantic_tokens.go index 2d9d68e0a..8a1a03166 100644 --- a/internal/lsp/semantic_tokens.go +++ b/internal/lsp/semantic_tokens.go @@ -1,9 +1,65 @@ package lsp import ( + "github.com/hashicorp/hcl-lang/lang" + "github.com/hashicorp/terraform-ls/internal/lsp/semtok" lsp "github.com/hashicorp/terraform-ls/internal/protocol" + tfschema "github.com/hashicorp/terraform-schema/schema" ) +// Registering types which are actually in use +var ( + serverTokenTypes = semtok.TokenTypes{ + semtok.TokenTypeEnumMember, + semtok.TokenTypeFunction, + semtok.TokenTypeKeyword, + semtok.TokenTypeNumber, + semtok.TokenTypeParameter, + semtok.TokenTypeProperty, + semtok.TokenTypeString, + semtok.TokenTypeType, + semtok.TokenTypeVariable, + } + serverTokenModifiers = semtok.TokenModifiers{ + semtok.TokenModifierDefaultLibrary, + } +) + +func init() { + for _, tokType := range lang.SupportedSemanticTokenTypes { + serverTokenTypes = append(serverTokenTypes, semtok.TokenType(tokType)) + } + for _, tokModifier := range tfschema.SemanticTokenModifiers { + serverTokenModifiers = append(serverTokenModifiers, semtok.TokenModifier(tokModifier)) + } +} + +func TokenTypesLegend(clientSupported []string) semtok.TokenTypes { + legend := make(semtok.TokenTypes, 0) + + // Filter only supported token types + for _, tokenType := range serverTokenTypes { + if sliceContains(clientSupported, string(tokenType)) { + legend = append(legend, semtok.TokenType(tokenType)) + } + } + + return legend +} + +func TokenModifiersLegend(clientSupported []string) semtok.TokenModifiers { + legend := make(semtok.TokenModifiers, 0) + + // Filter only supported token modifiers + for _, modifier := range serverTokenModifiers { + if sliceContains(clientSupported, string(modifier)) { + legend = append(legend, semtok.TokenModifier(modifier)) + } + } + + return legend +} + type SemanticTokensClientCapabilities struct { lsp.SemanticTokensClientCapabilities } diff --git a/internal/lsp/semtok/lsp_token_modifiers.go b/internal/lsp/semtok/lsp_token_modifiers.go new file mode 100644 index 000000000..ee34db7dd --- /dev/null +++ b/internal/lsp/semtok/lsp_token_modifiers.go @@ -0,0 +1,15 @@ +package semtok + +var ( + // Modifiers predefined in LSP spec + TokenModifierDeclaration TokenModifier = "declaration" + TokenModifierDefinition TokenModifier = "definition" + TokenModifierReadonly TokenModifier = "readonly" + TokenModifierStatic TokenModifier = "static" + TokenModifierDeprecated TokenModifier = "deprecated" + TokenModifierAbstract TokenModifier = "abstract" + TokenModifierAsync TokenModifier = "async" + TokenModifierModification TokenModifier = "modification" + TokenModifierDocumentation TokenModifier = "documentation" + TokenModifierDefaultLibrary TokenModifier = "defaultLibrary" +) diff --git a/internal/lsp/semtok/lsp_token_types.go b/internal/lsp/semtok/lsp_token_types.go new file mode 100644 index 000000000..9ab34e272 --- /dev/null +++ b/internal/lsp/semtok/lsp_token_types.go @@ -0,0 +1,27 @@ +package semtok + +const ( + // Types predefined in LSP spec + TokenTypeClass TokenType = "class" + TokenTypeComment TokenType = "comment" + TokenTypeEnum TokenType = "enum" + TokenTypeEnumMember TokenType = "enumMember" + TokenTypeEvent TokenType = "event" + TokenTypeFunction TokenType = "function" + TokenTypeInterface TokenType = "interface" + TokenTypeKeyword TokenType = "keyword" + TokenTypeMacro TokenType = "macro" + TokenTypeMethod TokenType = "method" + TokenTypeModifier TokenType = "modifier" + TokenTypeNamespace TokenType = "namespace" + TokenTypeNumber TokenType = "number" + TokenTypeOperator TokenType = "operator" + TokenTypeParameter TokenType = "parameter" + TokenTypeProperty TokenType = "property" + TokenTypeRegexp TokenType = "regexp" + TokenTypeString TokenType = "string" + TokenTypeStruct TokenType = "struct" + TokenTypeType TokenType = "type" + TokenTypeTypeParameter TokenType = "typeParameter" + TokenTypeVariable TokenType = "variable" +) diff --git a/internal/lsp/semtok/token_modifier.go b/internal/lsp/semtok/token_modifier.go new file mode 100644 index 000000000..4cc4cd3b2 --- /dev/null +++ b/internal/lsp/semtok/token_modifier.go @@ -0,0 +1,37 @@ +package semtok + +import "math" + +type TokenModifier string +type TokenModifiers []TokenModifier + +func (tm TokenModifiers) AsStrings() []string { + modifiers := make([]string, len(tm)) + + for i, tokenModifier := range tm { + modifiers[i] = string(tokenModifier) + } + + return modifiers +} + +func (tm TokenModifiers) BitMask(declaredModifiers TokenModifiers) int { + bitMask := 0b0 + + for i, modifier := range tm { + if isDeclared(modifier, declaredModifiers) { + bitMask |= int(math.Pow(2, float64(i))) + } + } + + return bitMask +} + +func isDeclared(mod TokenModifier, declaredModifiers TokenModifiers) bool { + for _, dm := range declaredModifiers { + if mod == dm { + return true + } + } + return false +} diff --git a/internal/lsp/semtok/token_types.go b/internal/lsp/semtok/token_types.go new file mode 100644 index 000000000..f12d9f08a --- /dev/null +++ b/internal/lsp/semtok/token_types.go @@ -0,0 +1,23 @@ +package semtok + +type TokenType string +type TokenTypes []TokenType + +func (tt TokenTypes) AsStrings() []string { + types := make([]string, len(tt)) + + for i, tokenType := range tt { + types[i] = string(tokenType) + } + + return types +} + +func (tt TokenTypes) Index(tokenType TokenType) int { + for i, t := range tt { + if t == tokenType { + return i + } + } + return -1 +} diff --git a/internal/lsp/token_encoder.go b/internal/lsp/token_encoder.go index a39f64be0..6db11d4b2 100644 --- a/internal/lsp/token_encoder.go +++ b/internal/lsp/token_encoder.go @@ -4,6 +4,7 @@ import ( "bytes" "github.com/hashicorp/hcl-lang/lang" + "github.com/hashicorp/terraform-ls/internal/lsp/semtok" lsp "github.com/hashicorp/terraform-ls/internal/protocol" "github.com/hashicorp/terraform-ls/internal/source" ) @@ -31,60 +32,13 @@ func (te *TokenEncoder) Encode() []uint32 { func (te *TokenEncoder) encodeTokenOfIndex(i int) []uint32 { token := te.Tokens[i] - var tokenType TokenType - modifiers := make([]TokenModifier, 0) - - switch token.Type { - case lang.TokenBlockType: - tokenType = TokenTypeType - case lang.TokenBlockLabel: - tokenType = TokenTypeEnumMember - case lang.TokenAttrName: - tokenType = TokenTypeProperty - case lang.TokenBool: - tokenType = TokenTypeKeyword - case lang.TokenNumber: - tokenType = TokenTypeNumber - case lang.TokenString: - tokenType = TokenTypeString - case lang.TokenObjectKey: - tokenType = TokenTypeParameter - case lang.TokenMapKey: - tokenType = TokenTypeParameter - case lang.TokenKeyword: - tokenType = TokenTypeVariable - case lang.TokenTraversalStep: - tokenType = TokenTypeVariable - case lang.TokenTypeCapsule: - tokenType = TokenTypeFunction - case lang.TokenTypePrimitive: - tokenType = TokenTypeKeyword - - default: - return []uint32{} - } - - if !te.tokenTypeSupported(tokenType) { + tokenType, ok := te.resolveTokenType(token) + if !ok { return []uint32{} } - tokenTypeIdx := TokenTypesLegend(te.ClientCaps.TokenTypes).Index(tokenType) - for _, m := range token.Modifiers { - switch m { - case lang.TokenModifierDependent: - if !te.tokenModifierSupported(TokenModifierDefaultLibrary) { - continue - } - modifiers = append(modifiers, TokenModifierDefaultLibrary) - case lang.TokenModifierDeprecated: - if !te.tokenModifierSupported(TokenModifierDeprecated) { - continue - } - modifiers = append(modifiers, TokenModifierDeprecated) - } - } - + modifiers := te.resolveTokenModifiers(token.Modifiers) modifierBitMask := TokenModifiersLegend(te.ClientCaps.TokenModifiers).BitMask(modifiers) data := make([]uint32, 0) @@ -153,10 +107,95 @@ func (te *TokenEncoder) encodeTokenOfIndex(i int) []uint32 { return data } -func (te *TokenEncoder) tokenTypeSupported(tokenType TokenType) bool { - return sliceContains(te.ClientCaps.TokenTypes, string(tokenType)) +func (te *TokenEncoder) resolveTokenType(token lang.SemanticToken) (semtok.TokenType, bool) { + switch token.Type { + case lang.TokenBlockType: + return te.firstSupportedTokenType( + semtok.TokenType(lang.TokenBlockType), semtok.TokenTypeType) + case lang.TokenBlockLabel: + return te.firstSupportedTokenType( + semtok.TokenType(lang.TokenBlockLabel), semtok.TokenTypeEnumMember) + case lang.TokenAttrName: + return te.firstSupportedTokenType( + semtok.TokenType(lang.TokenAttrName), semtok.TokenTypeProperty) + case lang.TokenBool: + return te.firstSupportedTokenType( + semtok.TokenType(lang.TokenBool), semtok.TokenTypeKeyword) + case lang.TokenNumber: + return te.firstSupportedTokenType( + semtok.TokenType(lang.TokenNumber), semtok.TokenTypeNumber) + case lang.TokenString: + return te.firstSupportedTokenType( + semtok.TokenType(lang.TokenString), semtok.TokenTypeString) + case lang.TokenObjectKey: + return te.firstSupportedTokenType( + semtok.TokenType(lang.TokenObjectKey), semtok.TokenTypeParameter) + case lang.TokenMapKey: + return te.firstSupportedTokenType( + semtok.TokenType(lang.TokenMapKey), semtok.TokenTypeParameter) + case lang.TokenKeyword: + return te.firstSupportedTokenType( + semtok.TokenType(lang.TokenKeyword), semtok.TokenTypeVariable) + case lang.TokenTraversalStep: + return te.firstSupportedTokenType( + semtok.TokenType(lang.TokenTraversalStep), semtok.TokenTypeVariable) + case lang.TokenTypeCapsule: + return te.firstSupportedTokenType( + semtok.TokenType(lang.TokenTypeCapsule), semtok.TokenTypeFunction) + case lang.TokenTypePrimitive: + return te.firstSupportedTokenType( + semtok.TokenType(lang.TokenTypePrimitive), semtok.TokenTypeKeyword) + } + + return "", false } -func (te *TokenEncoder) tokenModifierSupported(tokenModifier TokenModifier) bool { - return sliceContains(te.ClientCaps.TokenModifiers, string(tokenModifier)) +func (te *TokenEncoder) resolveTokenModifiers(tokModifiers []lang.SemanticTokenModifier) semtok.TokenModifiers { + modifiers := make(semtok.TokenModifiers, 0) + + for _, modifier := range tokModifiers { + if modifier == lang.TokenModifierDependent { + if te.tokenModifierSupported(string(lang.TokenModifierDependent)) { + modifiers = append(modifiers, semtok.TokenModifier(lang.TokenModifierDependent)) + continue + } + if te.tokenModifierSupported(string(semtok.TokenModifierDefaultLibrary)) { + modifiers = append(modifiers, semtok.TokenModifierDefaultLibrary) + continue + } + continue + } + + if te.tokenModifierSupported(string(modifier)) { + modifiers = append(modifiers, semtok.TokenModifier(modifier)) + } + } + + return modifiers +} + +func (te *TokenEncoder) firstSupportedTokenType(tokenTypes ...semtok.TokenType) (semtok.TokenType, bool) { + for _, tokenType := range tokenTypes { + if te.tokenTypeSupported(string(tokenType)) { + return tokenType, true + } + } + return "", false +} + +func (te *TokenEncoder) tokenTypeSupported(tokenType string) bool { + return sliceContains(te.ClientCaps.TokenTypes, tokenType) +} + +func (te *TokenEncoder) tokenModifierSupported(tokenModifier string) bool { + return sliceContains(te.ClientCaps.TokenModifiers, tokenModifier) +} + +func sliceContains(slice []string, value string) bool { + for _, val := range slice { + if val == value { + return true + } + } + return false } diff --git a/internal/lsp/token_encoder_test.go b/internal/lsp/token_encoder_test.go index 426c2c336..42eec417f 100644 --- a/internal/lsp/token_encoder_test.go +++ b/internal/lsp/token_encoder_test.go @@ -67,11 +67,11 @@ func TestTokenEncoder_singleLineTokens(t *testing.T) { } data := te.Encode() expectedData := []uint32{ - 0, 0, 7, 7, 0, - 0, 8, 8, 0, 0, - 1, 2, 8, 5, 0, - 1, 2, 8, 5, 0, - 1, 2, 9, 5, 0, + 0, 0, 7, 10, 0, + 0, 8, 8, 11, 0, + 1, 2, 8, 9, 0, + 1, 2, 8, 9, 0, + 1, 2, 9, 9, 0, } if diff := cmp.Diff(expectedData, data); diff != "" { @@ -90,7 +90,7 @@ func TestTokenEncoder_unknownTokenType(t *testing.T) { Lines: source.MakeSourceLines("test.tf", bytes), Tokens: []lang.SemanticToken{ { - Type: lang.SemanticTokenType(999), + Type: lang.SemanticTokenType("unknown"), Modifiers: []lang.SemanticTokenModifier{}, Range: hcl.Range{ Filename: "main.tf", @@ -99,7 +99,7 @@ func TestTokenEncoder_unknownTokenType(t *testing.T) { }, }, { - Type: lang.SemanticTokenType(1000), + Type: lang.SemanticTokenType("another-unknown"), Modifiers: []lang.SemanticTokenModifier{}, Range: hcl.Range{ Filename: "main.tf", @@ -124,7 +124,7 @@ func TestTokenEncoder_unknownTokenType(t *testing.T) { } data := te.Encode() expectedData := []uint32{ - 2, 2, 7, 5, 0, + 2, 2, 7, 9, 0, } if diff := cmp.Diff(expectedData, data); diff != "" { @@ -161,9 +161,9 @@ func TestTokenEncoder_multiLineTokens(t *testing.T) { } data := te.Encode() expectedData := []uint32{ - 1, 2, 24, 5, 0, - 1, 0, 15, 5, 0, - 1, 0, 11, 5, 0, + 1, 2, 24, 9, 0, + 1, 0, 15, 9, 0, + 1, 0, 11, 9, 0, } if diff := cmp.Diff(expectedData, data); diff != "" { @@ -214,9 +214,9 @@ func TestTokenEncoder_deltaStartCharBug(t *testing.T) { } data := te.Encode() expectedData := []uint32{ - 0, 0, 8, 7, 0, - 0, 9, 21, 0, 1, - 0, 22, 20, 0, 0, + 0, 0, 8, 10, 0, + 0, 9, 21, 11, 1, + 0, 22, 20, 11, 0, } if diff := cmp.Diff(expectedData, data); diff != "" { @@ -243,10 +243,8 @@ func TestTokenEncoder_tokenModifiers(t *testing.T) { }, }, { - Type: lang.TokenBlockLabel, - Modifiers: []lang.SemanticTokenModifier{ - lang.TokenModifierDeprecated, - }, + Type: lang.TokenBlockLabel, + Modifiers: []lang.SemanticTokenModifier{}, Range: hcl.Range{ Filename: "test.tf", Start: hcl.Pos{Line: 1, Column: 9, Byte: 8}, @@ -254,10 +252,8 @@ func TestTokenEncoder_tokenModifiers(t *testing.T) { }, }, { - Type: lang.TokenAttrName, - Modifiers: []lang.SemanticTokenModifier{ - lang.TokenModifierDeprecated, - }, + Type: lang.TokenAttrName, + Modifiers: []lang.SemanticTokenModifier{}, Range: hcl.Range{ Filename: "test.tf", Start: hcl.Pos{Line: 2, Column: 3, Byte: 21}, @@ -278,7 +274,6 @@ func TestTokenEncoder_tokenModifiers(t *testing.T) { { Type: lang.TokenAttrName, Modifiers: []lang.SemanticTokenModifier{ - lang.TokenModifierDeprecated, lang.TokenModifierDependent, }, Range: hcl.Range{ @@ -295,11 +290,11 @@ func TestTokenEncoder_tokenModifiers(t *testing.T) { } data := te.Encode() expectedData := []uint32{ - 0, 0, 7, 7, 0, - 0, 8, 8, 0, 2, - 1, 2, 8, 5, 2, - 1, 2, 8, 5, 1, - 1, 2, 9, 5, 3, + 0, 0, 7, 10, 0, + 0, 8, 8, 11, 0, + 1, 2, 8, 9, 0, + 1, 2, 8, 9, 1, + 1, 2, 9, 9, 1, } if diff := cmp.Diff(expectedData, data); diff != "" { @@ -326,10 +321,8 @@ func TestTokenEncoder_unsupported(t *testing.T) { }, }, { - Type: lang.TokenBlockLabel, - Modifiers: []lang.SemanticTokenModifier{ - lang.TokenModifierDeprecated, - }, + Type: lang.TokenBlockLabel, + Modifiers: []lang.SemanticTokenModifier{}, Range: hcl.Range{ Filename: "test.tf", Start: hcl.Pos{Line: 1, Column: 9, Byte: 8}, @@ -337,10 +330,8 @@ func TestTokenEncoder_unsupported(t *testing.T) { }, }, { - Type: lang.TokenAttrName, - Modifiers: []lang.SemanticTokenModifier{ - lang.TokenModifierDeprecated, - }, + Type: lang.TokenAttrName, + Modifiers: []lang.SemanticTokenModifier{}, Range: hcl.Range{ Filename: "test.tf", Start: hcl.Pos{Line: 2, Column: 3, Byte: 21}, @@ -361,7 +352,6 @@ func TestTokenEncoder_unsupported(t *testing.T) { { Type: lang.TokenAttrName, Modifiers: []lang.SemanticTokenModifier{ - lang.TokenModifierDeprecated, lang.TokenModifierDependent, }, Range: hcl.Range{ @@ -372,16 +362,16 @@ func TestTokenEncoder_unsupported(t *testing.T) { }, }, ClientCaps: protocol.SemanticTokensClientCapabilities{ - TokenTypes: []string{"type", "property"}, - TokenModifiers: []string{"deprecated"}, + TokenTypes: []string{"hcl-blockType", "hcl-attrName"}, + TokenModifiers: []string{}, }, } data := te.Encode() expectedData := []uint32{ 0, 0, 7, 1, 0, - 1, 2, 8, 0, 1, 1, 2, 8, 0, 0, - 1, 2, 9, 0, 1, + 1, 2, 8, 0, 0, + 1, 2, 9, 0, 0, } if diff := cmp.Diff(expectedData, data); diff != "" { diff --git a/internal/lsp/token_types.go b/internal/lsp/token_types.go deleted file mode 100644 index 023b6c9c2..000000000 --- a/internal/lsp/token_types.go +++ /dev/null @@ -1,154 +0,0 @@ -package lsp - -import ( - "math" -) - -type TokenType string -type TokenTypes []TokenType - -func (tt TokenTypes) AsStrings() []string { - types := make([]string, len(tt)) - - for i, tokenType := range tt { - types[i] = string(tokenType) - } - - return types -} - -func (tt TokenTypes) Index(tokenType TokenType) int { - for i, t := range tt { - if t == tokenType { - return i - } - } - return -1 -} - -type TokenModifier string -type TokenModifiers []TokenModifier - -func (tm TokenModifiers) AsStrings() []string { - modifiers := make([]string, len(tm)) - - for i, tokenModifier := range tm { - modifiers[i] = string(tokenModifier) - } - - return modifiers -} - -func (tm TokenModifiers) BitMask(declaredModifiers TokenModifiers) int { - bitMask := 0b0 - - for i, modifier := range tm { - if isDeclared(modifier, declaredModifiers) { - bitMask |= int(math.Pow(2, float64(i))) - } - } - - return bitMask -} - -func isDeclared(mod TokenModifier, declaredModifiers TokenModifiers) bool { - for _, dm := range declaredModifiers { - if mod == dm { - return true - } - } - return false -} - -const ( - // Types predefined in LSP spec - TokenTypeClass TokenType = "class" - TokenTypeComment TokenType = "comment" - TokenTypeEnum TokenType = "enum" - TokenTypeEnumMember TokenType = "enumMember" - TokenTypeEvent TokenType = "event" - TokenTypeFunction TokenType = "function" - TokenTypeInterface TokenType = "interface" - TokenTypeKeyword TokenType = "keyword" - TokenTypeMacro TokenType = "macro" - TokenTypeMethod TokenType = "method" - TokenTypeModifier TokenType = "modifier" - TokenTypeNamespace TokenType = "namespace" - TokenTypeNumber TokenType = "number" - TokenTypeOperator TokenType = "operator" - TokenTypeParameter TokenType = "parameter" - TokenTypeProperty TokenType = "property" - TokenTypeRegexp TokenType = "regexp" - TokenTypeString TokenType = "string" - TokenTypeStruct TokenType = "struct" - TokenTypeType TokenType = "type" - TokenTypeTypeParameter TokenType = "typeParameter" - TokenTypeVariable TokenType = "variable" - - // Modifiers predefined in LSP spec - TokenModifierDeclaration TokenModifier = "declaration" - TokenModifierDefinition TokenModifier = "definition" - TokenModifierReadonly TokenModifier = "readonly" - TokenModifierStatic TokenModifier = "static" - TokenModifierDeprecated TokenModifier = "deprecated" - TokenModifierAbstract TokenModifier = "abstract" - TokenModifierAsync TokenModifier = "async" - TokenModifierModification TokenModifier = "modification" - TokenModifierDocumentation TokenModifier = "documentation" - TokenModifierDefaultLibrary TokenModifier = "defaultLibrary" -) - -// Registering types which are actually in use and known -// to be registered by VS Code by default, see https://git.io/JIeuV -var ( - serverTokenTypes = TokenTypes{ - TokenTypeEnumMember, - TokenTypeFunction, - TokenTypeKeyword, - TokenTypeNumber, - TokenTypeParameter, - TokenTypeProperty, - TokenTypeString, - TokenTypeType, - TokenTypeVariable, - } - serverTokenModifiers = TokenModifiers{ - TokenModifierDefaultLibrary, - TokenModifierDeprecated, - } -) - -func TokenTypesLegend(clientSupported []string) TokenTypes { - legend := make(TokenTypes, 0) - - // Filter only supported token types - for _, tokenType := range serverTokenTypes { - if sliceContains(clientSupported, string(tokenType)) { - legend = append(legend, TokenType(tokenType)) - } - } - - return legend -} - -func TokenModifiersLegend(clientSupported []string) TokenModifiers { - legend := make(TokenModifiers, 0) - - // Filter only supported token modifiers - for _, modifier := range serverTokenModifiers { - if sliceContains(clientSupported, string(modifier)) { - legend = append(legend, TokenModifier(modifier)) - } - } - - return legend -} - -func sliceContains(slice []string, value string) bool { - for _, val := range slice { - if val == value { - return true - } - } - return false -} From c46b3a35076b81cf8cd4d1473aab9b21d60122fb Mon Sep 17 00:00:00 2001 From: Radek Simko Date: Thu, 17 Mar 2022 10:51:16 +0000 Subject: [PATCH 2/3] add docs --- docs/language-clients.md | 4 ++ docs/syntax-highlighting.md | 80 +++++++++++++++++++++++++++++++++++++ 2 files changed, 84 insertions(+) create mode 100644 docs/syntax-highlighting.md diff --git a/docs/language-clients.md b/docs/language-clients.md index 23a14727c..0d0944d2c 100644 --- a/docs/language-clients.md +++ b/docs/language-clients.md @@ -16,6 +16,10 @@ Clients specifically should **not** send `*.tf.json`, `*.tfvars.json` nor Packer HCL config nor any other HCL config files as the server is not equipped to handle these file types. +## Syntax Highlighting + +Read more about how we recommend Terraform files to be highlighted in [syntax-highlighting.md](./syntax-highlighting.md). + ### Internal parser The server expects clients to use standard text synchronization LSP methods diff --git a/docs/syntax-highlighting.md b/docs/syntax-highlighting.md new file mode 100644 index 000000000..f847069c7 --- /dev/null +++ b/docs/syntax-highlighting.md @@ -0,0 +1,80 @@ +# Syntax Highlighting + +Highlighting syntax is one of the key features expected of any editor. Editors typically have a few different solutions to choose from. Below is our view on how we expect editors to highlight Terraform code while using this language server. + +## Static Grammar + +Highlighting Terraform language syntax via static grammar (such as TextMate) _accurately_ may be challenging but brings more immediate value to the end user, since starting language server may take time. Also not all language clients may implement semantic token based highlighting. + +HashiCorp maintains a set of grammars in https://github.com/hashicorp/syntax and we encourage you to use the available Terraform grammar as the *primary* way of highlighting the Terraform language. + +## Semantic Tokens + +[LSP (Language Server Protocol) 3.16](https://microsoft.github.io/language-server-protocol/specifications/specification-3-16/) introduced language server-driven highlighting. This language server is better equipped to provide more contextual and accurate highlighting as it can parse the whole AST, unlike a TextMate grammar operating on a regex-basis. + +LSP 3.17 does support use cases where semantic highlighting is the only way to highlight a file (through [`augmentsSyntaxTokens` client capability](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#semanticTokensClientCapabilities)). However in the context of the Terraform language we recommend semantic highlighting to be used as in *addition* to a static grammar - i.e. this server does _not_ support `augmentsSyntaxTokens: false` mode and is not expected to be used in isolation to highlight configuration. + +There are two main use cases we're targeting with semantic tokens. + +### Improving Accuracy + +Regex-based grammars (like TextMate) operate on line-basis, which makes it difficult to accurately highlight certain parts of the syntax, for example nested blocks occuring in the Terraform language (as below). + +```hcl +terraform { + required_providers { + + } +} +``` + +Language server can use the AST and other important context (such as Terraform version or provider schema) to fully understand the whole configuration and provide more accurate highlighting. + +### Custom Theme Support + +Many _default_ IDE themes are intended as general-purpose themes, highlighting token types, modifiers and scopes mappable to most languages. We recognize that theme authors would benefit from token types & modifiers which more accurately reflect the Terraform language. + +LSP spec doesn't _explicitly_ encourage defining custom token types or modifiers, however the default token types and modifiers which are part of the spec are not well suited to express all the different constructs of a DSL (Domain Specific Language), such as Terraform language. With that in mind we use the LSP client/server capability negotiation mechanism to provide the following custom token types & modifiers with fallback to the predefined ones. + +#### Token Types + +Primary token types are preferred if deemed supported by client per `SemanticTokensClientCapabilities.TokenTypes`, fallbacks are also only reported if client claim support (using the same capability). + +Fallback types are chosen based on meaningful semantic mapping and default themes in VSCode. + +| Primary | Fallback | +| ------- | -------- | +| `hcl-blockType` | `type` | +| `hcl-blockLabel` | `enumMember` | +| `hcl-attrName` | `property` | +| `hcl-bool` | `keyword` | +| `hcl-number` | `number` | +| `hcl-string` | `string` | +| `hcl-objectKey` | `parameter` | +| `hcl-mapKey` | `parameter` | +| `hcl-keyword` | `variable` | +| `hcl-traversalStep` | `variable` | +| `hcl-typeCapsule` | `function` | +| `hcl-typePrimitive` | `keyword` | + +#### Token Modifiers + +Modifiers which do not have fallback are not reported at all if not received within `SemanticTokensClientCapabilities.TokenModifiers` (just like fallback modifier that isn't supported). + +| Primary | Fallback | +| ------- | -------- | +| `hcl-dependent` | `defaultLibrary` | +| `terraform-data` | | +| `terraform-locals` | | +| `terraform-module` | | +| `terraform-output` | | +| `terraform-provider` | | +| `terraform-resource` | | +| `terraform-provisioner` | | +| `terraform-connection` | | +| `terraform-variable` | | +| `terraform-terraform` | | +| `terraform-backend` | | +| `terraform-name` | | +| `terraform-type` | | +| `terraform-requiredProviders` | | From cde2c2b884623085e32951629f48f9c168de4429 Mon Sep 17 00:00:00 2001 From: Radek Simko Date: Fri, 18 Mar 2022 09:00:46 +0000 Subject: [PATCH 3/3] add hcl-depedent back to modifier legend This was just lost in the reshuffle of previous commits. --- internal/lsp/semantic_tokens.go | 1 + internal/lsp/token_encoder_test.go | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/internal/lsp/semantic_tokens.go b/internal/lsp/semantic_tokens.go index 8a1a03166..3a189ab73 100644 --- a/internal/lsp/semantic_tokens.go +++ b/internal/lsp/semantic_tokens.go @@ -29,6 +29,7 @@ func init() { for _, tokType := range lang.SupportedSemanticTokenTypes { serverTokenTypes = append(serverTokenTypes, semtok.TokenType(tokType)) } + serverTokenModifiers = append(serverTokenModifiers, semtok.TokenModifier(lang.TokenModifierDependent)) for _, tokModifier := range tfschema.SemanticTokenModifiers { serverTokenModifiers = append(serverTokenModifiers, semtok.TokenModifier(tokModifier)) } diff --git a/internal/lsp/token_encoder_test.go b/internal/lsp/token_encoder_test.go index 42eec417f..0711e694e 100644 --- a/internal/lsp/token_encoder_test.go +++ b/internal/lsp/token_encoder_test.go @@ -215,7 +215,7 @@ func TestTokenEncoder_deltaStartCharBug(t *testing.T) { data := te.Encode() expectedData := []uint32{ 0, 0, 8, 10, 0, - 0, 9, 21, 11, 1, + 0, 9, 21, 11, 2, 0, 22, 20, 11, 0, } @@ -293,8 +293,8 @@ func TestTokenEncoder_tokenModifiers(t *testing.T) { 0, 0, 7, 10, 0, 0, 8, 8, 11, 0, 1, 2, 8, 9, 0, - 1, 2, 8, 9, 1, - 1, 2, 9, 9, 1, + 1, 2, 8, 9, 2, + 1, 2, 9, 9, 2, } if diff := cmp.Diff(expectedData, data); diff != "" {