Skip to content

Commit

Permalink
format all the files with the new vfmt (#120)
Browse files Browse the repository at this point in the history
  • Loading branch information
yuyi98 authored Sep 14, 2024
1 parent d63bc6c commit 83b7286
Show file tree
Hide file tree
Showing 15 changed files with 62 additions and 63 deletions.
2 changes: 1 addition & 1 deletion src/analyzer/psi/StubBase.v
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ pub fn new_stub_base(parent &StubElement, stub_type StubType, name string, ident
} else {
&StubList{}
}
parent_id := if !isnil(parent) { parent.id() } else { psi.non_stubbed_element }
parent_id := if !isnil(parent) { parent.id() } else { non_stubbed_element }
mut stub := &StubBase{
name: name
text: data.text
Expand Down
18 changes: 9 additions & 9 deletions src/analyzer/psi/StubIndex.v
Original file line number Diff line number Diff line change
Expand Up @@ -31,23 +31,23 @@ pub mut:
file_to_module map[string]string
// data defines the index data that allows you to get the description of the element
// in 2 accesses to the array elements and one lookup by key.
data [psi.count_stub_index_location_keys][psi.count_index_keys]map[string]StubResult
data [count_stub_index_location_keys][count_index_keys]map[string]StubResult
// all_elements_by_modules contains all top-level elements in the module.
all_elements_by_modules [psi.count_stub_index_location_keys]map[string][]PsiElement
all_elements_by_modules [count_stub_index_location_keys]map[string][]PsiElement
// types_by_modules contains all top-level types in the module.
types_by_modules [psi.count_stub_index_location_keys]map[string][]PsiElement
types_by_modules [count_stub_index_location_keys]map[string][]PsiElement
}

pub fn new_stubs_index(sinks []StubIndexSink) &StubIndex {
mut index := &StubIndex{
sinks: sinks
module_to_files: map[string][]StubIndexSink{}
all_elements_by_modules: unsafe { [psi.count_stub_index_location_keys]map[string][]PsiElement{} }
types_by_modules: unsafe { [psi.count_stub_index_location_keys]map[string][]PsiElement{} }
all_elements_by_modules: unsafe { [count_stub_index_location_keys]map[string][]PsiElement{} }
types_by_modules: unsafe { [count_stub_index_location_keys]map[string][]PsiElement{} }
}

for i in 0 .. psi.count_stub_index_location_keys {
for j in 0 .. psi.count_index_keys {
for i in 0 .. count_stub_index_location_keys {
for j in 0 .. count_index_keys {
index.data[i][j] = map[string]StubResult{}
}
}
Expand Down Expand Up @@ -144,8 +144,8 @@ pub fn (mut s StubIndex) update_stubs_index(changed_sinks []StubIndexSink, all_s
s.file_to_module = map[string]string{}

// clear all workspace index
s.data[StubIndexLocationKind.workspace] = [psi.count_index_keys]map[string]StubResult{}
for i in 0 .. psi.count_index_keys {
s.data[StubIndexLocationKind.workspace] = [count_index_keys]map[string]StubResult{}
for i in 0 .. count_index_keys {
s.data[StubIndexLocationKind.workspace][i] = map[string]StubResult{}
}

Expand Down
2 changes: 1 addition & 1 deletion src/analyzer/psi/StubIndexSink.v
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ const non_fqn_keys = [StubIndexKey.global_variables, .methods_fingerprint, .fiel

fn (mut s StubIndexSink) occurrence(key StubIndexKey, value string) {
module_fqn := s.module_fqn()
resulting_value := if module_fqn != '' && key !in psi.non_fqn_keys {
resulting_value := if module_fqn != '' && key !in non_fqn_keys {
'${module_fqn}.${value}'
} else {
value
Expand Down
26 changes: 13 additions & 13 deletions src/jsonrpc/jsonrpc.v
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ pub const null = Null{}
// https://www.jsonrpc.org/specification#request_object
pub struct Request {
pub mut:
jsonrpc string = jsonrpc.version
jsonrpc string = version
id string @[raw]
method string
params string @[raw]
Expand All @@ -61,7 +61,7 @@ pub mut:
pub fn (req Request) json() string {
// NOTE: make request act as a notification for server_test_utils
id_payload := if req.id.len != 0 { ',"id":${req.id},' } else { ',' }
return '{"jsonrpc":"${jsonrpc.version}"${id_payload}"method":"${req.method}","params":${req.params}}'
return '{"jsonrpc":"${version}"${id_payload}"method":"${req.method}","params":${req.params}}'
}

// decode_params decodes the parameters of a Request.
Expand All @@ -73,7 +73,7 @@ pub fn (req Request) decode_params[T]() !T {
// https://www.jsonrpc.org/specification#response_object
pub struct Response[T] {
pub:
jsonrpc string = jsonrpc.version
jsonrpc string = version
id string
// error ResponseError
result T
Expand All @@ -97,20 +97,20 @@ const error_field_in_u8 = ',"error":'.bytes()
const result_field_in_u8 = ',"result":'.bytes()

fn encode_response[T](resp Response[T], mut writer io.Writer) {
writer.write('{"jsonrpc":"${jsonrpc.version}","id":'.bytes()) or {}
writer.write('{"jsonrpc":"${version}","id":'.bytes()) or {}
if resp.id.len == 0 {
writer.write(jsonrpc.null_in_u8) or {}
writer.write(null_in_u8) or {}
} else {
writer.write(resp.id.bytes()) or {}
}
if resp.error.code != 0 {
err := json.encode(resp.error)
writer.write(jsonrpc.error_field_in_u8) or {}
writer.write(error_field_in_u8) or {}
writer.write(err.bytes()) or {}
} else {
writer.write(jsonrpc.result_field_in_u8) or {}
writer.write(result_field_in_u8) or {}
$if T is Null {
writer.write(jsonrpc.null_in_u8) or {}
writer.write(null_in_u8) or {}
} $else {
res := json.encode(resp.result)
writer.write(res.bytes()) or {}
Expand All @@ -129,7 +129,7 @@ fn encode_response[T](resp Response[T], mut writer io.Writer) {
// https://www.jsonrpc.org/specification#notification
pub struct NotificationMessage[T] {
pub:
jsonrpc string = jsonrpc.version
jsonrpc string = version
method string
params T
}
Expand All @@ -145,9 +145,9 @@ pub fn (notif NotificationMessage[T]) json() string {
}

fn encode_notification[T](notif NotificationMessage[T], mut writer io.Writer) {
writer.write('{"jsonrpc":"${jsonrpc.version}","method":"${notif.method}","params":'.bytes()) or {}
writer.write('{"jsonrpc":"${version}","method":"${notif.method}","params":'.bytes()) or {}
$if T is Null {
writer.write(jsonrpc.null_in_u8) or {}
writer.write(null_in_u8) or {}
} $else {
res := json.encode(notif.params)
writer.write(res.bytes()) or {}
Expand All @@ -156,9 +156,9 @@ fn encode_notification[T](notif NotificationMessage[T], mut writer io.Writer) {
}

fn encode_request[T](notif NotificationMessage[T], mut writer io.Writer) {
writer.write('{"jsonrpc":"${jsonrpc.version}","id": 1, "method":"${notif.method}","params":'.bytes()) or {}
writer.write('{"jsonrpc":"${version}","id": 1, "method":"${notif.method}","params":'.bytes()) or {}
$if T is Null {
writer.write(jsonrpc.null_in_u8) or {}
writer.write(null_in_u8) or {}
} $else {
res := json.encode(notif.params)
writer.write(res.bytes()) or {}
Expand Down
6 changes: 3 additions & 3 deletions src/lsp/log/log.v
Original file line number Diff line number Diff line change
Expand Up @@ -138,11 +138,11 @@ pub const close_event = '${event_prefix}/close'
pub const state_event = '${event_prefix}/state'

pub fn (mut l LogRecorder) on_event(name string, data jsonrpc.InterceptorData) ! {
if name == log.set_logpath_event && data is string {
if name == set_logpath_event && data is string {
l.set_logpath(data)!
} else if name == log.close_event {
} else if name == close_event {
l.close()
} else if name == log.state_event && data is bool {
} else if name == state_event && data is bool {
if data {
l.enable()
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,15 +43,15 @@ fn (k &AttributesCompletionProvider) is_available(ctx &completion.CompletionCont
}

fn (mut k AttributesCompletionProvider) add_completion(ctx &completion.CompletionContext, mut result completion.CompletionResultSet) {
for attribute in providers.attributes {
for attribute in attributes {
result.add_element(lsp.CompletionItem{
label: attribute
kind: .struct_
insert_text: attribute
})
}

for attribute in providers.attributes_with_colon {
for attribute in attributes_with_colon {
result.add_element(lsp.CompletionItem{
label: "${attribute}: 'value'"
kind: .struct_
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ fn (_ &CompileTimeConstantCompletionProvider) is_available(ctx &completion.Compl
}

fn (mut _ CompileTimeConstantCompletionProvider) add_completion(ctx &completion.CompletionContext, mut result completion.CompletionResultSet) {
for constant, description in providers.compile_time_constant {
for constant, description in compile_time_constant {
result.add_element(lsp.CompletionItem{
label: '@${constant}'
kind: .constant
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ fn (k &FunctionLikeCompletionProvider) is_available(ctx &completion.CompletionCo
}

fn (mut k FunctionLikeCompletionProvider) add_completion(ctx &completion.CompletionContext, mut result completion.CompletionResultSet) {
for keyword in providers.function_like_keywords {
for keyword in function_like_keywords {
result.add_element(lsp.CompletionItem{
label: '${keyword}()'
kind: .keyword
Expand Down
4 changes: 2 additions & 2 deletions src/server/completion/providers/TopLevelCompletionProvider.v
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ fn (mut k TopLevelCompletionProvider) add_completion(ctx &completion.CompletionC
}

fn (mut k TopLevelCompletionProvider) pub_keyword(mut result completion.CompletionResultSet) {
for label, variant in providers.top_level_map {
for label, variant in top_level_map {
result.add_element(lsp.CompletionItem{
label: label
kind: .keyword
Expand All @@ -38,7 +38,7 @@ fn (mut k TopLevelCompletionProvider) pub_keyword(mut result completion.Completi
})
}

for label, variant in providers.top_level_map {
for label, variant in top_level_map {
result.add_element(lsp.CompletionItem{
label: 'pub ${label}'
kind: .keyword
Expand Down
4 changes: 2 additions & 2 deletions src/server/features_formatting.v
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@ pub fn (mut ls LanguageServer) formatting(params lsp.DocumentFormattingParams) !
uri := params.text_document.uri.normalize()
file := ls.get_file(uri) or { return error('Cannot format not opened file') }

os.write_file(server.temp_formatting_file_path, file.psi_file.source_text) or {
os.write_file(temp_formatting_file_path, file.psi_file.source_text) or {
return error('Cannot write temp file for formatting: ${err}')
}

mut fmt_proc := ls.launch_tool('fmt', server.temp_formatting_file_path)!
mut fmt_proc := ls.launch_tool('fmt', temp_formatting_file_path)!
defer {
fmt_proc.close()
}
Expand Down
5 changes: 2 additions & 3 deletions src/server/features_semantic_tokens.v
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,12 @@ pub fn (mut ls LanguageServer) semantic_tokens(text_document lsp.TextDocumentIde

lines := file.psi_file.source_text.count('\n')

if lines > server.max_line_for_any_semantic_tokens {
if lines > max_line_for_any_semantic_tokens {
// File too large, don't compute any tokens.
return lsp.SemanticTokens{}
}

if lines > server.max_line_for_resolve_semantic_tokens
|| ls.cfg.enable_semantic_tokens == .syntax {
if lines > max_line_for_resolve_semantic_tokens || ls.cfg.enable_semantic_tokens == .syntax {
// We don't want to send too many tokens (and compute it), so we just
// send dumb-aware tokens for large files.
dumb_aware_visitor := semantic.new_dumb_aware_semantic_visitor(range, file.psi_file)
Expand Down
12 changes: 6 additions & 6 deletions src/server/setup_test.v
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@ const default_vmodules_root = os.vmodules_dir()
fn test_setup_default_vpaths() {
mut ls := LanguageServer{}
ls.setup()
assert ls.paths.vexe == server.default_vexe
assert ls.paths.vroot == server.default_vroot
assert ls.paths.vlib_root == server.default_vlib_root
assert ls.paths.vmodules_root == server.default_vmodules_root
assert ls.paths.vexe == default_vexe
assert ls.paths.vroot == default_vroot
assert ls.paths.vlib_root == default_vlib_root
assert ls.paths.vmodules_root == default_vmodules_root
}

fn test_setup_custom_vpaths() {
Expand Down Expand Up @@ -53,7 +53,7 @@ fn test_setup_custom_vpaths() {
assert log_out.contains('Failed to find V standard library')

// Test custom_vroot with existing toolchain =================================
cfg_toml = "custom_vroot = '${server.default_vroot}'"
cfg_toml = "custom_vroot = '${default_vroot}'"
os.write_file(cfg_path, cfg_toml)!
os.write_file(log_file_path, '')!
log_file = os.open_append(os.join_path(custom_root, 'log'))!
Expand All @@ -68,6 +68,6 @@ fn test_setup_custom_vpaths() {
println('Testlog custom_vroot existing toolchain:')
println(log_out.trim_space())
assert log_out.contains('Find custom VROOT path')
assert log_out.contains('Using "${server.default_vroot}" as toolchain')
assert log_out.contains('Using "${default_vroot}" as toolchain')
assert !log_out.contains('Failed to find standard library path')
}
14 changes: 7 additions & 7 deletions src/streams/streams.v
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ pub fn (mut stream StdioStream) read(mut buf []u8) !int {
if len == 0 {
// encounter empty line ('\r\n') in header, header end
break
} else if line.starts_with(streams.content_length) {
conlen = line.all_after(streams.content_length).int()
} else if line.starts_with(content_length) {
conlen = line.all_after(content_length).int()
}
}

Expand Down Expand Up @@ -97,7 +97,7 @@ const base_ip = '127.0.0.1'
pub fn new_socket_stream_server(port int, log bool) !io.ReaderWriter {
server_label := 'v-analyzer-server'

address := '${streams.base_ip}:${port}'
address := '${base_ip}:${port}'
mut listener := net.listen_tcp(.ip, address)!

if log {
Expand Down Expand Up @@ -125,7 +125,7 @@ pub fn new_socket_stream_server(port int, log bool) !io.ReaderWriter {
}

fn new_socket_stream_client(port int) !io.ReaderWriter {
address := '${streams.base_ip}:${port}'
address := '${base_ip}:${port}'
mut conn := net.dial_tcp(address)!
mut reader := io.new_buffered_reader(reader: conn, cap: 1024 * 1024)
conn.set_blocking(true) or {}
Expand Down Expand Up @@ -173,14 +173,14 @@ pub fn (mut stream SocketStream) read(mut buf []u8) !int {
// read header line
got_header := stream.reader.read_line() or { return IError(io.Eof{}) }
buf << got_header.bytes()
buf << streams.newlines
buf << newlines
header_len = got_header.len + 2

if got_header.len == 0 {
// encounter empty line ('\r\n') in header, header end
break
} else if got_header.starts_with(streams.content_length) {
conlen = got_header.all_after(streams.content_length).int()
} else if got_header.starts_with(content_length) {
conlen = got_header.all_after(content_length).int()
}
}

Expand Down
10 changes: 5 additions & 5 deletions src/testing/TestFixture.v
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ pub fn new_fixture() &Fixture {
}

pub fn (mut t Fixture) initialize(with_stdlib bool) !lsp.InitializeResult {
os.mkdir_all(testing.temp_path)!
os.mkdir_all(temp_path)!

mut options := ['no-index-save', 'no-diagnostics']
if !with_stdlib {
Expand All @@ -69,8 +69,8 @@ pub fn (mut t Fixture) initialize(with_stdlib bool) !lsp.InitializeResult {
name: 'Testing'
version: '0.0.1'
}
root_uri: lsp.document_uri_from_path(testing.temp_path)
root_path: testing.temp_path
root_uri: lsp.document_uri_from_path(temp_path)
root_path: temp_path
initialization_options: options.join(' ')
capabilities: lsp.ClientCapabilities{}
trace: ''
Expand All @@ -89,7 +89,7 @@ pub fn (mut t Fixture) configure_by_file(path string) ! {
content := os.read_file(rel_path)!
prepared_text := content + '\n\n' // add extra lines to make sure the caret is not at the end of the file
prepared_content := prepared_text.replace('/*caret*/', '')
abs_path := os.join_path(testing.temp_path, path)
abs_path := os.join_path(temp_path, path)
dir_path := os.dir(abs_path)
os.mkdir_all(dir_path)!
os.write_file(abs_path, prepared_content)!
Expand All @@ -110,7 +110,7 @@ pub fn (mut t Fixture) configure_by_file(path string) ! {
pub fn (mut t Fixture) configure_by_text(filename string, text string) ! {
prepared_text := text + '\n\n' // add extra lines to make sure the caret is not at the end of the file
content := prepared_text.replace('/*caret*/', '')
abs_path := os.join_path(testing.temp_path, filename)
abs_path := os.join_path(temp_path, filename)
abs_path_without_name := os.dir(abs_path)
os.mkdir_all(abs_path_without_name)!
os.write_file(abs_path, content)!
Expand Down
Loading

0 comments on commit 83b7286

Please sign in to comment.