Skip to content

Commit 446048d

Browse files
MatthewMckee4BurntSushi
authored andcommitted
[ty] Don't provide completions when in from <module> i statement
This works principally by copying `from_import_tokens` and tweaking it a bit to look for `from module <CURSOR>` token sequences. We actually wind up needing to be a little more careful here to parse `<module>` correctly to avoid something like `from os.i<CURSOR>` from being treated as if the `i` is the beginning of the `import` keyword. Fixes astral-sh/ty#1494
1 parent f44598d commit 446048d

File tree

1 file changed

+204
-4
lines changed

1 file changed

+204
-4
lines changed

crates/ty_ide/src/completion.rs

Lines changed: 204 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -160,6 +160,20 @@ impl<'db> Completion<'db> {
160160
.and_then(|ty| imp(db, ty, &CompletionKindVisitor::default()))
161161
})
162162
}
163+
164+
fn keyword(name: &str) -> Self {
165+
Completion {
166+
name: name.into(),
167+
insert: None,
168+
ty: None,
169+
kind: Some(CompletionKind::Keyword),
170+
module_name: None,
171+
import: None,
172+
builtin: false,
173+
is_type_check_only: false,
174+
documentation: None,
175+
}
176+
}
163177
}
164178

165179
/// The "kind" of a completion.
@@ -212,14 +226,16 @@ pub fn completion<'db>(
212226
offset: TextSize,
213227
) -> Vec<Completion<'db>> {
214228
let parsed = parsed_module(db, file).load(db);
215-
216229
let tokens = tokens_start_before(parsed.tokens(), offset);
230+
let typed = find_typed_text(db, file, &parsed, offset);
217231

218-
if is_in_comment(tokens) || is_in_string(tokens) || is_in_definition_place(db, tokens, file) {
232+
if is_in_no_completions_place(db, tokens, file) {
219233
return vec![];
220234
}
235+
if let Some(completions) = only_keyword_completion(tokens, typed.as_deref()) {
236+
return vec![completions];
237+
}
221238

222-
let typed = find_typed_text(db, file, &parsed, offset);
223239
let typed_query = typed
224240
.as_deref()
225241
.map(QueryPattern::new)
@@ -309,6 +325,17 @@ fn add_keyword_value_completions<'db>(
309325
}
310326
}
311327

328+
/// When the tokens indicate that the last token should be precisely one
329+
/// possible keyword, we provide a single completion for it.
330+
///
331+
/// `typed` should be the text that we think the user has typed so far.
332+
fn only_keyword_completion<'db>(tokens: &[Token], typed: Option<&str>) -> Option<Completion<'db>> {
333+
if is_import_from_incomplete(tokens, typed) {
334+
return Some(Completion::keyword("import"));
335+
}
336+
None
337+
}
338+
312339
/// Adds completions not in scope.
313340
///
314341
/// `scoped` should be information about the identified scope
@@ -801,6 +828,67 @@ fn import_tokens(tokens: &[Token]) -> Option<(&Token, &Token)> {
801828
None
802829
}
803830

831+
/// Looks for the start of a `from module <CURSOR>` statement.
832+
///
833+
/// If found, `true` is returned.
834+
///
835+
/// `typed` should be the text that we think the user has typed so far.
836+
fn is_import_from_incomplete(tokens: &[Token], typed: Option<&str>) -> bool {
837+
// N.B. The implementation here is very similar to
838+
// `from_import_tokens`. The main difference is that
839+
// we're just looking for whether we should suggest
840+
// the `import` keyword. So this is a little simpler.
841+
842+
use TokenKind as TK;
843+
844+
const LIMIT: usize = 1_000;
845+
846+
/// A state used to "parse" the tokens preceding the user's cursor,
847+
/// in reverse, to detect a "from import" statement.
848+
enum S {
849+
Start,
850+
ImportKeyword,
851+
ModulePossiblyDotted,
852+
ModuleOnlyDotted,
853+
}
854+
855+
let mut state = S::Start;
856+
if typed.is_none() {
857+
state = S::ImportKeyword;
858+
}
859+
// Move backward through the tokens until we get to
860+
// the `from` token.
861+
for token in tokens.iter().rev().take(LIMIT) {
862+
state = match (state, token.kind()) {
863+
// Match an incomplete `import` keyword.
864+
//
865+
// It's okay to pop off a newline token here initially,
866+
// since it may occur before the user starts typing
867+
// `import` but after the module name.
868+
(S::Start, TK::Newline | TK::Name | TK::Import) => S::ImportKeyword,
869+
// We are a bit more careful with how we parse the module
870+
// here than in `from_import_tokens`. In particular, we
871+
// want to make sure we don't incorrectly suggest `import`
872+
// for `from os.i<CURSOR>`. If we aren't careful, then
873+
// `i` could be considered an incomplete `import` keyword
874+
// and `os.` is the module. But of course, ending with a
875+
// `.` (unless the entire module is dots) is invalid.
876+
(S::ImportKeyword, TK::Dot | TK::Ellipsis) => S::ModuleOnlyDotted,
877+
(S::ImportKeyword, TK::Name | TK::Case | TK::Match | TK::Type | TK::Unknown) => {
878+
S::ModulePossiblyDotted
879+
}
880+
(S::ModuleOnlyDotted, TK::Dot | TK::Ellipsis) => S::ModuleOnlyDotted,
881+
(
882+
S::ModulePossiblyDotted,
883+
TK::Name | TK::Dot | TK::Ellipsis | TK::Case | TK::Match | TK::Type | TK::Unknown,
884+
) => S::ModulePossiblyDotted,
885+
(S::ModulePossiblyDotted | S::ModuleOnlyDotted, TK::From) => return true,
886+
_ => return false,
887+
};
888+
}
889+
false
890+
}
891+
804892
/// Looks for the text typed immediately before the cursor offset
805893
/// given.
806894
///
@@ -815,7 +903,10 @@ fn find_typed_text(
815903
let source = source_text(db, file);
816904
let tokens = tokens_start_before(parsed.tokens(), offset);
817905
let last = tokens.last()?;
818-
if !matches!(last.kind(), TokenKind::Name) {
906+
// It's odd to include `TokenKind::Import` here, but it
907+
// indicates that the user has typed `import`. This is
908+
// useful to know in some contexts.
909+
if !matches!(last.kind(), TokenKind::Name | TokenKind::Import) {
819910
return None;
820911
}
821912
// This one's weird, but if the cursor is beyond
@@ -830,6 +921,11 @@ fn find_typed_text(
830921
Some(source[last.range()].to_string())
831922
}
832923

924+
/// Whether the last token is in a place where we should not provide completions.
925+
fn is_in_no_completions_place(db: &dyn Db, tokens: &[Token], file: File) -> bool {
926+
is_in_comment(tokens) || is_in_string(tokens) || is_in_definition_place(db, tokens, file)
927+
}
928+
833929
/// Whether the last token is within a comment or not.
834930
fn is_in_comment(tokens: &[Token]) -> bool {
835931
tokens.last().is_some_and(|t| t.kind().is_comment())
@@ -4216,6 +4312,110 @@ type <CURSOR>
42164312
");
42174313
}
42184314

4315+
#[test]
4316+
fn from_import_i_suggests_import() {
4317+
let builder = completion_test_builder("from typing i<CURSOR>");
4318+
assert_snapshot!(builder.build().snapshot(), @"import");
4319+
}
4320+
4321+
#[test]
4322+
fn from_import_import_suggests_nothing() {
4323+
let builder = completion_test_builder("from typing import<CURSOR>");
4324+
assert_snapshot!(builder.build().snapshot(), @"import");
4325+
}
4326+
4327+
#[test]
4328+
fn from_import_importt_suggests_import() {
4329+
let builder = completion_test_builder("from typing importt<CURSOR>");
4330+
assert_snapshot!(builder.build().snapshot(), @"import");
4331+
}
4332+
4333+
#[test]
4334+
fn from_import_space_suggests_import() {
4335+
let builder = completion_test_builder("from typing <CURSOR>");
4336+
assert_snapshot!(builder.build().snapshot(), @"import");
4337+
}
4338+
4339+
#[test]
4340+
fn from_import_no_space_not_suggests_import() {
4341+
let builder = completion_test_builder("from typing<CURSOR>");
4342+
assert_snapshot!(builder.build().snapshot(), @r"
4343+
typing
4344+
typing_extensions
4345+
");
4346+
}
4347+
4348+
#[test]
4349+
fn from_import_two_imports_suggests_import() {
4350+
let builder = completion_test_builder(
4351+
"from collections.abc import Sequence
4352+
from typing i<CURSOR>",
4353+
);
4354+
assert_snapshot!(builder.build().snapshot(), @"import");
4355+
}
4356+
4357+
/// The following behaviour may not be reflected in editors, since LSP
4358+
/// clients may do their own filtering of completion suggestions.
4359+
#[test]
4360+
fn from_import_random_name_suggests_import() {
4361+
let builder = completion_test_builder("from typing aa<CURSOR>");
4362+
assert_snapshot!(builder.build().snapshot(), @"import");
4363+
}
4364+
4365+
#[test]
4366+
fn from_import_dotted_name_suggests_import() {
4367+
let builder = completion_test_builder("from collections.abc i<CURSOR>");
4368+
assert_snapshot!(builder.build().snapshot(), @"import");
4369+
}
4370+
4371+
#[test]
4372+
fn from_import_relative_import_suggests_import() {
4373+
let builder = CursorTest::builder()
4374+
.source("main.py", "from .foo i<CURSOR>")
4375+
.source("foo.py", "")
4376+
.completion_test_builder();
4377+
assert_snapshot!(builder.build().snapshot(), @"import");
4378+
}
4379+
4380+
#[test]
4381+
fn from_import_dotted_name_relative_import_suggests_import() {
4382+
let builder = CursorTest::builder()
4383+
.source("main.py", "from .foo.bar i<CURSOR>")
4384+
.source("foo/bar.py", "")
4385+
.completion_test_builder();
4386+
assert_snapshot!(builder.build().snapshot(), @"import");
4387+
}
4388+
4389+
#[test]
4390+
fn from_import_nested_dotted_name_relative_import_suggests_import() {
4391+
let builder = CursorTest::builder()
4392+
.source("src/main.py", "from ..foo i<CURSOR>")
4393+
.source("foo.py", "")
4394+
.completion_test_builder();
4395+
assert_snapshot!(builder.build().snapshot(), @"import");
4396+
}
4397+
4398+
#[test]
4399+
fn from_import_nested_very_dotted_name_relative_import_suggests_import() {
4400+
let builder = CursorTest::builder()
4401+
// N.B. the `...` tokenizes as `TokenKind::Ellipsis`
4402+
.source("src/main.py", "from ...foo i<CURSOR>")
4403+
.source("foo.py", "")
4404+
.completion_test_builder();
4405+
assert_snapshot!(builder.build().snapshot(), @"import");
4406+
}
4407+
4408+
#[test]
4409+
fn from_import_incomplete() {
4410+
let builder = completion_test_builder(
4411+
"from collections.abc i
4412+
4413+
ZQZQZQ = 1
4414+
ZQ<CURSOR>",
4415+
);
4416+
assert_snapshot!(builder.build().snapshot(), @"ZQZQZQ");
4417+
}
4418+
42194419
/// A way to create a simple single-file (named `main.py`) completion test
42204420
/// builder.
42214421
///

0 commit comments

Comments
 (0)