@@ -11,7 +11,7 @@ use ruff_python_ast::{
1111 AnyNodeRef , BytesLiteral , Expr , FString , InterpolatedStringElement , Stmt , StringLiteral ,
1212 TypeParam ,
1313} ;
14- use ruff_text_size:: { Ranged , TextLen , TextRange } ;
14+ use ruff_text_size:: { Ranged , TextLen , TextRange , TextSize } ;
1515use std:: ops:: Deref ;
1616use ty_python_semantic:: {
1717 HasType , SemanticModel , semantic_index:: definition:: DefinitionKind , types:: Type ,
@@ -226,7 +226,12 @@ impl<'db> SemanticTokenVisitor<'db> {
226226 let range = ranged. range ( ) ;
227227 // Only emit tokens that intersect with the range filter, if one is specified
228228 if let Some ( range_filter) = self . range_filter {
229- if range. intersect ( range_filter) . is_none ( ) {
229+ // Only include ranges that have a non-empty overlap. Adjacent ranges
230+ // should be excluded.
231+ if range
232+ . intersect ( range_filter)
233+ . is_none_or ( TextRange :: is_empty)
234+ {
230235 return ;
231236 }
232237 }
@@ -446,11 +451,11 @@ impl<'db> SemanticTokenVisitor<'db> {
446451 let name_start = name. start ( ) ;
447452
448453 // Split the dotted name and calculate positions for each part
449- let mut current_offset = ruff_text_size :: TextSize :: default ( ) ;
454+ let mut current_offset = TextSize :: default ( ) ;
450455 for part in name_str. split ( '.' ) {
451456 if !part. is_empty ( ) {
452457 self . add_token (
453- ruff_text_size :: TextRange :: at ( name_start + current_offset, part. text_len ( ) ) ,
458+ TextRange :: at ( name_start + current_offset, part. text_len ( ) ) ,
454459 token_type,
455460 SemanticTokenModifier :: empty ( ) ,
456461 ) ;
@@ -926,6 +931,7 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> {
926931mod tests {
927932 use super :: * ;
928933 use crate :: tests:: cursor_test;
934+
929935 use insta:: assert_snapshot;
930936
931937 /// Helper function to get semantic tokens for full file (for testing)
@@ -1231,10 +1237,7 @@ def function2():
12311237
12321238 // Get the range that covers only the second function
12331239 // Hardcoded offsets: function2 starts at position 42, source ends at position 108
1234- let range = ruff_text_size:: TextRange :: new (
1235- ruff_text_size:: TextSize :: from ( 42u32 ) ,
1236- ruff_text_size:: TextSize :: from ( 108u32 ) ,
1237- ) ;
1240+ let range = TextRange :: new ( TextSize :: from ( 42u32 ) , TextSize :: from ( 108u32 ) ) ;
12381241
12391242 let range_tokens = semantic_tokens ( & test. db , test. cursor . file , Some ( range) ) ;
12401243
@@ -1278,6 +1281,31 @@ def function2():
12781281 }
12791282 }
12801283
1284+ /// When a token starts right at where the requested range ends,
1285+ /// don't include it in the semantic tokens.
1286+ #[ test]
1287+ fn test_semantic_tokens_range_excludes_boundary_tokens ( ) {
1288+ let test = cursor_test (
1289+ "
1290+ x = 1
1291+ y = 2
1292+ z = 3<CURSOR>
1293+ " ,
1294+ ) ;
1295+
1296+ // Range [6..13) starts where "1" ends and ends where "z" starts.
1297+ // Expected: only "y" @ 7..8 and "2" @ 11..12 (non-empty overlap with target range).
1298+ // Not included: "1" @ 5..6 and "z" @ 13..14 (adjacent, but not overlapping at offsets 6 and 13).
1299+ let range = TextRange :: new ( TextSize :: from ( 6 ) , TextSize :: from ( 13 ) ) ;
1300+
1301+ let range_tokens = semantic_tokens ( & test. db , test. cursor . file , Some ( range) ) ;
1302+
1303+ assert_snapshot ! ( semantic_tokens_to_snapshot( & test. db, test. cursor. file, & range_tokens) , @r#"
1304+ "y" @ 7..8: Variable
1305+ "2" @ 11..12: Number
1306+ "# ) ;
1307+ }
1308+
12811309 #[ test]
12821310 fn test_dotted_module_names ( ) {
12831311 let test = cursor_test (
0 commit comments