diff --git a/crates/ruff/src/directives.rs b/crates/ruff/src/directives.rs index d4c59a9a756d64..42f09bdc45b6fe 100644 --- a/crates/ruff/src/directives.rs +++ b/crates/ruff/src/directives.rs @@ -95,31 +95,6 @@ fn extract_noqa_line_for(lxr: &[LexResult], locator: &Locator, indexer: &Indexer break; } - // F-strings can be multi-line even without triple quotes. For example, - // - // ```python - // f"foo { - // x - // * - // y - // }" - // ``` - // - // We expect `noqa` directives on the last line of the f-string. - Tok::FStringEnd => { - // SAFETY: If we're at the end of a f-string, the indexer must have a - // corresponding f-string range for it. - let fstring_start = indexer - .fstring_ranges() - .outermost(range.start()) - .unwrap() - .start(); - string_mappings.push(TextRange::new( - locator.line_start(fstring_start), - range.end(), - )); - } - // For multi-line strings, we expect `noqa` directives on the last line of the // string. Tok::String { @@ -138,6 +113,26 @@ fn extract_noqa_line_for(lxr: &[LexResult], locator: &Locator, indexer: &Indexer } } + // The capacity allocated here might be more than we need if there are + // nested f-strings. + let mut fstring_mappings = Vec::with_capacity(indexer.fstring_ranges().len()); + + // For nested f-strings, we expect `noqa` directives on the last line of the + // outermost f-string. The last f-string range will be used to skip over + // the inner f-strings. + let mut last_fstring_range: TextRange = TextRange::default(); + for fstring_range in indexer.fstring_ranges().values() { + if last_fstring_range.contains_range(*fstring_range) { + continue; + } + let new_range = TextRange::new( + locator.line_start(fstring_range.start()), + fstring_range.end(), + ); + fstring_mappings.push(new_range); + last_fstring_range = new_range; + } + let mut continuation_mappings = Vec::new(); // For continuations, we expect `noqa` directives on the last line of the @@ -162,19 +157,25 @@ fn extract_noqa_line_for(lxr: &[LexResult], locator: &Locator, indexer: &Indexer } // Merge the mappings in sorted order - let mut mappings = - NoqaMapping::with_capacity(continuation_mappings.len() + string_mappings.len()); + let mut mappings = NoqaMapping::with_capacity( + continuation_mappings.len() + string_mappings.len() + fstring_mappings.len(), + ); let mut continuation_mappings = continuation_mappings.into_iter().peekable(); let mut string_mappings = string_mappings.into_iter().peekable(); - - while let (Some(continuation), Some(string)) = - (continuation_mappings.peek(), string_mappings.peek()) - { - if continuation.start() <= string.start() { + let mut fstring_mappings = fstring_mappings.into_iter().peekable(); + + while let (Some(continuation), Some(string), Some(fstring)) = ( + continuation_mappings.peek(), + string_mappings.peek(), + fstring_mappings.peek(), + ) { + if continuation.start() <= string.start() && continuation.start() <= fstring.start() { mappings.push_mapping(continuation_mappings.next().unwrap()); - } else { + } else if string.start() <= continuation.start() && string.start() <= fstring.start() { mappings.push_mapping(string_mappings.next().unwrap()); + } else { + mappings.push_mapping(fstring_mappings.next().unwrap()); } } @@ -186,6 +187,10 @@ fn extract_noqa_line_for(lxr: &[LexResult], locator: &Locator, indexer: &Indexer mappings.push_mapping(mapping); } + for mapping in fstring_mappings { + mappings.push_mapping(mapping); + } + mappings } @@ -498,6 +503,17 @@ ghi NoqaMapping::from_iter([TextRange::new(TextSize::from(6), TextSize::from(29))]) ); + let contents = "x = 1 +y = f'''abc +def {f'''nested +fstring''' f'another nested'} +end''' +"; + assert_eq!( + noqa_mappings(contents), + NoqaMapping::from_iter([TextRange::new(TextSize::from(6), TextSize::from(70))]) + ); + let contents = r"x = \ 1"; assert_eq!( diff --git a/crates/ruff_python_index/src/fstring_ranges.rs b/crates/ruff_python_index/src/fstring_ranges.rs index d2d13802a5cf43..b95c9b665083ba 100644 --- a/crates/ruff_python_index/src/fstring_ranges.rs +++ b/crates/ruff_python_index/src/fstring_ranges.rs @@ -50,9 +50,20 @@ impl FStringRanges { .map(|(_, range)| *range) } - #[cfg(test)] - pub(crate) fn ranges(&self) -> impl Iterator + '_ { - self.raw.values().copied() + /// Returns an iterator over all f-string [`TextRange`] sorted by their + /// start location. + /// + /// For nested f-strings, the outermost f-string is yielded first, moving + /// inwards with each iteration. + #[inline] + pub fn values(&self) -> impl Iterator + '_ { + self.raw.values() + } + + /// Returns the number of f-string ranges stored. + #[inline] + pub fn len(&self) -> usize { + self.raw.len() } } diff --git a/crates/ruff_python_index/src/indexer.rs b/crates/ruff_python_index/src/indexer.rs index a48eab6cb8c2d6..78bf2606b20338 100644 --- a/crates/ruff_python_index/src/indexer.rs +++ b/crates/ruff_python_index/src/indexer.rs @@ -353,7 +353,11 @@ f"implicit " f"concatenation" let lxr: Vec = lexer::lex(contents, Mode::Module).collect(); let indexer = Indexer::from_tokens(lxr.as_slice(), &Locator::new(contents)); assert_eq!( - indexer.fstring_ranges().ranges().collect::>(), + indexer + .fstring_ranges() + .values() + .copied() + .collect::>(), &[ TextRange::new(TextSize::from(0), TextSize::from(18)), TextRange::new(TextSize::from(19), TextSize::from(55)), @@ -385,7 +389,11 @@ f-string"""} let lxr: Vec = lexer::lex(contents, Mode::Module).collect(); let indexer = Indexer::from_tokens(lxr.as_slice(), &Locator::new(contents)); assert_eq!( - indexer.fstring_ranges().ranges().collect::>(), + indexer + .fstring_ranges() + .values() + .copied() + .collect::>(), &[ TextRange::new(TextSize::from(0), TextSize::from(39)), TextRange::new(TextSize::from(40), TextSize::from(68)),