Skip to content

Commit

Permalink
perf(formatter): Improve is_expression_parenthesized performance (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
MichaReiser authored and konstin committed Jul 19, 2023
1 parent 0c6c503 commit a9ce793
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 9 deletions.
26 changes: 17 additions & 9 deletions crates/ruff_python_formatter/src/expression/parentheses.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use crate::context::NodeLevel;
use crate::prelude::*;
use crate::trivia::{first_non_trivia_token, first_non_trivia_token_rev, Token, TokenKind};
use crate::trivia::{first_non_trivia_token, SimpleTokenizer, Token, TokenKind};
use ruff_formatter::prelude::tag::Condition;
use ruff_formatter::{format_args, write, Argument, Arguments};
use ruff_python_ast::node::AnyNodeRef;
Expand Down Expand Up @@ -72,19 +72,27 @@ pub enum Parentheses {
}

pub(crate) fn is_expression_parenthesized(expr: AnyNodeRef, contents: &str) -> bool {
matches!(
// First test if there's a closing parentheses because it tends to be cheaper.
if matches!(
first_non_trivia_token(expr.end(), contents),
Some(Token {
kind: TokenKind::RParen,
..
})
) && matches!(
first_non_trivia_token_rev(expr.start(), contents),
Some(Token {
kind: TokenKind::LParen,
..
})
)
) {
let mut tokenizer =
SimpleTokenizer::up_to_without_back_comment(expr.start(), contents).skip_trivia();

matches!(
tokenizer.next_back(),
Some(Token {
kind: TokenKind::LParen,
..
})
)
} else {
false
}
}

/// Formats `content` enclosed by the `left` and `right` parentheses. The implementation also ensures
Expand Down
10 changes: 10 additions & 0 deletions crates/ruff_python_formatter/src/trivia.rs
Original file line number Diff line number Diff line change
Expand Up @@ -274,10 +274,20 @@ impl<'a> SimpleTokenizer<'a> {
Self::new(source, range)
}

/// Creates a tokenizer that lexes tokens from the start of `source` up to `offset`.
pub(crate) fn up_to(offset: TextSize, source: &'a str) -> Self {
Self::new(source, TextRange::up_to(offset))
}

/// Creates a tokenizer that lexes tokens from the start of `source` up to `offset`, and informs
/// the lexer that the line at `offset` contains no comments. This can significantly speed up backwards lexing
/// because the lexer doesn't need to scan for comments.
pub(crate) fn up_to_without_back_comment(offset: TextSize, source: &'a str) -> Self {
let mut tokenizer = Self::up_to(offset, source);
tokenizer.back_line_has_no_comment = true;
tokenizer
}

fn to_keyword_or_other(&self, range: TextRange) -> TokenKind {
let source = &self.source[range];
match source {
Expand Down

0 comments on commit a9ce793

Please sign in to comment.