diff --git a/crates/ruff/src/autofix/edits.rs b/crates/ruff/src/autofix/edits.rs index 05f0d2c320c7f7..142dc2d6a12bd4 100644 --- a/crates/ruff/src/autofix/edits.rs +++ b/crates/ruff/src/autofix/edits.rs @@ -1,7 +1,7 @@ //! Interface for generating autofix edits from higher-level actions (e.g., "remove an argument"). use anyhow::{bail, Result}; use ruff_python_ast::{self as ast, ExceptHandler, Expr, Keyword, Ranged, Stmt}; -use ruff_python_parser::{lexer, Mode}; +use ruff_python_parser::lexer; use ruff_text_size::{TextLen, TextRange, TextSize}; use ruff_diagnostics::Edit; @@ -11,6 +11,7 @@ use ruff_python_trivia::{has_leading_content, is_python_whitespace, PythonWhites use ruff_source_file::{Locator, NewlineWithTrailingNewline}; use crate::autofix::codemods; +use crate::source_kind::PySourceType; /// Return the `Fix` to use when deleting a `Stmt`. /// @@ -81,15 +82,10 @@ pub(crate) fn remove_argument( args: &[Expr], keywords: &[Keyword], remove_parentheses: bool, - is_jupyter_notebook: bool, + source_type: PySourceType, ) -> Result { // TODO(sbrugman): Preserve trailing comments. let contents = locator.after(call_at); - let mode = if is_jupyter_notebook { - Mode::Jupyter - } else { - Mode::Module - }; let mut fix_start = None; let mut fix_end = None; @@ -102,7 +98,8 @@ pub(crate) fn remove_argument( if n_arguments == 1 { // Case 1: there is only one argument. let mut count = 0u32; - for (tok, range) in lexer::lex_starts_at(contents, mode, call_at).flatten() { + for (tok, range) in lexer::lex_starts_at(contents, source_type.as_mode(), call_at).flatten() + { if tok.is_lpar() { if count == 0 { fix_start = Some(if remove_parentheses { @@ -134,7 +131,8 @@ pub(crate) fn remove_argument( { // Case 2: argument or keyword is _not_ the last node. let mut seen_comma = false; - for (tok, range) in lexer::lex_starts_at(contents, mode, call_at).flatten() { + for (tok, range) in lexer::lex_starts_at(contents, source_type.as_mode(), call_at).flatten() + { if seen_comma { if tok.is_non_logical_newline() { // Also delete any non-logical newlines after the comma. @@ -157,7 +155,8 @@ pub(crate) fn remove_argument( } else { // Case 3: argument or keyword is the last node, so we have to find the last // comma in the stmt. - for (tok, range) in lexer::lex_starts_at(contents, mode, call_at).flatten() { + for (tok, range) in lexer::lex_starts_at(contents, source_type.as_mode(), call_at).flatten() + { if range.start() == expr_range.start() { fix_end = Some(expr_range.end()); break; diff --git a/crates/ruff/src/checkers/ast/analyze/arguments.rs b/crates/ruff/src/checkers/ast/analyze/arguments.rs index 36bf02a7b8e014..6711a1e35ff474 100644 --- a/crates/ruff/src/checkers/ast/analyze/arguments.rs +++ b/crates/ruff/src/checkers/ast/analyze/arguments.rs @@ -15,7 +15,7 @@ pub(crate) fn arguments(arguments: &Arguments, checker: &mut Checker) { if checker.settings.rules.enabled(Rule::ImplicitOptional) { ruff::rules::implicit_optional(checker, arguments); } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.enabled(Rule::TypedArgumentDefaultInStub) { flake8_pyi::rules::typed_argument_simple_defaults(checker, arguments); } diff --git a/crates/ruff/src/checkers/ast/analyze/bindings.rs b/crates/ruff/src/checkers/ast/analyze/bindings.rs index 1c8804272ebefb..a41c0331740b48 100644 --- a/crates/ruff/src/checkers/ast/analyze/bindings.rs +++ b/crates/ruff/src/checkers/ast/analyze/bindings.rs @@ -57,7 +57,7 @@ pub(crate) fn bindings(checker: &mut Checker) { checker.diagnostics.push(diagnostic); } } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.enabled(Rule::UnaliasedCollectionsAbcSetImport) { if let Some(diagnostic) = flake8_pyi::rules::unaliased_collections_abc_set_import(checker, binding) diff --git a/crates/ruff/src/checkers/ast/analyze/deferred_scopes.rs b/crates/ruff/src/checkers/ast/analyze/deferred_scopes.rs index 1d0f2a5ba0bb91..3ed7e082a8213a 100644 --- a/crates/ruff/src/checkers/ast/analyze/deferred_scopes.rs +++ b/crates/ruff/src/checkers/ast/analyze/deferred_scopes.rs @@ -33,7 +33,7 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) { // Identify any valid runtime imports. If a module is imported at runtime, and // used at runtime, then by default, we avoid flagging any other // imports from that model as typing-only. - let enforce_typing_imports = !checker.is_stub + let enforce_typing_imports = !checker.source_type.is_stub() && checker.any_enabled(&[ Rule::RuntimeImportInTypeCheckingBlock, Rule::TypingOnlyFirstPartyImport, @@ -226,7 +226,7 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) { pyflakes::rules::unused_annotation(checker, scope, &mut diagnostics); } - if !checker.is_stub { + if !checker.source_type.is_stub() { if checker.any_enabled(&[ Rule::UnusedClassMethodArgument, Rule::UnusedFunctionArgument, diff --git a/crates/ruff/src/checkers/ast/analyze/definitions.rs b/crates/ruff/src/checkers/ast/analyze/definitions.rs index d46e261413910d..5111be5e1b431c 100644 --- a/crates/ruff/src/checkers/ast/analyze/definitions.rs +++ b/crates/ruff/src/checkers/ast/analyze/definitions.rs @@ -30,7 +30,7 @@ pub(crate) fn definitions(checker: &mut Checker) { Rule::MissingTypeKwargs, Rule::MissingTypeSelf, ]); - let enforce_stubs = checker.is_stub + let enforce_stubs = checker.source_type.is_stub() && checker.any_enabled(&[Rule::DocstringInStub, Rule::IterMethodReturnIterable]); let enforce_docstrings = checker.any_enabled(&[ Rule::BlankLineAfterLastSection, diff --git a/crates/ruff/src/checkers/ast/analyze/expression.rs b/crates/ruff/src/checkers/ast/analyze/expression.rs index fb4501fe45f5dc..12da9eaa4cc561 100644 --- a/crates/ruff/src/checkers/ast/analyze/expression.rs +++ b/crates/ruff/src/checkers/ast/analyze/expression.rs @@ -31,7 +31,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { if let Some(operator) = typing::to_pep604_operator(value, slice, &checker.semantic) { if checker.enabled(Rule::FutureRewritableTypeAnnotation) { - if !checker.is_stub + if !checker.source_type.is_stub() && checker.settings.target_version < PythonVersion::Py310 && checker.settings.target_version >= PythonVersion::Py37 && !checker.semantic.future_annotations() @@ -44,7 +44,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { } } if checker.enabled(Rule::NonPEP604Annotation) { - if checker.is_stub + if checker.source_type.is_stub() || checker.settings.target_version >= PythonVersion::Py310 || (checker.settings.target_version >= PythonVersion::Py37 && checker.semantic.future_annotations() @@ -59,7 +59,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { // Ex) list[...] if checker.enabled(Rule::FutureRequiredTypeAnnotation) { - if !checker.is_stub + if !checker.source_type.is_stub() && checker.settings.target_version < PythonVersion::Py39 && !checker.semantic.future_annotations() && checker.semantic.in_annotation() @@ -152,7 +152,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { if checker.enabled(Rule::NumpyDeprecatedFunction) { numpy::rules::deprecated_function(checker, expr); } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.enabled(Rule::CollectionsNamedTuple) { flake8_pyi::rules::collections_named_tuple(checker, expr); } @@ -167,7 +167,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { typing::to_pep585_generic(expr, &checker.semantic) { if checker.enabled(Rule::FutureRewritableTypeAnnotation) { - if !checker.is_stub + if !checker.source_type.is_stub() && checker.settings.target_version < PythonVersion::Py39 && checker.settings.target_version >= PythonVersion::Py37 && !checker.semantic.future_annotations() @@ -180,7 +180,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { } } if checker.enabled(Rule::NonPEP585Annotation) { - if checker.is_stub + if checker.source_type.is_stub() || checker.settings.target_version >= PythonVersion::Py39 || (checker.settings.target_version >= PythonVersion::Py37 && checker.semantic.future_annotations() @@ -267,7 +267,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { ]) { if let Some(replacement) = typing::to_pep585_generic(expr, &checker.semantic) { if checker.enabled(Rule::FutureRewritableTypeAnnotation) { - if !checker.is_stub + if !checker.source_type.is_stub() && checker.settings.target_version < PythonVersion::Py39 && checker.settings.target_version >= PythonVersion::Py37 && !checker.semantic.future_annotations() @@ -280,7 +280,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { } } if checker.enabled(Rule::NonPEP585Annotation) { - if checker.is_stub + if checker.source_type.is_stub() || checker.settings.target_version >= PythonVersion::Py39 || (checker.settings.target_version >= PythonVersion::Py37 && checker.semantic.future_annotations() @@ -318,7 +318,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { if checker.enabled(Rule::PrivateMemberAccess) { flake8_self::rules::private_member_access(checker, expr); } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.enabled(Rule::CollectionsNamedTuple) { flake8_pyi::rules::collections_named_tuple(checker, expr); } @@ -880,7 +880,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { if checker.enabled(Rule::DjangoLocalsInRenderFunction) { flake8_django::rules::locals_in_render_function(checker, func, args, keywords); } - if checker.is_stub && checker.enabled(Rule::UnsupportedMethodCallOnAll) { + if checker.source_type.is_stub() && checker.enabled(Rule::UnsupportedMethodCallOnAll) { flake8_pyi::rules::unsupported_method_call_on_all(checker, func); } } @@ -1076,7 +1076,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { }) => { // Ex) `str | None` if checker.enabled(Rule::FutureRequiredTypeAnnotation) { - if !checker.is_stub + if !checker.source_type.is_stub() && checker.settings.target_version < PythonVersion::Py310 && !checker.semantic.future_annotations() && checker.semantic.in_annotation() @@ -1088,7 +1088,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { ); } } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.enabled(Rule::DuplicateUnionMember) && checker.semantic.in_type_definition() // Avoid duplicate checks if the parent is an `|` @@ -1216,7 +1216,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { kind: _, range: _, }) => { - if checker.is_stub && checker.enabled(Rule::NumericLiteralTooLong) { + if checker.source_type.is_stub() && checker.enabled(Rule::NumericLiteralTooLong) { flake8_pyi::rules::numeric_literal_too_long(checker, expr); } } @@ -1225,7 +1225,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { kind: _, range: _, }) => { - if checker.is_stub && checker.enabled(Rule::StringOrBytesTooLong) { + if checker.source_type.is_stub() && checker.enabled(Rule::StringOrBytesTooLong) { flake8_pyi::rules::string_or_bytes_too_long(checker, expr); } } @@ -1253,7 +1253,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { if checker.enabled(Rule::UnicodeKindPrefix) { pyupgrade::rules::unicode_kind_prefix(checker, expr, kind.as_deref()); } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.enabled(Rule::StringOrBytesTooLong) { flake8_pyi::rules::string_or_bytes_too_long(checker, expr); } diff --git a/crates/ruff/src/checkers/ast/analyze/statement.rs b/crates/ruff/src/checkers/ast/analyze/statement.rs index 0f169d6969a011..1ce76d4a28e46a 100644 --- a/crates/ruff/src/checkers/ast/analyze/statement.rs +++ b/crates/ruff/src/checkers/ast/analyze/statement.rs @@ -131,7 +131,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { checker.diagnostics.push(diagnostic); } } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.enabled(Rule::PassStatementStubBody) { flake8_pyi::rules::pass_statement_stub_body(checker, body); } @@ -391,7 +391,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { if checker.enabled(Rule::DjangoUnorderedBodyContentInModel) { flake8_django::rules::unordered_body_content_in_model(checker, bases, body); } - if !checker.is_stub { + if !checker.source_type.is_stub() { if checker.enabled(Rule::DjangoModelWithoutDunderStr) { flake8_django::rules::model_without_dunder_str(checker, class_def); } @@ -429,7 +429,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { checker.diagnostics.push(diagnostic); } } - if !checker.is_stub { + if !checker.source_type.is_stub() { if checker.any_enabled(&[ Rule::AbstractBaseClassWithoutAbstractMethod, Rule::EmptyMethodWithoutAbstractDecorator, @@ -439,7 +439,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { ); } } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.enabled(Rule::PassStatementStubBody) { flake8_pyi::rules::pass_statement_stub_body(checker, body); } @@ -541,7 +541,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { alias, ); } - if !checker.is_stub { + if !checker.source_type.is_stub() { if checker.enabled(Rule::UselessImportAlias) { pylint::rules::useless_import_alias(checker, alias); } @@ -716,7 +716,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { checker.diagnostics.push(diagnostic); } } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.enabled(Rule::FutureAnnotationsInStub) { flake8_pyi::rules::from_future_import(checker, import_from); } @@ -861,7 +861,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { checker.diagnostics.push(diagnostic); } } - if !checker.is_stub { + if !checker.source_type.is_stub() { if checker.enabled(Rule::UselessImportAlias) { pylint::rules::useless_import_alias(checker, alias); } @@ -985,7 +985,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { checker.diagnostics.push(diagnostic); } } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.any_enabled(&[ Rule::UnrecognizedVersionInfoCheck, Rule::PatchVersionComparison, @@ -1297,7 +1297,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { if checker.settings.rules.enabled(Rule::TypeBivariance) { pylint::rules::type_bivariance(checker, value); } - if checker.is_stub { + if checker.source_type.is_stub() { if checker.any_enabled(&[ Rule::UnprefixedTypeParam, Rule::AssignmentDefaultInStub, @@ -1362,7 +1362,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { stmt, ); } - if checker.is_stub { + if checker.source_type.is_stub() { if let Some(value) = value { if checker.enabled(Rule::AssignmentDefaultInStub) { // Ignore assignments in function bodies; those are covered by other rules. diff --git a/crates/ruff/src/checkers/ast/mod.rs b/crates/ruff/src/checkers/ast/mod.rs index 719757e126054a..7e061b013b81fc 100644 --- a/crates/ruff/src/checkers/ast/mod.rs +++ b/crates/ruff/src/checkers/ast/mod.rs @@ -52,7 +52,6 @@ use ruff_python_semantic::{ ModuleKind, ScopeId, ScopeKind, SemanticModel, SemanticModelFlags, StarImport, SubmoduleImport, }; use ruff_python_stdlib::builtins::{BUILTINS, MAGIC_GLOBALS}; -use ruff_python_stdlib::path::{is_jupyter_notebook, is_python_stub_file}; use ruff_source_file::Locator; use crate::checkers::ast::deferred::Deferred; @@ -62,6 +61,7 @@ use crate::noqa::NoqaMapping; use crate::registry::Rule; use crate::rules::{flake8_pyi, flake8_type_checking, pyflakes, pyupgrade}; use crate::settings::{flags, Settings}; +use crate::source_kind::PySourceType; use crate::{docstrings, noqa}; mod analyze; @@ -74,10 +74,8 @@ pub(crate) struct Checker<'a> { package: Option<&'a Path>, /// The module representation of the current file (e.g., `foo.bar`). module_path: Option<&'a [String]>, - /// Whether the current file is a stub (`.pyi`) file. - is_stub: bool, - /// Whether the current file is a Jupyter notebook (`.ipynb`) file. - pub(crate) is_jupyter_notebook: bool, + /// The [`PySourceType`] of the current file. + pub(crate) source_type: PySourceType, /// The [`flags::Noqa`] for the current analysis (i.e., whether to respect suppression /// comments). noqa: flags::Noqa, @@ -119,6 +117,7 @@ impl<'a> Checker<'a> { stylist: &'a Stylist, indexer: &'a Indexer, importer: Importer<'a>, + source_type: PySourceType, ) -> Checker<'a> { Checker { settings, @@ -127,8 +126,7 @@ impl<'a> Checker<'a> { path, package, module_path: module.path(), - is_stub: is_python_stub_file(path), - is_jupyter_notebook: is_jupyter_notebook(path), + source_type, locator, stylist, indexer, @@ -1712,7 +1710,7 @@ impl<'a> Checker<'a> { pyupgrade::rules::quoted_annotation(self, value, range); } } - if self.is_stub { + if self.source_type.is_stub() { if self.enabled(Rule::QuotedAnnotationInStub) { flake8_pyi::rules::quoted_annotation_in_stub(self, value, range); } @@ -1841,6 +1839,7 @@ pub(crate) fn check_ast( noqa: flags::Noqa, path: &Path, package: Option<&Path>, + source_type: PySourceType, ) -> Vec { let module_path = package.and_then(|package| to_module_path(package, path)); let module = Module { @@ -1868,6 +1867,7 @@ pub(crate) fn check_ast( stylist, indexer, Importer::new(python_ast, locator, stylist), + source_type, ); checker.bind_builtins(); diff --git a/crates/ruff/src/checkers/imports.rs b/crates/ruff/src/checkers/imports.rs index 7a16036445a517..e263cb55b6754b 100644 --- a/crates/ruff/src/checkers/imports.rs +++ b/crates/ruff/src/checkers/imports.rs @@ -10,7 +10,7 @@ use ruff_python_ast::imports::{ImportMap, ModuleImport}; use ruff_python_ast::statement_visitor::StatementVisitor; use ruff_python_codegen::Stylist; use ruff_python_index::Indexer; -use ruff_python_stdlib::path::is_python_stub_file; + use ruff_source_file::Locator; use crate::directives::IsortDirectives; @@ -18,7 +18,7 @@ use crate::registry::Rule; use crate::rules::isort; use crate::rules::isort::block::{Block, BlockBuilder}; use crate::settings::Settings; -use crate::source_kind::SourceKind; +use crate::source_kind::{PySourceType, SourceKind}; fn extract_import_map(path: &Path, package: Option<&Path>, blocks: &[&Block]) -> Option { let Some(package) = package else { @@ -87,12 +87,12 @@ pub(crate) fn check_imports( path: &Path, package: Option<&Path>, source_kind: Option<&SourceKind>, + source_type: PySourceType, ) -> (Vec, Option) { - let is_stub = is_python_stub_file(path); - // Extract all import blocks from the AST. let tracker = { - let mut tracker = BlockBuilder::new(locator, directives, is_stub, source_kind); + let mut tracker = + BlockBuilder::new(locator, directives, source_type.is_stub(), source_kind); tracker.visit_body(python_ast); tracker }; @@ -110,7 +110,7 @@ pub(crate) fn check_imports( indexer, settings, package, - source_kind.map_or(false, SourceKind::is_jupyter), + source_type, ) { diagnostics.push(diagnostic); } @@ -119,7 +119,11 @@ pub(crate) fn check_imports( } if settings.rules.enabled(Rule::MissingRequiredImport) { diagnostics.extend(isort::rules::add_required_imports( - python_ast, locator, stylist, settings, is_stub, + python_ast, + locator, + stylist, + settings, + source_type, )); } diff --git a/crates/ruff/src/importer/insertion.rs b/crates/ruff/src/importer/insertion.rs index 73e1957381417b..dd1e7dd344d827 100644 --- a/crates/ruff/src/importer/insertion.rs +++ b/crates/ruff/src/importer/insertion.rs @@ -2,7 +2,7 @@ use std::ops::Add; use ruff_python_ast::{Ranged, Stmt}; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_parser::{lexer, Tok}; use ruff_text_size::TextSize; use ruff_diagnostics::Edit; @@ -11,6 +11,8 @@ use ruff_python_codegen::Stylist; use ruff_python_trivia::{textwrap::indent, PythonWhitespace}; use ruff_source_file::{Locator, UniversalNewlineIterator}; +use crate::source_kind::PySourceType; + #[derive(Debug, Clone, PartialEq, Eq)] pub(super) enum Placement<'a> { /// The content will be inserted inline with the existing code (i.e., within semicolon-delimited @@ -137,7 +139,7 @@ impl<'a> Insertion<'a> { mut location: TextSize, locator: &Locator<'a>, stylist: &Stylist, - is_jupyter_notebook: bool, + source_type: PySourceType, ) -> Insertion<'a> { enum Awaiting { Colon(u32), @@ -145,14 +147,9 @@ impl<'a> Insertion<'a> { Indent, } - let mode = if is_jupyter_notebook { - Mode::Jupyter - } else { - Mode::Module - }; - let mut state = Awaiting::Colon(0); - for (tok, range) in lexer::lex_starts_at(locator.after(location), mode, location).flatten() + for (tok, range) in + lexer::lex_starts_at(locator.after(location), source_type.as_mode(), location).flatten() { match state { // Iterate until we find the colon indicating the start of the block body. @@ -313,6 +310,8 @@ mod tests { use ruff_python_codegen::Stylist; use ruff_source_file::{LineEnding, Locator}; + use crate::source_kind::PySourceType; + use super::Insertion; #[test] @@ -433,7 +432,7 @@ x = 1 let tokens: Vec = ruff_python_parser::tokenize(contents, Mode::Module); let locator = Locator::new(contents); let stylist = Stylist::from_tokens(&tokens, &locator); - Insertion::start_of_block(offset, &locator, &stylist, false) + Insertion::start_of_block(offset, &locator, &stylist, PySourceType::default()) } let contents = "if True: pass"; diff --git a/crates/ruff/src/importer/mod.rs b/crates/ruff/src/importer/mod.rs index 4c7446d9ec1750..46d42f0899c69e 100644 --- a/crates/ruff/src/importer/mod.rs +++ b/crates/ruff/src/importer/mod.rs @@ -21,6 +21,7 @@ use crate::autofix; use crate::autofix::codemods::CodegenStylist; use crate::cst::matchers::{match_aliases, match_import_from, match_statement}; use crate::importer::insertion::Insertion; +use crate::source_kind::PySourceType; mod insertion; @@ -121,7 +122,7 @@ impl<'a> Importer<'a> { import: &StmtImports, at: TextSize, semantic: &SemanticModel, - is_jupyter_notebook: bool, + source_type: PySourceType, ) -> Result { // Generate the modified import statement. let content = autofix::codemods::retain_imports( @@ -141,7 +142,7 @@ impl<'a> Importer<'a> { // Add the import to a `TYPE_CHECKING` block. let add_import_edit = if let Some(block) = self.preceding_type_checking_block(at) { // Add the import to the `TYPE_CHECKING` block. - self.add_to_type_checking_block(&content, block.start(), is_jupyter_notebook) + self.add_to_type_checking_block(&content, block.start(), source_type) } else { // Add the import to a new `TYPE_CHECKING` block. self.add_type_checking_block( @@ -358,10 +359,9 @@ impl<'a> Importer<'a> { &self, content: &str, at: TextSize, - is_jupyter_notebook: bool, + source_type: PySourceType, ) -> Edit { - Insertion::start_of_block(at, self.locator, self.stylist, is_jupyter_notebook) - .into_edit(content) + Insertion::start_of_block(at, self.locator, self.stylist, source_type).into_edit(content) } /// Return the import statement that precedes the given position, if any. diff --git a/crates/ruff/src/linter.rs b/crates/ruff/src/linter.rs index e4a12032e81217..6f9f20bdd40c8b 100644 --- a/crates/ruff/src/linter.rs +++ b/crates/ruff/src/linter.rs @@ -7,14 +7,14 @@ use colored::Colorize; use itertools::Itertools; use log::error; use ruff_python_parser::lexer::LexResult; -use ruff_python_parser::{Mode, ParseError}; +use ruff_python_parser::ParseError; use rustc_hash::FxHashMap; use ruff_diagnostics::Diagnostic; use ruff_python_ast::imports::ImportMap; use ruff_python_codegen::Stylist; use ruff_python_index::Indexer; -use ruff_python_stdlib::path::is_python_stub_file; + use ruff_source_file::{Locator, SourceFileBuilder}; use crate::autofix::{fix_file, FixResult}; @@ -32,7 +32,7 @@ use crate::noqa::add_noqa; use crate::registry::{AsRule, Rule}; use crate::rules::pycodestyle; use crate::settings::{flags, Settings}; -use crate::source_kind::SourceKind; +use crate::source_kind::{PySourceType, SourceKind}; use crate::{directives, fs}; const CARGO_PKG_NAME: &str = env!("CARGO_PKG_NAME"); @@ -81,6 +81,7 @@ pub fn check_path( settings: &Settings, noqa: flags::Noqa, source_kind: Option<&SourceKind>, + source_type: PySourceType, ) -> LinterResult<(Vec, Option)> { // Aggregate all diagnostics. let mut diagnostics = vec![]; @@ -101,9 +102,13 @@ pub fn check_path( .iter_enabled() .any(|rule_code| rule_code.lint_source().is_tokens()) { - let is_stub = is_python_stub_file(path); diagnostics.extend(check_tokens( - &tokens, path, locator, indexer, settings, is_stub, + &tokens, + path, + locator, + indexer, + settings, + source_type.is_stub(), )); } @@ -141,7 +146,7 @@ pub fn check_path( match ruff_python_parser::parse_program_tokens( tokens, &path.to_string_lossy(), - source_kind.map_or(false, SourceKind::is_jupyter), + source_type.is_jupyter(), ) { Ok(python_ast) => { if use_ast { @@ -155,6 +160,7 @@ pub fn check_path( noqa, path, package, + source_type, )); } if use_imports { @@ -168,6 +174,7 @@ pub fn check_path( path, package, source_kind, + source_type, ); imports = module_imports; diagnostics.extend(import_diagnostics); @@ -260,11 +267,13 @@ const MAX_ITERATIONS: usize = 100; /// Add any missing `# noqa` pragmas to the source code at the given `Path`. pub fn add_noqa_to_path(path: &Path, package: Option<&Path>, settings: &Settings) -> Result { + let source_type = PySourceType::from(path); + // Read the file from disk. let contents = std::fs::read_to_string(path)?; // Tokenize once. - let tokens: Vec = ruff_python_parser::tokenize(&contents, Mode::Module); + let tokens: Vec = ruff_python_parser::tokenize(&contents, source_type.as_mode()); // Map row and column locations to byte slices (lazily). let locator = Locator::new(&contents); @@ -298,6 +307,7 @@ pub fn add_noqa_to_path(path: &Path, package: Option<&Path>, settings: &Settings settings, flags::Noqa::Disabled, None, + source_type, ); // Log any parse errors. @@ -330,15 +340,10 @@ pub fn lint_only( settings: &Settings, noqa: flags::Noqa, source_kind: Option<&SourceKind>, + source_type: PySourceType, ) -> LinterResult<(Vec, Option)> { - let mode = if source_kind.map_or(false, SourceKind::is_jupyter) { - Mode::Jupyter - } else { - Mode::Module - }; - // Tokenize once. - let tokens: Vec = ruff_python_parser::tokenize(contents, mode); + let tokens: Vec = ruff_python_parser::tokenize(contents, source_type.as_mode()); // Map row and column locations to byte slices (lazily). let locator = Locator::new(contents); @@ -369,6 +374,7 @@ pub fn lint_only( settings, noqa, source_kind, + source_type, ); result.map(|(diagnostics, imports)| { @@ -415,6 +421,7 @@ pub fn lint_fix<'a>( noqa: flags::Noqa, settings: &Settings, source_kind: &mut SourceKind, + source_type: PySourceType, ) -> Result> { let mut transformed = Cow::Borrowed(contents); @@ -427,16 +434,11 @@ pub fn lint_fix<'a>( // Track whether the _initial_ source code was parseable. let mut parseable = false; - let mode = if source_kind.is_jupyter() { - Mode::Jupyter - } else { - Mode::Module - }; - // Continuously autofix until the source code stabilizes. loop { // Tokenize once. - let tokens: Vec = ruff_python_parser::tokenize(&transformed, mode); + let tokens: Vec = + ruff_python_parser::tokenize(&transformed, source_type.as_mode()); // Map row and column locations to byte slices (lazily). let locator = Locator::new(&transformed); @@ -467,6 +469,7 @@ pub fn lint_fix<'a>( settings, noqa, Some(source_kind), + source_type, ); if iterations == 0 { diff --git a/crates/ruff/src/rules/flake8_annotations/fixes.rs b/crates/ruff/src/rules/flake8_annotations/fixes.rs index 2481c795126e89..db90d128770a97 100644 --- a/crates/ruff/src/rules/flake8_annotations/fixes.rs +++ b/crates/ruff/src/rules/flake8_annotations/fixes.rs @@ -1,29 +1,28 @@ use anyhow::{bail, Result}; use ruff_python_ast::{Ranged, Stmt}; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_parser::{lexer, Tok}; use ruff_diagnostics::Edit; use ruff_source_file::Locator; +use crate::source_kind::PySourceType; + /// ANN204 pub(crate) fn add_return_annotation( locator: &Locator, stmt: &Stmt, annotation: &str, - is_jupyter_notebook: bool, + source_type: PySourceType, ) -> Result { let contents = &locator.contents()[stmt.range()]; - let mode = if is_jupyter_notebook { - Mode::Jupyter - } else { - Mode::Module - }; // Find the colon (following the `def` keyword). let mut seen_lpar = false; let mut seen_rpar = false; let mut count = 0u32; - for (tok, range) in lexer::lex_starts_at(contents, mode, stmt.start()).flatten() { + for (tok, range) in + lexer::lex_starts_at(contents, source_type.as_mode(), stmt.start()).flatten() + { if seen_lpar && seen_rpar { if matches!(tok, Tok::Colon) { return Ok(Edit::insertion(format!(" -> {annotation}"), range.start())); diff --git a/crates/ruff/src/rules/flake8_annotations/rules/definition.rs b/crates/ruff/src/rules/flake8_annotations/rules/definition.rs index f3745ad30f58b2..9f106bc235bdb8 100644 --- a/crates/ruff/src/rules/flake8_annotations/rules/definition.rs +++ b/crates/ruff/src/rules/flake8_annotations/rules/definition.rs @@ -710,7 +710,7 @@ pub(crate) fn definition( checker.locator(), stmt, "None", - checker.is_jupyter_notebook, + checker.source_type, ) .map(Fix::suggested) }); @@ -733,7 +733,7 @@ pub(crate) fn definition( checker.locator(), stmt, return_type, - checker.is_jupyter_notebook, + checker.source_type, ) .map(Fix::suggested) }); diff --git a/crates/ruff/src/rules/flake8_pytest_style/rules/fixture.rs b/crates/ruff/src/rules/flake8_pytest_style/rules/fixture.rs index a91a92841fd300..c640b22b7ba946 100644 --- a/crates/ruff/src/rules/flake8_pytest_style/rules/fixture.rs +++ b/crates/ruff/src/rules/flake8_pytest_style/rules/fixture.rs @@ -405,7 +405,7 @@ fn check_fixture_decorator(checker: &mut Checker, func_name: &str, decorator: &D args, keywords, false, - checker.is_jupyter_notebook, + checker.source_type, ) .map(Fix::suggested) }); diff --git a/crates/ruff/src/rules/flake8_pytest_style/rules/parametrize.rs b/crates/ruff/src/rules/flake8_pytest_style/rules/parametrize.rs index 01696bb3ecc852..81c8c329fed040 100644 --- a/crates/ruff/src/rules/flake8_pytest_style/rules/parametrize.rs +++ b/crates/ruff/src/rules/flake8_pytest_style/rules/parametrize.rs @@ -1,5 +1,5 @@ use ruff_python_ast::{self as ast, Constant, Decorator, Expr, ExprContext, Ranged}; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_parser::{lexer, Tok}; use ruff_text_size::TextRange; use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation}; @@ -9,6 +9,7 @@ use ruff_source_file::Locator; use crate::checkers::ast::Checker; use crate::registry::{AsRule, Rule}; +use crate::source_kind::PySourceType; use super::super::types; use super::helpers::{is_pytest_parametrize, split_names}; @@ -99,21 +100,19 @@ fn get_parametrize_name_range( decorator: &Decorator, expr: &Expr, locator: &Locator, - is_jupyter_notebook: bool, + source_type: PySourceType, ) -> TextRange { let mut locations = Vec::new(); let mut implicit_concat = None; - let mode = if is_jupyter_notebook { - Mode::Jupyter - } else { - Mode::Module - }; - // The parenthesis are not part of the AST, so we need to tokenize the // decorator to find them. - for (tok, range) in - lexer::lex_starts_at(locator.slice(decorator.range()), mode, decorator.start()).flatten() + for (tok, range) in lexer::lex_starts_at( + locator.slice(decorator.range()), + source_type.as_mode(), + decorator.start(), + ) + .flatten() { match tok { Tok::Lpar => locations.push(range.start()), @@ -152,7 +151,7 @@ fn check_names(checker: &mut Checker, decorator: &Decorator, expr: &Expr) { decorator, expr, checker.locator(), - checker.is_jupyter_notebook, + checker.source_type, ); let mut diagnostic = Diagnostic::new( PytestParametrizeNamesWrongType { @@ -187,7 +186,7 @@ fn check_names(checker: &mut Checker, decorator: &Decorator, expr: &Expr) { decorator, expr, checker.locator(), - checker.is_jupyter_notebook, + checker.source_type, ); let mut diagnostic = Diagnostic::new( PytestParametrizeNamesWrongType { diff --git a/crates/ruff/src/rules/flake8_raise/rules/unnecessary_paren_on_raise_exception.rs b/crates/ruff/src/rules/flake8_raise/rules/unnecessary_paren_on_raise_exception.rs index 5963029b80b874..f8b0c3498fff09 100644 --- a/crates/ruff/src/rules/flake8_raise/rules/unnecessary_paren_on_raise_exception.rs +++ b/crates/ruff/src/rules/flake8_raise/rules/unnecessary_paren_on_raise_exception.rs @@ -1,5 +1,5 @@ use ruff_python_ast::{self as ast, Expr, Ranged}; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_parser::{lexer, Tok}; use ruff_text_size::{TextRange, TextSize}; use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix}; @@ -8,6 +8,7 @@ use ruff_source_file::Locator; use crate::checkers::ast::Checker; use crate::registry::AsRule; +use crate::source_kind::PySourceType; /// ## What it does /// Checks for unnecessary parentheses on raised exceptions. @@ -66,7 +67,7 @@ pub(crate) fn unnecessary_paren_on_raise_exception(checker: &mut Checker, expr: return; } - let range = match_parens(func.end(), checker.locator(), checker.is_jupyter_notebook) + let range = match_parens(func.end(), checker.locator(), checker.source_type) .expect("Expected call to include parentheses"); let mut diagnostic = Diagnostic::new(UnnecessaryParenOnRaiseException, range); if checker.patch(diagnostic.kind.rule()) { @@ -81,7 +82,7 @@ pub(crate) fn unnecessary_paren_on_raise_exception(checker: &mut Checker, expr: fn match_parens( start: TextSize, locator: &Locator, - is_jupyter_notebook: bool, + source_type: PySourceType, ) -> Option { let contents = &locator.contents()[usize::from(start)..]; @@ -89,13 +90,7 @@ fn match_parens( let mut fix_end = None; let mut count = 0u32; - let mode = if is_jupyter_notebook { - Mode::Jupyter - } else { - Mode::Module - }; - - for (tok, range) in lexer::lex_starts_at(contents, mode, start).flatten() { + for (tok, range) in lexer::lex_starts_at(contents, source_type.as_mode(), start).flatten() { match tok { Tok::Lpar => { if count == 0 { diff --git a/crates/ruff/src/rules/flake8_simplify/rules/ast_if.rs b/crates/ruff/src/rules/flake8_simplify/rules/ast_if.rs index 0df7d46ece7fa5..c526dd3f50d46f 100644 --- a/crates/ruff/src/rules/flake8_simplify/rules/ast_if.rs +++ b/crates/ruff/src/rules/flake8_simplify/rules/ast_if.rs @@ -374,7 +374,7 @@ pub(crate) fn nested_if_statements(checker: &mut Checker, stmt_if: &StmtIf, pare let colon = first_colon_range( TextRange::new(test.end(), first_stmt.start()), checker.locator().contents(), - checker.is_jupyter_notebook, + checker.source_type.is_jupyter(), ); // Check if the parent is already emitting a larger diagnostic including this if statement diff --git a/crates/ruff/src/rules/flake8_simplify/rules/ast_with.rs b/crates/ruff/src/rules/flake8_simplify/rules/ast_with.rs index 26f409ab1d4407..55c5d0d78105a3 100644 --- a/crates/ruff/src/rules/flake8_simplify/rules/ast_with.rs +++ b/crates/ruff/src/rules/flake8_simplify/rules/ast_with.rs @@ -119,7 +119,7 @@ pub(crate) fn multiple_with_statements( body.first().expect("Expected body to be non-empty").start(), ), checker.locator().contents(), - checker.is_jupyter_notebook, + checker.source_type.is_jupyter(), ); let mut diagnostic = Diagnostic::new( diff --git a/crates/ruff/src/rules/flake8_type_checking/rules/typing_only_runtime_import.rs b/crates/ruff/src/rules/flake8_type_checking/rules/typing_only_runtime_import.rs index a787e32e547be7..a40301ec61703a 100644 --- a/crates/ruff/src/rules/flake8_type_checking/rules/typing_only_runtime_import.rs +++ b/crates/ruff/src/rules/flake8_type_checking/rules/typing_only_runtime_import.rs @@ -447,7 +447,7 @@ fn fix_imports(checker: &Checker, stmt_id: NodeId, imports: &[Import]) -> Result }, at, checker.semantic(), - checker.is_jupyter_notebook, + checker.source_type, )?; Ok( diff --git a/crates/ruff/src/rules/isort/annotate.rs b/crates/ruff/src/rules/isort/annotate.rs index 89f1ac7f9be250..fed4d3b5885419 100644 --- a/crates/ruff/src/rules/isort/annotate.rs +++ b/crates/ruff/src/rules/isort/annotate.rs @@ -3,6 +3,8 @@ use ruff_text_size::TextRange; use ruff_source_file::Locator; +use crate::source_kind::PySourceType; + use super::comments::Comment; use super::helpers::trailing_comma; use super::types::{AliasData, TrailingComma}; @@ -13,7 +15,7 @@ pub(crate) fn annotate_imports<'a>( comments: Vec>, locator: &Locator, split_on_trailing_comma: bool, - is_jupyter_notebook: bool, + source_type: PySourceType, ) -> Vec> { let mut comments_iter = comments.into_iter().peekable(); @@ -120,7 +122,7 @@ pub(crate) fn annotate_imports<'a>( names: aliases, level: level.map(|level| level.to_u32()), trailing_comma: if split_on_trailing_comma { - trailing_comma(import, locator, is_jupyter_notebook) + trailing_comma(import, locator, source_type) } else { TrailingComma::default() }, diff --git a/crates/ruff/src/rules/isort/comments.rs b/crates/ruff/src/rules/isort/comments.rs index d0b1564f864977..bd26e5fdb2db1b 100644 --- a/crates/ruff/src/rules/isort/comments.rs +++ b/crates/ruff/src/rules/isort/comments.rs @@ -1,10 +1,12 @@ use std::borrow::Cow; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_parser::{lexer, Tok}; use ruff_text_size::{TextRange, TextSize}; use ruff_source_file::Locator; +use crate::source_kind::PySourceType; + #[derive(Debug)] pub(crate) struct Comment<'a> { pub(crate) value: Cow<'a, str>, @@ -25,15 +27,10 @@ impl Comment<'_> { pub(crate) fn collect_comments<'a>( range: TextRange, locator: &'a Locator, - is_jupyter_notebook: bool, + source_type: PySourceType, ) -> Vec> { let contents = locator.slice(range); - let mode = if is_jupyter_notebook { - Mode::Jupyter - } else { - Mode::Module - }; - lexer::lex_starts_at(contents, mode, range.start()) + lexer::lex_starts_at(contents, source_type.as_mode(), range.start()) .flatten() .filter_map(|(tok, range)| { if let Tok::Comment(value) = tok { diff --git a/crates/ruff/src/rules/isort/helpers.rs b/crates/ruff/src/rules/isort/helpers.rs index 89038f7070f28f..1e4225c7963ea9 100644 --- a/crates/ruff/src/rules/isort/helpers.rs +++ b/crates/ruff/src/rules/isort/helpers.rs @@ -1,27 +1,23 @@ use ruff_python_ast::{Ranged, Stmt}; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_parser::{lexer, Tok}; use ruff_python_trivia::PythonWhitespace; use ruff_source_file::{Locator, UniversalNewlines}; use crate::rules::isort::types::TrailingComma; +use crate::source_kind::PySourceType; /// Return `true` if a `Stmt::ImportFrom` statement ends with a magic /// trailing comma. pub(super) fn trailing_comma( stmt: &Stmt, locator: &Locator, - is_jupyter_notebook: bool, + source_type: PySourceType, ) -> TrailingComma { let contents = locator.slice(stmt.range()); let mut count = 0u32; let mut trailing_comma = TrailingComma::Absent; - let mode = if is_jupyter_notebook { - Mode::Jupyter - } else { - Mode::Module - }; - for (tok, _) in lexer::lex_starts_at(contents, mode, stmt.start()).flatten() { + for (tok, _) in lexer::lex_starts_at(contents, source_type.as_mode(), stmt.start()).flatten() { if matches!(tok, Tok::Lpar) { count = count.saturating_add(1); } diff --git a/crates/ruff/src/rules/isort/mod.rs b/crates/ruff/src/rules/isort/mod.rs index 5587180636e61e..f32a46de70927e 100644 --- a/crates/ruff/src/rules/isort/mod.rs +++ b/crates/ruff/src/rules/isort/mod.rs @@ -22,6 +22,7 @@ use crate::line_width::{LineLength, LineWidth}; use crate::rules::isort::categorize::KnownModules; use crate::rules::isort::types::ImportBlock; use crate::settings::types::PythonVersion; +use crate::source_kind::PySourceType; mod annotate; pub(crate) mod block; @@ -72,7 +73,7 @@ pub(crate) fn format_imports( stylist: &Stylist, src: &[PathBuf], package: Option<&Path>, - is_jupyter_notebook: bool, + source_type: PySourceType, combine_as_imports: bool, force_single_line: bool, force_sort_within_sections: bool, @@ -100,7 +101,7 @@ pub(crate) fn format_imports( comments, locator, split_on_trailing_comma, - is_jupyter_notebook, + source_type, ); // Normalize imports (i.e., deduplicate, aggregate `from` imports). diff --git a/crates/ruff/src/rules/isort/rules/add_required_imports.rs b/crates/ruff/src/rules/isort/rules/add_required_imports.rs index 9bcdc477e4342f..a50dd57070dc7b 100644 --- a/crates/ruff/src/rules/isort/rules/add_required_imports.rs +++ b/crates/ruff/src/rules/isort/rules/add_required_imports.rs @@ -13,6 +13,7 @@ use ruff_source_file::Locator; use crate::importer::Importer; use crate::registry::Rule; use crate::settings::Settings; +use crate::source_kind::PySourceType; /// ## What it does /// Adds any required imports, as specified by the user, to the top of the @@ -91,7 +92,7 @@ fn add_required_import( locator: &Locator, stylist: &Stylist, settings: &Settings, - is_stub: bool, + source_type: PySourceType, ) -> Option { // Don't add imports to semantically-empty files. if python_ast.iter().all(is_docstring_stmt) { @@ -99,7 +100,7 @@ fn add_required_import( } // We don't need to add `__future__` imports to stubs. - if is_stub && required_import.is_future_import() { + if source_type.is_stub() && required_import.is_future_import() { return None; } @@ -131,7 +132,7 @@ pub(crate) fn add_required_imports( locator: &Locator, stylist: &Stylist, settings: &Settings, - is_stub: bool, + source_type: PySourceType, ) -> Vec { settings .isort @@ -172,7 +173,7 @@ pub(crate) fn add_required_imports( locator, stylist, settings, - is_stub, + source_type, ) }) .collect(), @@ -190,7 +191,7 @@ pub(crate) fn add_required_imports( locator, stylist, settings, - is_stub, + source_type, ) }) .collect(), diff --git a/crates/ruff/src/rules/isort/rules/organize_imports.rs b/crates/ruff/src/rules/isort/rules/organize_imports.rs index 1cb8182f966e8e..175dda8f404c21 100644 --- a/crates/ruff/src/rules/isort/rules/organize_imports.rs +++ b/crates/ruff/src/rules/isort/rules/organize_imports.rs @@ -15,6 +15,7 @@ use ruff_source_file::{Locator, UniversalNewlines}; use crate::line_width::LineWidth; use crate::registry::AsRule; use crate::settings::Settings; +use crate::source_kind::PySourceType; use super::super::block::Block; use super::super::{comments, format_imports}; @@ -87,7 +88,7 @@ pub(crate) fn organize_imports( indexer: &Indexer, settings: &Settings, package: Option<&Path>, - is_jupyter_notebook: bool, + source_type: PySourceType, ) -> Option { let indentation = locator.slice(extract_indentation_range(&block.imports, locator)); let indentation = leading_indentation(indentation); @@ -106,7 +107,7 @@ pub(crate) fn organize_imports( let comments = comments::collect_comments( TextRange::new(range.start(), locator.full_line_end(range.end())), locator, - is_jupyter_notebook, + source_type, ); let trailing_line_end = if block.trailer.is_none() { @@ -125,7 +126,7 @@ pub(crate) fn organize_imports( stylist, &settings.src, package, - is_jupyter_notebook, + source_type, settings.isort.combine_as_imports, settings.isort.force_single_line, settings.isort.force_sort_within_sections, diff --git a/crates/ruff/src/rules/pandas_vet/rules/inplace_argument.rs b/crates/ruff/src/rules/pandas_vet/rules/inplace_argument.rs index ed224051a319c2..4f5903a88dcb05 100644 --- a/crates/ruff/src/rules/pandas_vet/rules/inplace_argument.rs +++ b/crates/ruff/src/rules/pandas_vet/rules/inplace_argument.rs @@ -10,6 +10,7 @@ use ruff_source_file::Locator; use crate::autofix::edits::remove_argument; use crate::checkers::ast::Checker; use crate::registry::AsRule; +use crate::source_kind::PySourceType; /// ## What it does /// Checks for `inplace=True` usages in `pandas` function and method @@ -107,7 +108,7 @@ pub(crate) fn inplace_argument( keyword.range(), args, keywords, - checker.is_jupyter_notebook, + checker.source_type, ) { diagnostic.set_fix(fix); } @@ -131,7 +132,7 @@ fn convert_inplace_argument_to_assignment( expr_range: TextRange, args: &[Expr], keywords: &[Keyword], - is_jupyter_notebook: bool, + source_type: PySourceType, ) -> Option { // Add the assignment. let call = expr.as_call_expr()?; @@ -149,7 +150,7 @@ fn convert_inplace_argument_to_assignment( args, keywords, false, - is_jupyter_notebook, + source_type, ) .ok()?; diff --git a/crates/ruff/src/rules/pyflakes/mod.rs b/crates/ruff/src/rules/pyflakes/mod.rs index 38af4a57454751..e51f6cc8316829 100644 --- a/crates/ruff/src/rules/pyflakes/mod.rs +++ b/crates/ruff/src/rules/pyflakes/mod.rs @@ -12,7 +12,7 @@ mod tests { use anyhow::Result; use regex::Regex; use ruff_python_parser::lexer::LexResult; - use ruff_python_parser::Mode; + use test_case::test_case; use ruff_diagnostics::Diagnostic; @@ -25,6 +25,7 @@ mod tests { use crate::registry::{AsRule, Linter, Rule}; use crate::rules::pyflakes; use crate::settings::{flags, Settings}; + use crate::source_kind::PySourceType; use crate::test::{test_path, test_snippet}; use crate::{assert_messages, directives}; @@ -504,8 +505,9 @@ mod tests { /// Note that all tests marked with `#[ignore]` should be considered TODOs. fn flakes(contents: &str, expected: &[Rule]) { let contents = dedent(contents); + let source_type = PySourceType::default(); let settings = Settings::for_rules(Linter::Pyflakes.rules()); - let tokens: Vec = ruff_python_parser::tokenize(&contents, Mode::Module); + let tokens: Vec = ruff_python_parser::tokenize(&contents, source_type.as_mode()); let locator = Locator::new(&contents); let stylist = Stylist::from_tokens(&tokens, &locator); let indexer = Indexer::from_tokens(&tokens, &locator); @@ -529,6 +531,7 @@ mod tests { &settings, flags::Noqa::Enabled, None, + source_type, ); diagnostics.sort_by_key(Diagnostic::start); let actual = diagnostics diff --git a/crates/ruff/src/rules/pyflakes/rules/f_string_missing_placeholders.rs b/crates/ruff/src/rules/pyflakes/rules/f_string_missing_placeholders.rs index 3391e3c245ef1b..42e9efe2453069 100644 --- a/crates/ruff/src/rules/pyflakes/rules/f_string_missing_placeholders.rs +++ b/crates/ruff/src/rules/pyflakes/rules/f_string_missing_placeholders.rs @@ -1,5 +1,5 @@ use ruff_python_ast::{Expr, Ranged}; -use ruff_python_parser::{lexer, Mode, StringKind, Tok}; +use ruff_python_parser::{lexer, StringKind, Tok}; use ruff_text_size::{TextRange, TextSize}; use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix}; @@ -8,6 +8,7 @@ use ruff_source_file::Locator; use crate::checkers::ast::Checker; use crate::registry::AsRule; +use crate::source_kind::PySourceType; /// ## What it does /// Checks for f-strings that do not contain any placeholder expressions. @@ -52,10 +53,10 @@ impl AlwaysAutofixableViolation for FStringMissingPlaceholders { fn find_useless_f_strings<'a>( expr: &'a Expr, locator: &'a Locator, - mode: Mode, + source_type: PySourceType, ) -> impl Iterator + 'a { let contents = locator.slice(expr.range()); - lexer::lex_starts_at(contents, mode, expr.start()) + lexer::lex_starts_at(contents, source_type.as_mode(), expr.start()) .flatten() .filter_map(|(tok, range)| match tok { Tok::String { @@ -82,16 +83,13 @@ fn find_useless_f_strings<'a>( /// F541 pub(crate) fn f_string_missing_placeholders(expr: &Expr, values: &[Expr], checker: &mut Checker) { - let mode = if checker.is_jupyter_notebook { - Mode::Jupyter - } else { - Mode::Module - }; if !values .iter() .any(|value| matches!(value, Expr::FormattedValue(_))) { - for (prefix_range, tok_range) in find_useless_f_strings(expr, checker.locator(), mode) { + for (prefix_range, tok_range) in + find_useless_f_strings(expr, checker.locator(), checker.source_type) + { let mut diagnostic = Diagnostic::new(FStringMissingPlaceholders, tok_range); if checker.patch(diagnostic.kind.rule()) { diagnostic.set_fix(convert_f_string_to_regular_string( diff --git a/crates/ruff/src/rules/pyflakes/rules/unused_variable.rs b/crates/ruff/src/rules/pyflakes/rules/unused_variable.rs index 20544044641b81..15d81589a70a9f 100644 --- a/crates/ruff/src/rules/pyflakes/rules/unused_variable.rs +++ b/crates/ruff/src/rules/pyflakes/rules/unused_variable.rs @@ -1,6 +1,6 @@ use itertools::Itertools; use ruff_python_ast::{self as ast, Ranged, Stmt}; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_parser::{lexer, Tok}; use ruff_text_size::{TextRange, TextSize}; use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation}; @@ -12,6 +12,7 @@ use ruff_source_file::Locator; use crate::autofix::edits::delete_stmt; use crate::checkers::ast::Checker; use crate::registry::AsRule; +use crate::source_kind::PySourceType; /// ## What it does /// Checks for the presence of unused variables in function scopes. @@ -65,14 +66,14 @@ impl Violation for UnusedVariable { fn match_token_before( location: TextSize, locator: &Locator, - mode: Mode, + source_type: PySourceType, f: F, ) -> Option where F: Fn(Tok) -> bool, { let contents = locator.after(location); - for ((_, range), (tok, _)) in lexer::lex_starts_at(contents, mode, location) + for ((_, range), (tok, _)) in lexer::lex_starts_at(contents, source_type.as_mode(), location) .flatten() .tuple_windows() { @@ -88,7 +89,7 @@ where fn match_token_after( location: TextSize, locator: &Locator, - mode: Mode, + source_type: PySourceType, f: F, ) -> Option where @@ -101,7 +102,7 @@ where let mut sqb_count = 0u32; let mut brace_count = 0u32; - for ((tok, _), (_, range)) in lexer::lex_starts_at(contents, mode, location) + for ((tok, _), (_, range)) in lexer::lex_starts_at(contents, source_type.as_mode(), location) .flatten() .tuple_windows() { @@ -144,7 +145,7 @@ where fn match_token_or_closing_brace( location: TextSize, locator: &Locator, - mode: Mode, + source_type: PySourceType, f: F, ) -> Option where @@ -157,7 +158,7 @@ where let mut sqb_count = 0u32; let mut brace_count = 0u32; - for (tok, range) in lexer::lex_starts_at(contents, mode, location).flatten() { + for (tok, range) in lexer::lex_starts_at(contents, source_type.as_mode(), location).flatten() { match tok { Tok::Lpar => { par_count = par_count.saturating_add(1); @@ -209,12 +210,6 @@ fn remove_unused_variable( range: TextRange, checker: &Checker, ) -> Option { - let mode = if checker.is_jupyter_notebook { - Mode::Jupyter - } else { - Mode::Module - }; - // First case: simple assignment (`x = 1`) if let Stmt::Assign(ast::StmtAssign { targets, value, .. }) = stmt { if let Some(target) = targets.iter().find(|target| range == target.range()) { @@ -226,8 +221,10 @@ fn remove_unused_variable( // but preserve the right-hand side. let start = target.start(); let end = - match_token_after(start, checker.locator(), mode, |tok| tok == Tok::Equal)? - .start(); + match_token_after(start, checker.locator(), checker.source_type, |tok| { + tok == Tok::Equal + })? + .start(); let edit = Edit::deletion(start, end); Some(Fix::suggested(edit)) } else { @@ -252,8 +249,10 @@ fn remove_unused_variable( // but preserve the right-hand side. let start = stmt.start(); let end = - match_token_after(start, checker.locator(), mode, |tok| tok == Tok::Equal)? - .start(); + match_token_after(start, checker.locator(), checker.source_type, |tok| { + tok == Tok::Equal + })? + .start(); let edit = Edit::deletion(start, end); Some(Fix::suggested(edit)) } else { @@ -275,17 +274,19 @@ fn remove_unused_variable( let start = match_token_before( item.context_expr.start(), checker.locator(), - mode, + checker.source_type, |tok| tok == Tok::As, )? .end(); // Find the first colon, comma, or closing bracket after the `as` keyword. - let end = - match_token_or_closing_brace(start, checker.locator(), mode, |tok| { - tok == Tok::Colon || tok == Tok::Comma - })? - .start(); + let end = match_token_or_closing_brace( + start, + checker.locator(), + checker.source_type, + |tok| tok == Tok::Colon || tok == Tok::Comma, + )? + .start(); let edit = Edit::deletion(start, end); return Some(Fix::suggested(edit)); diff --git a/crates/ruff/src/rules/pylint/rules/bad_string_format_type.rs b/crates/ruff/src/rules/pylint/rules/bad_string_format_type.rs index f49cf2aef3f7ba..5c5c844bd31cf0 100644 --- a/crates/ruff/src/rules/pylint/rules/bad_string_format_type.rs +++ b/crates/ruff/src/rules/pylint/rules/bad_string_format_type.rs @@ -2,7 +2,7 @@ use std::str::FromStr; use ruff_python_ast::{self as ast, Constant, Expr, Ranged}; use ruff_python_literal::cformat::{CFormatPart, CFormatSpec, CFormatStrOrBytes, CFormatString}; -use ruff_python_parser::{lexer, Mode}; +use ruff_python_parser::lexer; use ruff_text_size::TextRange; use rustc_hash::FxHashMap; @@ -203,12 +203,9 @@ pub(crate) fn bad_string_format_type(checker: &mut Checker, expr: &Expr, right: // Grab each string segment (in case there's an implicit concatenation). let content = checker.locator().slice(expr.range()); let mut strings: Vec = vec![]; - let mode = if checker.is_jupyter_notebook { - Mode::Jupyter - } else { - Mode::Module - }; - for (tok, range) in lexer::lex_starts_at(content, mode, expr.start()).flatten() { + for (tok, range) in + lexer::lex_starts_at(content, checker.source_type.as_mode(), expr.start()).flatten() + { if tok.is_string() { strings.push(range); } else if tok.is_percent() { diff --git a/crates/ruff/src/rules/pyupgrade/rules/printf_string_formatting.rs b/crates/ruff/src/rules/pyupgrade/rules/printf_string_formatting.rs index 3ae653a1011adf..595b0153c12515 100644 --- a/crates/ruff/src/rules/pyupgrade/rules/printf_string_formatting.rs +++ b/crates/ruff/src/rules/pyupgrade/rules/printf_string_formatting.rs @@ -4,7 +4,7 @@ use ruff_python_ast::{self as ast, Constant, Expr, Ranged}; use ruff_python_literal::cformat::{ CConversionFlags, CFormatPart, CFormatPrecision, CFormatQuantity, CFormatString, }; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_parser::{lexer, Tok}; use ruff_text_size::TextRange; use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix}; @@ -337,13 +337,12 @@ pub(crate) fn printf_string_formatting( // Grab each string segment (in case there's an implicit concatenation). let mut strings: Vec = vec![]; let mut extension = None; - let mode = if checker.is_jupyter_notebook { - Mode::Jupyter - } else { - Mode::Module - }; - for (tok, range) in - lexer::lex_starts_at(checker.locator().slice(expr.range()), mode, expr.start()).flatten() + for (tok, range) in lexer::lex_starts_at( + checker.locator().slice(expr.range()), + checker.source_type.as_mode(), + expr.start(), + ) + .flatten() { if tok.is_string() { strings.push(range); diff --git a/crates/ruff/src/rules/pyupgrade/rules/redundant_open_modes.rs b/crates/ruff/src/rules/pyupgrade/rules/redundant_open_modes.rs index bb7ba78a743675..642bbf910bfecd 100644 --- a/crates/ruff/src/rules/pyupgrade/rules/redundant_open_modes.rs +++ b/crates/ruff/src/rules/pyupgrade/rules/redundant_open_modes.rs @@ -2,7 +2,7 @@ use std::str::FromStr; use anyhow::{anyhow, Result}; use ruff_python_ast::{self as ast, Constant, Expr, Keyword, Ranged}; -use ruff_python_parser::{lexer, Mode}; +use ruff_python_parser::lexer; use ruff_text_size::TextSize; use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix}; @@ -13,6 +13,7 @@ use ruff_source_file::Locator; use crate::checkers::ast::Checker; use crate::registry::Rule; +use crate::source_kind::PySourceType; /// ## What it does /// Checks for redundant `open` mode parameters. @@ -84,7 +85,7 @@ pub(crate) fn redundant_open_modes(checker: &mut Checker, expr: &Expr) { mode.replacement_value(), checker.locator(), checker.patch(Rule::RedundantOpenModes), - checker.is_jupyter_notebook, + checker.source_type, )); } } @@ -104,7 +105,7 @@ pub(crate) fn redundant_open_modes(checker: &mut Checker, expr: &Expr) { mode.replacement_value(), checker.locator(), checker.patch(Rule::RedundantOpenModes), - checker.is_jupyter_notebook, + checker.source_type, )); } } @@ -184,7 +185,7 @@ fn create_check( replacement_value: Option<&str>, locator: &Locator, patch: bool, - is_jupyter_notebook: bool, + source_type: PySourceType, ) -> Diagnostic { let mut diagnostic = Diagnostic::new( RedundantOpenModes { @@ -200,8 +201,7 @@ fn create_check( ))); } else { diagnostic.try_set_fix(|| { - create_remove_param_fix(locator, expr, mode_param, is_jupyter_notebook) - .map(Fix::automatic) + create_remove_param_fix(locator, expr, mode_param, source_type).map(Fix::automatic) }); } } @@ -212,7 +212,7 @@ fn create_remove_param_fix( locator: &Locator, expr: &Expr, mode_param: &Expr, - is_jupyter_notebook: bool, + source_type: PySourceType, ) -> Result { let content = locator.slice(expr.range()); // Find the last comma before mode_param and create a deletion fix @@ -221,12 +221,8 @@ fn create_remove_param_fix( let mut fix_end: Option = None; let mut is_first_arg: bool = false; let mut delete_first_arg: bool = false; - let mode = if is_jupyter_notebook { - Mode::Jupyter - } else { - Mode::Module - }; - for (tok, range) in lexer::lex_starts_at(content, mode, expr.start()).flatten() { + for (tok, range) in lexer::lex_starts_at(content, source_type.as_mode(), expr.start()).flatten() + { if range.start() == mode_param.start() { if is_first_arg { delete_first_arg = true; diff --git a/crates/ruff/src/rules/pyupgrade/rules/replace_stdout_stderr.rs b/crates/ruff/src/rules/pyupgrade/rules/replace_stdout_stderr.rs index 4991e2b2d9742c..79f0adde6b861c 100644 --- a/crates/ruff/src/rules/pyupgrade/rules/replace_stdout_stderr.rs +++ b/crates/ruff/src/rules/pyupgrade/rules/replace_stdout_stderr.rs @@ -9,6 +9,7 @@ use ruff_source_file::Locator; use crate::autofix::edits::remove_argument; use crate::checkers::ast::Checker; use crate::registry::AsRule; +use crate::source_kind::PySourceType; /// ## What it does /// Checks for uses of `subprocess.run` that send `stdout` and `stderr` to a @@ -59,7 +60,7 @@ fn generate_fix( keywords: &[Keyword], stdout: &Keyword, stderr: &Keyword, - is_jupyter_notebook: bool, + source_type: PySourceType, ) -> Result { let (first, second) = if stdout.start() < stderr.start() { (stdout, stderr) @@ -75,7 +76,7 @@ fn generate_fix( args, keywords, false, - is_jupyter_notebook, + source_type, )?], )) } @@ -130,7 +131,7 @@ pub(crate) fn replace_stdout_stderr( keywords, stdout, stderr, - checker.is_jupyter_notebook, + checker.source_type, ) }); } diff --git a/crates/ruff/src/rules/pyupgrade/rules/unnecessary_encode_utf8.rs b/crates/ruff/src/rules/pyupgrade/rules/unnecessary_encode_utf8.rs index fe2f06fcec1198..adcc0b3e4fa1c6 100644 --- a/crates/ruff/src/rules/pyupgrade/rules/unnecessary_encode_utf8.rs +++ b/crates/ruff/src/rules/pyupgrade/rules/unnecessary_encode_utf8.rs @@ -1,5 +1,5 @@ use ruff_python_ast::{self as ast, Constant, Expr, Keyword, Ranged}; -use ruff_python_parser::{lexer, Mode, Tok}; +use ruff_python_parser::{lexer, Tok}; use ruff_text_size::TextRange; use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix}; @@ -9,6 +9,7 @@ use ruff_source_file::Locator; use crate::autofix::edits::remove_argument; use crate::checkers::ast::Checker; use crate::registry::Rule; +use crate::source_kind::PySourceType; #[derive(Debug, PartialEq, Eq)] pub(crate) enum Reason { @@ -122,17 +123,14 @@ fn match_encoding_arg<'a>(args: &'a [Expr], kwargs: &'a [Keyword]) -> Option Fix { +fn replace_with_bytes_literal(locator: &Locator, expr: &Expr, source_type: PySourceType) -> Fix { // Build up a replacement string by prefixing all string tokens with `b`. let contents = locator.slice(expr.range()); let mut replacement = String::with_capacity(contents.len() + 1); let mut prev = expr.start(); - let mode = if is_jupyter_notebook { - Mode::Jupyter - } else { - Mode::Module - }; - for (tok, range) in lexer::lex_starts_at(contents, mode, expr.start()).flatten() { + for (tok, range) in + lexer::lex_starts_at(contents, source_type.as_mode(), expr.start()).flatten() + { match tok { Tok::Dot => break, Tok::String { .. } => { @@ -183,7 +181,7 @@ pub(crate) fn unnecessary_encode_utf8( diagnostic.set_fix(replace_with_bytes_literal( checker.locator(), expr, - checker.is_jupyter_notebook, + checker.source_type, )); } checker.diagnostics.push(diagnostic); @@ -205,7 +203,7 @@ pub(crate) fn unnecessary_encode_utf8( args, kwargs, false, - checker.is_jupyter_notebook, + checker.source_type, ) .map(Fix::automatic) }); @@ -228,7 +226,7 @@ pub(crate) fn unnecessary_encode_utf8( args, kwargs, false, - checker.is_jupyter_notebook, + checker.source_type, ) .map(Fix::automatic) }); @@ -258,7 +256,7 @@ pub(crate) fn unnecessary_encode_utf8( args, kwargs, false, - checker.is_jupyter_notebook, + checker.source_type, ) .map(Fix::automatic) }); @@ -281,7 +279,7 @@ pub(crate) fn unnecessary_encode_utf8( args, kwargs, false, - checker.is_jupyter_notebook, + checker.source_type, ) .map(Fix::automatic) }); diff --git a/crates/ruff/src/rules/pyupgrade/rules/useless_object_inheritance.rs b/crates/ruff/src/rules/pyupgrade/rules/useless_object_inheritance.rs index 82a0e609853621..9e76f846b25414 100644 --- a/crates/ruff/src/rules/pyupgrade/rules/useless_object_inheritance.rs +++ b/crates/ruff/src/rules/pyupgrade/rules/useless_object_inheritance.rs @@ -73,7 +73,7 @@ pub(crate) fn useless_object_inheritance(checker: &mut Checker, class_def: &ast: &class_def.bases, &class_def.keywords, true, - checker.is_jupyter_notebook, + checker.source_type, )?; Ok(Fix::automatic(edit)) }); diff --git a/crates/ruff/src/source_kind.rs b/crates/ruff/src/source_kind.rs index ab63e89c28f2ff..3625b32f4e5573 100644 --- a/crates/ruff/src/source_kind.rs +++ b/crates/ruff/src/source_kind.rs @@ -1,3 +1,7 @@ +use std::path::Path; + +use ruff_python_parser::Mode; + use crate::jupyter::Notebook; #[derive(Clone, Debug, PartialEq, is_macro::Is)] @@ -24,3 +28,42 @@ impl SourceKind { } } } + +#[derive(Clone, Copy, Debug, Default, PartialEq)] +pub enum PySourceType { + #[default] + Python, + Stub, + Jupyter, +} + +impl PySourceType { + pub fn as_mode(&self) -> Mode { + match self { + PySourceType::Python | PySourceType::Stub => Mode::Module, + PySourceType::Jupyter => Mode::Jupyter, + } + } + + pub const fn is_python(&self) -> bool { + matches!(self, PySourceType::Python) + } + + pub const fn is_stub(&self) -> bool { + matches!(self, PySourceType::Stub) + } + + pub const fn is_jupyter(&self) -> bool { + matches!(self, PySourceType::Jupyter) + } +} + +impl From<&Path> for PySourceType { + fn from(path: &Path) -> Self { + match path.extension() { + Some(ext) if ext == "pyi" => PySourceType::Stub, + Some(ext) if ext == "ipynb" => PySourceType::Jupyter, + _ => PySourceType::Python, + } + } +} diff --git a/crates/ruff/src/test.rs b/crates/ruff/src/test.rs index 1c4724edd38905..e4842ce0a4770f 100644 --- a/crates/ruff/src/test.rs +++ b/crates/ruff/src/test.rs @@ -7,7 +7,7 @@ use std::path::Path; use anyhow::Result; use itertools::Itertools; use ruff_python_parser::lexer::LexResult; -use ruff_python_parser::Mode; + use rustc_hash::FxHashMap; use ruff_diagnostics::{AutofixKind, Diagnostic}; @@ -26,7 +26,7 @@ use crate::packaging::detect_package_root; use crate::registry::AsRule; use crate::rules::pycodestyle::rules::syntax_error; use crate::settings::{flags, Settings}; -use crate::source_kind::SourceKind; +use crate::source_kind::{PySourceType, SourceKind}; #[cfg(not(fuzzing))] pub(crate) fn read_jupyter_notebook(path: &Path) -> Result { @@ -101,13 +101,9 @@ pub(crate) fn max_iterations() -> usize { /// A convenient wrapper around [`check_path`], that additionally /// asserts that autofixes converge after a fixed number of iterations. fn test_contents(source_kind: &mut SourceKind, path: &Path, settings: &Settings) -> Vec { - let mode = if source_kind.is_jupyter() { - Mode::Jupyter - } else { - Mode::Module - }; let contents = source_kind.content().to_string(); - let tokens: Vec = ruff_python_parser::tokenize(&contents, mode); + let source_type = PySourceType::from(path); + let tokens: Vec = ruff_python_parser::tokenize(&contents, source_type.as_mode()); let locator = Locator::new(&contents); let stylist = Stylist::from_tokens(&tokens, &locator); let indexer = Indexer::from_tokens(&tokens, &locator); @@ -132,6 +128,7 @@ fn test_contents(source_kind: &mut SourceKind, path: &Path, settings: &Settings) settings, flags::Noqa::Enabled, Some(source_kind), + source_type, ); let source_has_errors = error.is_some(); @@ -169,7 +166,8 @@ fn test_contents(source_kind: &mut SourceKind, path: &Path, settings: &Settings) notebook.update(&source_map, &fixed_contents); }; - let tokens: Vec = ruff_python_parser::tokenize(&fixed_contents, mode); + let tokens: Vec = + ruff_python_parser::tokenize(&fixed_contents, source_type.as_mode()); let locator = Locator::new(&fixed_contents); let stylist = Stylist::from_tokens(&tokens, &locator); let indexer = Indexer::from_tokens(&tokens, &locator); @@ -194,6 +192,7 @@ fn test_contents(source_kind: &mut SourceKind, path: &Path, settings: &Settings) settings, flags::Noqa::Enabled, Some(source_kind), + source_type, ); if let Some(fixed_error) = fixed_error { diff --git a/crates/ruff_benchmark/benches/linter.rs b/crates/ruff_benchmark/benches/linter.rs index 7abaa4fdafa251..e7f14ccedbc014 100644 --- a/crates/ruff_benchmark/benches/linter.rs +++ b/crates/ruff_benchmark/benches/linter.rs @@ -7,6 +7,7 @@ use criterion::{ use ruff::linter::lint_only; use ruff::settings::{flags, Settings}; +use ruff::source_kind::PySourceType; use ruff::RuleSelector; use ruff_benchmark::{TestCase, TestCaseSpeed, TestFile, TestFileDownloadError}; @@ -57,13 +58,15 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &Settings) { &case, |b, case| { b.iter(|| { + let path = case.path(); let result = lint_only( case.code(), - &case.path(), + &path, None, settings, flags::Noqa::Enabled, None, + PySourceType::from(path.as_path()), ); // Assert that file contains no parse errors diff --git a/crates/ruff_cli/src/diagnostics.rs b/crates/ruff_cli/src/diagnostics.rs index 5e494a04827133..b8980ba16c823d 100644 --- a/crates/ruff_cli/src/diagnostics.rs +++ b/crates/ruff_cli/src/diagnostics.rs @@ -24,12 +24,12 @@ use ruff::message::Message; use ruff::pyproject_toml::lint_pyproject_toml; use ruff::registry::Rule; use ruff::settings::{flags, AllSettings, Settings}; -use ruff::source_kind::SourceKind; +use ruff::source_kind::{PySourceType, SourceKind}; use ruff::{fs, IOError}; use ruff_diagnostics::Diagnostic; use ruff_macros::CacheKey; use ruff_python_ast::imports::ImportMap; -use ruff_python_stdlib::path::{is_jupyter_notebook, is_project_toml}; +use ruff_python_stdlib::path::{is_project_toml}; use ruff_source_file::{LineIndex, SourceCode, SourceFileBuilder}; #[derive(CacheKey)] @@ -211,8 +211,10 @@ pub(crate) fn lint_path( }); } + let source_type = PySourceType::from(path); + // Read the file from disk - let mut source_kind = if is_jupyter_notebook(path) { + let mut source_kind = if source_type.is_jupyter() { match load_jupyter_notebook(path) { Ok(notebook) => SourceKind::Jupyter(notebook), Err(diagnostic) => return Ok(*diagnostic), @@ -249,6 +251,7 @@ pub(crate) fn lint_path( noqa, &settings.lib, &mut source_kind, + source_type, ) { if !fixed.is_empty() { match autofix { @@ -282,6 +285,7 @@ pub(crate) fn lint_path( &settings.lib, noqa, Some(&source_kind), + source_type, ); let fixed = FxHashMap::default(); (result, fixed) @@ -294,6 +298,7 @@ pub(crate) fn lint_path( &settings.lib, noqa, Some(&source_kind), + source_type, ); let fixed = FxHashMap::default(); (result, fixed) @@ -343,6 +348,8 @@ pub(crate) fn lint_stdin( autofix: flags::FixMode, ) -> Result { let mut source_kind = SourceKind::Python(contents.to_string()); + let source_type = PySourceType::default(); + // Lint the inputs. let ( LinterResult { @@ -362,6 +369,7 @@ pub(crate) fn lint_stdin( noqa, settings, &mut source_kind, + source_type, ) { match autofix { flags::FixMode::Apply => { @@ -397,6 +405,7 @@ pub(crate) fn lint_stdin( settings, noqa, Some(&source_kind), + source_type, ); let fixed = FxHashMap::default(); @@ -415,6 +424,7 @@ pub(crate) fn lint_stdin( settings, noqa, Some(&source_kind), + source_type, ); let fixed = FxHashMap::default(); (result, fixed) diff --git a/crates/ruff_wasm/src/lib.rs b/crates/ruff_wasm/src/lib.rs index 7998cae0d1e4dd..4414bca8384eb2 100644 --- a/crates/ruff_wasm/src/lib.rs +++ b/crates/ruff_wasm/src/lib.rs @@ -21,6 +21,7 @@ use ruff::rules::{ use ruff::settings::configuration::Configuration; use ruff::settings::options::Options; use ruff::settings::{defaults, flags, Settings}; +use ruff::source_kind::PySourceType; use ruff_python_codegen::Stylist; use ruff_python_formatter::{format_module, format_node, PyFormatOptions}; use ruff_python_index::{CommentRangesBuilder, Indexer}; @@ -196,8 +197,10 @@ impl Workspace { } pub fn check(&self, contents: &str) -> Result { + let source_type = PySourceType::default(); + // Tokenize once. - let tokens: Vec = ruff_python_parser::tokenize(contents, Mode::Module); + let tokens: Vec = ruff_python_parser::tokenize(contents, source_type.as_mode()); // Map row and column locations to byte slices (lazily). let locator = Locator::new(contents); @@ -227,6 +230,7 @@ impl Workspace { &self.settings, flags::Noqa::Enabled, None, + source_type, ); let source_code = locator.to_source_code();