Skip to content

Commit

Permalink
Rename Program to Parsed, shorten tokens_in_range (#11627)
Browse files Browse the repository at this point in the history
## Summary

This PR renames `Program` to `Parsed` and shortens the `tokens_in_range`
to `in_range`.

The `Program` terminology is used in `red_knot` and this rename is to
avoid the conflict. The `tokens_in_range` is usually used as
`tokens.tokens_in_range` which suggests that the "tokens" word is
repeated, so let's shorten it.
  • Loading branch information
dhruvmanila committed Jun 3, 2024
1 parent bc63bf9 commit 83e61d4
Show file tree
Hide file tree
Showing 91 changed files with 433 additions and 470 deletions.
4 changes: 2 additions & 2 deletions crates/ruff_benchmark/benches/formatter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,13 +51,13 @@ fn benchmark_formatter(criterion: &mut Criterion) {
&case,
|b, case| {
// Parse the source.
let program =
let parsed =
parse(case.code(), Mode::Module).expect("Input should be a valid Python code");

b.iter(|| {
let options = PyFormatOptions::from_extension(Path::new(case.name()))
.with_preview(PreviewMode::Enabled);
let formatted = format_module_ast(&program, case.code(), options)
let formatted = format_module_ast(&parsed, case.code(), options)
.expect("Formatting to succeed");

formatted.print().expect("Printing to succeed")
Expand Down
2 changes: 1 addition & 1 deletion crates/ruff_benchmark/benches/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ fn benchmark_lexer(criterion: &mut Criterion<WallTime>) {
let token = lexer.next_token();
match token {
TokenKind::EndOfFile => break,
TokenKind::Unknown => panic!("Input to be a valid Python program"),
TokenKind::Unknown => panic!("Input to be a valid Python source code"),
_ => {}
}
}
Expand Down
8 changes: 4 additions & 4 deletions crates/ruff_benchmark/benches/linter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,12 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
&case,
|b, case| {
// Parse the source.
let program =
let parsed =
parse_module(case.code()).expect("Input should be a valid Python code");

b.iter_batched(
|| program.clone(),
|program| {
|| parsed.clone(),
|parsed| {
let path = case.path();
let result = lint_only(
&path,
Expand All @@ -69,7 +69,7 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
flags::Noqa::Enabled,
&SourceKind::Python(case.code().to_string()),
PySourceType::from(path.as_path()),
ParseSource::Precomputed(program),
ParseSource::Precomputed(parsed),
);

// Assert that file contains no parse errors
Expand Down
4 changes: 2 additions & 2 deletions crates/ruff_dev/src/print_tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ pub(crate) fn main(args: &Args) -> Result<()> {
args.file.display()
)
})?;
let program = parse_unchecked_source(source_kind.source_code(), source_type);
for token in program.tokens() {
let parsed = parse_unchecked_source(source_kind.source_code(), source_type);
for token in parsed.tokens() {
println!(
"{start:#?} {kind:#?} {end:#?}",
start = token.start(),
Expand Down
28 changes: 14 additions & 14 deletions crates/ruff_linter/src/checkers/ast/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ use ruff_python_ast::{
FStringElement, Keyword, MatchCase, ModModule, Parameter, Parameters, Pattern, Stmt, Suite,
UnaryOp,
};
use ruff_python_parser::Program;
use ruff_python_parser::Parsed;
use ruff_text_size::{Ranged, TextRange, TextSize};

use ruff_diagnostics::{Diagnostic, IsolationLevel};
Expand Down Expand Up @@ -176,8 +176,8 @@ impl ExpectedDocstringKind {
}

pub(crate) struct Checker<'a> {
/// The parsed [`Program`].
program: &'a Program<ModModule>,
/// The parsed [`Parsed`].
parsed: &'a Parsed<ModModule>,
/// The [`Path`] to the file under analysis.
path: &'a Path,
/// The [`Path`] to the package containing the current file.
Expand Down Expand Up @@ -227,7 +227,7 @@ pub(crate) struct Checker<'a> {
impl<'a> Checker<'a> {
#[allow(clippy::too_many_arguments)]
pub(crate) fn new(
program: &'a Program<ModModule>,
parsed: &'a Parsed<ModModule>,
settings: &'a LinterSettings,
noqa_line_for: &'a NoqaMapping,
noqa: flags::Noqa,
Expand All @@ -242,7 +242,7 @@ impl<'a> Checker<'a> {
notebook_index: Option<&'a NotebookIndex>,
) -> Checker<'a> {
Checker {
program,
parsed,
settings,
noqa_line_for,
noqa,
Expand All @@ -253,7 +253,7 @@ impl<'a> Checker<'a> {
locator,
stylist,
indexer,
importer: Importer::new(program, locator, stylist),
importer: Importer::new(parsed, locator, stylist),
semantic: SemanticModel::new(&settings.typing_modules, path, module),
visit: deferred::Visit::default(),
analyze: deferred::Analyze::default(),
Expand Down Expand Up @@ -323,9 +323,9 @@ impl<'a> Checker<'a> {
}
}

/// The [`Program`] for the current file, which contains the tokens, AST, and more.
pub(crate) const fn program(&self) -> &'a Program<ModModule> {
self.program
/// The [`Parsed`] output for the current file, which contains the tokens, AST, and more.
pub(crate) const fn parsed(&self) -> &'a Parsed<ModModule> {
self.parsed
}

/// The [`Locator`] for the current file, which enables extraction of source code from byte
Expand Down Expand Up @@ -2336,7 +2336,7 @@ impl<'a> Checker<'a> {

#[allow(clippy::too_many_arguments)]
pub(crate) fn check_ast(
program: &Program<ModModule>,
parsed: &Parsed<ModModule>,
locator: &Locator,
stylist: &Stylist,
indexer: &Indexer,
Expand Down Expand Up @@ -2366,11 +2366,11 @@ pub(crate) fn check_ast(
} else {
ModuleSource::File(path)
},
python_ast: program.suite(),
python_ast: parsed.suite(),
};

let mut checker = Checker::new(
program,
parsed,
settings,
noqa_line_for,
noqa,
Expand All @@ -2387,8 +2387,8 @@ pub(crate) fn check_ast(
checker.bind_builtins();

// Iterate over the AST.
checker.visit_module(program.suite());
checker.visit_body(program.suite());
checker.visit_module(parsed.suite());
checker.visit_body(parsed.suite());

// Visit any deferred syntax nodes. Take care to visit in order, such that we avoid adding
// new deferred nodes after visiting nodes of that kind. For example, visiting a deferred
Expand Down
10 changes: 5 additions & 5 deletions crates/ruff_linter/src/checkers/imports.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use ruff_python_ast::statement_visitor::StatementVisitor;
use ruff_python_ast::{ModModule, PySourceType};
use ruff_python_codegen::Stylist;
use ruff_python_index::Indexer;
use ruff_python_parser::Program;
use ruff_python_parser::Parsed;
use ruff_source_file::Locator;

use crate::directives::IsortDirectives;
Expand All @@ -18,7 +18,7 @@ use crate::settings::LinterSettings;

#[allow(clippy::too_many_arguments)]
pub(crate) fn check_imports(
program: &Program<ModModule>,
parsed: &Parsed<ModModule>,
locator: &Locator,
indexer: &Indexer,
directives: &IsortDirectives,
Expand All @@ -32,7 +32,7 @@ pub(crate) fn check_imports(
let tracker = {
let mut tracker =
BlockBuilder::new(locator, directives, source_type.is_stub(), cell_offsets);
tracker.visit_body(program.suite());
tracker.visit_body(parsed.suite());
tracker
};

Expand All @@ -51,7 +51,7 @@ pub(crate) fn check_imports(
settings,
package,
source_type,
program,
parsed,
) {
diagnostics.push(diagnostic);
}
Expand All @@ -60,7 +60,7 @@ pub(crate) fn check_imports(
}
if settings.rules.enabled(Rule::MissingRequiredImport) {
diagnostics.extend(isort::rules::add_required_imports(
program,
parsed,
locator,
stylist,
settings,
Expand Down
8 changes: 4 additions & 4 deletions crates/ruff_linter/src/checkers/physical_lines.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,16 +106,16 @@ mod tests {
fn e501_non_ascii_char() {
let line = "'\u{4e9c}' * 2"; // 7 in UTF-32, 9 in UTF-8.
let locator = Locator::new(line);
let program = parse_module(line).unwrap();
let indexer = Indexer::from_tokens(program.tokens(), &locator);
let stylist = Stylist::from_tokens(program.tokens(), &locator);
let parsed = parse_module(line).unwrap();
let indexer = Indexer::from_tokens(parsed.tokens(), &locator);
let stylist = Stylist::from_tokens(parsed.tokens(), &locator);

let check_with_max_line_length = |line_length: LineLength| {
check_physical_lines(
&locator,
&stylist,
&indexer,
program.comment_ranges(),
parsed.comment_ranges(),
&[],
&LinterSettings {
pycodestyle: pycodestyle::settings::Settings {
Expand Down
8 changes: 4 additions & 4 deletions crates/ruff_linter/src/checkers/tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use ruff_python_codegen::Stylist;

use ruff_diagnostics::Diagnostic;
use ruff_python_index::Indexer;
use ruff_python_parser::Program;
use ruff_python_parser::Parsed;
use ruff_source_file::Locator;
use ruff_text_size::Ranged;

Expand All @@ -23,7 +23,7 @@ use crate::settings::LinterSettings;

#[allow(clippy::too_many_arguments)]
pub(crate) fn check_tokens(
program: &Program<ModModule>,
parsed: &Parsed<ModModule>,
path: &Path,
locator: &Locator,
indexer: &Indexer,
Expand All @@ -34,8 +34,8 @@ pub(crate) fn check_tokens(
) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = vec![];

let tokens = program.tokens();
let comment_ranges = program.comment_ranges();
let tokens = parsed.tokens();
let comment_ranges = parsed.comment_ranges();

if settings.rules.any_enabled(&[
Rule::BlankLineBetweenMethods,
Expand Down
18 changes: 9 additions & 9 deletions crates/ruff_linter/src/directives.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use std::str::FromStr;

use bitflags::bitflags;
use ruff_python_ast::ModModule;
use ruff_python_parser::{Program, TokenKind, Tokens};
use ruff_python_parser::{Parsed, TokenKind, Tokens};
use ruff_python_trivia::CommentRanges;
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};

Expand Down Expand Up @@ -52,19 +52,19 @@ pub struct Directives {
}

pub fn extract_directives(
program: &Program<ModModule>,
parsed: &Parsed<ModModule>,
flags: Flags,
locator: &Locator,
indexer: &Indexer,
) -> Directives {
Directives {
noqa_line_for: if flags.intersects(Flags::NOQA) {
extract_noqa_line_for(program.tokens(), locator, indexer)
extract_noqa_line_for(parsed.tokens(), locator, indexer)
} else {
NoqaMapping::default()
},
isort: if flags.intersects(Flags::ISORT) {
extract_isort_directives(locator, program.comment_ranges())
extract_isort_directives(locator, parsed.comment_ranges())
} else {
IsortDirectives::default()
},
Expand Down Expand Up @@ -393,11 +393,11 @@ mod tests {
use super::IsortDirectives;

fn noqa_mappings(contents: &str) -> NoqaMapping {
let program = parse_module(contents).unwrap();
let parsed = parse_module(contents).unwrap();
let locator = Locator::new(contents);
let indexer = Indexer::from_tokens(program.tokens(), &locator);
let indexer = Indexer::from_tokens(parsed.tokens(), &locator);

extract_noqa_line_for(program.tokens(), &locator, &indexer)
extract_noqa_line_for(parsed.tokens(), &locator, &indexer)
}

#[test]
Expand Down Expand Up @@ -568,9 +568,9 @@ assert foo, \
}

fn isort_directives(contents: &str) -> IsortDirectives {
let program = parse_module(contents).unwrap();
let parsed = parse_module(contents).unwrap();
let locator = Locator::new(contents);
extract_isort_directives(&locator, program.comment_ranges())
extract_isort_directives(&locator, parsed.comment_ranges())
}

#[test]
Expand Down
6 changes: 3 additions & 3 deletions crates/ruff_linter/src/fix/edits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -664,9 +664,9 @@ x = 1 \
fn add_to_dunder_all_test(raw: &str, names: &[&str], expect: &str) -> Result<()> {
let locator = Locator::new(raw);
let edits = {
let program = parse_expression(raw)?;
let stylist = Stylist::from_tokens(program.tokens(), &locator);
add_to_dunder_all(names.iter().copied(), program.expr(), &stylist)
let parsed = parse_expression(raw)?;
let stylist = Stylist::from_tokens(parsed.tokens(), &locator);
add_to_dunder_all(names.iter().copied(), parsed.expr(), &stylist)
};
let diag = {
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
Expand Down
16 changes: 6 additions & 10 deletions crates/ruff_linter/src/importer/insertion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -327,14 +327,10 @@ mod tests {
#[test]
fn start_of_file() -> Result<()> {
fn insert(contents: &str) -> Result<Insertion> {
let program = parse_module(contents)?;
let parsed = parse_module(contents)?;
let locator = Locator::new(contents);
let stylist = Stylist::from_tokens(program.tokens(), &locator);
Ok(Insertion::start_of_file(
program.suite(),
&locator,
&stylist,
))
let stylist = Stylist::from_tokens(parsed.tokens(), &locator);
Ok(Insertion::start_of_file(parsed.suite(), &locator, &stylist))
}

let contents = "";
Expand Down Expand Up @@ -442,10 +438,10 @@ x = 1
#[test]
fn start_of_block() {
fn insert(contents: &str, offset: TextSize) -> Insertion {
let program = parse_module(contents).unwrap();
let parsed = parse_module(contents).unwrap();
let locator = Locator::new(contents);
let stylist = Stylist::from_tokens(program.tokens(), &locator);
Insertion::start_of_block(offset, &locator, &stylist, program.tokens())
let stylist = Stylist::from_tokens(parsed.tokens(), &locator);
Insertion::start_of_block(offset, &locator, &stylist, parsed.tokens())
}

let contents = "if True: pass";
Expand Down
8 changes: 4 additions & 4 deletions crates/ruff_linter/src/importer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use std::error::Error;
use anyhow::Result;
use libcst_native::{ImportAlias, Name, NameOrAttribute};
use ruff_python_ast::{self as ast, ModModule, Stmt};
use ruff_python_parser::{Program, Tokens};
use ruff_python_parser::{Parsed, Tokens};
use ruff_text_size::{Ranged, TextSize};

use ruff_diagnostics::Edit;
Expand Down Expand Up @@ -42,13 +42,13 @@ pub(crate) struct Importer<'a> {

impl<'a> Importer<'a> {
pub(crate) fn new(
program: &'a Program<ModModule>,
parsed: &'a Parsed<ModModule>,
locator: &'a Locator<'a>,
stylist: &'a Stylist<'a>,
) -> Self {
Self {
python_ast: program.suite(),
tokens: program.tokens(),
python_ast: parsed.suite(),
tokens: parsed.tokens(),
locator,
stylist,
runtime_imports: Vec::default(),
Expand Down
Loading

0 comments on commit 83e61d4

Please sign in to comment.