Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add to feature #803

Merged
merged 10 commits into from
Feb 23, 2024
2 changes: 2 additions & 0 deletions docs/cli.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ It will only add if the package with its version constraint is able to work with
- `--no-install`: Don't install the package to the environment, only add the package to the lock-file.
- `--no-lockfile-update`: Don't update the lock-file, implies the `--no-install` flag.
- `--platform <PLATFORM> (-p)`: The platform for which the dependency should be added. (Allowed to be used more than once)
- `--feature <FEATURE> (-f)`: The feature for which the dependency should be added.

```shell
pixi add numpy
Expand All @@ -76,6 +77,7 @@ pixi add --pypi requests[security]
pixi add --platform osx-64 --build clang
pixi add --no-install numpy
pixi add --no-lockfile-update numpy
pixi add --feature featurex numpy
```

## `install`
Expand Down
148 changes: 114 additions & 34 deletions src/cli/add.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use crate::{
use clap::Parser;
use itertools::{Either, Itertools};

use crate::project::grouped_environment::GroupedEnvironment;
use indexmap::IndexMap;
use miette::{IntoDiagnostic, WrapErr};
use rattler_conda_types::{
Expand Down Expand Up @@ -86,6 +87,10 @@ pub struct Args {
/// The platform(s) for which the dependency should be added
#[arg(long, short)]
pub platform: Vec<Platform>,

/// The feature for which the dependency should be added
#[arg(long, short)]
pub feature: Option<String>,
}

impl DependencyType {
Expand Down Expand Up @@ -126,6 +131,10 @@ pub async fn execute(args: Args) -> miette::Result<()> {
.manifest
.add_platforms(platforms_to_add.iter(), &FeatureName::Default)?;

let feature_name = args
.feature
.map_or(FeatureName::Default, FeatureName::Named);

match dependency_type {
DependencyType::CondaDependency(spec_type) => {
let specs = args
Expand All @@ -137,6 +146,7 @@ pub async fn execute(args: Args) -> miette::Result<()> {
.into_diagnostic()?;
add_conda_specs_to_project(
&mut project,
&feature_name,
specs,
spec_type,
args.no_install,
Expand Down Expand Up @@ -247,6 +257,7 @@ pub async fn add_pypi_specs_to_project(

pub async fn add_conda_specs_to_project(
project: &mut Project,
feature_name: &FeatureName,
specs: Vec<MatchSpec>,
spec_type: SpecType,
no_install: bool,
Expand All @@ -268,45 +279,67 @@ pub async fn add_conda_specs_to_project(
// Determine the best version per platform
let mut package_versions = HashMap::<PackageName, HashSet<Version>>::new();

let platforms = if specs_platforms.is_empty() {
Either::Left(project.platforms().into_iter())
} else {
Either::Right(specs_platforms.iter().copied())
};
// Get the grouped environments that contain the feature
let grouped_environments: Vec<GroupedEnvironment> = project
.grouped_environments()
.iter()
.filter(|env| {
env.features()
.map(|feat| &feat.name)
.contains(&feature_name)
})
.cloned()
baszalmstra marked this conversation as resolved.
Show resolved Hide resolved
.collect();

// TODO: show progress of this set of solves
// TODO: Make this parallel
// TODO: Make this more efficient by reusing the solves in the get_up_to_date_prefix
for grouped_environment in grouped_environments {
let platforms = if specs_platforms.is_empty() {
Either::Left(grouped_environment.platforms().into_iter())
} else {
Either::Right(specs_platforms.iter().copied())
};

for platform in platforms {
// Solve the environment with the new specs added
let solved_versions = match determine_best_version(
project,
&new_specs,
spec_type,
&sparse_repo_data,
platform,
) {
Ok(versions) => versions,
Err(err) => {
return Err(err).wrap_err_with(|| miette::miette!(
for platform in platforms {
// Solve the environment with the new specs added
let solved_versions = match determine_best_version(
&grouped_environment,
&new_specs,
spec_type,
&sparse_repo_data,
platform,
) {
Ok(versions) => versions,
Err(err) => {
return Err(err).wrap_err_with(|| miette::miette!(
"could not determine any available versions for {} on {platform}. Either the package could not be found or version constraints on other dependencies result in a conflict.",
new_specs.keys().map(|s| s.as_source()).join(", ")
));
}
};
}
};

// Collect all the versions seen.
for (name, version) in solved_versions {
package_versions.entry(name).or_default().insert(version);
// Collect all the versions seen.
for (name, version) in solved_versions {
package_versions.entry(name).or_default().insert(version);
}
}
}

// Update the specs passed on the command line with the best available versions.
for (name, spec) in new_specs {
let versions_seen = package_versions
.get(&name)
.cloned()
.expect("a version must have been previously selected");
let updated_spec = if spec.version.is_none() {
let mut updated_spec = spec.clone();
updated_spec.version = determine_version_constraint(&versions_seen);
if let Some(versions_seen) = package_versions.get(&name).cloned() {
updated_spec.version = determine_version_constraint(&versions_seen);
} else {
updated_spec.version = determine_version_constraint(&determine_latest_versions(
project,
specs_platforms,
&sparse_repo_data,
&name,
)?);
}
updated_spec
} else {
spec
Expand All @@ -315,12 +348,14 @@ pub async fn add_conda_specs_to_project(

// Add the dependency to the project
if specs_platforms.is_empty() {
project.manifest.add_dependency(&spec, spec_type, None)?;
project
.manifest
.add_dependency(&spec, spec_type, None, feature_name)?;
} else {
for platform in specs_platforms.iter() {
project
.manifest
.add_dependency(&spec, spec_type, Some(*platform))?;
.add_dependency(&spec, spec_type, Some(*platform), feature_name)?;
}
}
}
Expand All @@ -330,21 +365,67 @@ pub async fn add_conda_specs_to_project(
LockFileUsage::Update
};

// Update the prefix
get_up_to_date_prefix(
&project.default_environment(),
lock_file_usage,
no_install,
sparse_repo_data,
)
.await?;

project.save()?;

Ok(())
}

/// Get all the latest versions found in the platforms repodata.
fn determine_latest_versions(
project: &Project,
platforms: &Vec<Platform>,
sparse_repo_data: &IndexMap<(Channel, Platform), SparseRepoData>,
name: &PackageName,
) -> miette::Result<Vec<Version>> {
// If we didn't find any versions, we'll just use the latest version we can find in the repodata.
let mut found_records = Vec::new();

// Get platforms to search for including NoArch
let platforms = if platforms.is_empty() {
let mut temp = project.platforms().into_iter().collect_vec();
temp.push(Platform::NoArch);
temp
} else {
let mut temp = platforms.clone();
temp.push(Platform::NoArch);
temp
};

// Search for the package in the all the channels and platforms
for channel in project.channels() {
for platform in &platforms {
let sparse_repo_data = sparse_repo_data.get(&(channel.clone(), *platform));
if let Some(sparse_repo_data) = sparse_repo_data {
let records = sparse_repo_data.load_records(name).into_diagnostic()?;
// Add max of every channel and platform
if let Some(max_record) = records
.into_iter()
.max_by_key(|record| record.package_record.version.version().clone())
{
found_records.push(max_record);
}
};
}
}

// Determine the version constraint based on the max of every channel and platform.
Ok(found_records
.iter()
.map(|record| record.package_record.version.version().clone())
.collect_vec())
}
/// Given several specs determines the highest installable version for them.
pub fn determine_best_version(
project: &Project,
environment: &GroupedEnvironment,
new_specs: &HashMap<PackageName, NamelessMatchSpec>,
new_specs_type: SpecType,
sparse_repo_data: &IndexMap<(Channel, Platform), SparseRepoData>,
Expand All @@ -353,7 +434,7 @@ pub fn determine_best_version(
// Build the combined set of specs while updating the dependencies with the new specs.
let dependencies = SpecType::all()
.map(|spec_type| {
let mut deps = project.dependencies(Some(spec_type), Some(platform));
let mut deps = environment.dependencies(Some(spec_type), Some(platform));
if spec_type == new_specs_type {
for (new_name, new_spec) in new_specs.iter() {
deps.remove(new_name); // Remove any existing specs
Expand All @@ -369,7 +450,7 @@ pub fn determine_best_version(
let package_names = dependencies.names().cloned().collect_vec();

// Get the repodata for the current platform and for NoArch
let platform_sparse_repo_data = project
let platform_sparse_repo_data = environment
.channels()
.into_iter()
.cloned()
Expand All @@ -393,9 +474,8 @@ pub fn determine_best_version(

available_packages: &available_packages,

virtual_packages: project.virtual_packages(platform),
virtual_packages: environment.virtual_packages(platform),

// TODO: Add the information from the current lock file here.
locked_packages: vec![],

pinned_packages: vec![],
Expand Down
12 changes: 8 additions & 4 deletions src/cli/init.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use crate::environment::{get_up_to_date_prefix, LockFileUsage};
use crate::project::manifest::PyPiRequirement;
use crate::utils::conda_environment_file::{CondaEnvDep, CondaEnvFile};
use crate::Project;
use crate::{config::get_default_author, consts};
use crate::{FeatureName, Project};
use clap::Parser;
use indexmap::IndexMap;
use itertools::Itertools;
Expand Down Expand Up @@ -124,16 +124,20 @@ pub async fn execute(args: Args) -> miette::Result<()> {
let mut project = Project::from_str(&dir, &rv)?;
for spec in conda_deps {
match &args.platforms.is_empty() {
true => project
.manifest
.add_dependency(&spec, crate::SpecType::Run, None)?,
true => project.manifest.add_dependency(
&spec,
crate::SpecType::Run,
None,
&FeatureName::default(),
)?,
false => {
for platform in args.platforms.iter() {
// TODO: fix serialization of channels in rattler_conda_types::MatchSpec
project.manifest.add_dependency(
&spec,
crate::SpecType::Run,
Some(platform.parse().into_diagnostic()?),
&FeatureName::default(),
)?;
}
}
Expand Down
7 changes: 4 additions & 3 deletions src/environment.rs
Original file line number Diff line number Diff line change
@@ -1,18 +1,19 @@
use miette::IntoDiagnostic;

use crate::project::grouped_environment::GroupedEnvironmentName;
use crate::{
consts, install, install_pypi,
lock_file::UpdateLockFileOptions,
prefix::Prefix,
progress,
project::{
grouped_environment::GroupedEnvironment,
manifest::{EnvironmentName, SystemRequirements},
virtual_packages::verify_current_platform_has_required_virtual_packages,
Environment, GroupedEnvironment, GroupedEnvironmentName,
Environment,
},
Project,
};
use indexmap::IndexMap;
use miette::IntoDiagnostic;
use rattler::{
install::{PythonInfo, Transaction},
package_cache::PackageCache,
Expand Down
3 changes: 2 additions & 1 deletion src/lock_file/update.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use crate::project::grouped_environment::GroupedEnvironmentName;
use crate::{
config, consts,
environment::{
Expand All @@ -10,7 +11,7 @@ use crate::{
},
prefix::Prefix,
progress::global_multi_progress,
project::{Environment, GroupedEnvironment, GroupedEnvironmentName},
project::{grouped_environment::GroupedEnvironment, Environment},
repodata::fetch_sparse_repodata_targets,
utils::BarrierCell,
EnvironmentName, Project,
Expand Down
21 changes: 21 additions & 0 deletions src/project/grouped_environment.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use crate::project::manifest::Feature;
use crate::{
consts,
prefix::Prefix,
Expand All @@ -9,7 +10,9 @@ use crate::{
EnvironmentName, Project, SpecType,
};
use indexmap::{IndexMap, IndexSet};
use itertools::Either;
use rattler_conda_types::{Channel, GenericVirtualPackage, Platform};
use std::collections::HashSet;
use std::path::PathBuf;

/// Either a solve group or an individual environment without a solve group.
Expand Down Expand Up @@ -137,13 +140,31 @@ impl<'p> GroupedEnvironment<'p> {
}
}

pub fn platforms(&self) -> HashSet<Platform> {
match self {
GroupedEnvironment::Group(group) => group
.environments()
.flat_map(|env| env.platforms())
.collect(),
GroupedEnvironment::Environment(env) => env.platforms(),
}
}

/// Returns true if the group has any Pypi dependencies.
pub fn has_pypi_dependencies(&self) -> bool {
match self {
GroupedEnvironment::Group(group) => group.has_pypi_dependencies(),
GroupedEnvironment::Environment(env) => env.has_pypi_dependencies(),
}
}

/// Returns the features of the group
pub fn features(&self) -> impl Iterator<Item = &'p Feature> + DoubleEndedIterator + 'p {
match self {
GroupedEnvironment::Group(group) => Either::Left(group.features(true)),
GroupedEnvironment::Environment(env) => Either::Right(env.features(true)),
}
}
}

/// A name of a [`GroupedEnvironment`].
Expand Down
Loading
Loading