-
Notifications
You must be signed in to change notification settings - Fork 159
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[FEAT] Support intersect as a DataFrame API
- Loading branch information
1 parent
ced8c4b
commit f0ed52b
Showing
6 changed files
with
206 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,112 @@ | ||
use std::sync::Arc; | ||
|
||
use common_error::{DaftError, DaftResult}; | ||
use daft_core::join::JoinType; | ||
use daft_dsl::{col, zero_lit, ExprRef}; | ||
use daft_schema::schema::Schema; | ||
use itertools::Itertools; | ||
use snafu::ResultExt; | ||
|
||
use crate::{logical_plan, logical_plan::CreationSnafu, LogicalPlan}; | ||
|
||
// null safe equal (a <> b) is equivalent to: | ||
// nvl(a, zero_lit_of_type(a)) = nvl(b, zero_lit_of_type(b)) and is_null(a) = is_null(b) | ||
fn to_null_safe_equal_join_keys(schema: &Schema) -> DaftResult<Vec<ExprRef>> { | ||
schema | ||
.names() | ||
.iter() | ||
.map(|k| { | ||
let field = schema.get_field(k).unwrap(); | ||
// TODO: expr name should reflect the expression itself, a.k.a is_null(a)'s field name | ||
// should be `is_null(a)`. | ||
let col_or_zero = col(field.name.clone()) | ||
.fill_null(zero_lit(&field.dtype)?) | ||
.alias(field.name.clone() + "__or_zero__"); | ||
let is_null = col(field.name.clone()) | ||
.is_null() | ||
.alias(field.name.clone() + "__is_null__"); | ||
Ok(vec![col_or_zero, is_null]) | ||
}) | ||
.flatten_ok() | ||
.collect() | ||
} | ||
|
||
#[derive(Clone, Debug, PartialEq, Eq, Hash)] | ||
pub struct Intersect { | ||
// Upstream nodes. | ||
pub lhs: Arc<LogicalPlan>, | ||
pub rhs: Arc<LogicalPlan>, | ||
pub is_all: bool, | ||
} | ||
|
||
impl Intersect { | ||
pub(crate) fn try_new( | ||
lhs: Arc<LogicalPlan>, | ||
rhs: Arc<LogicalPlan>, | ||
is_all: bool, | ||
) -> logical_plan::Result<Self> { | ||
let lhs_schema = lhs.schema(); | ||
let rhs_schema = rhs.schema(); | ||
if lhs_schema.len() != rhs_schema.len() { | ||
return Err(DaftError::SchemaMismatch(format!( | ||
"Both plans must have the same num of fields to intersect, \ | ||
but got[lhs: {} v.s rhs: {}], lhs schema: {}, rhs schema: {}", | ||
lhs_schema.len(), | ||
rhs_schema.len(), | ||
lhs_schema, | ||
rhs_schema | ||
))) | ||
.context(CreationSnafu); | ||
} | ||
// lhs and rhs should have the same type for each field to intersect | ||
if lhs_schema | ||
.fields | ||
.values() | ||
.zip(rhs_schema.fields.values()) | ||
.any(|(l, r)| l.dtype != r.dtype) | ||
{ | ||
return Err(DaftError::SchemaMismatch(format!( | ||
"Both plans' schemas should have the same type for each field to intersect, \ | ||
but got lhs schema: {}, rhs schema: {}", | ||
lhs_schema, rhs_schema | ||
))) | ||
.context(CreationSnafu); | ||
} | ||
Ok(Self { lhs, rhs, is_all }) | ||
} | ||
|
||
/// intersect distinct could be represented as a semi join + distinct | ||
/// the following intersect operator: | ||
/// ```sql | ||
/// select a1, a2 from t1 intersect select b1, b2 from t2 | ||
/// ``` | ||
/// is the same as: | ||
/// ```sql | ||
/// select distinct a1, a2 from t1 left semi join t2 | ||
/// on t1.a1 <> t2.b1 and t1.a2 <> t2.b2 | ||
/// ``` | ||
/// TODO: Move this logical to logical optimization rules | ||
pub(crate) fn to_optimized_join(&self) -> logical_plan::Result<LogicalPlan> { | ||
if self.is_all { | ||
Err(logical_plan::Error::CreationError { | ||
source: DaftError::InternalError("intersect all is not supported yet".to_string()), | ||
}) | ||
} else { | ||
let left_on = to_null_safe_equal_join_keys(&self.lhs.schema()) | ||
.map_err(|e| logical_plan::Error::CreationError { source: e })?; | ||
let right_on = to_null_safe_equal_join_keys(&self.rhs.schema()) | ||
.map_err(|e| logical_plan::Error::CreationError { source: e })?; | ||
let join = logical_plan::Join::try_new( | ||
self.lhs.clone(), | ||
self.rhs.clone(), | ||
left_on, | ||
right_on, | ||
JoinType::Semi, | ||
None, | ||
None, | ||
None, | ||
); | ||
join.map(|j| logical_plan::Distinct::new(j.into()).into()) | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,47 @@ | ||
from __future__ import annotations | ||
|
||
import daft | ||
from daft import col | ||
|
||
|
||
def test_simple_intersect(make_df): | ||
df1 = make_df({"foo": [1, 2, 3]}) | ||
df2 = make_df({"bar": [2, 3, 4]}) | ||
result = df1.intersect(df2) | ||
assert result.to_pydict() == {"foo": [2, 3]} | ||
|
||
|
||
def test_intersect_with_duplicate(make_df): | ||
df1 = make_df({"foo": [1, 2, 2, 3]}) | ||
df2 = make_df({"bar": [2, 3, 3]}) | ||
result = df1.intersect(df2) | ||
assert result.to_pydict() == {"foo": [2, 3]} | ||
|
||
|
||
def test_self_intersect(make_df): | ||
df = make_df({"foo": [1, 2, 3]}) | ||
result = df.intersect(df).sort(by="foo") | ||
assert result.to_pydict() == {"foo": [1, 2, 3]} | ||
|
||
|
||
def test_intersect_empty(make_df): | ||
df1 = make_df({"foo": [1, 2, 3]}) | ||
df2 = make_df({"bar": []}).select(col("bar").cast(daft.DataType.int64())) | ||
result = df1.intersect(df2) | ||
assert result.to_pydict() == {"foo": []} | ||
|
||
|
||
def test_intersect_with_nulls(make_df): | ||
df1 = make_df({"foo": [1, 2, None]}) | ||
df1_without_mull = make_df({"foo": [1, 2]}) | ||
df2 = make_df({"bar": [2, 3, None]}) | ||
df2_without_null = make_df({"bar": [2, 3]}) | ||
|
||
result = df1.intersect(df2) | ||
assert result.to_pydict() == {"foo": [2, None]} | ||
|
||
result = df1_without_mull.intersect(df2) | ||
assert result.to_pydict() == {"foo": [2]} | ||
|
||
result = df1.intersect(df2_without_null) | ||
assert result.to_pydict() == {"foo": [2]} |