Skip to content

Commit

Permalink
fix test_hash_agg_with_nan_keys floating point sum failure (#10148)
Browse files Browse the repository at this point in the history
Signed-off-by: Jim Brennan <[email protected]>
  • Loading branch information
jbrennan333 authored Jan 4, 2024
1 parent beba547 commit 4e0c16e
Showing 1 changed file with 3 additions and 4 deletions.
7 changes: 3 additions & 4 deletions integration_tests/src/main/python/hash_aggregate_test.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright (c) 2020-2023, NVIDIA CORPORATION.
# Copyright (c) 2020-2024, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -159,7 +159,7 @@

_grpkey_doubles_with_nan_zero_grouping_keys = [
('a', RepeatSeqGen(DoubleGen(nullable=(True, 10.0), special_cases=_nan_zero_double_special_cases), length=50)),
('b', FloatGen(nullable=(True, 10.0))),
('b', IntegerGen(nullable=(True, 10.0))),
('c', LongGen())]

# Schema for xfail cases
Expand Down Expand Up @@ -1154,7 +1154,6 @@ def test_hash_multiple_filters(data_gen, conf):
'min(a), max(b) filter (where c > 250) from hash_agg_table group by a',
conf)

@datagen_overrides(seed=0, reason='https://github.com/NVIDIA/spark-rapids/issues/10026')
@approximate_float
@ignore_order
@pytest.mark.parametrize('data_gen', [_grpkey_floats_with_nan_zero_grouping_keys,
Expand Down Expand Up @@ -1222,7 +1221,7 @@ def test_hash_agg_with_struct_of_array_fallback(data_gen):

@approximate_float
@ignore_order
@pytest.mark.parametrize('data_gen', [ _grpkey_doubles_with_nan_zero_grouping_keys], ids=idfn)
@pytest.mark.parametrize('data_gen', [ _grpkey_floats_with_nulls_and_nans ], ids=idfn)
def test_count_distinct_with_nan_floats(data_gen):
assert_gpu_and_cpu_are_equal_sql(
lambda spark : gen_df(spark, data_gen, length=1024),
Expand Down

0 comments on commit 4e0c16e

Please sign in to comment.