Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ci][train] Remove unnecessary xgboost_ray/lightgbm_ray reinstalls for release tests #43176

Merged
merged 9 commits into from
Feb 16, 2024
2 changes: 2 additions & 0 deletions release/lightgbm_tests/workloads/tune_16x4.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
as the Ray Tune results table. No trials should error. All trials should
run in parallel.
"""

from collections import Counter
import json
import os
Expand Down Expand Up @@ -58,6 +59,7 @@ def train_wrapper(config, ray_params):
config=search_space,
num_samples=16,
resources_per_trial=ray_params.get_tune_resources(),
storage_path="/mnt/cluster_storage",
)
taken = time.time() - start

Expand Down
2 changes: 2 additions & 0 deletions release/lightgbm_tests/workloads/tune_4x16.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
as the Ray Tune results table. No trials should error. All trials should
run in parallel.
"""

from collections import Counter
import json
import os
Expand Down Expand Up @@ -58,6 +59,7 @@ def train_wrapper(config, ray_params):
config=search_space,
num_samples=4,
resources_per_trial=ray_params.get_tune_resources(),
storage_path="/mnt/cluster_storage",
)
taken = time.time() - start

Expand Down
2 changes: 2 additions & 0 deletions release/lightgbm_tests/workloads/tune_small.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
as the Ray Tune results table. No trials should error. All trials should
run in parallel.
"""

from collections import Counter
import json
import os
Expand Down Expand Up @@ -58,6 +59,7 @@ def train_wrapper(config, ray_params):
config=search_space,
num_samples=4,
resources_per_trial=ray_params.get_tune_resources(),
cluster_storage="/mnt/cluster_storage",
)
taken = time.time() - start

Expand Down
2 changes: 1 addition & 1 deletion release/ray_release/byod/byod_xgboost_cuj_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@

set -exo pipefail

pip3 install -U --force-reinstall --no-deps xgboost_ray
pip install -U "git+https://github.com/ray-project/xgboost_ray.git"
justinvyu marked this conversation as resolved.
Show resolved Hide resolved
2 changes: 1 addition & 1 deletion release/ray_release/byod/byod_xgboost_master_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

set -exo pipefail

pip install -U "git+https://github.com/ray-project/xgboost_ray@master#egg=xgboost_ray" petastorm
pip install -U "git+https://github.com/ray-project/xgboost_ray.git" petastorm
sudo mkdir -p /data || true
sudo chown ray:1000 /data || true
rm -rf /data/classification.parquet || true
Expand Down
2 changes: 1 addition & 1 deletion release/ray_release/byod/byod_xgboost_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

set -exo pipefail

pip3 install -U --force-reinstall --no-deps xgboost_ray lightgbm_ray
pip3 install -U --force-reinstall --no-deps "git+https://github.com/ray-project/xgboost_ray.git" "git+https://github.com/ray-project/lightgbm_ray.git"
sudo mkdir -p /data || true
sudo chown ray:1000 /data || true
rm -rf /data/classification.parquet || true
Expand Down
2 changes: 1 addition & 1 deletion release/ray_release/byod/byod_xgboost_tune_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

set -exo pipefail

pip3 install -U --force-reinstall --no-deps xgboost_ray
pip3 install -U --force-reinstall --no-deps "git+https://github.com/ray-project/xgboost_ray.git" "git+https://github.com/ray-project/lightgbm_ray.git"
sudo mkdir -p /data || true
sudo chown ray:1000 /data || true
rm -rf /data/train.parquet || true
Expand Down
2 changes: 2 additions & 0 deletions release/xgboost_tests/workloads/tune_32x4.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
as the Ray Tune results table. No trials should error. All trials should
run in parallel.
"""

from collections import Counter
import json
import os
Expand Down Expand Up @@ -58,6 +59,7 @@ def train_wrapper(config, ray_params):
config=search_space,
num_samples=32,
resources_per_trial=ray_params.get_tune_resources(),
storage_path="/mnt/cluster_storage",
)
taken = time.time() - start

Expand Down
2 changes: 2 additions & 0 deletions release/xgboost_tests/workloads/tune_4x32.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
as the Ray Tune results table. No trials should error. All trials should
run in parallel.
"""

from collections import Counter
import json
import os
Expand Down Expand Up @@ -58,6 +59,7 @@ def train_wrapper(config, ray_params):
config=search_space,
num_samples=4,
resources_per_trial=ray_params.get_tune_resources(),
storage_path="/mnt/cluster_storage",
)
taken = time.time() - start

Expand Down
2 changes: 2 additions & 0 deletions release/xgboost_tests/workloads/tune_small.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
as the Ray Tune results table. No trials should error. All trials should
run in parallel.
"""

from collections import Counter
import json
import os
Expand Down Expand Up @@ -58,6 +59,7 @@ def train_wrapper(config, ray_params):
config=search_space,
num_samples=4,
resources_per_trial=ray_params.get_tune_resources(),
storage_path="/mnt/cluster_storage",
)
taken = time.time() - start

Expand Down
Loading