diff --git a/src/deadline/client/cli/_groups/bundle_group.py b/src/deadline/client/cli/_groups/bundle_group.py index e01903841..be4536746 100644 --- a/src/deadline/client/cli/_groups/bundle_group.py +++ b/src/deadline/client/cli/_groups/bundle_group.py @@ -82,6 +82,7 @@ def validate_parameters(ctx, param, value): @click.option("--profile", help="The AWS profile to use.") @click.option("--farm-id", help="The farm to use.") @click.option("--queue-id", help="The queue to use.") +@click.option("--storage-profile-id", help="The storage profile to use.") @click.option("--name", help="The job name to use in place of the one in the job bundle.") @click.option( "--priority", @@ -135,13 +136,26 @@ def bundle_submit( max_retries_per_task, require_paths_exist, submitter_name, + storage_profile_id, **args, ): """ Submits an Open Job Description job bundle. """ - # Get a temporary config object with the standard options handled - config = _apply_cli_options_to_config(required_options={"farm_id", "queue_id"}, **args) + storage_profile_config_name = "settings.storage_profile_id" + + # Force loading the config so that it's not None when passed to set_settings() + config = config_file.read_config() + + # Apply the CLI args to the config + config = _apply_cli_options_to_config( + config=config, required_options={"farm_id", "queue_id"}, **args + ) + + # Override the storage profile ID in the config, but save the previous setting so that we can restore it. + prev_storage_id = get_setting(storage_profile_config_name, config=config) + if storage_profile_id: + set_setting(storage_profile_config_name, storage_profile_id, config=config) hash_callback_manager = _ProgressBarCallbackManager(length=100, label="Hashing Attachments") upload_callback_manager = _ProgressBarCallbackManager(length=100, label="Uploading Attachments") @@ -218,6 +232,12 @@ def _decide_cancel_submission(upload_group: AssetUploadGroup) -> bool: submitter_name=submitter_name, ) + # Reset the storage profile ID so that we don't save it to disk. + # Note: The config object is a singleton, so the config object that + # we may have modified above if a storage_profile_id was given is saved + # into that singleton. Setting the job_id writes that singleton to disk + set_setting(storage_profile_config_name, prev_storage_id, config=config) + # Check Whether the CLI options are modifying any of the default settings that affect # the job id. If not, we'll save the job id submitted as the default job id. # If the submission is canceled by the user job_id will be None, so ignore this case as well. diff --git a/test/unit/deadline_client/cli/test_cli_bundle.py b/test/unit/deadline_client/cli/test_cli_bundle.py index 77e06620c..1283f744e 100644 --- a/test/unit/deadline_client/cli/test_cli_bundle.py +++ b/test/unit/deadline_client/cli/test_cli_bundle.py @@ -15,10 +15,11 @@ from deadline.client import config from deadline.client.api import _queue_parameters +import deadline.client.api as api_module from deadline.client.cli import main from deadline.client.cli._groups import bundle_group from deadline.client.api import _submit_job_bundle -from deadline.client.config.config_file import set_setting +from deadline.client.config.config_file import get_setting, set_setting from deadline.job_attachments.upload import S3AssetManager from deadline.job_attachments.models import JobAttachmentsFileSystem from deadline.job_attachments.progress_tracker import SummaryStatistics @@ -290,6 +291,52 @@ def test_cli_bundle_job_name(fresh_deadline_config): assert result.exit_code == 0 +def test_cli_bundle_storage_profile_id(fresh_deadline_config): + """ + Confirm that --storage-profile-id sets the ID that the job is submitted with, but does not + change the value of storage profile saved to the configuration file. + """ + PRE_STORAGE_PROFILE_ID = "sp-11223344556677889900abbccddeeff" + CLI_STORAGE_PROFILE_ID = "sp-0000000000000000000000000000000" + + config.set_setting("defaults.farm_id", MOCK_FARM_ID) + config.set_setting("defaults.queue_id", MOCK_QUEUE_ID) + + # Set the storage profile ID in the config; as someone may have by using `deadline config set` + config.set_setting("settings.storage_profile_id", PRE_STORAGE_PROFILE_ID) + + # Use a temporary directory for the job bundle + with tempfile.TemporaryDirectory() as tmpdir, patch.object(boto3, "Session") as session_mock: + session_mock().client("deadline").create_job.return_value = MOCK_CREATE_JOB_RESPONSE + session_mock().client("deadline").get_job.return_value = MOCK_GET_JOB_RESPONSE + session_mock.reset_mock() + + # Write a JSON template + with open(os.path.join(tmpdir, "template.json"), "w", encoding="utf8") as f: + f.write(MOCK_JOB_TEMPLATE_CASES["MINIMAL_JSON"][1]) + + runner = CliRunner() + with patch.object(api_module, "get_storage_profile_for_queue"): + result = runner.invoke( + main, + ["bundle", "submit", tmpdir, "--storage-profile-id", CLI_STORAGE_PROFILE_ID], + ) + + assert tmpdir in result.output + assert MOCK_CREATE_JOB_RESPONSE["jobId"] in result.output + assert MOCK_GET_JOB_RESPONSE["lifecycleStatusMessage"] in result.output + session_mock().client().create_job.assert_called_once_with( + farmId=MOCK_FARM_ID, + queueId=MOCK_QUEUE_ID, + template=ANY, + templateType="JSON", + priority=50, + storageProfileId=CLI_STORAGE_PROFILE_ID, + ) + assert result.exit_code == 0 + assert get_setting("settings.storage_profile_id") == PRE_STORAGE_PROFILE_ID + + @pytest.mark.parametrize("loading_method", [e.value for e in JobAttachmentsFileSystem] + [None]) def test_cli_bundle_asset_load_method(fresh_deadline_config, temp_job_bundle_dir, loading_method): """