Skip to content

Commit

Permalink
Merge branch 'u/mpiano/SEC-19431'
Browse files Browse the repository at this point in the history
  • Loading branch information
piax93 committed Sep 2, 2024
2 parents e838a74 + 685e773 commit be6c3d6
Show file tree
Hide file tree
Showing 2 changed files with 66 additions and 3 deletions.
16 changes: 16 additions & 0 deletions paasta_tools/cli/cmds/spark_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from paasta_tools.cli.cmds.check import makefile_responds_to
from paasta_tools.cli.cmds.cook_image import paasta_cook_image
from paasta_tools.cli.utils import get_instance_config
from paasta_tools.cli.utils import get_service_auth_token
from paasta_tools.cli.utils import lazy_choices_completer
from paasta_tools.cli.utils import list_instances
from paasta_tools.clusterman import get_clusterman_metrics
Expand Down Expand Up @@ -337,6 +338,18 @@ def add_subparser(subparsers):
default=None,
)

list_parser.add_argument(
"--use-service-auth-token",
help=(
"Acquire service authentication token for the underlying instance,"
" and set it in the container environment"
),
action="store_true",
dest="use_service_auth_token",
required=False,
default=False,
)

aws_group = list_parser.add_argument_group(
title="AWS credentials options",
description="If --aws-credentials-yaml is specified, it overrides all "
Expand Down Expand Up @@ -795,6 +808,9 @@ def configure_and_run_docker_container(
) # type:ignore
environment.update(extra_driver_envs)

if args.use_service_auth_token:
environment["YELP_SVC_AUTHZ_TOKEN"] = get_service_auth_token()

webui_url = get_webui_url(spark_conf["spark.ui.port"])
webui_url_msg = PaastaColors.green(f"\nSpark monitoring URL: ") + f"{webui_url}\n"

Expand Down
53 changes: 50 additions & 3 deletions tests/cli/test_cmds_spark_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -493,6 +493,7 @@ def test_configure_and_run_docker_container(
args.docker_shm_size = False
args.tronfig = None
args.job_id = None
args.use_service_auth_token = False
with mock.patch.object(
self.instance_config, "get_env_dictionary", return_value={"env1": "val1"}
):
Expand Down Expand Up @@ -607,6 +608,7 @@ def test_configure_and_run_docker_driver_resource_limits_config(
args.docker_cpu_limit = 3
args.docker_memory_limit = "4g"
args.docker_shm_size = "1g"
args.use_service_auth_token = False
with mock.patch.object(
self.instance_config, "get_env_dictionary", return_value={"env1": "val1"}
):
Expand Down Expand Up @@ -721,6 +723,7 @@ def test_configure_and_run_docker_driver_resource_limits(
args.docker_cpu_limit = False
args.docker_memory_limit = False
args.docker_shm_size = False
args.use_service_auth_token = False
with mock.patch.object(
self.instance_config, "get_env_dictionary", return_value={"env1": "val1"}
):
Expand Down Expand Up @@ -792,7 +795,9 @@ def test_configure_and_run_docker_container_nvidia(
"spark.app.name": "fake app",
"spark.executorEnv.PAASTA_CLUSTER": "test-cluster",
}
args = mock.MagicMock(cmd="pyspark", nvidia=True)
args = mock.MagicMock(
cmd="pyspark", nvidia=True, use_service_auth_token=False
)

configure_and_run_docker_container(
args=args,
Expand Down Expand Up @@ -828,7 +833,9 @@ def test_configure_and_run_docker_container_mrjob(
"spark.app.name": "fake_app",
"spark.executorEnv.PAASTA_CLUSTER": "test-cluster",
}
args = mock.MagicMock(cmd="python mrjob_wrapper.py", mrjob=True)
args = mock.MagicMock(
cmd="python mrjob_wrapper.py", mrjob=True, use_service_auth_token=False
)

configure_and_run_docker_container(
args=args,
Expand Down Expand Up @@ -856,7 +863,7 @@ def test_dont_emit_metrics_for_inappropriate_commands(
"paasta_tools.cli.cmds.spark_run.clusterman_metrics", autospec=True
):
mock_create_spark_config_str.return_value = "--conf spark.cores.max=5"
args = mock.MagicMock(cmd="bash", mrjob=False)
args = mock.MagicMock(cmd="bash", mrjob=False, use_service_auth_token=False)

configure_and_run_docker_container(
args=args,
Expand All @@ -869,6 +876,46 @@ def test_dont_emit_metrics_for_inappropriate_commands(
pod_template_path="unique-run",
)

@mock.patch("paasta_tools.cli.cmds.spark_run.get_service_auth_token", autospec=True)
def test_configure_and_run_docker_container_auth_token(
self,
mock_get_service_auth_token,
mock_create_spark_config_str,
mock_get_docker_cmd,
mock_get_webui_url,
mock_run_docker_container,
mock_get_username,
):
mock_get_service_auth_token.return_value = "foobar"
with mock.patch(
"paasta_tools.cli.cmds.spark_run.clusterman_metrics", autospec=True
):
spark_conf = {
"spark.cores.max": "5",
"spark.executor.cores": 1,
"spark.executor.memory": "2g",
"spark.master": "mesos://spark.master",
"spark.ui.port": "1234",
"spark.app.name": "fake app",
"spark.executorEnv.PAASTA_CLUSTER": "test-cluster",
}
args = mock.MagicMock(
cmd="pyspark",
use_service_auth_token=True,
)
configure_and_run_docker_container(
args=args,
docker_img="fake-registry/fake-service",
instance_config=self.instance_config,
system_paasta_config=self.system_paasta_config,
aws_creds=("id", "secret", "token"),
spark_conf=spark_conf,
cluster_manager=spark_run.CLUSTER_MANAGER_K8S,
pod_template_path="unique-run",
)
args, kwargs = mock_run_docker_container.call_args
assert kwargs["environment"]["YELP_SVC_AUTHZ_TOKEN"] == "foobar"


@pytest.mark.parametrize(
"cmd,expected_name",
Expand Down

0 comments on commit be6c3d6

Please sign in to comment.