Skip to content

Commit

Permalink
Update submit_job_to_cluster.py (#1708)
Browse files Browse the repository at this point in the history
switch region to new 'global' region and remove unnecessary function.
  • Loading branch information
jameswinegar authored and engelke committed Nov 20, 2018
1 parent 0645553 commit cdab25b
Showing 1 changed file with 1 addition and 9 deletions.
10 changes: 1 addition & 9 deletions dataproc/submit_job_to_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,6 @@ def get_pyspark_file(filename):
return f, os.path.basename(filename)


def get_region_from_zone(zone):
try:
region_as_list = zone.split('-')[:-1]
return '-'.join(region_as_list)
except (AttributeError, IndexError, ValueError):
raise ValueError('Invalid zone provided, please check your input.')


def upload_pyspark_file(project_id, bucket_name, filename, file):
"""Uploads the PySpark file in this directory to the configured
input bucket."""
Expand Down Expand Up @@ -199,7 +191,7 @@ def get_client():
def main(project_id, zone, cluster_name, bucket_name,
pyspark_file=None, create_new_cluster=True):
dataproc = get_client()
region = get_region_from_zone(zone)
region = 'global'
try:
if pyspark_file:
spark_file, spark_filename = get_pyspark_file(pyspark_file)
Expand Down

0 comments on commit cdab25b

Please sign in to comment.