Skip to content

Commit

Permalink
cmd-cloud-prune: Fix indent-level for json and compare only builds data
Browse files Browse the repository at this point in the history
For consistency, keep the json indent level to 4 and also, only
compare the respective json's build data instead of the whole file
since we do update the timestamp in the builds.json in save_builds_json.
Also double-check the size of builds array is the same after modifying
  • Loading branch information
gursewak1997 authored and jlebon committed Aug 28, 2024
1 parent 77118ab commit 23f0d16
Showing 1 changed file with 9 additions and 4 deletions.
13 changes: 9 additions & 4 deletions src/cmd-cloud-prune
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ def get_json_from_s3(s3, bucket, key):
def save_builds_json(builds_json_data, location):
builds_json_data["timestamp"] = datetime.datetime.now(pytz.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
with open(location, "w") as json_file:
json.dump(builds_json_data, json_file, indent=2)
json.dump(builds_json_data, json_file, indent=4)


def handle_upload_builds_json(s3_client, bucket, prefix, dry_run, acl):
Expand All @@ -217,7 +217,7 @@ def handle_upload_builds_json(s3_client, bucket, prefix, dry_run, acl):

# If there are no changes to the local builds/builds.json we won't need to upload
# anything to the s3 bucket. Will return in this scenario.
if builds_json_source_data == current_builds_json:
if builds_json_source_data.get('builds') == current_builds_json.get('builds'):
print("There are no changes to the local builds/builds.json. No upload needed")
return

Expand All @@ -233,11 +233,16 @@ def handle_upload_builds_json(s3_client, bucket, prefix, dry_run, acl):

# Print the updated builds.json before the s3 update
with open(BUILDFILES['list'], 'r') as file:
data = json.load(file)
updated_builds_json = json.load(file)
print("----")
print(json.dumps(data, indent=4))
print(json.dumps(updated_builds_json, indent=4))
print("----")

with open(BUILDFILES['sourcedata'], 'r') as file:
builds_json_source_data = json.load(file)
# Make sure the size of the builds array is the same in the original and our modified builds.json
assert len(builds_json_source_data.get('builds')) == len(updated_builds_json.get('builds'))

# Before uploading builds.json, copy the updated tmp/builds-source.json as builds.json.bak as a backup
s3_copy(s3_client, BUILDFILES['sourcedata'], bucket, f'{prefix}/builds.json.bak', CACHE_MAX_AGE_METADATA, acl, extra_args={}, dry_run=dry_run)

Expand Down

0 comments on commit 23f0d16

Please sign in to comment.