From be3b35a828fb47239814b24f3d2e5f2016cbd6ed Mon Sep 17 00:00:00 2001 From: Siddharth Manoj Date: Thu, 30 Nov 2023 04:49:14 -0500 Subject: [PATCH] fix tests --- .../unit/sidecars/live_data_watcher_tests.py | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/tests/unit/sidecars/live_data_watcher_tests.py b/tests/unit/sidecars/live_data_watcher_tests.py index 828e7026b..09050dc51 100644 --- a/tests/unit/sidecars/live_data_watcher_tests.py +++ b/tests/unit/sidecars/live_data_watcher_tests.py @@ -13,13 +13,14 @@ from baseplate.sidecars.live_data_watcher import NodeWatcher + +BUCKET_NAME = "test_bucket" +DEFAULT_FILE_KEY = "test_file_key" NUM_FILE_SHARDS = 5 class NodeWatcherTests(unittest.TestCase): mock_s3 = mock_s3() - bucket_name = "test_bucket" - default_file_key = "test_file_key" def setUp(self): self.mock_s3.start() @@ -29,16 +30,16 @@ def setUp(self): ) s3_data = {"foo_encrypted": "bar_encrypted"} - s3_client.create_bucket(Bucket=self.bucket_name) + s3_client.create_bucket(Bucket=BUCKET_NAME) for file_shard_num in range(NUM_FILE_SHARDS): if file_shard_num == 0: # The first copy should just be the original file. - sharded_file_key = self.default_file_key + sharded_file_key = DEFAULT_FILE_KEY else: # All other copies should include the sharded prefix. - sharded_file_key = str(file_shard_num) + "/" + self.default_file_key + sharded_file_key = str(file_shard_num) + "/" + DEFAULT_FILE_KEY s3_client.put_object( - Bucket=self.bucket_name, + Bucket=BUCKET_NAME, Key=sharded_file_key, Body=json.dumps(s3_data).encode(), SSECustomerKey="test_decryption_key", @@ -69,7 +70,7 @@ def test_s3_load_type_on_change(self): "s3", region_name="us-east-1", ) - obj = s3_client.get_object(Bucket=self.bucket_name, Key=self.default_file_key) + obj = s3_client.get_object(Bucket=BUCKET_NAME, Key=DEFAULT_FILE_KEY) actual_data = obj["Body"].read().decode("utf-8") assert actual_data == json.loads(expected_content) @@ -94,15 +95,15 @@ def test_s3_load_type_sharded_on_change(self): "s3", region_name="us-east-1", ) - obj = s3_client.get_object(Bucket=self.bucket_name, Key=self.default_file_key) + obj = s3_client.get_object(Bucket=BUCKET_NAME, Key=DEFAULT_FILE_KEY) actual_data = obj["Body"].read().decode("utf-8") assert actual_data == json.loads(expected_content) # Assert that all copies of the file are fetchable and contain the same # data as the original. for i in range(1, NUM_FILE_SHARDS): - file_key = str(i) + "/" + self.default_file_key - obj = s3_client.get_object(Bucket=self.bucket_name, Key=file_key) + file_key = str(i) + "/" + DEFAULT_FILE_KEY + obj = s3_client.get_object(Bucket=BUCKET_NAME, Key=file_key) actual_data = obj["Body"].read().decode("utf-8") assert actual_data == json.loads(expected_content)