Skip to content

Commit

Permalink
fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Siddharth Manoj committed Nov 30, 2023
1 parent ab92e17 commit be3b35a
Showing 1 changed file with 11 additions and 10 deletions.
21 changes: 11 additions & 10 deletions tests/unit/sidecars/live_data_watcher_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,14 @@

from baseplate.sidecars.live_data_watcher import NodeWatcher


BUCKET_NAME = "test_bucket"
DEFAULT_FILE_KEY = "test_file_key"
NUM_FILE_SHARDS = 5


class NodeWatcherTests(unittest.TestCase):
mock_s3 = mock_s3()
bucket_name = "test_bucket"
default_file_key = "test_file_key"

def setUp(self):
self.mock_s3.start()
Expand All @@ -29,16 +30,16 @@ def setUp(self):
)
s3_data = {"foo_encrypted": "bar_encrypted"}

s3_client.create_bucket(Bucket=self.bucket_name)
s3_client.create_bucket(Bucket=BUCKET_NAME)
for file_shard_num in range(NUM_FILE_SHARDS):
if file_shard_num == 0:
# The first copy should just be the original file.
sharded_file_key = self.default_file_key
sharded_file_key = DEFAULT_FILE_KEY
else:
# All other copies should include the sharded prefix.
sharded_file_key = str(file_shard_num) + "/" + self.default_file_key
sharded_file_key = str(file_shard_num) + "/" + DEFAULT_FILE_KEY
s3_client.put_object(
Bucket=self.bucket_name,
Bucket=BUCKET_NAME,
Key=sharded_file_key,
Body=json.dumps(s3_data).encode(),
SSECustomerKey="test_decryption_key",
Expand Down Expand Up @@ -69,7 +70,7 @@ def test_s3_load_type_on_change(self):
"s3",
region_name="us-east-1",
)
obj = s3_client.get_object(Bucket=self.bucket_name, Key=self.default_file_key)
obj = s3_client.get_object(Bucket=BUCKET_NAME, Key=DEFAULT_FILE_KEY)
actual_data = obj["Body"].read().decode("utf-8")
assert actual_data == json.loads(expected_content)

Expand All @@ -94,15 +95,15 @@ def test_s3_load_type_sharded_on_change(self):
"s3",
region_name="us-east-1",
)
obj = s3_client.get_object(Bucket=self.bucket_name, Key=self.default_file_key)
obj = s3_client.get_object(Bucket=BUCKET_NAME, Key=DEFAULT_FILE_KEY)
actual_data = obj["Body"].read().decode("utf-8")
assert actual_data == json.loads(expected_content)

# Assert that all copies of the file are fetchable and contain the same
# data as the original.
for i in range(1, NUM_FILE_SHARDS):
file_key = str(i) + "/" + self.default_file_key
obj = s3_client.get_object(Bucket=self.bucket_name, Key=file_key)
file_key = str(i) + "/" + DEFAULT_FILE_KEY
obj = s3_client.get_object(Bucket=BUCKET_NAME, Key=file_key)
actual_data = obj["Body"].read().decode("utf-8")
assert actual_data == json.loads(expected_content)

Expand Down

0 comments on commit be3b35a

Please sign in to comment.