Skip to content

Commit

Permalink
🐛 Fix S3 destination integration tests to use bucket path (#18031)
Browse files Browse the repository at this point in the history
* Fix S3 destination tests

* Add Changelog

* CR Fixes

* add unit test

* version bump

* fix dockerfile

* auto-bump connector version

Co-authored-by: Edward Gao <[email protected]>
Co-authored-by: Octavia Squidington III <[email protected]>
  • Loading branch information
3 people authored and nataly committed Nov 3, 2022
1 parent bbf9b23 commit 45e2446
Show file tree
Hide file tree
Showing 8 changed files with 66 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@
- name: S3
destinationDefinitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362
dockerRepository: airbyte/destination-s3
dockerImageTag: 0.3.16
dockerImageTag: 0.3.17
documentationUrl: https://docs.airbyte.com/integrations/destinations/s3
icon: s3.svg
resourceRequirements:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4664,7 +4664,7 @@
supported_destination_sync_modes:
- "append"
- "overwrite"
- dockerImage: "airbyte/destination-s3:0.3.16"
- dockerImage: "airbyte/destination-s3:0.3.17"
spec:
documentationUrl: "https://docs.airbyte.com/integrations/destinations/s3"
connectionSpecification:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,8 @@ public AirbyteConnectionStatus check(final JsonNode config) {
try {
final S3DestinationConfig destinationConfig = configFactory.getS3DestinationConfig(config, storageProvider());
final AmazonS3 s3Client = destinationConfig.getS3Client();
final S3StorageOperations storageOperations = new S3StorageOperations(nameTransformer, s3Client, destinationConfig);

S3BaseChecks.attemptS3WriteAndDelete(storageOperations, destinationConfig, destinationConfig.getBucketName());
S3BaseChecks.testIAMUserHasListObjectPermission(s3Client, destinationConfig.getBucketName());
S3BaseChecks.testSingleUpload(s3Client, destinationConfig.getBucketName(), destinationConfig.getBucketPath());
S3BaseChecks.testMultipartUpload(s3Client, destinationConfig.getBucketName(), destinationConfig.getBucketPath());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,8 @@ public static void attemptS3WriteAndDelete(final S3StorageOperations storageOper

public static void testSingleUpload(final AmazonS3 s3Client, final String bucketName, final String bucketPath) {
LOGGER.info("Started testing if all required credentials assigned to user for single file uploading");
if (bucketPath.endsWith("/")) {
throw new RuntimeException("Bucket Path should not end with /");
}
final String testFile = bucketPath + "/" + "test_" + System.currentTimeMillis();
final var prefix = bucketPath.endsWith("/") ? bucketPath : bucketPath + "/";
final String testFile = prefix + "test_" + System.currentTimeMillis();
try {
s3Client.putObject(bucketName, testFile, "this is a test file");
} finally {
Expand All @@ -51,10 +49,8 @@ public static void testSingleUpload(final AmazonS3 s3Client, final String bucket

public static void testMultipartUpload(final AmazonS3 s3Client, final String bucketName, final String bucketPath) throws IOException {
LOGGER.info("Started testing if all required credentials assigned to user for multipart upload");
if (bucketPath.endsWith("/")) {
throw new RuntimeException("Bucket Path should not end with /");
}
final String testFile = bucketPath + "/" + "test_" + System.currentTimeMillis();
final var prefix = bucketPath.endsWith("/") ? bucketPath : bucketPath + "/";
final String testFile = prefix + "test_" + System.currentTimeMillis();
final StreamTransferManager manager = StreamTransferManagerFactory.create(bucketName, testFile, s3Client).get();
boolean success = false;
try (final MultiPartOutputStream outputStream = manager.getMultiPartOutputStreams().get(0);
Expand Down Expand Up @@ -96,7 +92,7 @@ static void attemptS3WriteAndDelete(final S3StorageOperations storageOperations,
final S3DestinationConfig s3Config,
final String bucketPath,
final AmazonS3 s3) {
final var prefix = bucketPath.isEmpty() ? "" : bucketPath + (bucketPath.endsWith("/") ? "" : "/");
final var prefix = bucketPath.endsWith("/") ? bucketPath : bucketPath + "/";
final String outputTableName = prefix + "_airbyte_connection_test_" + UUID.randomUUID().toString().replaceAll("-", "");
attemptWriteAndDeleteS3Object(storageOperations, s3Config, outputTableName, s3);
}
Expand All @@ -106,8 +102,9 @@ private static void attemptWriteAndDeleteS3Object(final S3StorageOperations stor
final String outputTableName,
final AmazonS3 s3) {
final var s3Bucket = s3Config.getBucketName();
final var bucketPath = s3Config.getBucketPath();

storageOperations.createBucketObjectIfNotExists(s3Bucket);
storageOperations.createBucketObjectIfNotExists(bucketPath);
s3.putObject(s3Bucket, outputTableName, "check-content");
testIAMUserHasListObjectPermission(s3, s3Bucket);
s3.deleteObject(s3Bucket, outputTableName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,14 +96,15 @@ public String getBucketObjectPath(final String namespace, final String streamNam
@Override
public void createBucketObjectIfNotExists(final String objectPath) {
final String bucket = s3Config.getBucketName();
final String folderPath = objectPath.endsWith("/") ? objectPath : objectPath + "/";
if (!doesBucketExist(bucket)) {
LOGGER.info("Bucket {} does not exist; creating...", bucket);
s3Client.createBucket(bucket);
LOGGER.info("Bucket {} has been created.", bucket);
}
if (!s3Client.doesObjectExist(bucket, objectPath)) {
if (!s3Client.doesObjectExist(bucket, folderPath)) {
LOGGER.info("Storage Object {}/{} does not exist in bucket; creating...", bucket, objectPath);
s3Client.putObject(bucket, objectPath.endsWith("/") ? objectPath : objectPath + "/", "");
s3Client.putObject(bucket, folderPath, "");
LOGGER.info("Storage Object {}/{} has been created in bucket.", bucket, objectPath);
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
package io.airbyte.integrations.destination.s3;

import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.matches;
import static org.mockito.ArgumentMatchers.startsWith;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;

import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.ListObjectsRequest;
import io.airbyte.integrations.destination.s3.util.S3NameTransformer;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentMatchers;

public class S3BaseChecksTest {

private AmazonS3 s3Client;

@BeforeEach
public void setup() {
s3Client = mock(AmazonS3.class);
}

@Test
public void attemptWriteAndDeleteS3Object_should_createSpecificFiles() {
S3DestinationConfig config = new S3DestinationConfig(
null,
"test_bucket",
"test/bucket/path",
null,
null,
null,
null,
s3Client
);
S3StorageOperations operations = new S3StorageOperations(new S3NameTransformer(), s3Client, config);
when(s3Client.doesObjectExist("test_bucket", "test/bucket/path/")).thenReturn(false);

S3BaseChecks.attemptS3WriteAndDelete(operations, config, "test/bucket/path");

verify(s3Client).putObject("test_bucket", "test/bucket/path/", "");
verify(s3Client).putObject(eq("test_bucket"), startsWith("test/bucket/path/_airbyte_connection_test_"), anyString());
verify(s3Client).listObjects(ArgumentMatchers.<ListObjectsRequest>argThat(request -> "test_bucket".equals(request.getBucketName())));
verify(s3Client).deleteObject(eq("test_bucket"), startsWith("test/bucket/path/_airbyte_connection_test_"));
}
}
6 changes: 3 additions & 3 deletions airbyte-integrations/connectors/destination-s3/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ RUN /bin/bash -c 'set -e && \
yum install lzop lzo lzo-dev -y; \
elif [ "$ARCH" == "aarch64" ] || [ "$ARCH" = "arm64" ]; then \
echo "$ARCH" && \
yum group install -y "Development Tools" \
yum install lzop lzo lzo-dev wget curl unzip zip maven git -y; \
yum group install -y "Development Tools"; \
yum install lzop lzo lzo-dev wget curl unzip zip maven git which -y; \
wget http://www.oberhumer.com/opensource/lzo/download/lzo-2.10.tar.gz -P /tmp; \
cd /tmp && tar xvfz lzo-2.10.tar.gz; \
cd /tmp/lzo-2.10/ && ./configure --enable-shared --prefix /usr/local/lzo-2.10; \
Expand All @@ -40,5 +40,5 @@ RUN /bin/bash -c 'set -e && \
echo "unknown arch" ;\
fi'

LABEL io.airbyte.version=0.3.16
LABEL io.airbyte.version=0.3.17
LABEL io.airbyte.name=airbyte/destination-s3
1 change: 1 addition & 0 deletions docs/integrations/destinations/s3.md
Original file line number Diff line number Diff line change
Expand Up @@ -323,6 +323,7 @@ In order for everything to work correctly, it is also necessary that the user wh

| Version | Date | Pull Request | Subject |
|:--------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------|
| 0.3.17 | 2022-10-15 | [\#18031](https://github.com/airbytehq/airbyte/pull/18031) | Fix integration tests to use bucket path |
| 0.3.16 | 2022-10-03 | [\#17340](https://github.com/airbytehq/airbyte/pull/17340) | Enforced encrypted only traffic to S3 buckets and check logic |
| 0.3.15 | 2022-09-01 | [\#16243](https://github.com/airbytehq/airbyte/pull/16243) | Fix Json to Avro conversion when there is field name clash from combined restrictions (`anyOf`, `oneOf`, `allOf` fields). |
| 0.3.14 | 2022-08-24 | [\#15207](https://github.com/airbytehq/airbyte/pull/15207) | Fix S3 bucket path to be used for check. |
Expand Down

0 comments on commit 45e2446

Please sign in to comment.