diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index d84582706ba2..b05591295659 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -40,7 +40,7 @@ - name: BigQuery destinationDefinitionId: 22f6c74f-5699-40ff-833c-4a879ea40133 dockerRepository: airbyte/destination-bigquery - dockerImageTag: 1.2.11 + dockerImageTag: 1.2.12 documentationUrl: https://docs.airbyte.com/integrations/destinations/bigquery icon: bigquery.svg normalizationConfig: @@ -139,7 +139,7 @@ - name: Google Cloud Storage (GCS) destinationDefinitionId: ca8f6566-e555-4b40-943a-545bf123117a dockerRepository: airbyte/destination-gcs - dockerImageTag: 0.2.12 + dockerImageTag: 0.2.13 documentationUrl: https://docs.airbyte.com/integrations/destinations/gcs icon: googlecloudstorage.svg resourceRequirements: @@ -290,7 +290,7 @@ - name: Redshift destinationDefinitionId: f7a7d195-377f-cf5b-70a5-be6b819019dc dockerRepository: airbyte/destination-redshift - dockerImageTag: 0.3.53 + dockerImageTag: 0.3.54 documentationUrl: https://docs.airbyte.com/integrations/destinations/redshift icon: redshift.svg normalizationConfig: @@ -321,7 +321,7 @@ - name: S3 destinationDefinitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362 dockerRepository: airbyte/destination-s3 - dockerImageTag: 0.3.18 + dockerImageTag: 0.3.19 documentationUrl: https://docs.airbyte.com/integrations/destinations/s3 icon: s3.svg resourceRequirements: @@ -348,7 +348,7 @@ - name: Snowflake destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba dockerRepository: airbyte/destination-snowflake - dockerImageTag: 0.4.43 + dockerImageTag: 0.4.44 documentationUrl: https://docs.airbyte.com/integrations/destinations/snowflake icon: snowflake.svg normalizationConfig: diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index aa768b304061..7f34e408d5b9 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -621,7 +621,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-bigquery:1.2.11" +- dockerImage: "airbyte/destination-bigquery:1.2.12" spec: documentationUrl: "https://docs.airbyte.com/integrations/destinations/bigquery" connectionSpecification: @@ -2325,7 +2325,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-gcs:0.2.12" +- dockerImage: "airbyte/destination-gcs:0.2.13" spec: documentationUrl: "https://docs.airbyte.com/integrations/destinations/gcs" connectionSpecification: @@ -5123,7 +5123,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-redshift:0.3.53" +- dockerImage: "airbyte/destination-redshift:0.3.54" spec: documentationUrl: "https://docs.airbyte.com/integrations/destinations/redshift" connectionSpecification: @@ -5492,7 +5492,7 @@ supported_destination_sync_modes: - "append" - "overwrite" -- dockerImage: "airbyte/destination-s3:0.3.18" +- dockerImage: "airbyte/destination-s3:0.3.19" spec: documentationUrl: "https://docs.airbyte.com/integrations/destinations/s3" connectionSpecification: @@ -6109,7 +6109,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-snowflake:0.4.43" +- dockerImage: "airbyte/destination-snowflake:0.4.44" spec: documentationUrl: "https://docs.airbyte.com/integrations/destinations/snowflake" connectionSpecification: diff --git a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/BlobStorageOperations.java b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/BlobStorageOperations.java index d131470e976e..96681b4216f8 100644 --- a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/BlobStorageOperations.java +++ b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/BlobStorageOperations.java @@ -22,9 +22,9 @@ protected BlobStorageOperations() { public abstract String getBucketObjectPath(String namespace, String streamName, DateTime writeDatetime, String customFormat); /** - * Create a storage object where to store data in the destination for a @param objectPath + * Ensure that the bucket specified in the config exists */ - public abstract void createBucketObjectIfNotExists(String objectPath) throws Exception; + public abstract void createBucketIfNotExists() throws Exception; /** * Upload the data files into the storage area. diff --git a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3BaseChecks.java b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3BaseChecks.java index b5f2037e842b..0873cbb87a01 100644 --- a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3BaseChecks.java +++ b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3BaseChecks.java @@ -120,7 +120,7 @@ private static void attemptWriteAndDeleteS3Object(final S3StorageOperations stor final var bucketPath = s3Config.getBucketPath(); if (!Strings.isNullOrEmpty(bucketPath)) { - storageOperations.createBucketObjectIfNotExists(bucketPath); + storageOperations.createBucketIfNotExists(); } s3.putObject(s3Bucket, outputTableName, "check-content"); testIAMUserHasListObjectPermission(s3, s3Bucket); diff --git a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3StorageOperations.java b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3StorageOperations.java index 599463435142..ba9f3a19ceab 100644 --- a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3StorageOperations.java +++ b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3StorageOperations.java @@ -8,6 +8,7 @@ import alex.mojaki.s3upload.StreamTransferManager; import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion; import com.amazonaws.services.s3.model.ListObjectsRequest; @@ -15,12 +16,14 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.string.Strings; import io.airbyte.integrations.destination.NamingConventionTransformer; import io.airbyte.integrations.destination.record_buffer.SerializableBuffer; import io.airbyte.integrations.destination.s3.template.S3FilenameTemplateManager; import io.airbyte.integrations.destination.s3.template.S3FilenameTemplateParameterObject; import io.airbyte.integrations.destination.s3.util.StreamTransferManagerFactory; +import io.airbyte.integrations.util.ConnectorExceptionUtil; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -95,23 +98,15 @@ public String getBucketObjectPath(final String namespace, final String streamNam /** * Create a directory object at the specified location. Creates the bucket if necessary. - * - * @param objectPath The directory to create. Must be a nonempty string. */ @Override - public void createBucketObjectIfNotExists(final String objectPath) { + public void createBucketIfNotExists() { final String bucket = s3Config.getBucketName(); - final String folderPath = objectPath.endsWith("/") ? objectPath : objectPath + "/"; if (!doesBucketExist(bucket)) { LOGGER.info("Bucket {} does not exist; creating...", bucket); s3Client.createBucket(bucket); LOGGER.info("Bucket {} has been created.", bucket); } - if (!s3Client.doesObjectExist(bucket, folderPath)) { - LOGGER.info("Storage Object {}/{} does not exist in bucket; creating...", bucket, objectPath); - s3Client.putObject(bucket, folderPath, ""); - LOGGER.info("Storage Object {}/{} has been created in bucket.", bucket, objectPath); - } } protected boolean doesBucketExist(final String bucket) { @@ -138,7 +133,18 @@ public String uploadRecordsToBucket(final SerializableBuffer recordsData, exceptionsThrown.add(e); } } - throw new RuntimeException(String.format("Exceptions thrown while uploading records into storage: %s", Strings.join(exceptionsThrown, "\n"))); + // Verifying that ALL exceptions are authentication related before assuming this is a configuration + // issue + // reduces risk of misidentifying errors or reporting a transient error. + final boolean areAllExceptionsAuthExceptions = exceptionsThrown.stream().filter(e -> e instanceof AmazonS3Exception) + .map(s3e -> ((AmazonS3Exception) s3e).getStatusCode()) + .filter(ConnectorExceptionUtil.HTTP_AUTHENTICATION_ERROR_CODES::contains) + .count() == exceptionsThrown.size(); + if (areAllExceptionsAuthExceptions) { + throw new ConfigErrorException(exceptionsThrown.get(0).getMessage(), exceptionsThrown.get(0)); + } else { + throw new RuntimeException(String.format("Exceptions thrown while uploading records into storage: %s", Strings.join(exceptionsThrown, "\n"))); + } } /** diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/util/ConnectorExceptionUtil.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/util/ConnectorExceptionUtil.java index 2db68dff49e6..ea336d8fce65 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/util/ConnectorExceptionUtil.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/util/ConnectorExceptionUtil.java @@ -4,6 +4,7 @@ package io.airbyte.integrations.util; +import com.google.common.collect.ImmutableList; import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.exceptions.ConnectionErrorException; import io.airbyte.integrations.base.errors.messages.ErrorMessage; @@ -22,6 +23,8 @@ public class ConnectorExceptionUtil { static final String RECOVERY_CONNECTION_ERROR_MESSAGE = "We're having issues syncing from a Postgres replica that is configured as a hot standby server. " + "Please see https://docs.airbyte.com/integrations/sources/postgres/#sync-data-from-postgres-hot-standby-server for options and workarounds"; + + public static final List HTTP_AUTHENTICATION_ERROR_CODES = ImmutableList.of(401, 403); private static final List> configErrorPredicates = List.of(getConfigErrorPredicate(), getConnectionErrorPredicate(), isRecoveryConnectionExceptionPredicate(), isUnknownColumnInFieldListException()); diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile index 1cfd9ee045d5..e6cd0710659c 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=1.2.11 +LABEL io.airbyte.version=1.2.12 LABEL io.airbyte.name=airbyte/destination-bigquery-denormalized diff --git a/airbyte-integrations/connectors/destination-bigquery/Dockerfile b/airbyte-integrations/connectors/destination-bigquery/Dockerfile index f79d950d1d7f..5e0eba63c40d 100644 --- a/airbyte-integrations/connectors/destination-bigquery/Dockerfile +++ b/airbyte-integrations/connectors/destination-bigquery/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=1.2.11 +LABEL io.airbyte.version=1.2.12 LABEL io.airbyte.name=airbyte/destination-bigquery diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsOperations.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsOperations.java index 51a269725bef..e955d005b871 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsOperations.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsOperations.java @@ -13,11 +13,14 @@ import com.google.cloud.bigquery.LoadJobConfiguration; import com.google.cloud.bigquery.Schema; import com.google.cloud.bigquery.TableId; +import com.google.common.collect.ImmutableList; +import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.integrations.destination.StandardNameTransformer; import io.airbyte.integrations.destination.bigquery.uploader.AbstractBigQueryUploader; import io.airbyte.integrations.destination.gcs.GcsDestinationConfig; import io.airbyte.integrations.destination.gcs.GcsStorageOperations; import io.airbyte.integrations.destination.record_buffer.SerializableBuffer; +import io.airbyte.integrations.util.ConnectorExceptionUtil; import io.airbyte.protocol.models.v0.DestinationSyncMode; import java.util.HashSet; import java.util.List; @@ -84,7 +87,15 @@ public String getStagingFullPath(final String datasetId, final String stream) { public void createSchemaIfNotExists(final String datasetId, final String datasetLocation) { if (!existingSchemas.contains(datasetId)) { LOGGER.info("Creating dataset {}", datasetId); - BigQueryUtils.getOrCreateDataset(bigQuery, datasetId, datasetLocation); + try { + BigQueryUtils.getOrCreateDataset(bigQuery, datasetId, datasetLocation); + } catch (BigQueryException e) { + if (ConnectorExceptionUtil.HTTP_AUTHENTICATION_ERROR_CODES.contains(e.getCode())) { + throw new ConfigErrorException(e.getMessage(), e); + } else { + throw e; + } + } existingSchemas.add(datasetId); } } @@ -99,7 +110,7 @@ public void createTmpTableIfNotExists(final TableId tmpTableId, final Schema tab public void createStageIfNotExists(final String datasetId, final String stream) { final String objectPath = getStagingFullPath(datasetId, stream); LOGGER.info("Creating staging path for stream {} (dataset {}): {}", stream, datasetId, objectPath); - gcsStorageOperations.createBucketObjectIfNotExists(objectPath); + gcsStorageOperations.createBucketIfNotExists(); } @Override diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java index 065ff49844ce..3f2c05210cf0 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java @@ -84,8 +84,10 @@ class BigQueryDestinationTest { protected static final Path CREDENTIALS_WITH_GCS_STAGING_PATH = Path.of("secrets/credentials-gcs-staging.json"); - protected static final Path[] ALL_PATHS = {CREDENTIALS_WITH_GCS_STAGING_PATH, CREDENTIALS_BAD_PROJECT_PATH, CREDENTIALS_NO_DATASET_CREATION_PATH, + + protected static final Path[] ALL_PATHS = {CREDENTIALS_STANDARD_INSERT_PATH, CREDENTIALS_BAD_PROJECT_PATH, CREDENTIALS_NO_DATASET_CREATION_PATH, CREDENTIALS_NO_EDIT_PUBLIC_SCHEMA_ROLE_PATH, CREDENTIALS_NON_BILLABLE_PROJECT_PATH, CREDENTIALS_WITH_GCS_STAGING_PATH}; + private static final Logger LOGGER = LoggerFactory.getLogger(BigQueryDestinationTest.class); private static final String DATASET_NAME_PREFIX = "bq_dest_integration_test"; diff --git a/airbyte-integrations/connectors/destination-gcs/Dockerfile b/airbyte-integrations/connectors/destination-gcs/Dockerfile index c5534e0a9128..f4d34c04bceb 100644 --- a/airbyte-integrations/connectors/destination-gcs/Dockerfile +++ b/airbyte-integrations/connectors/destination-gcs/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-gcs COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.2.12 +LABEL io.airbyte.version=0.2.13 LABEL io.airbyte.name=airbyte/destination-gcs diff --git a/airbyte-integrations/connectors/destination-redshift/Dockerfile b/airbyte-integrations/connectors/destination-redshift/Dockerfile index 5f06bd32cc8e..882852e88811 100644 --- a/airbyte-integrations/connectors/destination-redshift/Dockerfile +++ b/airbyte-integrations/connectors/destination-redshift/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-redshift COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.53 +LABEL io.airbyte.version=0.3.54 LABEL io.airbyte.name=airbyte/destination-redshift diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java index 63abde3f6966..657a582d6eeb 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/operations/RedshiftS3StagingSqlOperations.java @@ -77,7 +77,7 @@ public String getStagingPath(UUID connectionId, String namespace, String streamN public void createStageIfNotExists(JdbcDatabase database, String stageName) throws Exception { final String bucketPath = s3Config.getBucketPath(); final String prefix = bucketPath.isEmpty() ? "" : bucketPath + (bucketPath.endsWith("/") ? "" : "/"); - s3StorageOperations.createBucketObjectIfNotExists(prefix + stageName); + s3StorageOperations.createBucketIfNotExists(); } @Override diff --git a/airbyte-integrations/connectors/destination-s3/Dockerfile b/airbyte-integrations/connectors/destination-s3/Dockerfile index e11e5ea30b84..73f794c6b721 100644 --- a/airbyte-integrations/connectors/destination-s3/Dockerfile +++ b/airbyte-integrations/connectors/destination-s3/Dockerfile @@ -40,5 +40,5 @@ RUN /bin/bash -c 'set -e && \ echo "unknown arch" ;\ fi' -LABEL io.airbyte.version=0.3.18 +LABEL io.airbyte.version=0.3.19 LABEL io.airbyte.name=airbyte/destination-s3 diff --git a/airbyte-integrations/connectors/destination-snowflake/Dockerfile b/airbyte-integrations/connectors/destination-snowflake/Dockerfile index 4b8f627b28ea..da6515de1feb 100644 --- a/airbyte-integrations/connectors/destination-snowflake/Dockerfile +++ b/airbyte-integrations/connectors/destination-snowflake/Dockerfile @@ -20,5 +20,5 @@ RUN tar xf ${APPLICATION}.tar --strip-components=1 ENV ENABLE_SENTRY true -LABEL io.airbyte.version=0.4.43 +LABEL io.airbyte.version=0.4.44 LABEL io.airbyte.name=airbyte/destination-snowflake diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StagingSqlOperations.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StagingSqlOperations.java index 9cdc3058be4e..e9c40de09116 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StagingSqlOperations.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StagingSqlOperations.java @@ -82,7 +82,7 @@ public String uploadRecordsToStage(final JdbcDatabase database, @Override public void createStageIfNotExists(final JdbcDatabase database, final String stageName) { - s3StorageOperations.createBucketObjectIfNotExists(stageName); + s3StorageOperations.createBucketIfNotExists(); } @Override diff --git a/docs/integrations/destinations/bigquery.md b/docs/integrations/destinations/bigquery.md index e418918e1f78..6b6759b152f5 100644 --- a/docs/integrations/destinations/bigquery.md +++ b/docs/integrations/destinations/bigquery.md @@ -136,6 +136,7 @@ Now that you have set up the BigQuery destination connector, check out the follo | Version | Date | Pull Request | Subject | |:--------|:-----------|:----------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------| +| 1.2.12 | 2023-01-18 | [#21087](https://github.com/airbytehq/airbyte/pull/21087) | Wrap Authentication Errors as Config Exceptions | | 1.2.11 | 2023-01-18 | [#21144](https://github.com/airbytehq/airbyte/pull/21144) | Added explicit error message if sync fails due to a config issue | | 1.2.9 | 2022-12-14 | [#20501](https://github.com/airbytehq/airbyte/pull/20501) | Report GCS staging failures that occur during connection check | | 1.2.8 | 2022-11-22 | [#19489](https://github.com/airbytehq/airbyte/pull/19489) | Added non-billable projects handle to check connection stage | diff --git a/docs/integrations/destinations/gcs.md b/docs/integrations/destinations/gcs.md index aa07d098ed17..e988530da08c 100644 --- a/docs/integrations/destinations/gcs.md +++ b/docs/integrations/destinations/gcs.md @@ -235,31 +235,32 @@ Under the hood, an Airbyte data stream in Json schema is first converted to an A ## CHANGELOG -| Version | Date | Pull Request | Subject | -|:--------| :--- | :--- | :--- | -| 0.2.12 | 2022-10-18 | [\#17901](https://github.com/airbytehq/airbyte/pull/17901) | Fix logging to GCS | -| 0.2.11 | 2022-09-01 | [\#16243](https://github.com/airbytehq/airbyte/pull/16243) | Fix Json to Avro conversion when there is field name clash from combined restrictions (`anyOf`, `oneOf`, `allOf` fields) | -| 0.2.10 | 2022-08-05 | [\#14801](https://github.com/airbytehq/airbyte/pull/14801) | Fix multiple log bindings | -| 0.2.9 | 2022-06-24 | [\#14114](https://github.com/airbytehq/airbyte/pull/14114) | Remove "additionalProperties": false from specs for connectors with staging | -| 0.2.8 | 2022-06-17 | [\#13753](https://github.com/airbytehq/airbyte/pull/13753) | Deprecate and remove PART_SIZE_MB fields from connectors based on StreamTransferManager | -| 0.2.7 | 2022-06-14 | [\#13483](https://github.com/airbytehq/airbyte/pull/13483) | Added support for int, long, float data types to Avro/Parquet formats. | -| 0.2.6 | 2022-05-17 | [12820](https://github.com/airbytehq/airbyte/pull/12820) | Improved 'check' operation performance | -| 0.2.5 | 2022-05-04 | [\#12578](https://github.com/airbytehq/airbyte/pull/12578) | In JSON to Avro conversion, log JSON field values that do not follow Avro schema for debugging. | -| 0.2.4 | 2022-04-22 | [\#12167](https://github.com/airbytehq/airbyte/pull/12167) | Add gzip compression option for CSV and JSONL formats. | -| 0.2.3 | 2022-04-22 | [\#11795](https://github.com/airbytehq/airbyte/pull/11795) | Fix the connection check to verify the provided bucket path. | -| 0.2.2 | 2022-04-05 | [\#11728](https://github.com/airbytehq/airbyte/pull/11728) | Properly clean-up bucket when running OVERWRITE sync mode | -| 0.2.1 | 2022-04-05 | [\#11499](https://github.com/airbytehq/airbyte/pull/11499) | Updated spec and documentation. | -| 0.2.0 | 2022-04-04 | [\#11686](https://github.com/airbytehq/airbyte/pull/11686) | Use serialized buffering strategy to reduce memory consumption; compress CSV and JSONL formats. | -| 0.1.22 | 2022-02-12 | [\#10256](https://github.com/airbytehq/airbyte/pull/10256) | Add JVM flag to exist on OOME. | -| 0.1.21 | 2022-02-12 | [\#10299](https://github.com/airbytehq/airbyte/pull/10299) | Fix connection check to require only the necessary permissions. | -| 0.1.20 | 2022-01-11 | [\#9367](https://github.com/airbytehq/airbyte/pull/9367) | Avro & Parquet: support array field with unknown item type; default any improperly typed field to string. | -| 0.1.19 | 2022-01-10 | [\#9121](https://github.com/airbytehq/airbyte/pull/9121) | Fixed check method for GCS mode to verify if all roles assigned to user | -| 0.1.18 | 2021-12-30 | [\#8809](https://github.com/airbytehq/airbyte/pull/8809) | Update connector fields title/description | -| 0.1.17 | 2021-12-21 | [\#8574](https://github.com/airbytehq/airbyte/pull/8574) | Added namespace to Avro and Parquet record types | -| 0.1.16 | 2021-12-20 | [\#8974](https://github.com/airbytehq/airbyte/pull/8974) | Release a new version to ensure there is no excessive logging. | -| 0.1.15 | 2021-12-03 | [\#8386](https://github.com/airbytehq/airbyte/pull/8386) | Add new GCP regions | -| 0.1.14 | 2021-12-01 | [\#7732](https://github.com/airbytehq/airbyte/pull/7732) | Support timestamp in Avro and Parquet | -| 0.1.13 | 2021-11-03 | [\#7288](https://github.com/airbytehq/airbyte/issues/7288) | Support Json `additionalProperties`. | -| 0.1.2 | 2021-09-12 | [\#5720](https://github.com/airbytehq/airbyte/issues/5720) | Added configurable block size for stream. Each stream is limited to 10,000 by GCS | -| 0.1.1 | 2021-08-26 | [\#5296](https://github.com/airbytehq/airbyte/issues/5296) | Added storing gcsCsvFileLocation property for CSV format. This is used by destination-bigquery \(GCS Staging upload type\) | -| 0.1.0 | 2021-07-16 | [\#4329](https://github.com/airbytehq/airbyte/pull/4784) | Initial release. | +| Version | Date | Pull Request | Subject | +|:--------| :--- |:------------------------------------------------------------| :--- | +| 0.2.13 | 2023-01-18 | [#21087](https://github.com/airbytehq/airbyte/pull/21087) | Wrap Authentication Errors as Config Exceptions | +| 0.2.12 | 2022-10-18 | [\#17901](https://github.com/airbytehq/airbyte/pull/17901) | Fix logging to GCS | +| 0.2.11 | 2022-09-01 | [\#16243](https://github.com/airbytehq/airbyte/pull/16243) | Fix Json to Avro conversion when there is field name clash from combined restrictions (`anyOf`, `oneOf`, `allOf` fields) | +| 0.2.10 | 2022-08-05 | [\#14801](https://github.com/airbytehq/airbyte/pull/14801) | Fix multiple log bindings | +| 0.2.9 | 2022-06-24 | [\#14114](https://github.com/airbytehq/airbyte/pull/14114) | Remove "additionalProperties": false from specs for connectors with staging | +| 0.2.8 | 2022-06-17 | [\#13753](https://github.com/airbytehq/airbyte/pull/13753) | Deprecate and remove PART_SIZE_MB fields from connectors based on StreamTransferManager | +| 0.2.7 | 2022-06-14 | [\#13483](https://github.com/airbytehq/airbyte/pull/13483) | Added support for int, long, float data types to Avro/Parquet formats. | +| 0.2.6 | 2022-05-17 | [12820](https://github.com/airbytehq/airbyte/pull/12820) | Improved 'check' operation performance | +| 0.2.5 | 2022-05-04 | [\#12578](https://github.com/airbytehq/airbyte/pull/12578) | In JSON to Avro conversion, log JSON field values that do not follow Avro schema for debugging. | +| 0.2.4 | 2022-04-22 | [\#12167](https://github.com/airbytehq/airbyte/pull/12167) | Add gzip compression option for CSV and JSONL formats. | +| 0.2.3 | 2022-04-22 | [\#11795](https://github.com/airbytehq/airbyte/pull/11795) | Fix the connection check to verify the provided bucket path. | +| 0.2.2 | 2022-04-05 | [\#11728](https://github.com/airbytehq/airbyte/pull/11728) | Properly clean-up bucket when running OVERWRITE sync mode | +| 0.2.1 | 2022-04-05 | [\#11499](https://github.com/airbytehq/airbyte/pull/11499) | Updated spec and documentation. | +| 0.2.0 | 2022-04-04 | [\#11686](https://github.com/airbytehq/airbyte/pull/11686) | Use serialized buffering strategy to reduce memory consumption; compress CSV and JSONL formats. | +| 0.1.22 | 2022-02-12 | [\#10256](https://github.com/airbytehq/airbyte/pull/10256) | Add JVM flag to exist on OOME. | +| 0.1.21 | 2022-02-12 | [\#10299](https://github.com/airbytehq/airbyte/pull/10299) | Fix connection check to require only the necessary permissions. | +| 0.1.20 | 2022-01-11 | [\#9367](https://github.com/airbytehq/airbyte/pull/9367) | Avro & Parquet: support array field with unknown item type; default any improperly typed field to string. | +| 0.1.19 | 2022-01-10 | [\#9121](https://github.com/airbytehq/airbyte/pull/9121) | Fixed check method for GCS mode to verify if all roles assigned to user | +| 0.1.18 | 2021-12-30 | [\#8809](https://github.com/airbytehq/airbyte/pull/8809) | Update connector fields title/description | +| 0.1.17 | 2021-12-21 | [\#8574](https://github.com/airbytehq/airbyte/pull/8574) | Added namespace to Avro and Parquet record types | +| 0.1.16 | 2021-12-20 | [\#8974](https://github.com/airbytehq/airbyte/pull/8974) | Release a new version to ensure there is no excessive logging. | +| 0.1.15 | 2021-12-03 | [\#8386](https://github.com/airbytehq/airbyte/pull/8386) | Add new GCP regions | +| 0.1.14 | 2021-12-01 | [\#7732](https://github.com/airbytehq/airbyte/pull/7732) | Support timestamp in Avro and Parquet | +| 0.1.13 | 2021-11-03 | [\#7288](https://github.com/airbytehq/airbyte/issues/7288) | Support Json `additionalProperties`. | +| 0.1.2 | 2021-09-12 | [\#5720](https://github.com/airbytehq/airbyte/issues/5720) | Added configurable block size for stream. Each stream is limited to 10,000 by GCS | +| 0.1.1 | 2021-08-26 | [\#5296](https://github.com/airbytehq/airbyte/issues/5296) | Added storing gcsCsvFileLocation property for CSV format. This is used by destination-bigquery \(GCS Staging upload type\) | +| 0.1.0 | 2021-07-16 | [\#4329](https://github.com/airbytehq/airbyte/pull/4784) | Initial release. | diff --git a/docs/integrations/destinations/redshift.md b/docs/integrations/destinations/redshift.md index a2a7168dcd03..0f2b0ac8e50f 100644 --- a/docs/integrations/destinations/redshift.md +++ b/docs/integrations/destinations/redshift.md @@ -141,6 +141,7 @@ Each stream will be output into its own raw table in Redshift. Each table will c | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.3.54 | 2023-01-18 | [#21087](https://github.com/airbytehq/airbyte/pull/21087) | Wrap Authentication Errors as Config Exceptions | | 0.3.53 | 2023-01-03 | [\#17273](https://github.com/airbytehq/airbyte/pull/17273) | Flatten JSON arrays to fix maximum size check for SUPER field | | 0.3.52 | 2022-12-30 | [\#20879](https://github.com/airbytehq/airbyte/pull/20879) | Added configurable parameter for number of file buffers | | 0.3.51 | 2022-10-26 | [\#18434](https://github.com/airbytehq/airbyte/pull/18434) | Fix empty S3 bucket path handling | @@ -178,5 +179,3 @@ Each stream will be output into its own raw table in Redshift. Each table will c | 0.3.13 | 2021-09-02 | [5745](https://github.com/airbytehq/airbyte/pull/5745) | Disable STATUPDATE flag when using S3 staging to speed up performance | | 0.3.12 | 2021-07-21 | [3555](https://github.com/airbytehq/airbyte/pull/3555) | Enable partial checkpointing for halfway syncs | | 0.3.11 | 2021-07-20 | [4874](https://github.com/airbytehq/airbyte/pull/4874) | allow `additionalProperties` in connector spec | - - diff --git a/docs/integrations/destinations/s3.md b/docs/integrations/destinations/s3.md index c642624c6ea3..db26ac102e0d 100644 --- a/docs/integrations/destinations/s3.md +++ b/docs/integrations/destinations/s3.md @@ -330,6 +330,7 @@ In order for everything to work correctly, it is also necessary that the user wh | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.3.19 | 2023-01-18 | [#21087](https://github.com/airbytehq/airbyte/pull/21087) | Wrap Authentication Errors as Config Exceptions | | 0.3.18 | 2022-12-15 | [\#20088](https://github.com/airbytehq/airbyte/pull/20088) | New data type support v0/v1 | | 0.3.17 | 2022-10-15 | [\#18031](https://github.com/airbytehq/airbyte/pull/18031) | Fix integration tests to use bucket path | | 0.3.16 | 2022-10-03 | [\#17340](https://github.com/airbytehq/airbyte/pull/17340) | Enforced encrypted only traffic to S3 buckets and check logic | diff --git a/docs/integrations/destinations/snowflake.md b/docs/integrations/destinations/snowflake.md index b9c6ba290ce2..1bbadee5053a 100644 --- a/docs/integrations/destinations/snowflake.md +++ b/docs/integrations/destinations/snowflake.md @@ -277,7 +277,8 @@ Now that you have set up the Snowflake destination connector, check out the foll | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------| -| 0.4.43 | 2023-01-20 | [\#21450](https://github.com/airbytehq/airbyte/pull/21450) | Updated Check methods to handle more possible s3 and gcs stagings issues | +| 0.4.44 | 2023-01-20 | [#21087](https://github.com/airbytehq/airbyte/pull/21087) | Wrap Authentication Errors as Config Exceptions | +| 0.4.43 | 2023-01-20 | [\#21450](https://github.com/airbytehq/airbyte/pull/21450) | Updated Check methods to handle more possible s3 and gcs stagings issues | | 0.4.42 | 2023-01-12 | [\#21342](https://github.com/airbytehq/airbyte/pull/21342) | Better handling for conflicting destination streams | | 0.4.41 | 2022-12-16 | [\#20566](https://github.com/airbytehq/airbyte/pull/20566) | Improve spec to adhere to standards | | 0.4.40 | 2022-11-11 | [\#19302](https://github.com/airbytehq/airbyte/pull/19302) | Set jdbc application env variable depends on env - airbyte_oss or airbyte_cloud |