Skip to content

Commit

Permalink
Merge branch 'master' into ddavydov/#21076-source-gitlab-fix-missing-…
Browse files Browse the repository at this point in the history
…records
  • Loading branch information
davydov-d authored Jan 23, 2023
2 parents ef5fb51 + ea5574a commit 47289b4
Show file tree
Hide file tree
Showing 99 changed files with 955 additions and 415 deletions.
9 changes: 0 additions & 9 deletions .github/workflows/test-command.yml
Original file line number Diff line number Diff line change
Expand Up @@ -95,19 +95,12 @@ jobs:
uses: actions/setup-python@v4
with:
python-version: "3.9"
- name: Install Pyenv
run: |
python3 -m pip install --quiet virtualenv==16.7.9 --user
python3 -m virtualenv venv
source venv/bin/activate
- name: Install CI scripts
# all CI python packages have the prefix "ci_"
run: |
source venv/bin/activate
pip install --quiet -e ./tools/ci_*
- name: Write Integration Test Credentials for ${{ github.event.inputs.connector }}
run: |
source venv/bin/activate
ci_credentials ${{ github.event.inputs.connector }} write-to-storage
# normalization also runs destination-specific tests, so fetch their creds also
if [ 'bases/base-normalization' = "${{ github.event.inputs.connector }}" ] || [ 'base-normalization' = "${{ github.event.inputs.connector }}" ]; then
Expand All @@ -117,7 +110,6 @@ jobs:
fi
env:
GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }}

- name: Test ${{ github.event.inputs.connector }}
id: test
env:
Expand All @@ -132,7 +124,6 @@ jobs:
- name: Update Integration Test Credentials after test run for ${{ github.event.inputs.connector }}
if: always()
run: |
source venv/bin/activate
ci_credentials ${{ github.event.inputs.connector }} update-secrets
# normalization also runs destination-specific tests, so fetch their creds also
if [ 'bases/base-normalization' = "${{ github.event.inputs.connector }}" ] || [ 'base-normalization' = "${{ github.event.inputs.connector }}" ]; then
Expand Down
2 changes: 1 addition & 1 deletion airbyte-cdk/python/.bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.21.0
current_version = 0.22.0
commit = False

[bumpversion:file:setup.py]
3 changes: 3 additions & 0 deletions airbyte-cdk/python/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
# Changelog

## 0.22.0
Surface the resolved manifest in the CDK

## 0.21.0
Add AvailabilityStrategy concept and use check_availability within CheckStream

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,10 @@ def __init__(self, source_config: ConnectionDefinition, debug: bool = False, con
if unknown_fields:
raise InvalidConnectorDefinitionException(f"Found unknown top-level fields: {unknown_fields}")

@property
def resolved_manifest(self) -> Mapping[str, Any]:
return self._new_source_config

@property
def connection_checker(self) -> ConnectionChecker:
check = self._new_source_config["check"] if self.construct_using_pydantic_models else self._legacy_source_config["check"]
Expand Down
2 changes: 1 addition & 1 deletion airbyte-cdk/python/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

setup(
name="airbyte-cdk",
version="0.21.0",
version="0.22.0",
description="A framework for writing Airbyte Connectors.",
long_description=README,
long_description_content_type="text/markdown",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
/*
* Copyright (c) 2022 Airbyte, Inc., all rights reserved.
*/

package io.airbyte.commons.auth;

import java.util.Comparator;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;

/**
* This enum describes the standard auth levels for a given resource. It currently is only used for
* 2 resources Workspace and Instance (i.e. the entire instance or deployment of Airbyte).
*
* In the context of a workspace, there is a 1:1 mapping.
* <ul>
* <li>OWNER => WORKSPACE OWNER. Superadmin of the instance (typically the person that created it),
* has all the rights on the instance including deleting it.</li>
* <li>ADMIN => WORKSPACE ADMIN. Admin of the instance, can invite other users, update their
* permission and change settings of the instance.</li>
* <li>EDITOR => WORKSPACE EDITOR</li>
* <li>READER => WORKSPACE READER</li>
* <li>AUTHENTICATED_USER => INVALID</li>
* <li>NONE => NONE (does not have access to this resource)</li>
* </ul>
* In the context of the instance, there are currently only 3 levels.
* <ul>
* <li>ADMIN => INSTANCE ADMIN</li>
* <li>AUTHENTICATED_USER => Denotes that all that is required for access is an active Airbyte
* account. This should only ever be used when the associated resource is an INSTANCE. All other
* uses are invalid. It is a special value in the enum to handle a case that only applies to
* instances and no other resources.</li>
* <li>NONE => NONE (not applicable. anyone being checked in our auth stack already has an account
* so by definition they have some access to the instance.)</li>
* </ul>
*/
public enum AuthRole {

OWNER(500, AuthRoleConstants.OWNER),
ADMIN(400, AuthRoleConstants.ADMIN),
EDITOR(300, AuthRoleConstants.EDITOR),
READER(200, AuthRoleConstants.READER),
AUTHENTICATED_USER(100, AuthRoleConstants.AUTHENTICATED_USER), // ONLY USE WITH INSTANCE RESOURCE!
NONE(0, AuthRoleConstants.NONE);

private final int authority;
private final String label;

AuthRole(final int authority, final String label) {
this.authority = authority;
this.label = label;
}

public int getAuthority() {
return authority;
}

public String getLabel() {
return label;
}

/**
* Builds the set of roles based on the provided {@link AuthRole} value.
* <p>
* The generated set of auth roles contains the provided {@link AuthRole} (if not {@code null}) and
* any other authentication roles with a lesser {@link #getAuthority()} value.
* </p>
*
* @param authRole An {@link AuthRole} (may be {@code null}).
* @return The set of {@link AuthRole}s based on the provided {@link AuthRole}.
*/
public static Set<AuthRole> buildAuthRolesSet(final AuthRole authRole) {
final Set<AuthRole> authRoles = new HashSet<>();

if (authRole != null) {
authRoles.add(authRole);
authRoles.addAll(Stream.of(values())
.filter(role -> !NONE.equals(role))
.filter(role -> role.getAuthority() < authRole.getAuthority())
.collect(Collectors.toSet()));
}

// Sort final set by descending authority order
return authRoles.stream()
.sorted(Comparator.comparingInt(AuthRole::getAuthority))
.collect(Collectors.toCollection(LinkedHashSet::new));
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
/*
* Copyright (c) 2022 Airbyte, Inc., all rights reserved.
*/

package io.airbyte.commons.auth;

/**
* Collection of constants that defines authorization roles.
*/
public final class AuthRoleConstants {

public static final String ADMIN = "ADMIN";
public static final String AUTHENTICATED_USER = "AUTHENTICATED_USER";
public static final String EDITOR = "EDITOR";
public static final String OWNER = "OWNER";
public static final String NONE = "NONE";
public static final String READER = "READER";

private AuthRoleConstants() {}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
/*
* Copyright (c) 2022 Airbyte, Inc., all rights reserved.
*/

package io.airbyte.commons.auth;

import static org.junit.jupiter.api.Assertions.assertEquals;

import java.util.Set;
import org.junit.jupiter.api.Test;

/**
* Test suite for the {@link AuthRole} enumeration.
*/
class AuthRoleTest {

@Test
void testBuildingAuthRoleSet() {
final Set<AuthRole> ownerResult = AuthRole.buildAuthRolesSet(AuthRole.OWNER);
assertEquals(5, ownerResult.size());
assertEquals(Set.of(AuthRole.OWNER, AuthRole.ADMIN, AuthRole.EDITOR, AuthRole.READER, AuthRole.AUTHENTICATED_USER), ownerResult);

final Set<AuthRole> adminResult = AuthRole.buildAuthRolesSet(AuthRole.ADMIN);
assertEquals(4, adminResult.size());
assertEquals(Set.of(AuthRole.ADMIN, AuthRole.EDITOR, AuthRole.READER, AuthRole.AUTHENTICATED_USER), adminResult);

final Set<AuthRole> editorResult = AuthRole.buildAuthRolesSet(AuthRole.EDITOR);
assertEquals(3, editorResult.size());
assertEquals(Set.of(AuthRole.EDITOR, AuthRole.READER, AuthRole.AUTHENTICATED_USER), editorResult);

final Set<AuthRole> readerResult = AuthRole.buildAuthRolesSet(AuthRole.READER);
assertEquals(2, readerResult.size());
assertEquals(Set.of(AuthRole.READER, AuthRole.AUTHENTICATED_USER), readerResult);

final Set<AuthRole> authenticatedUserResult = AuthRole.buildAuthRolesSet(AuthRole.AUTHENTICATED_USER);
assertEquals(1, authenticatedUserResult.size());
assertEquals(Set.of(AuthRole.AUTHENTICATED_USER), authenticatedUserResult);

final Set<AuthRole> noneResult = AuthRole.buildAuthRolesSet(AuthRole.NONE);
assertEquals(1, noneResult.size());
assertEquals(Set.of(AuthRole.NONE), noneResult);

final Set<AuthRole> nullResult = AuthRole.buildAuthRolesSet(null);
assertEquals(0, nullResult.size());
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
- name: BigQuery
destinationDefinitionId: 22f6c74f-5699-40ff-833c-4a879ea40133
dockerRepository: airbyte/destination-bigquery
dockerImageTag: 1.2.11
dockerImageTag: 1.2.12
documentationUrl: https://docs.airbyte.com/integrations/destinations/bigquery
icon: bigquery.svg
normalizationConfig:
Expand Down Expand Up @@ -139,7 +139,7 @@
- name: Google Cloud Storage (GCS)
destinationDefinitionId: ca8f6566-e555-4b40-943a-545bf123117a
dockerRepository: airbyte/destination-gcs
dockerImageTag: 0.2.12
dockerImageTag: 0.2.13
documentationUrl: https://docs.airbyte.com/integrations/destinations/gcs
icon: googlecloudstorage.svg
resourceRequirements:
Expand Down Expand Up @@ -290,7 +290,7 @@
- name: Redshift
destinationDefinitionId: f7a7d195-377f-cf5b-70a5-be6b819019dc
dockerRepository: airbyte/destination-redshift
dockerImageTag: 0.3.53
dockerImageTag: 0.3.54
documentationUrl: https://docs.airbyte.com/integrations/destinations/redshift
icon: redshift.svg
normalizationConfig:
Expand Down Expand Up @@ -321,7 +321,7 @@
- name: S3
destinationDefinitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362
dockerRepository: airbyte/destination-s3
dockerImageTag: 0.3.18
dockerImageTag: 0.3.19
documentationUrl: https://docs.airbyte.com/integrations/destinations/s3
icon: s3.svg
resourceRequirements:
Expand All @@ -348,7 +348,7 @@
- name: Snowflake
destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba
dockerRepository: airbyte/destination-snowflake
dockerImageTag: 0.4.43
dockerImageTag: 0.4.44
documentationUrl: https://docs.airbyte.com/integrations/destinations/snowflake
icon: snowflake.svg
normalizationConfig:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -621,7 +621,7 @@
supported_destination_sync_modes:
- "overwrite"
- "append"
- dockerImage: "airbyte/destination-bigquery:1.2.11"
- dockerImage: "airbyte/destination-bigquery:1.2.12"
spec:
documentationUrl: "https://docs.airbyte.com/integrations/destinations/bigquery"
connectionSpecification:
Expand Down Expand Up @@ -2325,7 +2325,7 @@
supported_destination_sync_modes:
- "overwrite"
- "append"
- dockerImage: "airbyte/destination-gcs:0.2.12"
- dockerImage: "airbyte/destination-gcs:0.2.13"
spec:
documentationUrl: "https://docs.airbyte.com/integrations/destinations/gcs"
connectionSpecification:
Expand Down Expand Up @@ -5123,7 +5123,7 @@
supported_destination_sync_modes:
- "overwrite"
- "append"
- dockerImage: "airbyte/destination-redshift:0.3.53"
- dockerImage: "airbyte/destination-redshift:0.3.54"
spec:
documentationUrl: "https://docs.airbyte.com/integrations/destinations/redshift"
connectionSpecification:
Expand Down Expand Up @@ -5492,7 +5492,7 @@
supported_destination_sync_modes:
- "append"
- "overwrite"
- dockerImage: "airbyte/destination-s3:0.3.18"
- dockerImage: "airbyte/destination-s3:0.3.19"
spec:
documentationUrl: "https://docs.airbyte.com/integrations/destinations/s3"
connectionSpecification:
Expand Down Expand Up @@ -6109,7 +6109,7 @@
supported_destination_sync_modes:
- "overwrite"
- "append"
- dockerImage: "airbyte/destination-snowflake:0.4.43"
- dockerImage: "airbyte/destination-snowflake:0.4.44"
spec:
documentationUrl: "https://docs.airbyte.com/integrations/destinations/snowflake"
connectionSpecification:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1019,7 +1019,7 @@
- name: Microsoft SQL Server (MSSQL)
sourceDefinitionId: b5ea17b1-f170-46dc-bc31-cc744ca984c1
dockerRepository: airbyte/source-mssql
dockerImageTag: 0.4.27
dockerImageTag: 0.4.28
documentationUrl: https://docs.airbyte.com/integrations/sources/mssql
icon: mssql.svg
sourceType: database
Expand Down Expand Up @@ -1338,7 +1338,7 @@
- name: Postgres
sourceDefinitionId: decd338e-5647-4c0b-adf4-da0e75f5a750
dockerRepository: airbyte/source-postgres
dockerImageTag: 1.0.38
dockerImageTag: 1.0.39
documentationUrl: https://docs.airbyte.com/integrations/sources/postgres
icon: postgresql.svg
sourceType: database
Expand Down
16 changes: 14 additions & 2 deletions airbyte-config/init/src/main/resources/seed/source_specs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8113,7 +8113,7 @@
supportsNormalization: false
supportsDBT: false
supported_destination_sync_modes: []
- dockerImage: "airbyte/source-mssql:0.4.27"
- dockerImage: "airbyte/source-mssql:0.4.28"
spec:
documentationUrl: "https://docs.airbyte.com/integrations/destinations/mssql"
connectionSpecification:
Expand Down Expand Up @@ -8272,6 +8272,18 @@
\ the \"Snapshot\" level, you must enable the <a href=\"https://docs.microsoft.com/en-us/dotnet/framework/data/adonet/sql/snapshot-isolation-in-sql-server\"\
>snapshot isolation mode</a> on the database."
order: 2
initial_waiting_seconds:
type: "integer"
title: "Initial Waiting Time in Seconds (Advanced)"
description: "The amount of time the connector will wait when it launches\
\ to determine if there is new data to sync or not. Defaults to\
\ 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about\
\ <a href=\"https://docs.airbyte.com/integrations/sources/mysql/#change-data-capture-cdc\"\
>initial waiting time</a>."
default: 300
min: 120
max: 1200
order: 3
tunnel_method:
type: "object"
title: "SSH Tunnel Method"
Expand Down Expand Up @@ -11476,7 +11488,7 @@
supportsNormalization: false
supportsDBT: false
supported_destination_sync_modes: []
- dockerImage: "airbyte/source-postgres:1.0.38"
- dockerImage: "airbyte/source-postgres:1.0.39"
spec:
documentationUrl: "https://docs.airbyte.com/integrations/sources/postgres"
connectionSpecification:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@ protected BlobStorageOperations() {
public abstract String getBucketObjectPath(String namespace, String streamName, DateTime writeDatetime, String customFormat);

/**
* Create a storage object where to store data in the destination for a @param objectPath
* Ensure that the bucket specified in the config exists
*/
public abstract void createBucketObjectIfNotExists(String objectPath) throws Exception;
public abstract void createBucketIfNotExists() throws Exception;

/**
* Upload the data files into the storage area.
Expand Down
Loading

0 comments on commit 47289b4

Please sign in to comment.