From 7525ebab89e4e0e83a0e6fbbd6909218afbc5e33 Mon Sep 17 00:00:00 2001 From: AlexanderBatoulis <85283328+AlexanderBatoulis@users.noreply.github.com> Date: Tue, 26 Oct 2021 09:34:34 +0300 Subject: [PATCH 01/12] added source zenloop --- .../connectors/source-zenloop/.Rhistory | 0 .../connectors/source-zenloop/.dockerignore | 7 + .../connectors/source-zenloop/Dockerfile | 38 +++ .../connectors/source-zenloop/README.md | 132 ++++++++++ .../source-zenloop/acceptance-test-config.yml | 29 +++ .../source-zenloop/acceptance-test-docker.sh | 16 ++ .../connectors/source-zenloop/bootstrap.md | 19 ++ .../connectors/source-zenloop/build.gradle | 14 ++ .../integration_tests/__init__.py | 3 + .../integration_tests/abnormal_state.json | 8 + .../integration_tests/acceptance.py | 14 ++ .../integration_tests/catalog.json | 30 +++ .../integration_tests/configured_catalog.json | 40 ++++ .../integration_tests/invalid_config.json | 5 + .../integration_tests/sample_config.json | 5 + .../integration_tests/sample_state.json | 8 + .../connectors/source-zenloop/main.py | 13 + .../source-zenloop/requirements.txt | 2 + .../source-zenloop/sample_files/config.json | 5 + .../connectors/source-zenloop/setup.py | 29 +++ .../source-zenloop/source_zenloop/__init__.py | 8 + .../source_zenloop/schemas/answers.json | 77 ++++++ .../schemas/answers_survey_group.json | 63 +++++ .../source_zenloop/schemas/survey_groups.json | 35 +++ .../source_zenloop/schemas/surveys.json | 18 ++ .../source-zenloop/source_zenloop/source.py | 225 ++++++++++++++++++ .../source-zenloop/source_zenloop/spec.json | 27 +++ .../source-zenloop/unit_tests/__init__.py | 3 + .../source-zenloop/unit_tests/conftest.py | 9 + .../unit_tests/test_incremental_streams.py | 62 +++++ .../source-zenloop/unit_tests/test_source.py | 21 ++ .../source-zenloop/unit_tests/test_streams.py | 87 +++++++ docs/SUMMARY.md | 2 +- docs/integrations/README.md | 2 +- docs/integrations/sources/zenloop.md | 49 ++++ 35 files changed, 1103 insertions(+), 2 deletions(-) create mode 100644 airbyte-integrations/connectors/source-zenloop/.Rhistory create mode 100644 airbyte-integrations/connectors/source-zenloop/.dockerignore create mode 100644 airbyte-integrations/connectors/source-zenloop/Dockerfile create mode 100644 airbyte-integrations/connectors/source-zenloop/README.md create mode 100644 airbyte-integrations/connectors/source-zenloop/acceptance-test-config.yml create mode 100644 airbyte-integrations/connectors/source-zenloop/acceptance-test-docker.sh create mode 100644 airbyte-integrations/connectors/source-zenloop/bootstrap.md create mode 100644 airbyte-integrations/connectors/source-zenloop/build.gradle create mode 100644 airbyte-integrations/connectors/source-zenloop/integration_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-zenloop/integration_tests/abnormal_state.json create mode 100644 airbyte-integrations/connectors/source-zenloop/integration_tests/acceptance.py create mode 100644 airbyte-integrations/connectors/source-zenloop/integration_tests/catalog.json create mode 100644 airbyte-integrations/connectors/source-zenloop/integration_tests/configured_catalog.json create mode 100644 airbyte-integrations/connectors/source-zenloop/integration_tests/invalid_config.json create mode 100644 airbyte-integrations/connectors/source-zenloop/integration_tests/sample_config.json create mode 100644 airbyte-integrations/connectors/source-zenloop/integration_tests/sample_state.json create mode 100644 airbyte-integrations/connectors/source-zenloop/main.py create mode 100644 airbyte-integrations/connectors/source-zenloop/requirements.txt create mode 100644 airbyte-integrations/connectors/source-zenloop/sample_files/config.json create mode 100644 airbyte-integrations/connectors/source-zenloop/setup.py create mode 100644 airbyte-integrations/connectors/source-zenloop/source_zenloop/__init__.py create mode 100644 airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/answers.json create mode 100644 airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/answers_survey_group.json create mode 100644 airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/survey_groups.json create mode 100644 airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/surveys.json create mode 100644 airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py create mode 100644 airbyte-integrations/connectors/source-zenloop/source_zenloop/spec.json create mode 100644 airbyte-integrations/connectors/source-zenloop/unit_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-zenloop/unit_tests/conftest.py create mode 100644 airbyte-integrations/connectors/source-zenloop/unit_tests/test_incremental_streams.py create mode 100644 airbyte-integrations/connectors/source-zenloop/unit_tests/test_source.py create mode 100644 airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py create mode 100644 docs/integrations/sources/zenloop.md diff --git a/airbyte-integrations/connectors/source-zenloop/.Rhistory b/airbyte-integrations/connectors/source-zenloop/.Rhistory new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/airbyte-integrations/connectors/source-zenloop/.dockerignore b/airbyte-integrations/connectors/source-zenloop/.dockerignore new file mode 100644 index 000000000000..19bdf12d9d7a --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/.dockerignore @@ -0,0 +1,7 @@ +* +!Dockerfile +!Dockerfile.test +!main.py +!source_zenloop +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-zenloop/Dockerfile b/airbyte-integrations/connectors/source-zenloop/Dockerfile new file mode 100644 index 000000000000..67385332a6d8 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.7.11-alpine3.14 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_zenloop ./source_zenloop + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-zenloop diff --git a/airbyte-integrations/connectors/source-zenloop/README.md b/airbyte-integrations/connectors/source-zenloop/README.md new file mode 100644 index 000000000000..c93616b0c476 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/README.md @@ -0,0 +1,132 @@ +# Zenloop Source + +This is the repository for the Zenloop source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/zenloop). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-zenloop:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/zenloop) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_zenloop/spec.json` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source zenloop test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-zenloop:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-zenloop:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-zenloop:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-zenloop:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-zenloop:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-zenloop:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-zenloop:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-zenloop:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-zenloop/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zenloop/acceptance-test-config.yml new file mode 100644 index 000000000000..d9dc45554be6 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/acceptance-test-config.yml @@ -0,0 +1,29 @@ +# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-zenloop:dev +tests: + spec: + - spec_path: "source_zenloop/spec.json" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-zenloop/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-zenloop/acceptance-test-docker.sh new file mode 100644 index 000000000000..e4d8b1cef896 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-zenloop/bootstrap.md b/airbyte-integrations/connectors/source-zenloop/bootstrap.md new file mode 100644 index 000000000000..fd0f7097c8d8 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/bootstrap.md @@ -0,0 +1,19 @@ +# Zenloop + +## Overview + +Zenloop is an integrated experience management platform (IXM). The SaaS solution automatically collects customer feedback through various channels along the customer journey, analyzes and clusters it, and derives tailored measures to retain satisfied customers as well as win back churning customers. Zenloop REST API allows a developer to retrieve survey and answer information on the Zenloop platform. + +## Endpoints + +Zenloop API consists of four endpoints which can be extracted data from: + +1. **Surveys**: This endpoint is used for getting basic data for all surveys from user organization. +2. **Answers**: This endpoint is used for fetching survey answers along with basic survey data and aggregated NPS scores. +3. **Survey Groups**: This endpoint is used for getting basic data for all survey groups from user organization. +4. **Survey Group Answers**: This endpoint is used for fetching survey group answers along with basic survey data and aggregated NPS scores. + + +## API Reference + +The API reference documents: [https://docs.zenloop.com/reference](https://docs.zenloop.com/reference) diff --git a/airbyte-integrations/connectors/source-zenloop/build.gradle b/airbyte-integrations/connectors/source-zenloop/build.gradle new file mode 100644 index 000000000000..637af6a16d64 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/build.gradle @@ -0,0 +1,14 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_zenloop' +} + +dependencies { + implementation files(project(':airbyte-integrations:bases:source-acceptance-test').airbyteDocker.outputs) + implementation files(project(':airbyte-integrations:bases:base-python').airbyteDocker.outputs) +} diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/__init__.py b/airbyte-integrations/connectors/source-zenloop/integration_tests/__init__.py new file mode 100644 index 000000000000..46b7376756ec --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-zenloop/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..681b89335463 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/abnormal_state.json @@ -0,0 +1,8 @@ +{ + "answers": { + "inserted_at": "2099-08-18T08:35:49.540Z" + }, + "answers_survey_group": { + "inserted_at": "2099-08-18T08:35:49.540Z" + } +} diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-zenloop/integration_tests/acceptance.py new file mode 100644 index 000000000000..108075487440 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """ This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/catalog.json b/airbyte-integrations/connectors/source-zenloop/integration_tests/catalog.json new file mode 100644 index 000000000000..14c918c25287 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/catalog.json @@ -0,0 +1,30 @@ +{ + "streams": [ + { + "name": "answers", + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": "test", + "json_schema": {} + }, + { + "name": "surveys", + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "json_schema": {} + }, + { + "name": "survey_groups", + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": true, + "json_schema": {} + }, + { + "name": "answers_survey_group", + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": "test", + "json_schema": {} + } + ] +} diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-zenloop/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..47fe90a48f23 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/configured_catalog.json @@ -0,0 +1,40 @@ +{ + "streams": [ + { + "stream": { + "name": "answers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "surveys", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "answers_survey_group", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "survey_groups", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-zenloop/integration_tests/invalid_config.json new file mode 100644 index 000000000000..f96e9cbfb1df --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/invalid_config.json @@ -0,0 +1,5 @@ +{ + "api_token": "wrong key", + "date_from": "2021-04-01T04:20:02Z", + "public_hash_id": "wrong key" +} diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_config.json new file mode 100644 index 000000000000..94a179571097 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_config.json @@ -0,0 +1,5 @@ +{ + "api_token": "", + "date_from": "2021-01-01", + "public_hash_id": "" +} diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_state.json new file mode 100644 index 000000000000..cbc8e3dfa4ee --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_state.json @@ -0,0 +1,8 @@ +{ + "answers": { + "inserted_at": "2021-08-18T08:35:49.540Z" + }, + "answers_survey_group": { + "inserted_at": "2021-08-18T08:35:49.540Z" + } +} diff --git a/airbyte-integrations/connectors/source-zenloop/main.py b/airbyte-integrations/connectors/source-zenloop/main.py new file mode 100644 index 000000000000..4a69290ac202 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_zenloop import SourceZenloop + +if __name__ == "__main__": + source = SourceZenloop() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-zenloop/requirements.txt b/airbyte-integrations/connectors/source-zenloop/requirements.txt new file mode 100644 index 000000000000..0411042aa091 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-zenloop/sample_files/config.json b/airbyte-integrations/connectors/source-zenloop/sample_files/config.json new file mode 100644 index 000000000000..94a179571097 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/sample_files/config.json @@ -0,0 +1,5 @@ +{ + "api_token": "", + "date_from": "2021-01-01", + "public_hash_id": "" +} diff --git a/airbyte-integrations/connectors/source-zenloop/setup.py b/airbyte-integrations/connectors/source-zenloop/setup.py new file mode 100644 index 000000000000..999bc3c427f3 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_zenloop", + description="Source implementation for Zenloop.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/__init__.py b/airbyte-integrations/connectors/source-zenloop/source_zenloop/__init__.py new file mode 100644 index 000000000000..222f86afc829 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceZenloop + +__all__ = ["SourceZenloop"] diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/answers.json b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/answers.json new file mode 100644 index 000000000000..86c9fd63cee6 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/answers.json @@ -0,0 +1,77 @@ +{ + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"] + }, + "score_type": { + "type": ["null", "string"] + }, + "score": { + "type": ["null", "number"] + }, + "sentiment": { + "type": ["null", "string"] + }, + "sentiment_per_label_name": { + "type": ["null", "object"] + }, + "name": { + "type": ["null", "string"] + }, + "recipient_id": { + "type": ["null", "string"] + }, + "property_ids": { + "type": ["null", "array"] + }, + "metatags": { + "type": ["null", "object"] + }, + "labels": { + "type": ["null", "array"] + }, + "labels_with_keywords": { + "type": ["null", "object"] + }, + "inserted_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "email": { + "type": ["null", "string"] + }, + "identity": { + "type": ["null", "string"] + }, + "identity_type": { + "type": ["null", "string"] + }, + "comment": { + "type": ["null", "string"] + }, + "translated_comment": { + "type": ["null", "string"] + }, + "additional_answers": { + "type": ["null", "array"], + "items": { + "properties": { + "additional_question_id": { + "type": ["null", "string"] + }, + "answer": { + "type": ["null", "string"] + }, + "inserted_at": { + "type": ["null", "string"], + "format": "date-time" + } + } + } + }, + "additional_questions": { + "type": ["null", "object"] + } + } +} diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/answers_survey_group.json b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/answers_survey_group.json new file mode 100644 index 000000000000..b871dbe6ee14 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/answers_survey_group.json @@ -0,0 +1,63 @@ +{ + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"] + }, + "survey_public_hash_id": { + "type": ["null", "string"] + }, + "score_type": { + "type": ["null", "string"] + }, + "score": { + "type": ["null", "number"] + }, + "sentiment": { + "type": ["null", "string"] + }, + "sentiment_per_label_name": { + "type": ["null", "object"] + }, + "name": { + "type": ["null", "string"] + }, + "recipient_id": { + "type": ["null", "string"] + }, + "property_ids": { + "type": ["null", "array"] + }, + "metatags": { + "type": ["null", "object"] + }, + "labels": { + "type": ["null", "array"] + }, + "labels_with_keywords": { + "type": ["null", "object"] + }, + "inserted_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "email": { + "type": ["null", "string"] + }, + "identity": { + "type": ["null", "string"] + }, + "identity_type": { + "type": ["null", "string"] + }, + "comment": { + "type": ["null", "string"] + }, + "translated_comment": { + "type": ["null", "string"] + }, + "additional_questions": { + "type": ["null", "object"] + } + } +} diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/survey_groups.json b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/survey_groups.json new file mode 100644 index 000000000000..e22e0f5bf32f --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/survey_groups.json @@ -0,0 +1,35 @@ +{ + "type": ["null", "object"], + "properties": { + "surveys": { + "type": ["null", "array"], + "items": { + "properties": { + "title": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "public_hash_id": { + "type": ["null", "string"] + }, + "inserted_at": { + "type": ["null", "string"], + "format": "date-time" + } + } + } + }, + "name": { + "type": ["null", "string"] + }, + "public_hash_id": { + "type": ["null", "string"] + }, + "inserted_at": { + "type": ["null", "string"], + "format": "date-time" + } + } +} diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/surveys.json b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/surveys.json new file mode 100644 index 000000000000..96e5988801b3 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/surveys.json @@ -0,0 +1,18 @@ +{ + "type": ["null", "object"], + "properties": { + "title": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "public_hash_id": { + "type": ["null", "string"] + }, + "inserted_at": { + "type": ["null", "string"], + "format": "date-time" + } + } +} diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py b/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py new file mode 100644 index 000000000000..b69d99726548 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py @@ -0,0 +1,225 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from abc import ABC +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple + +import requests +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream +# from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator +from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator + +import math +from datetime import datetime, timedelta + +""" +TODO: Most comments in this class are instructive and should be deleted after the source is implemented. + +This file provides a stubbed example of how to use the Airbyte CDK to develop both a source connector which supports full refresh or and an +incremental syncs from an HTTP API. + +The various TODOs are both implementation hints and steps - fulfilling all the TODOs should be sufficient to implement one basic and one incremental +stream from a source. This pattern is the same one used by Airbyte internally to implement connectors. + +The approach here is not authoritative, and devs are free to use their own judgement. + +There are additional required TODOs in the files within the integration_tests folder and the spec.json file. +""" + + +# Basic full refresh stream +class ZenloopStream(HttpStream, ABC): + + url_base = "https://api.zenloop.com/v1/" + extra_params = None + has_date_param = False + + def __init__(self, api_token: str, date_from: Optional[str], public_hash_id: Optional[str], **kwargs): + super().__init__(authenticator=api_token) + self.api_token=api_token + self.date_from = date_from or datetime.today().strftime('%Y-%m-%d') + self.public_hash_id = public_hash_id or None + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + decoded_response = response.json() + page = decoded_response['meta']['page'] + per_page = decoded_response['meta']['per_page'] + total = decoded_response['meta']['total'] + + if page < math.ceil(total/per_page): + return {'page': page + 1} + else: + return None + + def request_params( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> MutableMapping[str, Any]: + if self.has_date_param: + params = {"date_from": self.date_from} + else: + params = {} + if self.extra_params: + params.update(self.extra_params) + if next_page_token: + params.update(**next_page_token) + return params + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + response_json = response.json() + yield response_json + +class ChildStreamMixin: + parent_stream_class: Optional[ZenloopStream] = None + + def stream_slices(self, sync_mode, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: + # loop through all public_hash_id's if None was provided + # return nothing otherwise + if not self.public_hash_id: + for item in self.parent_stream_class(api_token=self.api_token, date_from = self.date_from, public_hash_id = self.public_hash_id).read_records(sync_mode=sync_mode): + # set date_from to most current cursor_field or date_from if not incremental + if stream_state: + date_from = stream_state[self.cursor_field] + else: + date_from = self.date_from + yield {"survey_id": item["public_hash_id"], "date_from": date_from} + else: + yield None + +# Basic incremental stream +class IncrementalZenloopStream(ZenloopStream, ABC): + # checkpoint stream reads after 100 records. + state_checkpoint_interval = 100 + cursor_field = "inserted_at" + + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: + # latest_record has objects in answers + if len(latest_record) > 0: + # add 1 second to not pull latest_record again + latest_record_date = (datetime.strptime(latest_record[self.cursor_field], '%Y-%m-%dT%H:%M:%S.%fZ') + timedelta(seconds=1)).isoformat() + str('Z') + else: + latest_record_date = "" + max_record = max(latest_record_date, current_stream_state.get(self.cursor_field, "")) + return {self.cursor_field: max_record} + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + params = super().request_params(stream_state, stream_slice, next_page_token) + if stream_state: + # if looped through all slices take its date_from parameter + # else no public_hash_id provided -> take cursor_field + if stream_slice: + params["date_from"] = stream_slice["date_from"] + else: + params["date_from"] = stream_state[self.cursor_field] + return params + +class Surveys(ZenloopStream): + # API Doc: https://docs.zenloop.com/reference#get-list-of-surveys + primary_key = None + has_date_param = False + extra_params = {"page": "1"} + + def path( + self, + stream_state: Mapping[str, Any] = None, + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None + ) -> str: + return "surveys" + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + response_json = response.json() + yield from response_json.get("surveys", []) + +class Answers(ChildStreamMixin, IncrementalZenloopStream): + # API Doc: https://docs.zenloop.com/reference#get-answers + primary_key = "id" + has_date_param = True + parent_stream_class = Surveys + extra_params = {"page": "1", "order_type": "desc", "order_by": "inserted_at", "date_shortcut": "custom", "date_to": datetime.today().strftime('%Y-%m-%d')} + + def path( + self, + stream_state: Mapping[str, Any] = None, + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None + ) -> str: + # take optional public_hash_id if entered + if self.public_hash_id: + return f"surveys/{self.public_hash_id}/answers" + # slice all public_hash_id's if nothing provided + else: + return f"surveys/{stream_slice['survey_id']}/answers" + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + response_json = response.json() + # select answers and surveys to be able to link answer to a survey + yield from response_json.get("answers", []) + +class SurveyGroups(ZenloopStream): + # API Doc: https://docs.zenloop.com/reference#get-list-of-survey-groups + primary_key = None + has_date_param = False + extra_params = {"page": "1"} + + def path( + self, + stream_state: Mapping[str, Any] = None, + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None + ) -> str: + return "survey_groups" + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + response_json = response.json() + yield from response_json.get("survey_groups", []) + +class AnswersSurveyGroup(ChildStreamMixin, IncrementalZenloopStream): + # API Doc: https://docs.zenloop.com/reference#get-answers-for-survey-group + primary_key = "id" + has_date_param = True + parent_stream_class = SurveyGroups + extra_params = {"page": "1", "order_type": "desc", "order_by": "inserted_at", "date_shortcut": "custom", "date_to": datetime.today().strftime('%Y-%m-%d')} + + def path( + self, + stream_state: Mapping[str, Any] = None, + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None + ) -> str: + # take optional public_hash_id if entered + if self.public_hash_id: + return f"survey_groups/{self.public_hash_id}/answers" + # slice all public_hash_id's if nothing provided + else: + return f"survey_groups/{stream_slice['survey_id']}/answers" + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + response_json = response.json() + # select answers and surveys to be able to link answer to a survey + yield from response_json.get("answers", []) + +# Source +class SourceZenloop(AbstractSource): + def check_connection(self, logger, config) -> Tuple[bool, any]: + try: + authenticator = TokenAuthenticator(config["api_token"]) + url = f"{ZenloopStream.url_base}surveys" + + session = requests.get(url, headers=authenticator.get_auth_header()) + session.raise_for_status() + return True, None + except Exception as error: + return False, f"Unable to connect to Zenloop API with the provided credentials - {error}" + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + args = {"api_token": TokenAuthenticator(token=config["api_token"]), "date_from": config["date_from"], "public_hash_id": config.get("public_hash_id")} + return [Surveys(**args), Answers(**args), SurveyGroups(**args), AnswersSurveyGroup(**args)] diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/spec.json b/airbyte-integrations/connectors/source-zenloop/source_zenloop/spec.json new file mode 100644 index 000000000000..9ebb8bf499ce --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/spec.json @@ -0,0 +1,27 @@ +{ + "documentationUrl": "https://docsurl.com", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Zenloop Spec", + "type": "object", + "required": ["api_token"], + "additionalProperties": false, + "properties": { + "api_token": { + "type": "string", + "description": "Zenloop API Token. You can get the API token in settings page here ", + "airbyte_secret": true + }, + "date_from": { + "type": "string", + "description": "Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24. Leave empty if only data from current data should be synced", + "examples": ["2021-10-24T03:30:30Z"] + }, + "public_hash_id": { + "type": "string", + "description": "Zenloop Survey (Group) ID. Can be found here. Leave empty to pull answers from all surveys", + "airbyte_secret": true + } + } + } +} diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/__init__.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/__init__.py new file mode 100644 index 000000000000..46b7376756ec --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/conftest.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/conftest.py new file mode 100644 index 000000000000..76471308022b --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/conftest.py @@ -0,0 +1,9 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# +from pytest import fixture + + +@fixture +def config(): + return {"api_token": '', "date_from": "2021-07-01", "public_hash_id": ""} diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_incremental_streams.py new file mode 100644 index 000000000000..040e3a505927 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_incremental_streams.py @@ -0,0 +1,62 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from airbyte_cdk.models import SyncMode +from pytest import fixture +from source_zenloop.source import IncrementalZenloopStream + + +@fixture +def patch_incremental_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(IncrementalZenloopStream, "path", "v0/example_endpoint") + mocker.patch.object(IncrementalZenloopStream, "primary_key", "test_primary_key") + mocker.patch.object(IncrementalZenloopStream, "__abstractmethods__", set()) + + +def test_cursor_field(patch_incremental_base_class, config): + stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + expected_cursor_field = "inserted_at" + assert stream.cursor_field == expected_cursor_field + + +def test_get_updated_state(patch_incremental_base_class, config): + stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + expected_cursor_field = "inserted_at" + inputs = { + "current_stream_state": {expected_cursor_field: "2021-07-24T03:30:30.038549Z"}, + "latest_record": {"inserted_at": "2021-10-20T03:30:30.038549Z"}, + } + expected_state = {expected_cursor_field: "2021-10-20T03:30:31.038549Z"} + assert stream.get_updated_state(**inputs) == expected_state + + +def test_stream_slices(patch_incremental_base_class, config): + stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + expected_cursor_field = "inserted_at" + inputs = { + "sync_mode": SyncMode.incremental, + "cursor_field": expected_cursor_field, + "stream_state": {expected_cursor_field: "2021-10-20T03:30:30Z"} + } + expected_stream_slice = [None] + assert stream.stream_slices(**inputs) == expected_stream_slice + + +def test_supports_incremental(patch_incremental_base_class, mocker, config): + mocker.patch.object(IncrementalZenloopStream, "cursor_field", "dummy_field") + stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + assert stream.supports_incremental + + +def test_source_defined_cursor(patch_incremental_base_class, config): + stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + assert stream.source_defined_cursor + + +def test_stream_checkpoint_interval(patch_incremental_base_class, config): + stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + expected_checkpoint_interval = 100 + assert stream.state_checkpoint_interval == expected_checkpoint_interval diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_source.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_source.py new file mode 100644 index 000000000000..8d6cd4f4d831 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_source.py @@ -0,0 +1,21 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +from source_zenloop.source import SourceZenloop + + +def test_check_connection(mocker, config): + source = SourceZenloop() + logger_mock = MagicMock() + assert source.check_connection(logger_mock, config) == (True, None) + + +def test_streams(mocker): + source = SourceZenloop() + config_mock = MagicMock() + streams = source.streams(config_mock) + expected_streams_number = 4 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py new file mode 100644 index 000000000000..2677bb292128 --- /dev/null +++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py @@ -0,0 +1,87 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from source_zenloop.source import ZenloopStream +import requests +import json + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(ZenloopStream, "path", "v0/example_endpoint") + mocker.patch.object(ZenloopStream, "primary_key", "test_primary_key") + mocker.patch.object(ZenloopStream, "__abstractmethods__", set()) + + +def test_request_params(patch_base_class, config): + stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + # TODO: replace this with your input parameters + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": {'page': "1"}} + # TODO: replace this with your expected request parameters + expected_params = {'page': "1"} + assert stream.request_params(**inputs) == expected_params + + +def test_next_page_token(patch_base_class, config): + stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + # TODO: replace this with your input parameters + inputs = {"response": MagicMock()} + inputs["response"].json.return_value = {"meta": {"page": 1, "per_page": 12, "total": 8}} + # TODO: replace this with your expected next page token + expected_token = None + assert stream.next_page_token(**inputs) == expected_token + + +def test_parse_response(patch_base_class, config): + stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + # TODO: replace this with your input parameters + response = MagicMock() + response.json.return_value = {"answers": [{"id": 123, "name": "John Doe"}]} + inputs = {"response": response} + # TODO: replace this with your expected parced object + expected_parsed_object = {"answers": [{"id": 123, "name": "John Doe"}]} + assert next(stream.parse_response(**inputs)) == expected_parsed_object + + +def test_request_headers(patch_base_class, config): + stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + # TODO: replace this with your input parameters + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + # TODO: replace this with your expected request headers + expected_headers = {} + assert stream.request_headers(**inputs) == expected_headers + + +def test_http_method(patch_base_class, config): + stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, config, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class, config): + response_mock = MagicMock() + stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index 0b2e912cb581..b88d826de2e5 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -133,6 +133,7 @@ * [Zendesk Sunshine](integrations/sources/zendesk-sunshine.md) * [Zendesk Support](integrations/sources/zendesk-support.md) * [Zendesk Talk](integrations/sources/zendesk-talk.md) + * [Zenloop](integrations/sources/zenloop.md) * [Zoom](integrations/sources/zoom.md) * [Zuora](integrations/sources/zuora.md) * [Destinations](integrations/destinations/README.md) @@ -229,4 +230,3 @@ * [On Setting up a New Connection](troubleshooting/new-connection.md) * [On Running a Sync](troubleshooting/running-sync.md) * [On Upgrading](troubleshooting/on-upgrading.md) - diff --git a/docs/integrations/README.md b/docs/integrations/README.md index 715d13a997c0..de4e87f491c0 100644 --- a/docs/integrations/README.md +++ b/docs/integrations/README.md @@ -115,6 +115,7 @@ Airbyte uses a grading system for connectors to help users understand what to ex | [Zendesk Sunshine](sources/zendesk-sunshine.md) | Beta | | [Zendesk Support](sources/zendesk-support.md) | Certified | | [Zendesk Talk](sources/zendesk-talk.md) | Certified | +| [Zenloop](./sources/zenloop.md)| Alpha | | [Zoom](sources/zoom.md) | Beta | | [Zuora](sources/zuora.md) | Beta | @@ -141,4 +142,3 @@ Airbyte uses a grading system for connectors to help users understand what to ex | [S3](destinations/s3.md) | Certified | | [SQL Server \(MSSQL\)](destinations/mssql.md) | Alpha | | [Snowflake](destinations/snowflake.md) | Certified | - diff --git a/docs/integrations/sources/zenloop.md b/docs/integrations/sources/zenloop.md new file mode 100644 index 000000000000..92b90158e6b8 --- /dev/null +++ b/docs/integrations/sources/zenloop.md @@ -0,0 +1,49 @@ +# Zenloop + +## Sync overview + +This source can sync data for the [Zenloop API](https://docs.zenloop.com/reference). It supports both Full Refresh and Incremental syncs for Answer endpoints. You can choose if this connector will copy only the new or updated data, or all rows in the tables and columns you set up for replication, every time a sync is run. + +### Output schema + +This Source is capable of syncing the following core Streams: + +* [Answers](https://docs.zenloop.com/reference#get-answers) \(Incremental\) +* [Surveys](https://docs.zenloop.com/reference#get-list-of-surveys) +* [AnswersSurveyGroup](https://docs.zenloop.com/reference#get-answers-for-survey-group) \(Incremental\) +* [SurveyGroups](https://docs.zenloop.com/reference#get-list-of-survey-groups) + +The `Answers` and `AnswersSurveyGroup` stream respectively have an optional survey_id parameter. If not provided answers for all surveys (groups) will be pulled. + +### Data type mapping + +| Integration Type | Airbyte Type | Notes | +| :--- | :--- | :--- | +| `string` | `string` | | +| `integer` | `integer` | | +| `number` | `number` | | +| `array` | `array` | | +| `object` | `object` | | + +### Features + +| Feature | Supported?\(Yes/No\) | Notes | +| :--- | :--- | :--- | +| Full Refresh Sync | Yes | | +| Incremental Sync | Yes | | +| Namespaces | No | | + +### Performance considerations + +The Zenloop connector should not run into Zenloop API limitations under normal usage. Please [create an issue](https://github.com/airbytehq/airbyte/issues) if you see any rate limit issues that are not automatically retried successfully. + +## Getting started + +### Requirements + +* Zenloop account +* Zenloop API token + +### Setup guide + +Please register on Zenloop and retrieve your API token [here](https://app.zenloop.com/settings/api). From 7e4f47b1d0349faf8ee7b2837a4e15f2e76702fd Mon Sep 17 00:00:00 2001 From: AlexanderBatoulis <85283328+AlexanderBatoulis@users.noreply.github.com> Date: Tue, 26 Oct 2021 10:04:59 +0300 Subject: [PATCH 02/12] Added Zenloop integration --- airbyte-integrations/connectors/source-zenloop/.Rhistory | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 airbyte-integrations/connectors/source-zenloop/.Rhistory diff --git a/airbyte-integrations/connectors/source-zenloop/.Rhistory b/airbyte-integrations/connectors/source-zenloop/.Rhistory deleted file mode 100644 index e69de29bb2d1..000000000000 From 6b822ce94c2fa44670e53a4d4d0a00e44c32a837 Mon Sep 17 00:00:00 2001 From: AlexanderBatoulis <85283328+AlexanderBatoulis@users.noreply.github.com> Date: Tue, 26 Oct 2021 10:32:13 +0300 Subject: [PATCH 03/12] updated PR number in change log --- docs/integrations/sources/zenloop.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/integrations/sources/zenloop.md b/docs/integrations/sources/zenloop.md index 92b90158e6b8..2563023cf6e9 100644 --- a/docs/integrations/sources/zenloop.md +++ b/docs/integrations/sources/zenloop.md @@ -47,3 +47,9 @@ The Zenloop connector should not run into Zenloop API limitations under normal u ### Setup guide Please register on Zenloop and retrieve your API token [here](https://app.zenloop.com/settings/api). + +## Changelog + +| Version | Date | Pull Request | Subject | +| :--- | :--- | :--- | :--- | +| 0.1.0 | 2021-10-26 | [7380](https://github.com/airbytehq/airbyte/pull/7380) | Initial Release | From 2667973496c8018102adb81162cc99573f68d2e4 Mon Sep 17 00:00:00 2001 From: AlexanderBatoulis <85283328+AlexanderBatoulis@users.noreply.github.com> Date: Tue, 26 Oct 2021 10:40:05 +0300 Subject: [PATCH 04/12] updated README in docs/integrations --- docs/integrations/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/integrations/README.md b/docs/integrations/README.md index de4e87f491c0..edb7446d1731 100644 --- a/docs/integrations/README.md +++ b/docs/integrations/README.md @@ -115,7 +115,7 @@ Airbyte uses a grading system for connectors to help users understand what to ex | [Zendesk Sunshine](sources/zendesk-sunshine.md) | Beta | | [Zendesk Support](sources/zendesk-support.md) | Certified | | [Zendesk Talk](sources/zendesk-talk.md) | Certified | -| [Zenloop](./sources/zenloop.md)| Alpha | +| [Zenloop](sources/zenloop.md)| Alpha | | [Zoom](sources/zoom.md) | Beta | | [Zuora](sources/zuora.md) | Beta | From 2807b196c74963ab0096915f4dcecea1dfbc4dcf Mon Sep 17 00:00:00 2001 From: AlexanderBatoulis <85283328+AlexanderBatoulis@users.noreply.github.com> Date: Tue, 26 Oct 2021 10:51:50 +0300 Subject: [PATCH 05/12] updated source_definitions.yaml --- .../init/src/main/resources/seed/source_definitions.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 0b8509c994ef..6369eda65f4b 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -578,3 +578,9 @@ dockerImageTag: 0.1.0 documentationUrl: https://docs.airbyte.io/integrations/sources/lever-onesignal sourceType: api +- sourceDefinitionId: f1e4c7f6-db5c-4035-981f-d35ab4998794 + name: Zenloop + dockerRepository: airbyte/source-zenloop + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/sources/url + sourceType: api From bd19c8c23219d20a158107f038b6b92d0f4802f0 Mon Sep 17 00:00:00 2001 From: AlexanderBatoulis <85283328+AlexanderBatoulis@users.noreply.github.com> Date: Tue, 26 Oct 2021 17:10:36 +0300 Subject: [PATCH 06/12] cleaned up todo comments --- .../source-zenloop/source_zenloop/source.py | 18 ------------------ .../source-zenloop/unit_tests/test_streams.py | 8 -------- 2 files changed, 26 deletions(-) diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py b/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py index b69d99726548..04852fae8082 100644 --- a/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py +++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py @@ -10,28 +10,12 @@ from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http import HttpStream -# from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator import math from datetime import datetime, timedelta -""" -TODO: Most comments in this class are instructive and should be deleted after the source is implemented. -This file provides a stubbed example of how to use the Airbyte CDK to develop both a source connector which supports full refresh or and an -incremental syncs from an HTTP API. - -The various TODOs are both implementation hints and steps - fulfilling all the TODOs should be sufficient to implement one basic and one incremental -stream from a source. This pattern is the same one used by Airbyte internally to implement connectors. - -The approach here is not authoritative, and devs are free to use their own judgement. - -There are additional required TODOs in the files within the integration_tests folder and the spec.json file. -""" - - -# Basic full refresh stream class ZenloopStream(HttpStream, ABC): url_base = "https://api.zenloop.com/v1/" @@ -92,7 +76,6 @@ def stream_slices(self, sync_mode, stream_state: Mapping[str, Any] = None, **kw else: yield None -# Basic incremental stream class IncrementalZenloopStream(ZenloopStream, ABC): # checkpoint stream reads after 100 records. state_checkpoint_interval = 100 @@ -207,7 +190,6 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp # select answers and surveys to be able to link answer to a survey yield from response_json.get("answers", []) -# Source class SourceZenloop(AbstractSource): def check_connection(self, logger, config) -> Tuple[bool, any]: try: diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py index 2677bb292128..b94195e567e3 100644 --- a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py @@ -21,39 +21,31 @@ def patch_base_class(mocker): def test_request_params(patch_base_class, config): stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) - # TODO: replace this with your input parameters inputs = {"stream_slice": None, "stream_state": None, "next_page_token": {'page': "1"}} - # TODO: replace this with your expected request parameters expected_params = {'page': "1"} assert stream.request_params(**inputs) == expected_params def test_next_page_token(patch_base_class, config): stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) - # TODO: replace this with your input parameters inputs = {"response": MagicMock()} inputs["response"].json.return_value = {"meta": {"page": 1, "per_page": 12, "total": 8}} - # TODO: replace this with your expected next page token expected_token = None assert stream.next_page_token(**inputs) == expected_token def test_parse_response(patch_base_class, config): stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) - # TODO: replace this with your input parameters response = MagicMock() response.json.return_value = {"answers": [{"id": 123, "name": "John Doe"}]} inputs = {"response": response} - # TODO: replace this with your expected parced object expected_parsed_object = {"answers": [{"id": 123, "name": "John Doe"}]} assert next(stream.parse_response(**inputs)) == expected_parsed_object def test_request_headers(patch_base_class, config): stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) - # TODO: replace this with your input parameters inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} - # TODO: replace this with your expected request headers expected_headers = {} assert stream.request_headers(**inputs) == expected_headers From a18a442e755d8a9a28a7142249ebc658fcfa87af Mon Sep 17 00:00:00 2001 From: AlexanderBatoulis <85283328+AlexanderBatoulis@users.noreply.github.com> Date: Mon, 8 Nov 2021 18:18:30 +0200 Subject: [PATCH 07/12] ran ./gradlew format & removed commented code --- .../source-zenloop/acceptance-test-config.yml | 5 -- .../source-zenloop/source_zenloop/source.py | 86 +++++++++++-------- .../source-zenloop/unit_tests/conftest.py | 3 +- .../unit_tests/test_incremental_streams.py | 2 +- .../source-zenloop/unit_tests/test_streams.py | 6 +- 5 files changed, 55 insertions(+), 47 deletions(-) diff --git a/airbyte-integrations/connectors/source-zenloop/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zenloop/acceptance-test-config.yml index d9dc45554be6..bb7a5757b6d6 100644 --- a/airbyte-integrations/connectors/source-zenloop/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-zenloop/acceptance-test-config.yml @@ -15,11 +15,6 @@ tests: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" empty_streams: [] -# expect_records: -# path: "integration_tests/expected_records.txt" -# extra_fields: no -# exact_order: no -# extra_records: yes incremental: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py b/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py index 04852fae8082..d5561f4cc3cf 100644 --- a/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py +++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py @@ -3,7 +3,9 @@ # +import math from abc import ABC +from datetime import datetime, timedelta from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple import requests @@ -12,9 +14,6 @@ from airbyte_cdk.sources.streams.http import HttpStream from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator -import math -from datetime import datetime, timedelta - class ZenloopStream(HttpStream, ABC): @@ -24,26 +23,26 @@ class ZenloopStream(HttpStream, ABC): def __init__(self, api_token: str, date_from: Optional[str], public_hash_id: Optional[str], **kwargs): super().__init__(authenticator=api_token) - self.api_token=api_token - self.date_from = date_from or datetime.today().strftime('%Y-%m-%d') + self.api_token = api_token + self.date_from = date_from or datetime.today().strftime("%Y-%m-%d") self.public_hash_id = public_hash_id or None def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: decoded_response = response.json() - page = decoded_response['meta']['page'] - per_page = decoded_response['meta']['per_page'] - total = decoded_response['meta']['total'] + page = decoded_response["meta"]["page"] + per_page = decoded_response["meta"]["per_page"] + total = decoded_response["meta"]["total"] - if page < math.ceil(total/per_page): - return {'page': page + 1} + if page < math.ceil(total / per_page): + return {"page": page + 1} else: return None def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, ) -> MutableMapping[str, Any]: if self.has_date_param: params = {"date_from": self.date_from} @@ -59,14 +58,17 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp response_json = response.json() yield response_json + class ChildStreamMixin: parent_stream_class: Optional[ZenloopStream] = None - def stream_slices(self, sync_mode, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: + def stream_slices(self, sync_mode, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: # loop through all public_hash_id's if None was provided # return nothing otherwise if not self.public_hash_id: - for item in self.parent_stream_class(api_token=self.api_token, date_from = self.date_from, public_hash_id = self.public_hash_id).read_records(sync_mode=sync_mode): + for item in self.parent_stream_class( + api_token=self.api_token, date_from=self.date_from, public_hash_id=self.public_hash_id + ).read_records(sync_mode=sync_mode): # set date_from to most current cursor_field or date_from if not incremental if stream_state: date_from = stream_state[self.cursor_field] @@ -76,6 +78,7 @@ def stream_slices(self, sync_mode, stream_state: Mapping[str, Any] = None, **kw else: yield None + class IncrementalZenloopStream(ZenloopStream, ABC): # checkpoint stream reads after 100 records. state_checkpoint_interval = 100 @@ -85,7 +88,9 @@ def get_updated_state(self, current_stream_state: MutableMapping[str, Any], late # latest_record has objects in answers if len(latest_record) > 0: # add 1 second to not pull latest_record again - latest_record_date = (datetime.strptime(latest_record[self.cursor_field], '%Y-%m-%dT%H:%M:%S.%fZ') + timedelta(seconds=1)).isoformat() + str('Z') + latest_record_date = ( + datetime.strptime(latest_record[self.cursor_field], "%Y-%m-%dT%H:%M:%S.%fZ") + timedelta(seconds=1) + ).isoformat() + str("Z") else: latest_record_date = "" max_record = max(latest_record_date, current_stream_state.get(self.cursor_field, "")) @@ -104,6 +109,7 @@ def request_params( params["date_from"] = stream_state[self.cursor_field] return params + class Surveys(ZenloopStream): # API Doc: https://docs.zenloop.com/reference#get-list-of-surveys primary_key = None @@ -111,10 +117,7 @@ class Surveys(ZenloopStream): extra_params = {"page": "1"} def path( - self, - stream_state: Mapping[str, Any] = None, - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> str: return "surveys" @@ -122,18 +125,22 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp response_json = response.json() yield from response_json.get("surveys", []) + class Answers(ChildStreamMixin, IncrementalZenloopStream): # API Doc: https://docs.zenloop.com/reference#get-answers primary_key = "id" has_date_param = True parent_stream_class = Surveys - extra_params = {"page": "1", "order_type": "desc", "order_by": "inserted_at", "date_shortcut": "custom", "date_to": datetime.today().strftime('%Y-%m-%d')} + extra_params = { + "page": "1", + "order_type": "desc", + "order_by": "inserted_at", + "date_shortcut": "custom", + "date_to": datetime.today().strftime("%Y-%m-%d"), + } def path( - self, - stream_state: Mapping[str, Any] = None, - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> str: # take optional public_hash_id if entered if self.public_hash_id: @@ -147,6 +154,7 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp # select answers and surveys to be able to link answer to a survey yield from response_json.get("answers", []) + class SurveyGroups(ZenloopStream): # API Doc: https://docs.zenloop.com/reference#get-list-of-survey-groups primary_key = None @@ -154,10 +162,7 @@ class SurveyGroups(ZenloopStream): extra_params = {"page": "1"} def path( - self, - stream_state: Mapping[str, Any] = None, - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> str: return "survey_groups" @@ -165,18 +170,22 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp response_json = response.json() yield from response_json.get("survey_groups", []) + class AnswersSurveyGroup(ChildStreamMixin, IncrementalZenloopStream): # API Doc: https://docs.zenloop.com/reference#get-answers-for-survey-group primary_key = "id" has_date_param = True parent_stream_class = SurveyGroups - extra_params = {"page": "1", "order_type": "desc", "order_by": "inserted_at", "date_shortcut": "custom", "date_to": datetime.today().strftime('%Y-%m-%d')} + extra_params = { + "page": "1", + "order_type": "desc", + "order_by": "inserted_at", + "date_shortcut": "custom", + "date_to": datetime.today().strftime("%Y-%m-%d"), + } def path( - self, - stream_state: Mapping[str, Any] = None, - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> str: # take optional public_hash_id if entered if self.public_hash_id: @@ -190,6 +199,7 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp # select answers and surveys to be able to link answer to a survey yield from response_json.get("answers", []) + class SourceZenloop(AbstractSource): def check_connection(self, logger, config) -> Tuple[bool, any]: try: @@ -203,5 +213,9 @@ def check_connection(self, logger, config) -> Tuple[bool, any]: return False, f"Unable to connect to Zenloop API with the provided credentials - {error}" def streams(self, config: Mapping[str, Any]) -> List[Stream]: - args = {"api_token": TokenAuthenticator(token=config["api_token"]), "date_from": config["date_from"], "public_hash_id": config.get("public_hash_id")} + args = { + "api_token": TokenAuthenticator(token=config["api_token"]), + "date_from": config["date_from"], + "public_hash_id": config.get("public_hash_id"), + } return [Surveys(**args), Answers(**args), SurveyGroups(**args), AnswersSurveyGroup(**args)] diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/conftest.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/conftest.py index 76471308022b..1b65a421441d 100644 --- a/airbyte-integrations/connectors/source-zenloop/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/conftest.py @@ -1,9 +1,10 @@ # # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # + from pytest import fixture @fixture def config(): - return {"api_token": '', "date_from": "2021-07-01", "public_hash_id": ""} + return {"api_token": "", "date_from": "2021-07-01", "public_hash_id": ""} diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_incremental_streams.py index 040e3a505927..8640cf302b87 100644 --- a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_incremental_streams.py +++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_incremental_streams.py @@ -39,7 +39,7 @@ def test_stream_slices(patch_incremental_base_class, config): inputs = { "sync_mode": SyncMode.incremental, "cursor_field": expected_cursor_field, - "stream_state": {expected_cursor_field: "2021-10-20T03:30:30Z"} + "stream_state": {expected_cursor_field: "2021-10-20T03:30:30Z"}, } expected_stream_slice = [None] assert stream.stream_slices(**inputs) == expected_stream_slice diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py index b94195e567e3..038ec29def01 100644 --- a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py @@ -7,8 +7,6 @@ import pytest from source_zenloop.source import ZenloopStream -import requests -import json @pytest.fixture @@ -21,8 +19,8 @@ def patch_base_class(mocker): def test_request_params(patch_base_class, config): stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) - inputs = {"stream_slice": None, "stream_state": None, "next_page_token": {'page': "1"}} - expected_params = {'page': "1"} + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": {"page": "1"}} + expected_params = {"page": "1"} assert stream.request_params(**inputs) == expected_params From 98f4e55b4acbf3fbce04fb0260176ff9942ec8bc Mon Sep 17 00:00:00 2001 From: AlexanderBatoulis <85283328+AlexanderBatoulis@users.noreply.github.com> Date: Thu, 18 Nov 2021 20:25:04 +0100 Subject: [PATCH 08/12] resolving review comments; added own survey_group_id input parameter --- .../resources/seed/source_definitions.yaml | 4 +- .../integration_tests/invalid_config.json | 3 +- .../integration_tests/sample_config.json | 3 +- .../source-zenloop/sample_files/config.json | 3 +- .../connectors/source-zenloop/setup.py | 4 +- .../source-zenloop/source_zenloop/source.py | 52 ++++++++++------- .../source-zenloop/source_zenloop/spec.json | 9 ++- .../source-zenloop/unit_tests/conftest.py | 2 +- .../unit_tests/test_incremental_streams.py | 57 ++++++++++++++++--- .../source-zenloop/unit_tests/test_source.py | 19 ++++++- .../source-zenloop/unit_tests/test_streams.py | 46 ++++++++++++--- docs/integrations/sources/zenloop.md | 2 +- 12 files changed, 153 insertions(+), 51 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 3994c570b058..fe4389f4b023 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -654,7 +654,5 @@ name: Zenloop dockerRepository: airbyte/source-zenloop dockerImageTag: 0.1.0 - documentationUrl: https://docs.airbyte.io/integrations/sources/url + documentationUrl: https://docs.airbyte.io/integrations/sources/zenloop sourceType: api - documentationUrl: https://docs.airbyte.io/integrations/sources/azure-table - sourceType: database \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-zenloop/integration_tests/invalid_config.json index f96e9cbfb1df..91f11b9b150a 100644 --- a/airbyte-integrations/connectors/source-zenloop/integration_tests/invalid_config.json +++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/invalid_config.json @@ -1,5 +1,6 @@ { "api_token": "wrong key", "date_from": "2021-04-01T04:20:02Z", - "public_hash_id": "wrong key" + "survey_id": "wrong key", + "survey_group_id": "wrong key" } diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_config.json index 94a179571097..194d4b1404f4 100644 --- a/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_config.json +++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_config.json @@ -1,5 +1,6 @@ { "api_token": "", "date_from": "2021-01-01", - "public_hash_id": "" + "survey_id": "", + "survey_group_id": "" } diff --git a/airbyte-integrations/connectors/source-zenloop/sample_files/config.json b/airbyte-integrations/connectors/source-zenloop/sample_files/config.json index 94a179571097..194d4b1404f4 100644 --- a/airbyte-integrations/connectors/source-zenloop/sample_files/config.json +++ b/airbyte-integrations/connectors/source-zenloop/sample_files/config.json @@ -1,5 +1,6 @@ { "api_token": "", "date_from": "2021-01-01", - "public_hash_id": "" + "survey_id": "", + "survey_group_id": "" } diff --git a/airbyte-integrations/connectors/source-zenloop/setup.py b/airbyte-integrations/connectors/source-zenloop/setup.py index 999bc3c427f3..c95143f684cd 100644 --- a/airbyte-integrations/connectors/source-zenloop/setup.py +++ b/airbyte-integrations/connectors/source-zenloop/setup.py @@ -18,8 +18,8 @@ setup( name="source_zenloop", description="Source implementation for Zenloop.", - author="Airbyte", - author_email="contact@airbyte.io", + author="Alexander Batoulis", + author_email="alexander.batoulis@hometogo.com", packages=find_packages(), install_requires=MAIN_REQUIREMENTS, package_data={"": ["*.json", "schemas/*.json", "schemas/shared/*.json"]}, diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py b/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py index d5561f4cc3cf..e9d5f4cf6741 100644 --- a/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py +++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py @@ -21,11 +21,12 @@ class ZenloopStream(HttpStream, ABC): extra_params = None has_date_param = False - def __init__(self, api_token: str, date_from: Optional[str], public_hash_id: Optional[str], **kwargs): + def __init__(self, api_token: str, date_from: Optional[str], survey_id, survey_group_id: Optional[str], **kwargs): super().__init__(authenticator=api_token) self.api_token = api_token self.date_from = date_from or datetime.today().strftime("%Y-%m-%d") - self.public_hash_id = public_hash_id or None + self.survey_id = survey_id or None + self.survey_group_id = survey_group_id or None def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: decoded_response = response.json() @@ -60,33 +61,39 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp class ChildStreamMixin: + parent_stream_class: Optional[ZenloopStream] = None def stream_slices(self, sync_mode, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: - # loop through all public_hash_id's if None was provided + # determine if parent_stream_class is Surveys or SurveyGroups + if self.parent_stream_class.__name__ == "Surveys": + public_hash_id = self.survey_id + else: + public_hash_id = self.survey_group_id + # loop through all survey_id's if None was provided # return nothing otherwise - if not self.public_hash_id: + if not public_hash_id: for item in self.parent_stream_class( - api_token=self.api_token, date_from=self.date_from, public_hash_id=self.public_hash_id + api_token=self.api_token, date_from=self.date_from, survey_id=self.survey_id, survey_group_id=self.survey_group_id ).read_records(sync_mode=sync_mode): # set date_from to most current cursor_field or date_from if not incremental if stream_state: date_from = stream_state[self.cursor_field] else: date_from = self.date_from - yield {"survey_id": item["public_hash_id"], "date_from": date_from} + yield {"survey_slice": item["public_hash_id"], "date_from": date_from} else: yield None class IncrementalZenloopStream(ZenloopStream, ABC): - # checkpoint stream reads after 100 records. - state_checkpoint_interval = 100 + # checkpoint stream reads after 1000 records. + state_checkpoint_interval = 1000 cursor_field = "inserted_at" def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: # latest_record has objects in answers - if len(latest_record) > 0: + if latest_record: # add 1 second to not pull latest_record again latest_record_date = ( datetime.strptime(latest_record[self.cursor_field], "%Y-%m-%dT%H:%M:%S.%fZ") + timedelta(seconds=1) @@ -102,7 +109,7 @@ def request_params( params = super().request_params(stream_state, stream_slice, next_page_token) if stream_state: # if looped through all slices take its date_from parameter - # else no public_hash_id provided -> take cursor_field + # else no survey_id or survey_group_id provided -> take cursor_field if stream_slice: params["date_from"] = stream_slice["date_from"] else: @@ -115,6 +122,7 @@ class Surveys(ZenloopStream): primary_key = None has_date_param = False extra_params = {"page": "1"} + use_cache = True def path( self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None @@ -142,12 +150,12 @@ class Answers(ChildStreamMixin, IncrementalZenloopStream): def path( self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> str: - # take optional public_hash_id if entered - if self.public_hash_id: - return f"surveys/{self.public_hash_id}/answers" - # slice all public_hash_id's if nothing provided + # take optional survey_id if entered + if self.survey_id: + return f"surveys/{self.survey_id}/answers" + # slice all survey_id's if nothing provided else: - return f"surveys/{stream_slice['survey_id']}/answers" + return f"surveys/{stream_slice['survey_slice']}/answers" def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: response_json = response.json() @@ -160,6 +168,7 @@ class SurveyGroups(ZenloopStream): primary_key = None has_date_param = False extra_params = {"page": "1"} + use_cache = True def path( self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None @@ -187,12 +196,12 @@ class AnswersSurveyGroup(ChildStreamMixin, IncrementalZenloopStream): def path( self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> str: - # take optional public_hash_id if entered - if self.public_hash_id: - return f"survey_groups/{self.public_hash_id}/answers" - # slice all public_hash_id's if nothing provided + # take optional survey_group_id if entered + if self.survey_group_id: + return f"survey_groups/{self.survey_group_id}/answers" + # slice all survey_group_id's if nothing provided else: - return f"survey_groups/{stream_slice['survey_id']}/answers" + return f"survey_groups/{stream_slice['survey_slice']}/answers" def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: response_json = response.json() @@ -216,6 +225,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: args = { "api_token": TokenAuthenticator(token=config["api_token"]), "date_from": config["date_from"], - "public_hash_id": config.get("public_hash_id"), + "survey_id": config.get("survey_id"), + "survey_group_id": config.get("survey_group_id"), } return [Surveys(**args), Answers(**args), SurveyGroups(**args), AnswersSurveyGroup(**args)] diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/spec.json b/airbyte-integrations/connectors/source-zenloop/source_zenloop/spec.json index 9ebb8bf499ce..cbf078676d32 100644 --- a/airbyte-integrations/connectors/source-zenloop/source_zenloop/spec.json +++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/spec.json @@ -17,9 +17,14 @@ "description": "Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24. Leave empty if only data from current data should be synced", "examples": ["2021-10-24T03:30:30Z"] }, - "public_hash_id": { + "survey_id": { "type": "string", - "description": "Zenloop Survey (Group) ID. Can be found here. Leave empty to pull answers from all surveys", + "description": "Zenloop Survey ID. Can be found here. Leave empty to pull answers from all surveys", + "airbyte_secret": true + }, + "survey_group_id": { + "type": "string", + "description": "Zenloop Survey Group ID. Can be found by pulling All Survey Groups via SurveyGroups stream. Leave empty to pull answers from all survey groups", "airbyte_secret": true } } diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/conftest.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/conftest.py index 1b65a421441d..9e213ecc565c 100644 --- a/airbyte-integrations/connectors/source-zenloop/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/conftest.py @@ -7,4 +7,4 @@ @fixture def config(): - return {"api_token": "", "date_from": "2021-07-01", "public_hash_id": ""} + return {"api_token": "", "date_from": "2021-07-01", "survey_id": "", "survey_group_id": ""} diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_incremental_streams.py index 8640cf302b87..78b6d70b1d58 100644 --- a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_incremental_streams.py +++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_incremental_streams.py @@ -3,9 +3,11 @@ # +from unittest.mock import MagicMock + from airbyte_cdk.models import SyncMode from pytest import fixture -from source_zenloop.source import IncrementalZenloopStream +from source_zenloop.source import Answers, AnswersSurveyGroup, IncrementalZenloopStream @fixture @@ -17,13 +19,13 @@ def patch_incremental_base_class(mocker): def test_cursor_field(patch_incremental_base_class, config): - stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["survey_id"], config["survey_group_id"]) expected_cursor_field = "inserted_at" assert stream.cursor_field == expected_cursor_field def test_get_updated_state(patch_incremental_base_class, config): - stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["survey_id"], config["survey_group_id"]) expected_cursor_field = "inserted_at" inputs = { "current_stream_state": {expected_cursor_field: "2021-07-24T03:30:30.038549Z"}, @@ -34,7 +36,6 @@ def test_get_updated_state(patch_incremental_base_class, config): def test_stream_slices(patch_incremental_base_class, config): - stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) expected_cursor_field = "inserted_at" inputs = { "sync_mode": SyncMode.incremental, @@ -42,21 +43,59 @@ def test_stream_slices(patch_incremental_base_class, config): "stream_state": {expected_cursor_field: "2021-10-20T03:30:30Z"}, } expected_stream_slice = [None] - assert stream.stream_slices(**inputs) == expected_stream_slice + + stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["survey_id"], config["survey_group_id"]) + assert list(stream.stream_slices(**inputs)) == expected_stream_slice + + stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["survey_id"], None) + assert list(stream.stream_slices(**inputs)) == expected_stream_slice + + stream = IncrementalZenloopStream(config["api_token"], config["date_from"], None, config["survey_group_id"]) + assert list(stream.stream_slices(**inputs)) == expected_stream_slice def test_supports_incremental(patch_incremental_base_class, mocker, config): mocker.patch.object(IncrementalZenloopStream, "cursor_field", "dummy_field") - stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["survey_id"], config["survey_group_id"]) assert stream.supports_incremental def test_source_defined_cursor(patch_incremental_base_class, config): - stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["survey_id"], config["survey_group_id"]) assert stream.source_defined_cursor def test_stream_checkpoint_interval(patch_incremental_base_class, config): - stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) - expected_checkpoint_interval = 100 + stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["survey_id"], config["survey_group_id"]) + expected_checkpoint_interval = 1000 assert stream.state_checkpoint_interval == expected_checkpoint_interval + + +def test_parse_response_answers(patch_incremental_base_class, config): + stream = Answers(**config) + response = MagicMock() + response.json.return_value = {"answers": [{"id": 123, "name": "John Doe"}]} + inputs = {"response": response} + expected_parsed_object = {"id": 123, "name": "John Doe"} + assert next(stream.parse_response(**inputs)) == expected_parsed_object + + +def test_parse_response_answers_survey_groups(patch_incremental_base_class, config): + stream = AnswersSurveyGroup(**config) + response = MagicMock() + response.json.return_value = {"answers": [{"id": 123, "name": "John Doe"}]} + inputs = {"response": response} + expected_parsed_object = {"id": 123, "name": "John Doe"} + assert next(stream.parse_response(**inputs)) == expected_parsed_object + + +def test_surveys_path(config): + stream = Answers(**config) + expected = "surveys//answers" + assert stream.path() == expected + + +def test_survey_groups_path(config): + stream = AnswersSurveyGroup(**config) + expected = "survey_groups//answers" + assert stream.path() == expected diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_source.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_source.py index 8d6cd4f4d831..019577d9208c 100644 --- a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_source.py @@ -4,15 +4,32 @@ from unittest.mock import MagicMock +import responses from source_zenloop.source import SourceZenloop -def test_check_connection(mocker, config): +@responses.activate +def test_check_connection_success(mocker, config): + responses.add( + responses.GET, + "https://api.zenloop.com/v1/surveys", + ) source = SourceZenloop() logger_mock = MagicMock() assert source.check_connection(logger_mock, config) == (True, None) +@responses.activate +def test_check_connection_fail(mocker, config): + responses.add(responses.GET, "https://api.zenloop.com/v1/surveys", json={"error": "Unauthorized"}, status=401) + source = SourceZenloop() + logger_mock = MagicMock() + assert source.check_connection(logger_mock, config) == ( + False, + "Unable to connect to Zenloop API with the provided credentials - 401 Client Error: Unauthorized for url: https://api.zenloop.com/v1/surveys", + ) + + def test_streams(mocker): source = SourceZenloop() config_mock = MagicMock() diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py index 038ec29def01..07765274b448 100644 --- a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py @@ -6,7 +6,7 @@ from unittest.mock import MagicMock import pytest -from source_zenloop.source import ZenloopStream +from source_zenloop.source import SurveyGroups, Surveys, ZenloopStream @pytest.fixture @@ -18,14 +18,14 @@ def patch_base_class(mocker): def test_request_params(patch_base_class, config): - stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + stream = ZenloopStream(**config) inputs = {"stream_slice": None, "stream_state": None, "next_page_token": {"page": "1"}} expected_params = {"page": "1"} assert stream.request_params(**inputs) == expected_params def test_next_page_token(patch_base_class, config): - stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + stream = ZenloopStream(**config) inputs = {"response": MagicMock()} inputs["response"].json.return_value = {"meta": {"page": 1, "per_page": 12, "total": 8}} expected_token = None @@ -33,7 +33,7 @@ def test_next_page_token(patch_base_class, config): def test_parse_response(patch_base_class, config): - stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + stream = ZenloopStream(**config) response = MagicMock() response.json.return_value = {"answers": [{"id": 123, "name": "John Doe"}]} inputs = {"response": response} @@ -41,15 +41,45 @@ def test_parse_response(patch_base_class, config): assert next(stream.parse_response(**inputs)) == expected_parsed_object +def test_parse_response_surveys(patch_base_class, config): + stream = Surveys(**config) + response = MagicMock() + response.json.return_value = {"surveys": [{"id": 123, "name": "John Doe"}]} + inputs = {"response": response} + expected_parsed_object = {"id": 123, "name": "John Doe"} + assert next(stream.parse_response(**inputs)) == expected_parsed_object + + +def test_parse_response_survey_groups(patch_base_class, config): + stream = SurveyGroups(**config) + response = MagicMock() + response.json.return_value = {"survey_groups": [{"id": 123, "name": "John Doe"}]} + inputs = {"response": response} + expected_parsed_object = {"id": 123, "name": "John Doe"} + assert next(stream.parse_response(**inputs)) == expected_parsed_object + + +def test_surveys_path(config): + stream = Surveys(**config) + expected = "surveys" + assert stream.path() == expected + + +def test_survey_groups_path(config): + stream = SurveyGroups(**config) + expected = "survey_groups" + assert stream.path() == expected + + def test_request_headers(patch_base_class, config): - stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + stream = ZenloopStream(**config) inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} expected_headers = {} assert stream.request_headers(**inputs) == expected_headers def test_http_method(patch_base_class, config): - stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + stream = ZenloopStream(**config) expected_method = "GET" assert stream.http_method == expected_method @@ -66,12 +96,12 @@ def test_http_method(patch_base_class, config): def test_should_retry(patch_base_class, config, http_status, should_retry): response_mock = MagicMock() response_mock.status_code = http_status - stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + stream = ZenloopStream(**config) assert stream.should_retry(response_mock) == should_retry def test_backoff_time(patch_base_class, config): response_mock = MagicMock() - stream = ZenloopStream(config["api_token"], config["date_from"], config["public_hash_id"]) + stream = ZenloopStream(**config) expected_backoff_time = None assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/docs/integrations/sources/zenloop.md b/docs/integrations/sources/zenloop.md index 2563023cf6e9..572224686846 100644 --- a/docs/integrations/sources/zenloop.md +++ b/docs/integrations/sources/zenloop.md @@ -13,7 +13,7 @@ This Source is capable of syncing the following core Streams: * [AnswersSurveyGroup](https://docs.zenloop.com/reference#get-answers-for-survey-group) \(Incremental\) * [SurveyGroups](https://docs.zenloop.com/reference#get-list-of-survey-groups) -The `Answers` and `AnswersSurveyGroup` stream respectively have an optional survey_id parameter. If not provided answers for all surveys (groups) will be pulled. +The `Answers` and `AnswersSurveyGroup` stream respectively have an optional survey_id parameter that can be set by filling the `public_hash_id` field of the connector configuration. If not provided answers for all surveys (groups) will be pulled. ### Data type mapping From 5fe8892ac1908efadbe8b471513751e128bc73a5 Mon Sep 17 00:00:00 2001 From: AlexanderBatoulis <85283328+AlexanderBatoulis@users.noreply.github.com> Date: Thu, 18 Nov 2021 20:35:32 +0100 Subject: [PATCH 09/12] resolving source defintion conflict --- .../init/src/main/resources/seed/source_definitions.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index fe4389f4b023..8978058a9495 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -648,8 +648,8 @@ sourceDefinitionId: 798ae795-5189-42b6-b64e-3cb91db93338 dockerRepository: airbyte/source-azure-table dockerImageTag: 0.1.0 - documentationUrl: https://docs.airbyte.io/integrations/sources/lever-onesignal - sourceType: api + documentationUrl: https://docs.airbyte.io/integrations/sources/azure-table + sourceType: database - sourceDefinitionId: f1e4c7f6-db5c-4035-981f-d35ab4998794 name: Zenloop dockerRepository: airbyte/source-zenloop From 145a21449072093afdcf6296fbd7bd36c858df50 Mon Sep 17 00:00:00 2001 From: alafanechere Date: Fri, 19 Nov 2021 13:36:21 +0100 Subject: [PATCH 10/12] add creds for zenloop --- tools/bin/ci_credentials.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index e6c25027741b..3f63a074dd47 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -292,6 +292,7 @@ read_secrets source-zendesk-sunshine "$ZENDESK_SUNSHINE_TEST_CREDS" read_secrets source-zendesk-support "$ZENDESK_SUPPORT_TEST_CREDS" read_secrets source-zendesk-support "$ZENDESK_SUPPORT_OAUTH_TEST_CREDS" "config_oauth.json" read_secrets source-zendesk-talk "$ZENDESK_TALK_TEST_CREDS" +read_secrets source-zenloop "$SOURCE_ZENLOOP_TEST_CREDS" read_secrets source-zoom-singer "$ZOOM_INTEGRATION_TEST_CREDS" read_secrets source-zuora "$SOURCE_ZUORA_TEST_CREDS" From 19f3b0a785f1cf6a36b280323ddf20c760c3f6c7 Mon Sep 17 00:00:00 2001 From: AlexanderBatoulis <85283328+AlexanderBatoulis@users.noreply.github.com> Date: Sat, 20 Nov 2021 13:15:48 +0100 Subject: [PATCH 11/12] added responses module to setup.py --- .../connectors/source-zenloop/integration_tests/acceptance.py | 2 +- airbyte-integrations/connectors/source-zenloop/setup.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-zenloop/integration_tests/acceptance.py index 108075487440..0347f2a0b143 100644 --- a/airbyte-integrations/connectors/source-zenloop/integration_tests/acceptance.py +++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/acceptance.py @@ -10,5 +10,5 @@ @pytest.fixture(scope="session", autouse=True) def connector_setup(): - """ This fixture is a placeholder for external resources that acceptance test might require.""" + """This fixture is a placeholder for external resources that acceptance test might require.""" yield diff --git a/airbyte-integrations/connectors/source-zenloop/setup.py b/airbyte-integrations/connectors/source-zenloop/setup.py index c95143f684cd..92509be18873 100644 --- a/airbyte-integrations/connectors/source-zenloop/setup.py +++ b/airbyte-integrations/connectors/source-zenloop/setup.py @@ -13,6 +13,7 @@ "pytest~=6.1", "pytest-mock~=3.6.1", "source-acceptance-test", + "responses~=0.13.3", ] setup( From 5b82d1e3fe6d86a0587d5c9be625af7c320182aa Mon Sep 17 00:00:00 2001 From: alafanechere Date: Mon, 22 Nov 2021 13:00:41 +0100 Subject: [PATCH 12/12] update source specs --- .../src/main/resources/seed/source_specs.yaml | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 4f0063d8e2c7..1dde0aec9416 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -6486,3 +6486,39 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-zenloop:0.1.0" + spec: + documentationUrl: "https://docsurl.com" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Zenloop Spec" + type: "object" + required: + - "api_token" + additionalProperties: false + properties: + api_token: + type: "string" + description: "Zenloop API Token. You can get the API token in settings page\ + \ here " + airbyte_secret: true + date_from: + type: "string" + description: "Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24.\ + \ Leave empty if only data from current data should be synced" + examples: + - "2021-10-24T03:30:30Z" + survey_id: + type: "string" + description: "Zenloop Survey ID. Can be found here. Leave empty to pull answers from all surveys" + airbyte_secret: true + survey_group_id: + type: "string" + description: "Zenloop Survey Group ID. Can be found by pulling All Survey\ + \ Groups via SurveyGroups stream. Leave empty to pull answers from all\ + \ survey groups" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: []