diff --git a/.env.dist b/.env.dist index 3779d38..6d0928b 100644 --- a/.env.dist +++ b/.env.dist @@ -1,4 +1,3 @@ -STORAGE_API_TOKEN= KBC_URL= KBC_RUNID= DB_HOST= diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 81e0faa..082a036 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -1,16 +1,33 @@ name: 'GitHub Actions' -'on': - - push +on: [ push ] concurrency: 'ci-${{ github.ref }}' env: - APP_IMAGE: keboola-component + # Name of the image + APP_IMAGE: keboola/wr-db-snowflake-gcs # must be same in docker-compose.yml + + # Developer portal login KBC_DEVELOPERPORTAL_VENDOR: keboola - KBC_DEVELOPERPORTAL_APP: keboola.db-writer-snowflake-gcs + KBC_DEVELOPERPORTAL_APP: keboola.wr-db-snowflake-gcs KBC_DEVELOPERPORTAL_USERNAME: keboola+wr_db_snowflake_gcs - KBC_DEVELOPERPORTAL_PASSWORD: '${{ secrets.KBC_DEVELOPERPORTAL_PASSWORD }}' - DOCKERHUB_USER: '' - DOCKERHUB_TOKEN: '${{ secrets.DOCKERHUB_TOKEN }}' - KBC_STORAGE_TOKEN: '${{ secrets.KBC_STORAGE_TOKEN }}' + KBC_DEVELOPERPORTAL_PASSWORD: ${{ secrets.KBC_DEVELOPERPORTAL_PASSWORD }} + + # DockerHub login + DOCKERHUB_USER: keboolabot + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + + # Test DB + DB_HOST: kebooladev.snowflakecomputing.com + DB_PORT: 443 + DB_USER: snowflake_writer_gcs + DB_PASSWORD: ${{ secrets.DB_PASSWORD }} + DB_DATABASE: snowflake_writer_gcs + DB_SCHEMA: snowflake_writer_gcs + DB_WAREHOUSE: snowflake_writer_gcs + + # Testing staging storage projects + KBC_RUNID: 123456 + KBC_STORAGE_TOKEN: '' + KBC_URL: 'https://connection.keboola.com/' KBC_TEST_PROJECT_URL: '' KBC_TEST_PROJECT_CONFIGS: '' jobs: @@ -21,7 +38,7 @@ jobs: is_semantic_tag: '${{ steps.tag.outputs.is_semantic_tag }}' steps: - name: 'Check out the repo' - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: 'Print Docker version' run: 'docker -v' - name: 'Docker login' @@ -40,31 +57,33 @@ jobs: - name: 'Push image to ECR' uses: keboola/action-push-to-ecr@master with: - vendor: '${{ env.KBC_DEVELOPERPORTAL_VENDOR }}' - app_id: '${{ env.KBC_DEVELOPERPORTAL_APP }}' - username: '${{ env.KBC_DEVELOPERPORTAL_USERNAME }}' - password: '${{ env.KBC_DEVELOPERPORTAL_PASSWORD }}' - tag: '${{ steps.tag.outputs.app_image_tag }}' - push_latest: '${{ steps.tag.outputs.is_semantic_tag }}' - source_image: '${{ env.APP_IMAGE}}' + vendor: ${{ env.KBC_DEVELOPERPORTAL_VENDOR }} + app_id: ${{ env.KBC_DEVELOPERPORTAL_APP }} + username: ${{ env.KBC_DEVELOPERPORTAL_USERNAME }} + password: ${{ env.KBC_DEVELOPERPORTAL_PASSWORD }} + tag: ${{ steps.tag.outputs.app_image_tag }} + push_latest: ${{ steps.tag.outputs.is_semantic_tag }} + source_image: ${{ env.APP_IMAGE}} tests: needs: build runs-on: ubuntu-latest steps: - name: 'Check out the repo' - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: 'Pull image from ECR' uses: keboola/action-pull-from-ecr@master with: - vendor: '${{ env.KBC_DEVELOPERPORTAL_VENDOR }}' - app_id: '${{ env.KBC_DEVELOPERPORTAL_APP }}' - username: '${{ env.KBC_DEVELOPERPORTAL_USERNAME }}' - password: '${{ env.KBC_DEVELOPERPORTAL_PASSWORD }}' - tag: '${{ needs.build.outputs.app_image_tag }}' - target_image: '${{ env.APP_IMAGE}}' + vendor: ${{ env.KBC_DEVELOPERPORTAL_VENDOR }} + app_id: ${{ env.KBC_DEVELOPERPORTAL_APP }} + username: ${{ env.KBC_DEVELOPERPORTAL_USERNAME }} + password: ${{ env.KBC_DEVELOPERPORTAL_PASSWORD }} + tag: ${{ needs.build.outputs.app_image_tag }} + target_image: ${{ env.APP_IMAGE}} tag_as_latest: true - name: 'Run tests' - run: 'docker run ${{env.APP_IMAGE}} composer ci' + run: | + docker-compose run --rm \ + app composer ci tests-in-kbc: needs: build runs-on: ubuntu-latest @@ -73,10 +92,10 @@ jobs: if: 'env.KBC_STORAGE_TOKEN && env.KBC_TEST_PROJECT_CONFIGS' uses: keboola/action-run-configs-parallel@master with: - token: '${{ env.KBC_STORAGE_TOKEN }}' - componentId: '${{ env.KBC_DEVELOPERPORTAL_APP }}' - tag: '${{ needs.build.outputs.app_image_tag }}' - configs: '${{ env.KBC_TEST_PROJECT_CONFIGS }}' + token: ${{ env.KBC_STORAGE_TOKEN }} + componentId: ${{ env.KBC_DEVELOPERPORTAL_APP }} + tag: ${{ needs.build.outputs.app_image_tag }} + configs: ${{ env.KBC_TEST_PROJECT_CONFIGS }} deploy: needs: - build @@ -88,8 +107,8 @@ jobs: - name: 'Set tag in the Developer Portal' uses: keboola/action-set-tag-developer-portal@master with: - vendor: '${{ env.KBC_DEVELOPERPORTAL_VENDOR }}' - app_id: '${{ env.KBC_DEVELOPERPORTAL_APP }}' - username: '${{ env.KBC_DEVELOPERPORTAL_USERNAME }}' - password: '${{ env.KBC_DEVELOPERPORTAL_PASSWORD }}' - tag: '${{ needs.build.outputs.app_image_tag }}' + vendor: ${{ env.KBC_DEVELOPERPORTAL_VENDOR }} + app_id: ${{ env.KBC_DEVELOPERPORTAL_APP }} + username: ${{ env.KBC_DEVELOPERPORTAL_USERNAME }} + password: ${{ env.KBC_DEVELOPERPORTAL_PASSWORD }} + tag: ${{ needs.build.outputs.app_image_tag }} diff --git a/README.md b/README.md index 03ca452..744b11c 100644 --- a/README.md +++ b/README.md @@ -9,97 +9,104 @@ Writes data to Snowflake Database. ## Example configuration ```json - { - "db": { - "host": "HOST", - "port": "PORT", - "database": "DATABASE", - "user": "USERNAME", - "password": "PASSWORD", - "schema": "SCHEMA", - "warehouse": "WAREHOUSE", - "ssh": { - "enabled": true, - "keys": { - "private": "ENCRYPTED_PRIVATE_SSH_KEY", - "public": "PUBLIC_SSH_KEY" - }, - "sshHost": "PROXY_HOSTNAME" - } +{ + "parameters": { + "db": { + "host": "HOST", + "port": "PORT", + "database": "DATABASE", + "user": "USERNAME", + "password": "PASSWORD", + "schema": "SCHEMA", + "warehouse": "WAREHOUSE", + "ssh": { + "enabled": true, + "keys": { + "private": "ENCRYPTED_PRIVATE_SSH_KEY", + "public": "PUBLIC_SSH_KEY" + }, + "sshHost": "PROXY_HOSTNAME" + } + }, + "tableId": "simple", + "dbName": "simple", + "export": true, + "incremental": false, + "primaryKey": ["id"], + "items": [ + { + "name": "id", + "dbName": "id", + "type": "int", + "size": null, + "nullable": null, + "default": null + }, + { + "name": "name", + "dbName": "name", + "type": "varchar", + "size": 255, + "nullable": null, + "default": null }, + { + "name": "glasses", + "dbName": "glasses", + "type": "varchar", + "size": 255, + "nullable": null, + "default": null + } + ] + }, + "storage": { + "input": { "tables": [ { - "tableId": "simple", - "dbName": "simple", - "export": true, - "incremental": true, - "primaryKey": ["id"], - "items": [ - { - "name": "id", - "dbName": "id", - "type": "int", - "size": null, - "nullable": null, - "default": null - }, - { - "name": "name", - "dbName": "name", - "type": "nvarchar", - "size": 255, - "nullable": null, - "default": null - }, - { - "name": "glasses", - "dbName": "glasses", - "type": "nvarchar", - "size": 255, - "nullable": null, - "default": null - } - ] + "source": "simple", + "destination": "simple.csv" } ] } + } +} ``` ## Development Required snowflake resource for writer: ```sql -CREATE DATABASE "snowflake_writer"; -USE DATABASE "snowflake_writer"; -CREATE TRANSIENT SCHEMA "snowflake_writer"; -CREATE WAREHOUSE "snowflake_writer" WITH +CREATE DATABASE "snowflake_writer_gcs"; +USE DATABASE "snowflake_writer_gcs"; +CREATE TRANSIENT SCHEMA "snowflake_writer_gcs"; +CREATE WAREHOUSE "snowflake_writer_gcs" WITH WAREHOUSE_SIZE = 'XSMALL' WAREHOUSE_TYPE = 'STANDARD' AUTO_SUSPEND = 900 AUTO_RESUME = TRUE; -CREATE ROLE "snowflake_writer"; -GRANT USAGE ON WAREHOUSE "snowflake_writer" TO ROLE "snowflake_writer"; -GRANT USAGE ON DATABASE "snowflake_writer" TO ROLE "snowflake_writer"; -GRANT ALL ON SCHEMA "snowflake_writer" TO ROLE "snowflake_writer"; -GRANT ALL ON FUTURE TABLES IN SCHEMA "snowflake_writer" TO ROLE "snowflake_writer"; -GRANT ALL ON FUTURE VIEWS IN SCHEMA "snowflake_writer" TO ROLE "snowflake_writer"; -CREATE USER "snowflake_writer" +CREATE ROLE "snowflake_writer_gcs"; +GRANT USAGE ON WAREHOUSE "snowflake_writer_gcs" TO ROLE "snowflake_writer_gcs"; +GRANT USAGE ON DATABASE "snowflake_writer_gcs" TO ROLE "snowflake_writer_gcs"; +GRANT ALL ON SCHEMA "snowflake_writer_gcs" TO ROLE "snowflake_writer_gcs"; +GRANT ALL ON FUTURE TABLES IN SCHEMA "snowflake_writer_gcs" TO ROLE "snowflake_writer_gcs"; +GRANT ALL ON FUTURE VIEWS IN SCHEMA "snowflake_writer_gcs" TO ROLE "snowflake_writer_gcs"; +CREATE USER "snowflake_writer_gcs" PASSWORD = 'password' - DEFAULT_ROLE = "snowflake_writer" - DEFAULT_WAREHOUSE = "snowflake_writer" - DEFAULT_NAMESPACE = "snowflake_writer"."snowflake_writer" + DEFAULT_ROLE = "snowflake_writer_gcs" + DEFAULT_WAREHOUSE = "snowflake_writer_gcs" + DEFAULT_NAMESPACE = "snowflake_writer_gcs"."snowflake_writer_gcs" MUST_CHANGE_PASSWORD = FALSE; -GRANT ROLE "snowflake_writer" TO USER "snowflake_writer"; +GRANT ROLE "snowflake_writer_gcs" TO USER "snowflake_writer_gcs"; ``` App is developed on localhost using TDD. 1. Clone from repository: `git clone git@github.com:keboola/db-writer-snowflake.git` 2. Change directory: `cd db-writer-snowflake` -3. Install dependencies: `docker-compose run --rm php composer install -n` +3. Install dependencies: `docker-compose run --rm dev composer install -n` 4. Create `.env` file: ```bash -STORAGE_API_TOKEN= KBC_URL= KBC_RUNID= DB_HOST= diff --git a/composer.json b/composer.json index 5ac4fd7..494d242 100644 --- a/composer.json +++ b/composer.json @@ -14,6 +14,7 @@ "ext-PDO": "*", "ext-json": "*", "ext-mbstring": "*", + "ext-odbc": "*", "keboola/db-writer-adapter": "^0.1.0", "keboola/db-writer-common": "^6.0", "keboola/db-writer-config": "^0.1.0", @@ -24,7 +25,6 @@ "require-dev": { "cweagans/composer-patches": "^1.7", "keboola/coding-standard": "^15.0", - "keboola/csv": "^1.5", "keboola/datadir-tests": "^5.6", "keboola/storage-api-client": "^14.15", "phpstan/phpstan": "^1.10", @@ -40,17 +40,14 @@ "psr-4": { "Keboola\\DbWriter\\Snowflake\\Tests\\": "tests/phpunit/", "Keboola\\DbWriter\\Snowflake\\TraitTests\\": "tests/traits/", - "Keboola\\DbWriter\\Snowflake\\FunctionalTests\\": "tests/functional/", - "Keboola\\DbWriter\\Snowflake\\PrepareTestsData\\": "tests/prepare-data/" + "Keboola\\DbWriter\\Snowflake\\FunctionalTests\\": "tests/functional/" } }, "scripts": { "tests": [ - "@tests-prepare-data", "@tests-phpunit", "@tests-datadir" ], - "tests-prepare-data": "php ./tests/prepare-data/prepareData.php", "tests-phpunit": "phpunit --testsuite unit", "tests-datadir": "phpunit --testsuite functional", "tests-perf": "phpunit --testsuite perf", diff --git a/composer.lock b/composer.lock index 7e36d3b..9b12061 100644 --- a/composer.lock +++ b/composer.lock @@ -4,7 +4,7 @@ "Read more about it at https://getcomposer.org/doc/01-basic-usage.md#installing-dependencies", "This file is @generated automatically" ], - "content-hash": "ca4364b2304b540b0043a721b1556c8e", + "content-hash": "fcfac1f31381cc2849df0de0c526bfc3", "packages": [ { "name": "guzzlehttp/guzzle", @@ -493,16 +493,16 @@ }, { "name": "keboola/db-writer-config", - "version": "0.1.0", + "version": "0.1.1", "source": { "type": "git", "url": "https://github.com/keboola/db-writer-config.git", - "reference": "3118fe15c7bf2f594e603a7a0d039e18622bada0" + "reference": "48c1e5fbe39fae925e905355e7c0e8621b7220cc" }, "dist": { "type": "zip", - "url": "https://api.github.com/repos/keboola/db-writer-config/zipball/3118fe15c7bf2f594e603a7a0d039e18622bada0", - "reference": "3118fe15c7bf2f594e603a7a0d039e18622bada0", + "url": "https://api.github.com/repos/keboola/db-writer-config/zipball/48c1e5fbe39fae925e905355e7c0e8621b7220cc", + "reference": "48c1e5fbe39fae925e905355e7c0e8621b7220cc", "shasum": "" }, "require": { @@ -538,9 +538,9 @@ ], "description": "Config definition for database writer component", "support": { - "source": "https://github.com/keboola/db-writer-config/tree/0.1.0" + "source": "https://github.com/keboola/db-writer-config/tree/0.1.1" }, - "time": "2024-02-09T20:11:58+00:00" + "time": "2024-03-13T14:01:23+00:00" }, { "name": "keboola/php-component", @@ -5482,7 +5482,8 @@ "php": "^8.2", "ext-pdo": "*", "ext-json": "*", - "ext-mbstring": "*" + "ext-mbstring": "*", + "ext-odbc": "*" }, "platform-dev": [], "plugin-api-version": "2.6.0" diff --git a/docker-compose.yml b/docker-compose.yml index e00fb75..c669e6c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,11 +2,10 @@ version: '3' services: app: &default build: . - image: keboola/wr-db-snowflake + image: keboola/wr-db-snowflake-gcs environment: - KBC_RUNID - KBC_URL - - STORAGE_API_TOKEN - DB_HOST - DB_PORT - DB_USER @@ -21,7 +20,7 @@ services: dev: <<: *default - platform: linux/amd64 # explicitly set for arm64 developers (snowflake odbc is amd54 only) + platform: linux/amd64 # explicitly set for arm64 developers (snowflake odbc is amd64 only) volumes: - .:/code - ./data:/data diff --git a/driver/simba.snowflake.ini b/driver/simba.snowflake.ini index c3e28e7..d5ce504 100644 --- a/driver/simba.snowflake.ini +++ b/driver/simba.snowflake.ini @@ -1,9 +1,10 @@ [Driver] DriverManagerEncoding=UTF-16 DriverLocale=en-US -ErrorMessagesPath=/usr/bin/snowflake_odbc/ErrorMessages/ +ErrorMessagesPath=/usr/lib/snowflake/odbc/ErrorMessages/ LogNamespace= LogPath=/tmp ODBCInstLib=libodbcinst.so -CABundleFile=/usr/bin/snowflake_odbc/lib/cacert.pem +#LogLevel=6 +CABundleFile=/usr/lib/snowflake/odbc/lib/cacert.pem DisableOCSPCheck=true diff --git a/phpstan.neon b/phpstan.neon index ac981b6..059b170 100644 --- a/phpstan.neon +++ b/phpstan.neon @@ -1,4 +1,3 @@ parameters: checkMissingIterableValueType: false ignoreErrors: - - '#Call to an undefined method Symfony\\Component\\Config\\Definition\\Builder\\NodeDefinition::arrayPrototype\(\)\.#' diff --git a/src/Configuration/NodeDefinition/SnowflakeDbNode.php b/src/Configuration/NodeDefinition/SnowflakeDbNode.php index 64d31cf..cc40954 100644 --- a/src/Configuration/NodeDefinition/SnowflakeDbNode.php +++ b/src/Configuration/NodeDefinition/SnowflakeDbNode.php @@ -5,10 +5,7 @@ namespace Keboola\DbWriter\Configuration\NodeDefinition; use Keboola\DbWriterConfig\Configuration\NodeDefinition\DbNode; -use Keboola\DbWriterConfig\Configuration\NodeDefinition\SshNode; -use Keboola\DbWriterConfig\Configuration\NodeDefinition\SslNode; use Symfony\Component\Config\Definition\Builder\NodeBuilder; -use Symfony\Component\Config\Definition\Builder\NodeParentInterface; class SnowflakeDbNode extends DbNode { diff --git a/src/Configuration/SnowflakeTableNodesDecorator.php b/src/Configuration/SnowflakeTableNodesDecorator.php deleted file mode 100644 index d7ed6c1..0000000 --- a/src/Configuration/SnowflakeTableNodesDecorator.php +++ /dev/null @@ -1,45 +0,0 @@ -arrayNode('items') - ->validate()->always(function ($v) { - $validItem = false; - foreach ($v as $item) { - if ($item['type'] !== 'ignore') { - $validItem = true; - break; - } - } - if (!$validItem) { - throw new InvalidConfigurationException( - 'At least one item must be defined and cannot be ignored.', - ); - } - return $v; - })->end() - ->arrayPrototype() - ->children() - ->scalarNode('name')->isRequired()->cannotBeEmpty()->end() - ->scalarNode('dbName')->isRequired()->cannotBeEmpty()->end() - ->scalarNode('type')->isRequired()->cannotBeEmpty()->end() - ->scalarNode('size')->beforeNormalization()->always(fn($v) => (string) $v)->end()->end() - ->scalarNode('nullable')->end() - ->scalarNode('default')->end() - ->scalarNode('foreignKeyTable')->cannotBeEmpty()->end() - ->scalarNode('foreignKeyColumn')->cannotBeEmpty()->end() - ->end() - ->end(); - } -} diff --git a/src/Configuration/ValueObject/SnowflakeExportConfig.php b/src/Configuration/ValueObject/SnowflakeExportConfig.php deleted file mode 100644 index e090685..0000000 --- a/src/Configuration/ValueObject/SnowflakeExportConfig.php +++ /dev/null @@ -1,40 +0,0 @@ - $v['source'] === $config['tableId']); - if (count($filteredInputMapping) === 0) { - throw new UserException( - sprintf('Table "%s" in storage input mapping cannot be found.', $config['tableId']), - ); - } - $tableFilePath = sprintf( - '%s/in/tables/%s', - $config['data_dir'], - current($filteredInputMapping)['destination'], - ); - - return new self( - $config['data_dir'], - $config['writer_class'], - $databaseConfig ?? DatabaseConfig::fromArray($config['db']), - $config['tableId'], - $config['dbName'], - $config['incremental'] ?? false, - $config['export'] ?? true, - !empty($config['primaryKey']) ? $config['primaryKey'] : null, - array_map(fn($v) => SnowflakeItemConfig::fromArray($v), $config['items']), - $tableFilePath, - ); - } -} diff --git a/src/Configuration/ValueObject/SnowflakeItemConfig.php b/src/Configuration/ValueObject/SnowflakeItemConfig.php deleted file mode 100644 index f7acdb2..0000000 --- a/src/Configuration/ValueObject/SnowflakeItemConfig.php +++ /dev/null @@ -1,71 +0,0 @@ -foreignKeyTable) && !empty($this->foreignKeyColumn); - } - - public function getForeignKeyTable(): string - { - if (!$this->foreignKeyTable) { - throw new PropertyNotSetException('Property "foreignKeyTable" is not set.'); - } - return $this->foreignKeyTable; - } - - public function getForeignKeyColumn(): string - { - if (!$this->foreignKeyColumn) { - throw new PropertyNotSetException('Property "foreignKeyColumn" is not set.'); - } - return $this->foreignKeyColumn; - } -} diff --git a/src/SnowflakeApplication.php b/src/SnowflakeApplication.php index f0c8ff1..8b9d275 100644 --- a/src/SnowflakeApplication.php +++ b/src/SnowflakeApplication.php @@ -7,12 +7,11 @@ use Keboola\Component\Config\BaseConfig; use Keboola\Component\UserException; use Keboola\DbWriter\Configuration\NodeDefinition\SnowflakeDbNode; -use Keboola\DbWriter\Configuration\SnowflakeTableNodesDecorator; use Keboola\DbWriter\Configuration\ValueObject\SnowflakeDatabaseConfig; -use Keboola\DbWriter\Configuration\ValueObject\SnowflakeExportConfig; use Keboola\DbWriter\Writer\Snowflake; use Keboola\DbWriterConfig\Configuration\ConfigDefinition; use Keboola\DbWriterConfig\Configuration\ConfigRowDefinition; +use Keboola\DbWriterConfig\Configuration\NodeDefinition\TableNodesDecorator; use Symfony\Component\Config\Definition\Exception\InvalidConfigurationException; class SnowflakeApplication extends Application @@ -22,26 +21,12 @@ class SnowflakeApplication extends Application protected function run(): void { $parameters = $this->getConfig()->getParameters(); + $parameters = $this->validateTableItems($parameters); + $writerFactory = new WriterFactory($this->getConfig()); /** @var Snowflake $writer */ $writer = $writerFactory->create($this->getLogger(), $this->createDatabaseConfig($parameters['db'])); - - if (!$this->isRowConfiguration($parameters)) { - $filteredTables = array_filter($parameters['tables'], fn($table) => $table['export']); - unset($parameters['tables']); - foreach ($filteredTables as $k => $filteredTable) { - $filteredTable = $this->validateTableItems($filteredTable); - $filteredTable = array_merge($parameters, $filteredTable); - $filteredTables[$k] = $filteredTable; - $writer->write($this->createExportConfig($filteredTable)); - } - foreach ($filteredTables as $filteredTable) { - $writer->createForeignKeys($this->createExportConfig($filteredTable)); - } - } else { - $parameters = $this->validateTableItems($parameters); - $writer->write($this->createExportConfig($parameters)); - } + $writer->write($this->createExportConfig($parameters)); } protected function loadConfig(): void @@ -52,7 +37,7 @@ protected function loadConfig(): void if (in_array($configDefinitionClass, [ConfigRowDefinition::class, ConfigDefinition::class])) { $definition = new $configDefinitionClass( dbNode: (new SnowflakeDbNode())->ignoreExtraKeys(), - tableNodesDecorator: new SnowflakeTableNodesDecorator(), + tableNodesDecorator: new TableNodesDecorator(), ); } else { $definition = new $configDefinitionClass(dbNode: new SnowflakeDbNode()); @@ -74,13 +59,4 @@ protected function createDatabaseConfig(array $dbParams): SnowflakeDatabaseConfi { return SnowflakeDatabaseConfig::fromArray($dbParams); } - - protected function createExportConfig(array $table): SnowflakeExportConfig - { - return SnowflakeExportConfig::fromArray( - $table, - $this->getConfig()->getInputTables(), - $this->createDatabaseConfig($table['db']), - ); - } } diff --git a/src/Writer/QuoteTrait.php b/src/Writer/QuoteTrait.php index df81bc7..2d9308d 100644 --- a/src/Writer/QuoteTrait.php +++ b/src/Writer/QuoteTrait.php @@ -15,4 +15,9 @@ public function quoteIdentifier(string $str): string { return '"' . str_replace('"', '""', $str) . '"'; } + + public function quoteManyIdentifiers(array $items, ?callable $mapper = null): array + { + return array_map(fn($item) => $this->quoteIdentifier($mapper ? $mapper($item) : $item), $items); + } } diff --git a/src/Writer/Snowflake.php b/src/Writer/Snowflake.php index 86c807e..3b6458f 100644 --- a/src/Writer/Snowflake.php +++ b/src/Writer/Snowflake.php @@ -5,7 +5,6 @@ namespace Keboola\DbWriter\Writer; use Keboola\DbWriter\Configuration\ValueObject\SnowflakeDatabaseConfig; -use Keboola\DbWriter\Configuration\ValueObject\SnowflakeItemConfig; use Keboola\DbWriter\Exception\UserException; use Keboola\DbWriterAdapter\Connection\Connection; use Keboola\DbWriterAdapter\WriteAdapter; @@ -33,48 +32,12 @@ public function __construct(DatabaseConfig $databaseConfig, LoggerInterface $log parent::__construct($this->databaseConfig, $logger); } - public function createForeignKeys(ExportConfig $exportConfig): void - { - /** @var SnowflakeItemConfig[] $items */ - $items = $exportConfig->getItems(); - $items = array_filter($items, fn(SnowflakeItemConfig $item) => $item->hasForeignKey()); - if (empty($items)) { - return; - } - - foreach ($items as $item) { - if (!$this->adapter->tableExists($item->getForeignKeyTable())) { - continue; - } - - $isSameTypeColumns = $this->adapter->isSameTypeColumns( - $exportConfig->getDbName(), - $item->getName(), - $item->getForeignKeyTable(), - $item->getForeignKeyColumn(), - ); - - if (!$isSameTypeColumns) { - throw new UserException(sprintf( - 'Foreign key column "%s" in table "%s" has different type than column in table "%s"', - $item->getForeignKeyColumn(), - $item->getForeignKeyTable(), - $item->getName(), - )); - } - - $this->adapter->addUniqueKeyIfMissing($item->getForeignKeyTable(), $item->getForeignKeyColumn()); - - $this->adapter->addForeignKey($exportConfig->getDbName(), $item); - } - } - protected function writeFull(ExportConfig $exportConfig): void { - $stagingName = $this->adapter->generateTmpName($exportConfig->getDbName()); + $stageTableName = $this->adapter->generateTmpName($exportConfig->getDbName()); $this->adapter->create( - $stagingName, + $stageTableName, false, $exportConfig->getItems(), $exportConfig->hasPrimaryKey() ? $exportConfig->getPrimaryKey() : null, @@ -88,10 +51,10 @@ protected function writeFull(ExportConfig $exportConfig): void $exportConfig->hasPrimaryKey() ? $exportConfig->getPrimaryKey() : null, ); - $this->adapter->writeData($stagingName, $exportConfig); - $this->adapter->swapTable($this->connection, $exportConfig->getDbName(), $stagingName); + $this->adapter->writeData($stageTableName, $exportConfig); + $this->adapter->swapTable($this->connection, $exportConfig->getDbName(), $stageTableName); } finally { - $this->adapter->drop($stagingName); + $this->adapter->drop($stageTableName); } } @@ -147,9 +110,8 @@ private function validateAndSetWarehouse(SnowflakeConnection $connection, ?strin } catch (Throwable $e) { if (preg_match('/Object does not exist/ui', $e->getMessage())) { throw new UserException(sprintf('Invalid warehouse "%s" specified', $warehouse)); - } else { - throw $e; } + throw $e; } } @@ -187,9 +149,8 @@ private function validateAndSetSchema(SnowflakeConnection $connection, string $s } catch (Throwable $e) { if (preg_match('/Object does not exist/ui', $e->getMessage())) { throw new UserException(sprintf('Invalid schema "%s" specified', $schema)); - } else { - throw $e; } + throw $e; } } } diff --git a/src/Writer/SnowflakeConnectionFactory.php b/src/Writer/SnowflakeConnectionFactory.php index 1165533..e7995f7 100644 --- a/src/Writer/SnowflakeConnectionFactory.php +++ b/src/Writer/SnowflakeConnectionFactory.php @@ -15,7 +15,7 @@ class SnowflakeConnectionFactory public function create(SnowflakeDatabaseConfig $databaseConfig, LoggerInterface $logger): SnowflakeConnection { - $connection = new SnowflakeConnection( + return new SnowflakeConnection( $logger, $this->generateDsn($databaseConfig), $databaseConfig->getUser(), @@ -35,8 +35,6 @@ function ($connection) use ($databaseConfig) { } }, ); - - return $connection; } public static function escapePassword(string $password): string diff --git a/src/Writer/SnowflakeQueryBuilder.php b/src/Writer/SnowflakeQueryBuilder.php index 6645159..4e607fb 100644 --- a/src/Writer/SnowflakeQueryBuilder.php +++ b/src/Writer/SnowflakeQueryBuilder.php @@ -39,14 +39,6 @@ public function createQueryStatement( ); } - public function dropStageStatement(Connection $connection, string $stageName): string - { - return sprintf( - 'DROP STAGE IF EXISTS %s', - $connection->quoteIdentifier($stageName), - ); - } - public function tableExistsQueryStatement(Connection $connection, string $tableName): string { return sprintf( @@ -73,6 +65,68 @@ public function addPrimaryKeyQueryStatement(Connection $connection, string $tabl ); } + public function putFileQueryStatement(Connection $connection, string $tableFilePath, string $tmpTableName): string + { + $warehouse = $this->databaseConfig->hasWarehouse() ? $this->databaseConfig->getWarehouse() : null; + $database = $this->databaseConfig->getDatabase(); + $schema = $this->databaseConfig->hasSchema() ? $this->databaseConfig->getSchema() : null; + + $sql = []; + if ($warehouse) { + $sql[] = sprintf('USE WAREHOUSE %s;', $connection->quoteIdentifier($warehouse)); + } + + $sql[] = sprintf('USE DATABASE %s;', $connection->quoteIdentifier($database)); + + if ($schema) { + $sql[] = sprintf( + 'USE SCHEMA %s.%s;', + $connection->quoteIdentifier($database), + $connection->quoteIdentifier($schema), + ); + } + + $sql[] = sprintf( + 'PUT file://%s @~/%s;', + $tableFilePath, + $tmpTableName, + ); + + return trim(implode("\n", $sql)); + } + + public function copyIntoTableQueryStatement(Connection $connection, string $tmpTableName, array $items): string + { + $csvOptions = [ + 'SKIP_HEADER = 1', + sprintf('FIELD_DELIMITER = %s', $connection->quote(',')), + sprintf('FIELD_OPTIONALLY_ENCLOSED_BY = %s', $connection->quote('"')), + sprintf('ESCAPE_UNENCLOSED_FIELD = %s', $connection->quote('\\')), + sprintf('COMPRESSION = %s', $connection->quote('GZIP')), + ]; + + $tmpTableNameWithSchema = sprintf( + '%s.%s', + $connection->quoteIdentifier($this->databaseConfig->getSchema()), + $connection->quoteIdentifier($tmpTableName), + ); + + $columns = array_map(fn(ItemConfig $column) => $connection->quoteIdentifier($column->getDbName()), $items); + + return sprintf( + ' + COPY INTO %s(%s) + FROM @~/%s + FILE_FORMAT = (TYPE=CSV %s) + ; + ', + $tmpTableNameWithSchema, + implode(', ', $columns), + $tmpTableName, + implode(' ', $csvOptions), + ); + } + public function upsertUpdateRowsQueryStatement( Connection $connection, ExportConfig $exportConfig, @@ -146,45 +200,6 @@ public function tableInfoQueryStatement(Connection $connection, string $dbName): ); } - public function describeTableColumnsQueryStatement(Connection $connection, string $tableName): string - { - return sprintf( - 'SHOW COLUMNS IN %s.%s', - $connection->quoteIdentifier($this->databaseConfig->getSchema()), - $connection->quoteIdentifier($tableName), - ); - } - - public function addUniqueKeyQueryStatement(Connection $connection, string $tableName, string $columnName): string - { - return sprintf( - 'ALTER TABLE %s.%s ADD UNIQUE (%s)', - $connection->quoteIdentifier($this->databaseConfig->getSchema()), - $connection->quoteIdentifier($tableName), - $connection->quoteIdentifier($columnName), - ); - } - - public function addForeignKeyQueryStatement( - Connection $connection, - string $tableName, - string $columnName, - string $foreignKeyTable, - string $foreignKeyColumn, - ): string { - return sprintf( - 'ALTER TABLE %s.%s ADD CONSTRAINT FK_%s_%s FOREIGN KEY (%s) REFERENCES %s.%s(%s)', - $connection->quoteIdentifier($this->databaseConfig->getSchema()), - $connection->quoteIdentifier($tableName), - $foreignKeyTable, - $foreignKeyColumn, - $connection->quoteIdentifier($columnName), - $connection->quoteIdentifier($this->databaseConfig->getSchema()), - $connection->quoteIdentifier($foreignKeyTable), - $connection->quoteIdentifier($foreignKeyColumn), - ); - } - private function buildItemsSqlDefinition(Connection $connection, array $items, ?array $primaryKeys = []): string { $sqlItems = []; diff --git a/src/Writer/SnowflakeWriteAdapter.php b/src/Writer/SnowflakeWriteAdapter.php index f12efaa..09cbde2 100644 --- a/src/Writer/SnowflakeWriteAdapter.php +++ b/src/Writer/SnowflakeWriteAdapter.php @@ -5,60 +5,106 @@ namespace Keboola\DbWriter\Writer; use Keboola\DbWriter\Configuration\ValueObject\SnowflakeDatabaseConfig; -use Keboola\DbWriter\Configuration\ValueObject\SnowflakeItemConfig; use Keboola\DbWriter\Exception\UserException; -use Keboola\DbWriter\Writer\Strategy\AbsWriteStrategy; -use Keboola\DbWriter\Writer\Strategy\S3WriteStrategy; -use Keboola\DbWriter\Writer\Strategy\WriteStrategy; use Keboola\DbWriterAdapter\ODBC\OdbcWriteAdapter; +use Keboola\DbWriterAdapter\Query\QueryBuilder; use Keboola\DbWriterConfig\Configuration\ValueObject\ExportConfig; use Keboola\DbWriterConfig\Configuration\ValueObject\ItemConfig; +use Keboola\Temp\Temp; +use Psr\Log\LoggerInterface; +use RuntimeException; +use SplFileInfo; +use Symfony\Component\Process\Process; /** * @property-read SnowflakeQueryBuilder $queryBuilder */ class SnowflakeWriteAdapter extends OdbcWriteAdapter { - public function writeData(string $tableName, ExportConfig $exportConfig): void + use QuoteTrait; + + private Temp $tempDir; + + private SplFileInfo $snowSqlConfig; + + public function __construct( + SnowflakeConnection $connection, + QueryBuilder $queryBuilder, + LoggerInterface $logger, + ) { + parent::__construct($connection, $queryBuilder, $logger); + + $this->tempDir = new Temp('wr-snowflake-adapter'); + } + + public function getName(): string { - $this->logger->info(sprintf('Writing data to table "%s"', $tableName)); + return 'Snowsql'; + } + public function writeData(string $tableName, ExportConfig $exportConfig): void + { /** @var SnowflakeDatabaseConfig $databaseConfig */ $databaseConfig = $exportConfig->getDatabaseConfig(); - $stageName = $this->generateStageName($databaseConfig->hasRunId() ? $databaseConfig->getRunId() : ''); + $this->snowSqlConfig = $this->createSnowSqlConfig($databaseConfig); - $this->logger->info(sprintf('Dropping stage "%s"', $stageName)); - $this->connection->exec($this->queryBuilder->dropStageStatement($this->connection, $stageName)); + // Upload to internal stage + $this->logger->info(sprintf('Uploading data to internal stage "@~/%s"', $tableName)); + $this->putIntoInternalStage($exportConfig, $tableName); - $writeStrategy = $this->getTableWriteStrategy($exportConfig->getTableFilePath()); - - $this->logger->info(sprintf('Creating stage "%s"', $stageName)); - $this->connection->exec($writeStrategy->generateCreateStageCommand($stageName)); - - $tableNameWithSchema = sprintf( - '%s.%s', - $this->connection->quoteIdentifier($databaseConfig->getSchema()), - $this->connection->quoteIdentifier($tableName), - ); try { $items = array_filter( $exportConfig->getItems(), fn(ItemConfig $item) => strtolower($item->getType()) !== 'ignore', ); - $commands = $writeStrategy->generateCopyCommands( - tableName: $tableNameWithSchema, - stageName: $stageName, - items: $items, - ); - foreach ($commands as $command) { - $this->connection->exec($command); - } + + // Copy from internal stage to staging table + $this->logger->info(sprintf('Copying data from internal stage to staging table "%s"', $tableName)); + $query = $this->queryBuilder->copyIntoTableQueryStatement($this->connection, $tableName, $items); + $this->connection->exec($query); } finally { - $this->connection->exec($this->queryBuilder->dropStageStatement($this->connection, $stageName)); + $this->cleanupInternalStage($tableName); } } + private function putIntoInternalStage(ExportConfig $exportConfig, string $tmpTableName): void + { + $putSql = $this->queryBuilder + ->putFileQueryStatement($this->connection, $exportConfig->getTableFilePath(), $tmpTableName); + + $sqlFile = $this->tempDir->createTmpFile('snowsql.sql'); + file_put_contents($sqlFile->getPathname(), $putSql); + + $command = sprintf( + 'snowsql --config %s -c writer -f %s', + $this->snowSqlConfig, + $sqlFile, + ); + + $this->logger->debug($putSql); + $this->logger->debug(trim($command)); + + $process = Process::fromShellCommandline($command); + $process->setTimeout(null); + $process->run(); + + if (!$process->isSuccessful()) { + $this->logger->error(sprintf('Snowsql error, process output %s', $process->getOutput())); + $this->logger->error(sprintf('Snowsql error: %s', $process->getErrorOutput())); + throw new RuntimeException(sprintf( + 'File upload error occurred processing [%s]', + $exportConfig->getTableFilePath(), + )); + } + } + + private function cleanupInternalStage(string $tmpTableName): void + { + $sql = sprintf('REMOVE @~/%s;', $tmpTableName); + $this->connection->exec($sql); + } + public function upsert(ExportConfig $exportConfig, string $stageTableName): void { $this->logger->info(sprintf('Upserting data to table "%s"', $exportConfig->getDbName())); @@ -75,8 +121,8 @@ public function swapTable(SnowflakeConnection $connection, string $tableName, st $this->logger->info(sprintf('Swapping table "%s" with "%s"', $stagingTableName, $tableName)); $connection->exec(sprintf( 'ALTER TABLE %s SWAP WITH %s', - $this->connection->quoteIdentifier($stagingTableName), - $this->connection->quoteIdentifier($tableName), + $this->quoteIdentifier($stagingTableName), + $this->quoteIdentifier($tableName), )); } @@ -101,73 +147,6 @@ public function validateTable(string $tableName, array $items): void // turn off validation } - public function isSameTypeColumns( - string $sourceTable, - string $sourceColumnName, - string $targetTable, - string $targetColumnName, - ): bool { - $sourceColumnDataType = $this->getColumnDataType( - $sourceTable, - $sourceColumnName, - ); - - $targetColumnDataType = $this->getColumnDataType( - $targetTable, - $targetColumnName, - ); - - return - $sourceColumnDataType['type'] === $targetColumnDataType['type'] && - $sourceColumnDataType['length'] === $targetColumnDataType['length'] && - $sourceColumnDataType['nullable'] === $targetColumnDataType['nullable']; - } - - public function addUniqueKeyIfMissing(string $targetTable, string $targetColumn): void - { - $this->logger->info(sprintf( - 'Adding unique key to table "%s" on column "%s"', - $targetTable, - $targetColumn, - )); - $tableInfo = $this->connection->fetchAll( - $this->queryBuilder->tableInfoQueryStatement($this->connection, $targetTable), - ); - - $uniquesInDb = array_filter($tableInfo, fn($v) => $v['unique key'] === 'Y'); - $uniquesInDb = array_map(fn(array $item) => $item['name'], $uniquesInDb); - - $primaryKeysInDb = $this->getPrimaryKeys($targetTable); - $primaryKeysInDb = array_map(fn(array $item) => $item['name'], $primaryKeysInDb); - - if (in_array($targetColumn, $uniquesInDb) || !empty($primaryKeysInDb)) { - return; - } - - $this->connection->exec( - $this->queryBuilder->addUniqueKeyQueryStatement($this->connection, $targetTable, $targetColumn), - ); - } - - public function addForeignKey(string $targetTable, SnowflakeItemConfig $item): void - { - $this->logger->info(sprintf( - 'Creating foreign key from table "%s" to table "%s" on column "%s"', - $item->getDbName(), - $item->getForeignKeyTable(), - $item->getForeignKeyColumn(), - )); - $this->connection->exec( - $this->queryBuilder->addForeignKeyQueryStatement( - $this->connection, - $targetTable, - $item->getDbName(), - $item->getForeignKeyTable(), - $item->getForeignKeyColumn(), - ), - ); - } - public function getPrimaryKeys(string $tableName): array { $sqlPrimaryKeysInDb = $this->connection->fetchAll( @@ -176,37 +155,6 @@ public function getPrimaryKeys(string $tableName): array return array_filter($sqlPrimaryKeysInDb, fn($v) => $v['primary key'] === 'Y'); } - private function getTableWriteStrategy(string $getTableFilePath): WriteStrategy - { - /** - * @var array{s3?: array, abs?: array} $manifest - */ - $manifest = json_decode( - (string) file_get_contents($getTableFilePath . '.manifest'), - true, - ); - - if (isset($manifest[WriteStrategy::FILE_STORAGE_S3])) { - $this->logger->info('Using S3 write strategy'); - return new S3WriteStrategy($manifest[WriteStrategy::FILE_STORAGE_S3]); - } - if (isset($manifest[WriteStrategy::FILE_STORAGE_ABS])) { - $this->logger->info('Using ABS write strategy'); - return new AbsWriteStrategy($manifest[WriteStrategy::FILE_STORAGE_ABS]); - } - throw new UserException('Unknown input adapter'); - } - - private function generateStageName(string $runId): string - { - $stageName = sprintf( - 'db-writer-%s', - str_replace('.', '-', $runId), - ); - - return rtrim(mb_substr($stageName, 0, 255), '-'); - } - private function addPrimaryKeyIfMissing(array $primaryKeys, string $tableName): void { $primaryKeysInDb = $this->getPrimaryKeys($tableName); @@ -239,20 +187,35 @@ private function checkPrimaryKey(array $primaryKeys, string $tableName): void } } - private function getColumnDataType(string $table, string $column): array + private function createSnowSqlConfig(SnowflakeDatabaseConfig $databaseConfig): SplFileInfo { - $columns = $this->connection->fetchAll( - $this->queryBuilder->describeTableColumnsQueryStatement($this->connection, $table), - ); - /** - * @var array{column_name: string, data_type: string}[] $columnData - */ - $columnData = array_values(array_filter($columns, fn($v) => $v['column_name'] === $column)); + $cliConfig[] = ''; + $cliConfig[] = '[options]'; + $cliConfig[] = 'exit_on_error = true'; + $cliConfig[] = ''; + $cliConfig[] = '[connections.writer]'; + $cliConfig[] = sprintf('accountname = "%s"', self::getAccountUrlFromHost($databaseConfig->getHost())); + $cliConfig[] = sprintf('username = "%s"', $databaseConfig->getUser()); + $cliConfig[] = sprintf('password = "%s"', $databaseConfig->getPassword()); + $cliConfig[] = sprintf('dbname = "%s"', $databaseConfig->getDatabase()); + + if ($databaseConfig->hasWarehouse()) { + $cliConfig[] = sprintf('warehousename = "%s"', $databaseConfig->getWarehouse()); + } - if (count($columnData) === 0) { - throw new UserException(sprintf('Column \'%s\' in table \'%s\' not found', $column, $table)); + if ($databaseConfig->hasSchema()) { + $cliConfig[] = sprintf('schemaname = "%s"', $databaseConfig->getSchema()); } - return (array) json_decode($columnData[0]['data_type'], true); + $file = $this->tempDir->createFile('snowsql.config'); + file_put_contents($file->getPathname(), implode("\n", $cliConfig)); + + return $file; + } + + private static function getAccountUrlFromHost(string $host): string + { + $hostParts = explode('.', $host); + return implode('.', array_slice($hostParts, 0, count($hostParts) - 2)); } } diff --git a/src/Writer/Strategy/AbsWriteStrategy.php b/src/Writer/Strategy/AbsWriteStrategy.php deleted file mode 100644 index 115b2f7..0000000 --- a/src/Writer/Strategy/AbsWriteStrategy.php +++ /dev/null @@ -1,126 +0,0 @@ -isSliced = $absInfo['is_sliced']; - $this->container = $absInfo['container']; - $this->name = $absInfo['name']; - $this->connectionEndpoint = $connectionInfo[1]; - $this->connectionAccessSignature = $connectionInfo[2]; - } - - public function generateCreateStageCommand(string $stageName): string - { - $csvOptions = []; - $csvOptions[] = sprintf('FIELD_DELIMITER = %s', $this->quote(',')); - $csvOptions[] = sprintf('FIELD_OPTIONALLY_ENCLOSED_BY = %s', $this->quote('"')); - $csvOptions[] = sprintf('ESCAPE_UNENCLOSED_FIELD = %s', $this->quote('\\')); - - if (!$this->isSliced) { - $csvOptions[] = 'SKIP_HEADER = 1'; - } - - return sprintf( - "CREATE OR REPLACE STAGE %s - FILE_FORMAT = (TYPE=CSV %s) - URL = 'azure://%s/%s' - CREDENTIALS = (AZURE_SAS_TOKEN = %s) - ", - $this->quoteIdentifier($stageName), - implode(' ', $csvOptions), - $this->connectionEndpoint, - $this->container, - $this->quote($this->connectionAccessSignature), - ); - } - - public function generateCopyCommands(string $tableName, string $stageName, array $items): iterable - { - $filesToImport = $this->getManifestEntries(); - foreach (array_chunk($filesToImport, self::SLICED_FILES_CHUNK_SIZE) as $files) { - $quotedFiles = array_map( - fn($entry) => $this->quote(strtr($entry, [$this->getContainerUrl() . '/' => ''])), - $files, - ); - - yield sprintf( - 'COPY INTO %s(%s) FROM (SELECT %s FROM %s t) FILES = (%s)', - $tableName, - implode(', ', SqlHelper::getQuotedColumnsNames($items)), - implode(', ', SqlHelper::getColumnsTransformation($items)), - $this->quote('@' . $this->quoteIdentifier($stageName) . '/'), - implode(',', $quotedFiles), - ); - } - } - - private function getManifestEntries(): array - { - $blobClient = $this->getClient(); - if (!$this->isSliced) { - return [$this->getContainerUrl() . $this->name]; - } - try { - $manifestBlob = $blobClient->getBlob($this->container, $this->name); - } catch (ServiceException $e) { - throw new UserException('Load error: manifest file was not found.', 0, $e); - } - - /** @var array{'entries': array{'url': string}[]} $manifest */ - $manifest = json_decode((string) stream_get_contents($manifestBlob->getContentStream()), true); - return array_map(function (array $entry) { - return str_replace('azure://', 'https://', $entry['url']); - }, $manifest['entries']); - } - - private function getContainerUrl(): string - { - return sprintf('https://%s/%s', $this->connectionEndpoint, $this->container); - } - - private function getClient(): BlobRestProxy - { - $sasConnectionString = sprintf( - '%s=https://%s;%s=%s', - Resources::BLOB_ENDPOINT_NAME, - $this->connectionEndpoint, - Resources::SAS_TOKEN_NAME, - $this->connectionAccessSignature, - ); - - $blobRestProxy = BlobRestProxy::createBlobService($sasConnectionString); - $blobRestProxy->pushMiddleware(RetryMiddlewareFactory::create()); - - return $blobRestProxy; - } -} diff --git a/src/Writer/Strategy/S3WriteStrategy.php b/src/Writer/Strategy/S3WriteStrategy.php deleted file mode 100644 index 259e6f9..0000000 --- a/src/Writer/Strategy/S3WriteStrategy.php +++ /dev/null @@ -1,139 +0,0 @@ -isSliced = $s3info['isSliced']; - $this->region = $s3info['region']; - $this->bucket = $s3info['bucket']; - $this->key = $s3info['key']; - $this->accessKeyId = $s3info['credentials']['access_key_id']; - $this->secretAccessKey = $s3info['credentials']['secret_access_key']; - $this->sessionToken = $s3info['credentials']['session_token']; - } - - public function generateCreateStageCommand(string $stageName): string - { - $csvOptions = []; - $csvOptions[] = sprintf('FIELD_DELIMITER = %s', $this->quote(',')); - $csvOptions[] = sprintf('FIELD_OPTIONALLY_ENCLOSED_BY = %s', $this->quote('"')); - $csvOptions[] = sprintf('ESCAPE_UNENCLOSED_FIELD = %s', $this->quote('\\')); - - if (!$this->isSliced) { - $csvOptions[] = 'SKIP_HEADER = 1'; - } - - return sprintf( - "CREATE OR REPLACE STAGE %s - FILE_FORMAT = (TYPE=CSV %s) - URL = 's3://%s' - CREDENTIALS = (AWS_KEY_ID = %s AWS_SECRET_KEY = %s AWS_TOKEN = %s) - ", - $this->quoteIdentifier($stageName), - implode(' ', $csvOptions), - $this->bucket, - $this->quote($this->accessKeyId), - $this->quote($this->secretAccessKey), - $this->quote($this->sessionToken), - ); - } - - /** - * @param ItemConfig[] $items - */ - public function generateCopyCommands(string $tableName, string $stageName, array $items): iterable - { - $filesToImport = $this->getManifestEntries(); - foreach (array_chunk($filesToImport, self::SLICED_FILES_CHUNK_SIZE) as $files) { - $quotedFiles = array_map( - function ($entry) { - return $this->quote( - strtr($entry, [$this->getS3Prefix() . '/' => '']), - ); - }, - $files, - ); - - yield sprintf( - 'COPY INTO %s(%s) - FROM (SELECT %s FROM %s t) - FILES = (%s)', - $tableName, - implode(', ', SqlHelper::getQuotedColumnsNames($items)), - implode(', ', SqlHelper::getColumnsTransformation($items)), - $this->quote('@' . $this->quoteIdentifier($stageName) . '/'), - implode(',', $quotedFiles), - ); - } - } - - private function getManifestEntries(): array - { - if (!$this->isSliced) { - return [$this->getS3Prefix() . '/' . $this->key]; - } - - $client = $this->getClient(); - try { - /** - * @var array{ - * Body: string - * } $response - */ - $response = $client->getObject([ - 'Bucket' => $this->bucket, - 'Key' => ltrim($this->key, '/'), - ]); - } catch (AwsException $e) { - throw new UserException('Load error: ' . $e->getMessage(), $e->getCode(), $e); - } - - /** - * @var array{ - * entries: array - * } $manifest - */ - $manifest = (array) json_decode((string) $response['Body'], true); - return array_map(static function (array $entry) { - return $entry['url']; - }, (array) $manifest['entries']); - } - - private function getS3Prefix(): string - { - return sprintf('s3://%s', $this->bucket); - } - - private function getClient(): S3Client - { - return new S3Client([ - 'credentials' => [ - 'key' => $this->accessKeyId, - 'secret' => $this->secretAccessKey, - 'token' => $this->sessionToken, - ], - 'region' => $this->region, - 'version' => '2006-03-01', - ]); - } -} diff --git a/src/Writer/Strategy/SqlHelper.php b/src/Writer/Strategy/SqlHelper.php deleted file mode 100644 index a7ca8db..0000000 --- a/src/Writer/Strategy/SqlHelper.php +++ /dev/null @@ -1,37 +0,0 @@ - (new SqlHelper)->quoteIdentifier($column->getDbName()), $items); - } - - /** - * @param ItemConfig[] $items - */ - public static function getColumnsTransformation(array $items): array - { - return array_map( - function (ItemConfig $item, int $index) { - if ($item->getNullable()) { - return sprintf("IFF(t.$%d = '', null, t.$%d)", $index + 1, $index + 1); - } - return sprintf('t.$%d', $index + 1); - }, - $items, - array_keys($items), - ); - } -} diff --git a/src/Writer/Strategy/WriteStrategy.php b/src/Writer/Strategy/WriteStrategy.php deleted file mode 100644 index 10c2954..0000000 --- a/src/Writer/Strategy/WriteStrategy.php +++ /dev/null @@ -1,15 +0,0 @@ -getTempDatadir($specification); - $finder = new Finder(); - $files = $finder - ->files() - ->in($this->testProjectDir . '/source/data/in/tables') - ->name('*.manifest'); - - /** @var array $manifestData */ - $manifestData = json_decode( - (string) file_get_contents( - __DIR__ . '/../prepare-data/manifestData.json', - ), - true, - ); - foreach ($files as $file) { - $filename = $file->getFilenameWithoutExtension(); - if (!isset($manifestData[$filename])) { - throw new RuntimeException(sprintf('Table in storage for file "%s" not found!', $filename)); - } - $manifestPath = sprintf( - '%s/in/tables/%s.manifest', - $tempDatadir->getTmpFolder(), - $filename, - ); - - $stage = $manifestData[$filename]; - /** @var array $manifest */ - $manifest = json_decode((string) file_get_contents($manifestPath), true); - $manifest[$stage['stagingStorage']] = $stage['manifest']; - file_put_contents($manifestPath, json_encode($manifest, JSON_PRETTY_PRINT)); - } - $process = $this->runScript($tempDatadir->getTmpFolder()); $this->dumpTables($tempDatadir->getTmpFolder()); @@ -213,7 +181,7 @@ private function dumpTableData(string $tableName, string $tmpFolder): void public function getDatabaseConfig(): SnowflakeDatabaseConfig { $config = [ - 'host' => getenv('DB_HOST'), + 'host' => getenv('DB_HOST'), 'port' => getenv('DB_PORT'), 'database' => getenv('DB_DATABASE'), 'user' => getenv('DB_USER'), diff --git a/tests/functional/error-foreign-key-invalid-column/expected-code b/tests/functional/error-foreign-key-invalid-column/expected-code deleted file mode 100644 index 56a6051..0000000 --- a/tests/functional/error-foreign-key-invalid-column/expected-code +++ /dev/null @@ -1 +0,0 @@ -1 \ No newline at end of file diff --git a/tests/functional/error-foreign-key-invalid-column/expected-stderr b/tests/functional/error-foreign-key-invalid-column/expected-stderr deleted file mode 100644 index 6583ec5..0000000 --- a/tests/functional/error-foreign-key-invalid-column/expected-stderr +++ /dev/null @@ -1 +0,0 @@ -Column 'invalidColumn' in table 'special' not found \ No newline at end of file diff --git a/tests/functional/error-foreign-key-invalid-column/expected-stdout b/tests/functional/error-foreign-key-invalid-column/expected-stdout deleted file mode 100644 index d26283b..0000000 --- a/tests/functional/error-foreign-key-invalid-column/expected-stdout +++ /dev/null @@ -1,19 +0,0 @@ -Creating ODBC connection to "Driver=SnowflakeDSIIDriver;Server=%s;Port=443;Tracing=0;Login_timeout=30;Database="%s";Schema="%s";Warehouse=%s;CLIENT_SESSION_KEEP_ALIVE=TRUE;application="Keboola_Connection"". -Validating warehouse "%s" -Validating schema "%s" -Creating table "simple_%s" -Creating table "simple" -Writing data to table "simple_%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" -Swapping table "simple_%s" with "simple" -Dropping table "simple_%s" -Creating table "special_%s" -Creating table "special" -Writing data to table "special_%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" -Swapping table "special_%s" with "special" -Dropping table "special_%s" diff --git a/tests/functional/error-foreign-key-invalid-column/expected/data/out/db-dump/special.csv b/tests/functional/error-foreign-key-invalid-column/expected/data/out/db-dump/special.csv deleted file mode 100644 index 6818cda..0000000 --- a/tests/functional/error-foreign-key-invalid-column/expected/data/out/db-dump/special.csv +++ /dev/null @@ -1,11 +0,0 @@ -"col1","col2" -"column with \n \t \\","second col" -"column with backslash \ inside","column with backslash and enclosure \""" -"column with enclosure "", and comma inside text","second column enclosure in text """ -"columns with -new line","columns with tab" -"first","something with - -double new line" -"line with enclosure","second column" -"single quote'","two single''quotes" diff --git a/tests/functional/error-foreign-key-invalid-column/source/data/config.json b/tests/functional/error-foreign-key-invalid-column/source/data/config.json deleted file mode 100644 index 7ca4156..0000000 --- a/tests/functional/error-foreign-key-invalid-column/source/data/config.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "parameters": { - "db": { - "host": "%env(string:DB_HOST)%", - "port": "%env(string:DB_PORT)%", - "user": "%env(string:DB_USER)%", - "#password": "%env(string:DB_PASSWORD)%", - "database": "%env(string:DB_DATABASE)%", - "schema": "%env(string:DB_SCHEMA)%", - "warehouse": "%env(string:DB_WAREHOUSE)%" - }, - "tables": [ - { - "tableId": "simple", - "dbName": "simple", - "export": true, - "incremental": false, - "primaryKey": ["id"], - "items": [ - { - "name": "id", - "dbName": "id", - "type": "int", - "size": null, - "nullable": null, - "default": null, - "foreignKeyTable": "special", - "foreignKeyColumn": "invalidColumn" - }, - { - "name": "name", - "dbName": "name", - "type": "varchar", - "size": 255, - "nullable": null, - "default": null - }, - { - "name": "glasses", - "dbName": "glasses", - "type": "varchar", - "size": 255, - "nullable": null, - "default": null - } - ] - }, - { - "tableId": "special", - "dbName": "special", - "export": true, - "incremental": false, - "items": [ - { - "name": "col1", - "dbName": "col1", - "type": "varchar", - "size": 255, - "nullable": null, - "default": null - }, - { - "name": "col2", - "dbName": "col2", - "type": "varchar", - "size": 255, - "nullable": null, - "default": null - } - ] - } - ] - }, - "storage": { - "input": { - "tables": [ - { - "source": "simple", - "destination": "simple.csv" - }, - { - "source": "special", - "destination": "special.csv" - } - ] - } - } -} \ No newline at end of file diff --git a/tests/functional/error-foreign-key-invalid-column/source/data/in/tables/special.csv.manifest b/tests/functional/error-foreign-key-invalid-column/source/data/in/tables/special.csv.manifest deleted file mode 100644 index 1ca3575..0000000 --- a/tests/functional/error-foreign-key-invalid-column/source/data/in/tables/special.csv.manifest +++ /dev/null @@ -1,7 +0,0 @@ -{ - "id": "special", - "columns": [ - "col1", - "col2" - ] -} \ No newline at end of file diff --git a/tests/functional/error-foreign-key-invalid-type/expected-code b/tests/functional/error-foreign-key-invalid-type/expected-code deleted file mode 100644 index 56a6051..0000000 --- a/tests/functional/error-foreign-key-invalid-type/expected-code +++ /dev/null @@ -1 +0,0 @@ -1 \ No newline at end of file diff --git a/tests/functional/error-foreign-key-invalid-type/expected-stderr b/tests/functional/error-foreign-key-invalid-type/expected-stderr deleted file mode 100644 index b08092c..0000000 --- a/tests/functional/error-foreign-key-invalid-type/expected-stderr +++ /dev/null @@ -1 +0,0 @@ -Foreign key column "col1" in table "special" has different type than column in table "id" \ No newline at end of file diff --git a/tests/functional/error-foreign-key-invalid-type/expected-stdout b/tests/functional/error-foreign-key-invalid-type/expected-stdout deleted file mode 100644 index d26283b..0000000 --- a/tests/functional/error-foreign-key-invalid-type/expected-stdout +++ /dev/null @@ -1,19 +0,0 @@ -Creating ODBC connection to "Driver=SnowflakeDSIIDriver;Server=%s;Port=443;Tracing=0;Login_timeout=30;Database="%s";Schema="%s";Warehouse=%s;CLIENT_SESSION_KEEP_ALIVE=TRUE;application="Keboola_Connection"". -Validating warehouse "%s" -Validating schema "%s" -Creating table "simple_%s" -Creating table "simple" -Writing data to table "simple_%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" -Swapping table "simple_%s" with "simple" -Dropping table "simple_%s" -Creating table "special_%s" -Creating table "special" -Writing data to table "special_%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" -Swapping table "special_%s" with "special" -Dropping table "special_%s" diff --git a/tests/functional/error-foreign-key-invalid-type/expected/data/out/db-dump/special.csv b/tests/functional/error-foreign-key-invalid-type/expected/data/out/db-dump/special.csv deleted file mode 100644 index 6818cda..0000000 --- a/tests/functional/error-foreign-key-invalid-type/expected/data/out/db-dump/special.csv +++ /dev/null @@ -1,11 +0,0 @@ -"col1","col2" -"column with \n \t \\","second col" -"column with backslash \ inside","column with backslash and enclosure \""" -"column with enclosure "", and comma inside text","second column enclosure in text """ -"columns with -new line","columns with tab" -"first","something with - -double new line" -"line with enclosure","second column" -"single quote'","two single''quotes" diff --git a/tests/functional/error-foreign-key-invalid-type/source/data/config.json b/tests/functional/error-foreign-key-invalid-type/source/data/config.json deleted file mode 100644 index 01cccc8..0000000 --- a/tests/functional/error-foreign-key-invalid-type/source/data/config.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "parameters": { - "db": { - "host": "%env(string:DB_HOST)%", - "port": "%env(string:DB_PORT)%", - "user": "%env(string:DB_USER)%", - "#password": "%env(string:DB_PASSWORD)%", - "database": "%env(string:DB_DATABASE)%", - "schema": "%env(string:DB_SCHEMA)%", - "warehouse": "%env(string:DB_WAREHOUSE)%" - }, - "tables": [ - { - "tableId": "simple", - "dbName": "simple", - "export": true, - "incremental": false, - "primaryKey": ["id"], - "items": [ - { - "name": "id", - "dbName": "id", - "type": "int", - "size": null, - "nullable": null, - "default": null, - "foreignKeyTable": "special", - "foreignKeyColumn": "col1" - }, - { - "name": "name", - "dbName": "name", - "type": "varchar", - "size": 255, - "nullable": null, - "default": null - }, - { - "name": "glasses", - "dbName": "glasses", - "type": "varchar", - "size": 255, - "nullable": null, - "default": null - } - ] - }, - { - "tableId": "special", - "dbName": "special", - "export": true, - "incremental": false, - "items": [ - { - "name": "col1", - "dbName": "col1", - "type": "varchar", - "size": 255, - "nullable": null, - "default": null - }, - { - "name": "col2", - "dbName": "col2", - "type": "varchar", - "size": 255, - "nullable": null, - "default": null - } - ] - } - ] - }, - "storage": { - "input": { - "tables": [ - { - "source": "simple", - "destination": "simple.csv" - }, - { - "source": "special", - "destination": "special.csv" - } - ] - } - } -} \ No newline at end of file diff --git a/tests/functional/error-foreign-key-invalid-type/source/data/in/tables/simple.csv.manifest b/tests/functional/error-foreign-key-invalid-type/source/data/in/tables/simple.csv.manifest deleted file mode 100644 index 5e5556b..0000000 --- a/tests/functional/error-foreign-key-invalid-type/source/data/in/tables/simple.csv.manifest +++ /dev/null @@ -1,7 +0,0 @@ -{ - "columns": [ - "id", - "name", - "glasses" - ] -} \ No newline at end of file diff --git a/tests/functional/error-foreign-key-invalid-type/source/data/in/tables/special.csv.manifest b/tests/functional/error-foreign-key-invalid-type/source/data/in/tables/special.csv.manifest deleted file mode 100644 index 1ca3575..0000000 --- a/tests/functional/error-foreign-key-invalid-type/source/data/in/tables/special.csv.manifest +++ /dev/null @@ -1,7 +0,0 @@ -{ - "id": "special", - "columns": [ - "col1", - "col2" - ] -} \ No newline at end of file diff --git a/tests/functional/error-foreign-key-invalid-column/expected/data/out/db-dump/simple.csv b/tests/functional/error-ignore-all-columns/source/data/in/tables/simple.csv similarity index 100% rename from tests/functional/error-foreign-key-invalid-column/expected/data/out/db-dump/simple.csv rename to tests/functional/error-ignore-all-columns/source/data/in/tables/simple.csv diff --git a/tests/functional/error-incremental-write-different-pk/expected-stdout b/tests/functional/error-incremental-write-different-pk/expected-stdout index 7a189c6..8a65cf8 100644 --- a/tests/functional/error-incremental-write-different-pk/expected-stdout +++ b/tests/functional/error-incremental-write-different-pk/expected-stdout @@ -3,8 +3,6 @@ Validating warehouse "%s" Validating schema "%s" Dropping table "incremental_temp_%s" Creating temporary table "incremental_temp_%s" -Writing data to table "incremental_temp_%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" +Uploading data to internal stage "@~/incremental_temp_%s" +Copying data from internal stage to staging table "incremental_temp_%s" Upserting data to table "incremental" diff --git a/tests/prepare-data/tables/incremental.csv b/tests/functional/error-incremental-write-different-pk/source/data/in/tables/incremental.csv similarity index 100% rename from tests/prepare-data/tables/incremental.csv rename to tests/functional/error-incremental-write-different-pk/source/data/in/tables/incremental.csv diff --git a/tests/functional/error-foreign-key-invalid-type/expected/data/out/db-dump/simple.csv b/tests/functional/error-invalid-schema/source/data/in/tables/simple.csv similarity index 100% rename from tests/functional/error-foreign-key-invalid-type/expected/data/out/db-dump/simple.csv rename to tests/functional/error-invalid-schema/source/data/in/tables/simple.csv diff --git a/tests/functional/foreign-key/expected/data/out/db-dump/simple.csv b/tests/functional/error-invalid-warehouse/source/data/in/tables/simple.csv similarity index 100% rename from tests/functional/foreign-key/expected/data/out/db-dump/simple.csv rename to tests/functional/error-invalid-warehouse/source/data/in/tables/simple.csv diff --git a/tests/functional/error-missing-input-mapping-table/expected-stdout b/tests/functional/error-missing-input-mapping-table/expected-stdout index 81a3701..e69de29 100644 --- a/tests/functional/error-missing-input-mapping-table/expected-stdout +++ b/tests/functional/error-missing-input-mapping-table/expected-stdout @@ -1,3 +0,0 @@ -Creating ODBC connection to "Driver=SnowflakeDSIIDriver;Server=%s;Port=443;Tracing=0;Login_timeout=30;Database="%s";Schema="%s";Warehouse=%s;CLIENT_SESSION_KEEP_ALIVE=TRUE;application="Keboola_Connection"". -Validating warehouse "%s" -Validating schema "%s" diff --git a/tests/functional/run-old-config/expected/data/out/db-dump/simple.csv b/tests/functional/error-missing-input-mapping-table/source/data/in/tables/simple.csv similarity index 100% rename from tests/functional/run-old-config/expected/data/out/db-dump/simple.csv rename to tests/functional/error-missing-input-mapping-table/source/data/in/tables/simple.csv diff --git a/tests/functional/error-missing-input-mapping/expected-stdout b/tests/functional/error-missing-input-mapping/expected-stdout index 81a3701..e69de29 100644 --- a/tests/functional/error-missing-input-mapping/expected-stdout +++ b/tests/functional/error-missing-input-mapping/expected-stdout @@ -1,3 +0,0 @@ -Creating ODBC connection to "Driver=SnowflakeDSIIDriver;Server=%s;Port=443;Tracing=0;Login_timeout=30;Database="%s";Schema="%s";Warehouse=%s;CLIENT_SESSION_KEEP_ALIVE=TRUE;application="Keboola_Connection"". -Validating warehouse "%s" -Validating schema "%s" diff --git a/tests/prepare-data/tables/simple.csv b/tests/functional/error-missing-input-mapping/source/data/in/tables/simple.csv similarity index 100% rename from tests/prepare-data/tables/simple.csv rename to tests/functional/error-missing-input-mapping/source/data/in/tables/simple.csv diff --git a/tests/functional/error-run-old-config/expected-code b/tests/functional/error-run-old-config/expected-code new file mode 100644 index 0000000..d8263ee --- /dev/null +++ b/tests/functional/error-run-old-config/expected-code @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/tests/functional/error-run-old-config/expected-stderr b/tests/functional/error-run-old-config/expected-stderr new file mode 100644 index 0000000..cc9e2b1 --- /dev/null +++ b/tests/functional/error-run-old-config/expected-stderr @@ -0,0 +1 @@ +%sUndefined array key "tableId"%s \ No newline at end of file diff --git a/tests/functional/error-foreign-key-invalid-column/expected/data/out/files/.gitkeep b/tests/functional/error-run-old-config/expected-stdout similarity index 100% rename from tests/functional/error-foreign-key-invalid-column/expected/data/out/files/.gitkeep rename to tests/functional/error-run-old-config/expected-stdout diff --git a/tests/functional/error-foreign-key-invalid-column/expected/data/out/tables/.gitkeep b/tests/functional/error-run-old-config/expected/data/out/db-dump/.gitkeep similarity index 100% rename from tests/functional/error-foreign-key-invalid-column/expected/data/out/tables/.gitkeep rename to tests/functional/error-run-old-config/expected/data/out/db-dump/.gitkeep diff --git a/tests/functional/error-foreign-key-invalid-type/expected/data/out/files/.gitkeep b/tests/functional/error-run-old-config/expected/data/out/files/.gitkeep similarity index 100% rename from tests/functional/error-foreign-key-invalid-type/expected/data/out/files/.gitkeep rename to tests/functional/error-run-old-config/expected/data/out/files/.gitkeep diff --git a/tests/functional/error-foreign-key-invalid-type/expected/data/out/tables/.gitkeep b/tests/functional/error-run-old-config/expected/data/out/tables/.gitkeep similarity index 100% rename from tests/functional/error-foreign-key-invalid-type/expected/data/out/tables/.gitkeep rename to tests/functional/error-run-old-config/expected/data/out/tables/.gitkeep diff --git a/tests/functional/run-old-config/source/data/config.json b/tests/functional/error-run-old-config/source/data/config.json similarity index 100% rename from tests/functional/run-old-config/source/data/config.json rename to tests/functional/error-run-old-config/source/data/config.json diff --git a/tests/functional/error-run-old-config/source/data/in/tables/simple.csv b/tests/functional/error-run-old-config/source/data/in/tables/simple.csv new file mode 100644 index 0000000..4a8ab84 --- /dev/null +++ b/tests/functional/error-run-old-config/source/data/in/tables/simple.csv @@ -0,0 +1,10 @@ +"id","name","glasses" +"0","Miro cillik","yes" +"1","Ondrej Hlavacek","no" +"2","Martin Halamicek","yes" +"3","Tomas Kacur","yes" +"4","Erik Zigo","no" +"5","Marc Raiser","sometimes" +"6","Petr Simecek","yes" +"7","Jakub Matejka","yes" +"8","Ondrej Popelka","no" diff --git a/tests/functional/error-foreign-key-invalid-column/source/data/in/tables/simple.csv.manifest b/tests/functional/error-run-old-config/source/data/in/tables/simple.csv.manifest similarity index 100% rename from tests/functional/error-foreign-key-invalid-column/source/data/in/tables/simple.csv.manifest rename to tests/functional/error-run-old-config/source/data/in/tables/simple.csv.manifest diff --git a/tests/functional/foreign-key/expected-code b/tests/functional/foreign-key/expected-code deleted file mode 100644 index 573541a..0000000 --- a/tests/functional/foreign-key/expected-code +++ /dev/null @@ -1 +0,0 @@ -0 diff --git a/tests/functional/foreign-key/expected-stderr b/tests/functional/foreign-key/expected-stderr deleted file mode 100644 index e69de29..0000000 diff --git a/tests/functional/foreign-key/expected-stdout b/tests/functional/foreign-key/expected-stdout deleted file mode 100644 index 15c4788..0000000 --- a/tests/functional/foreign-key/expected-stdout +++ /dev/null @@ -1,21 +0,0 @@ -Creating ODBC connection to "Driver=SnowflakeDSIIDriver;Server=%s;Port=443;Tracing=0;Login_timeout=30;Database="%s";Schema="%s";Warehouse=%s;CLIENT_SESSION_KEEP_ALIVE=TRUE;application="Keboola_Connection"". -Validating warehouse "%s" -Validating schema "%s" -Creating table "simple_%s" -Creating table "simple" -Writing data to table "simple_%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" -Swapping table "simple_%s" with "simple" -Dropping table "simple_%s" -Creating table "special_%s" -Creating table "special" -Writing data to table "special_%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" -Swapping table "special_%s" with "special" -Dropping table "special_%s" -Adding unique key to table "special" on column "col1" -Creating foreign key from table "name" to table "special" on column "col1" diff --git a/tests/functional/foreign-key/expected/data/out/db-dump/special.csv b/tests/functional/foreign-key/expected/data/out/db-dump/special.csv deleted file mode 100644 index 6818cda..0000000 --- a/tests/functional/foreign-key/expected/data/out/db-dump/special.csv +++ /dev/null @@ -1,11 +0,0 @@ -"col1","col2" -"column with \n \t \\","second col" -"column with backslash \ inside","column with backslash and enclosure \""" -"column with enclosure "", and comma inside text","second column enclosure in text """ -"columns with -new line","columns with tab" -"first","something with - -double new line" -"line with enclosure","second column" -"single quote'","two single''quotes" diff --git a/tests/functional/foreign-key/expected/data/out/files/.gitkeep b/tests/functional/foreign-key/expected/data/out/files/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/tests/functional/foreign-key/expected/data/out/tables/.gitkeep b/tests/functional/foreign-key/expected/data/out/tables/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/tests/functional/foreign-key/source/data/config.json b/tests/functional/foreign-key/source/data/config.json deleted file mode 100644 index 498b8b8..0000000 --- a/tests/functional/foreign-key/source/data/config.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "parameters": { - "db": { - "host": "%env(string:DB_HOST)%", - "port": "%env(string:DB_PORT)%", - "user": "%env(string:DB_USER)%", - "#password": "%env(string:DB_PASSWORD)%", - "database": "%env(string:DB_DATABASE)%", - "schema": "%env(string:DB_SCHEMA)%", - "warehouse": "%env(string:DB_WAREHOUSE)%" - }, - "tables": [ - { - "tableId": "simple", - "dbName": "simple", - "export": true, - "incremental": false, - "primaryKey": ["id"], - "items": [ - { - "name": "id", - "dbName": "id", - "type": "int", - "size": null, - "nullable": null, - "default": null - }, - { - "name": "name", - "dbName": "name", - "type": "varchar", - "size": 255, - "nullable": null, - "default": null, - "foreignKeyTable": "special", - "foreignKeyColumn": "col1" - }, - { - "name": "glasses", - "dbName": "glasses", - "type": "varchar", - "size": 255, - "nullable": null, - "default": null - } - ] - }, - { - "tableId": "special", - "dbName": "special", - "export": true, - "incremental": false, - "items": [ - { - "name": "col1", - "dbName": "col1", - "type": "varchar", - "size": 255, - "nullable": null, - "default": null - }, - { - "name": "col2", - "dbName": "col2", - "type": "varchar", - "size": 255, - "nullable": null, - "default": null - } - ] - } - ] - }, - "storage": { - "input": { - "tables": [ - { - "source": "simple", - "destination": "simple.csv" - }, - { - "source": "special", - "destination": "special.csv" - } - ] - } - } -} \ No newline at end of file diff --git a/tests/functional/foreign-key/source/data/in/tables/simple.csv.manifest b/tests/functional/foreign-key/source/data/in/tables/simple.csv.manifest deleted file mode 100644 index 5e5556b..0000000 --- a/tests/functional/foreign-key/source/data/in/tables/simple.csv.manifest +++ /dev/null @@ -1,7 +0,0 @@ -{ - "columns": [ - "id", - "name", - "glasses" - ] -} \ No newline at end of file diff --git a/tests/functional/foreign-key/source/data/in/tables/special.csv.manifest b/tests/functional/foreign-key/source/data/in/tables/special.csv.manifest deleted file mode 100644 index 1ca3575..0000000 --- a/tests/functional/foreign-key/source/data/in/tables/special.csv.manifest +++ /dev/null @@ -1,7 +0,0 @@ -{ - "id": "special", - "columns": [ - "col1", - "col2" - ] -} \ No newline at end of file diff --git a/tests/functional/foreign-key/tearDown.php b/tests/functional/foreign-key/tearDown.php deleted file mode 100644 index bee8a1f..0000000 --- a/tests/functional/foreign-key/tearDown.php +++ /dev/null @@ -1,25 +0,0 @@ -getDatabaseConfig(); - $foreignKeys = $test->connection->fetchAll(sprintf( - $sql, - $databaseConfig->getSchema(), - 'simple', - )); - - Assert::assertCount(1, $foreignKeys); - Assert::assertEquals('FK_SPECIAL_COL1', $foreignKeys[0]['CONSTRAINT_NAME']); -}; diff --git a/tests/functional/incremental-write-date-no-pk/expected-stdout b/tests/functional/incremental-write-date-no-pk/expected-stdout index b7e25ec..4e58001 100644 --- a/tests/functional/incremental-write-date-no-pk/expected-stdout +++ b/tests/functional/incremental-write-date-no-pk/expected-stdout @@ -3,9 +3,7 @@ Validating warehouse "%s" Validating schema "%s" Dropping table "incremental_temp_%s" Creating temporary table "incremental_temp_%s" -Writing data to table "incremental_temp_%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" +Uploading data to internal stage "@~/incremental_temp_%s" +Copying data from internal stage to staging table "incremental_temp_%s" Upserting data to table "incremental" Data upserted to table "incremental". diff --git a/tests/functional/incremental-write-date-no-pk/source/data/in/tables/incremental.csv b/tests/functional/incremental-write-date-no-pk/source/data/in/tables/incremental.csv new file mode 100644 index 0000000..6feed97 --- /dev/null +++ b/tests/functional/incremental-write-date-no-pk/source/data/in/tables/incremental.csv @@ -0,0 +1,5 @@ +"name","int","float","date","datetime","timestamp" +"name120","12","22.5","2021-01-13","2021-01-13 12:12:12","2021-01-13 20:12:12" +"name130","1","13.5","2020-01-14","2020-01-14 13:13:13","2020-01-14 21:13:13" +"name140","4","14.1","2023-01-15","2023-11-15 14:14:14","2023-02-15 22:14:14" +"name150","5","19.5","2123-01-16","2013-01-16 15:10:15","2023-01-16 23:15:15" \ No newline at end of file diff --git a/tests/functional/incremental-write-date/expected-stdout b/tests/functional/incremental-write-date/expected-stdout index c52c5a8..8a0a25b 100644 --- a/tests/functional/incremental-write-date/expected-stdout +++ b/tests/functional/incremental-write-date/expected-stdout @@ -3,10 +3,8 @@ Validating warehouse "%s" Validating schema "%s" Dropping table "incremental_temp_%s" Creating temporary table "incremental_temp_%s" -Writing data to table "incremental_temp_%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" +Uploading data to internal stage "@~/%s" +Copying data from internal stage to staging table "incremental_temp_%s" Upserting data to table "incremental" Table "incremental" has primary key, using upsert. Data upserted to table "incremental". diff --git a/tests/functional/incremental-write-date/source/data/in/tables/incremental.csv b/tests/functional/incremental-write-date/source/data/in/tables/incremental.csv new file mode 100644 index 0000000..6feed97 --- /dev/null +++ b/tests/functional/incremental-write-date/source/data/in/tables/incremental.csv @@ -0,0 +1,5 @@ +"name","int","float","date","datetime","timestamp" +"name120","12","22.5","2021-01-13","2021-01-13 12:12:12","2021-01-13 20:12:12" +"name130","1","13.5","2020-01-14","2020-01-14 13:13:13","2020-01-14 21:13:13" +"name140","4","14.1","2023-01-15","2023-11-15 14:14:14","2023-02-15 22:14:14" +"name150","5","19.5","2123-01-16","2013-01-16 15:10:15","2023-01-16 23:15:15" \ No newline at end of file diff --git a/tests/functional/incremental-write-float-no-pk/expected-stdout b/tests/functional/incremental-write-float-no-pk/expected-stdout index b7e25ec..4e58001 100644 --- a/tests/functional/incremental-write-float-no-pk/expected-stdout +++ b/tests/functional/incremental-write-float-no-pk/expected-stdout @@ -3,9 +3,7 @@ Validating warehouse "%s" Validating schema "%s" Dropping table "incremental_temp_%s" Creating temporary table "incremental_temp_%s" -Writing data to table "incremental_temp_%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" +Uploading data to internal stage "@~/incremental_temp_%s" +Copying data from internal stage to staging table "incremental_temp_%s" Upserting data to table "incremental" Data upserted to table "incremental". diff --git a/tests/functional/incremental-write-float-no-pk/source/data/in/tables/incremental.csv b/tests/functional/incremental-write-float-no-pk/source/data/in/tables/incremental.csv new file mode 100644 index 0000000..6feed97 --- /dev/null +++ b/tests/functional/incremental-write-float-no-pk/source/data/in/tables/incremental.csv @@ -0,0 +1,5 @@ +"name","int","float","date","datetime","timestamp" +"name120","12","22.5","2021-01-13","2021-01-13 12:12:12","2021-01-13 20:12:12" +"name130","1","13.5","2020-01-14","2020-01-14 13:13:13","2020-01-14 21:13:13" +"name140","4","14.1","2023-01-15","2023-11-15 14:14:14","2023-02-15 22:14:14" +"name150","5","19.5","2123-01-16","2013-01-16 15:10:15","2023-01-16 23:15:15" \ No newline at end of file diff --git a/tests/functional/incremental-write-float/expected-stdout b/tests/functional/incremental-write-float/expected-stdout index c52c5a8..ede4d6e 100644 --- a/tests/functional/incremental-write-float/expected-stdout +++ b/tests/functional/incremental-write-float/expected-stdout @@ -3,10 +3,8 @@ Validating warehouse "%s" Validating schema "%s" Dropping table "incremental_temp_%s" Creating temporary table "incremental_temp_%s" -Writing data to table "incremental_temp_%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" +Uploading data to internal stage "@~/incremental_temp_%s" +Copying data from internal stage to staging table "incremental_temp_%s" Upserting data to table "incremental" Table "incremental" has primary key, using upsert. Data upserted to table "incremental". diff --git a/tests/functional/incremental-write-float/source/data/in/tables/incremental.csv b/tests/functional/incremental-write-float/source/data/in/tables/incremental.csv new file mode 100644 index 0000000..6feed97 --- /dev/null +++ b/tests/functional/incremental-write-float/source/data/in/tables/incremental.csv @@ -0,0 +1,5 @@ +"name","int","float","date","datetime","timestamp" +"name120","12","22.5","2021-01-13","2021-01-13 12:12:12","2021-01-13 20:12:12" +"name130","1","13.5","2020-01-14","2020-01-14 13:13:13","2020-01-14 21:13:13" +"name140","4","14.1","2023-01-15","2023-11-15 14:14:14","2023-02-15 22:14:14" +"name150","5","19.5","2123-01-16","2013-01-16 15:10:15","2023-01-16 23:15:15" \ No newline at end of file diff --git a/tests/functional/incremental-write-int-no-pk/expected-stdout b/tests/functional/incremental-write-int-no-pk/expected-stdout index b7e25ec..4e58001 100644 --- a/tests/functional/incremental-write-int-no-pk/expected-stdout +++ b/tests/functional/incremental-write-int-no-pk/expected-stdout @@ -3,9 +3,7 @@ Validating warehouse "%s" Validating schema "%s" Dropping table "incremental_temp_%s" Creating temporary table "incremental_temp_%s" -Writing data to table "incremental_temp_%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" +Uploading data to internal stage "@~/incremental_temp_%s" +Copying data from internal stage to staging table "incremental_temp_%s" Upserting data to table "incremental" Data upserted to table "incremental". diff --git a/tests/functional/incremental-write-int-no-pk/source/data/in/tables/incremental.csv b/tests/functional/incremental-write-int-no-pk/source/data/in/tables/incremental.csv new file mode 100644 index 0000000..6feed97 --- /dev/null +++ b/tests/functional/incremental-write-int-no-pk/source/data/in/tables/incremental.csv @@ -0,0 +1,5 @@ +"name","int","float","date","datetime","timestamp" +"name120","12","22.5","2021-01-13","2021-01-13 12:12:12","2021-01-13 20:12:12" +"name130","1","13.5","2020-01-14","2020-01-14 13:13:13","2020-01-14 21:13:13" +"name140","4","14.1","2023-01-15","2023-11-15 14:14:14","2023-02-15 22:14:14" +"name150","5","19.5","2123-01-16","2013-01-16 15:10:15","2023-01-16 23:15:15" \ No newline at end of file diff --git a/tests/functional/incremental-write-timestamp-no-pk/expected-stdout b/tests/functional/incremental-write-timestamp-no-pk/expected-stdout index b7e25ec..4e58001 100644 --- a/tests/functional/incremental-write-timestamp-no-pk/expected-stdout +++ b/tests/functional/incremental-write-timestamp-no-pk/expected-stdout @@ -3,9 +3,7 @@ Validating warehouse "%s" Validating schema "%s" Dropping table "incremental_temp_%s" Creating temporary table "incremental_temp_%s" -Writing data to table "incremental_temp_%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" +Uploading data to internal stage "@~/incremental_temp_%s" +Copying data from internal stage to staging table "incremental_temp_%s" Upserting data to table "incremental" Data upserted to table "incremental". diff --git a/tests/functional/incremental-write-timestamp-no-pk/source/data/in/tables/incremental.csv b/tests/functional/incremental-write-timestamp-no-pk/source/data/in/tables/incremental.csv new file mode 100644 index 0000000..6feed97 --- /dev/null +++ b/tests/functional/incremental-write-timestamp-no-pk/source/data/in/tables/incremental.csv @@ -0,0 +1,5 @@ +"name","int","float","date","datetime","timestamp" +"name120","12","22.5","2021-01-13","2021-01-13 12:12:12","2021-01-13 20:12:12" +"name130","1","13.5","2020-01-14","2020-01-14 13:13:13","2020-01-14 21:13:13" +"name140","4","14.1","2023-01-15","2023-11-15 14:14:14","2023-02-15 22:14:14" +"name150","5","19.5","2123-01-16","2013-01-16 15:10:15","2023-01-16 23:15:15" \ No newline at end of file diff --git a/tests/functional/incremental-write-timestamp/expected-stdout b/tests/functional/incremental-write-timestamp/expected-stdout index c52c5a8..ede4d6e 100644 --- a/tests/functional/incremental-write-timestamp/expected-stdout +++ b/tests/functional/incremental-write-timestamp/expected-stdout @@ -3,10 +3,8 @@ Validating warehouse "%s" Validating schema "%s" Dropping table "incremental_temp_%s" Creating temporary table "incremental_temp_%s" -Writing data to table "incremental_temp_%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" +Uploading data to internal stage "@~/incremental_temp_%s" +Copying data from internal stage to staging table "incremental_temp_%s" Upserting data to table "incremental" Table "incremental" has primary key, using upsert. Data upserted to table "incremental". diff --git a/tests/functional/incremental-write-timestamp/source/data/in/tables/incremental.csv b/tests/functional/incremental-write-timestamp/source/data/in/tables/incremental.csv new file mode 100644 index 0000000..6feed97 --- /dev/null +++ b/tests/functional/incremental-write-timestamp/source/data/in/tables/incremental.csv @@ -0,0 +1,5 @@ +"name","int","float","date","datetime","timestamp" +"name120","12","22.5","2021-01-13","2021-01-13 12:12:12","2021-01-13 20:12:12" +"name130","1","13.5","2020-01-14","2020-01-14 13:13:13","2020-01-14 21:13:13" +"name140","4","14.1","2023-01-15","2023-11-15 14:14:14","2023-02-15 22:14:14" +"name150","5","19.5","2123-01-16","2013-01-16 15:10:15","2023-01-16 23:15:15" \ No newline at end of file diff --git a/tests/functional/nullable-and-default-values-date/expected-code b/tests/functional/nullable-and-default-values-date/expected-code deleted file mode 100644 index 573541a..0000000 --- a/tests/functional/nullable-and-default-values-date/expected-code +++ /dev/null @@ -1 +0,0 @@ -0 diff --git a/tests/functional/nullable-and-default-values-date/expected-stderr b/tests/functional/nullable-and-default-values-date/expected-stderr deleted file mode 100644 index e69de29..0000000 diff --git a/tests/functional/nullable-and-default-values-date/expected-stdout b/tests/functional/nullable-and-default-values-date/expected-stdout deleted file mode 100644 index e137256..0000000 --- a/tests/functional/nullable-and-default-values-date/expected-stdout +++ /dev/null @@ -1,11 +0,0 @@ -Creating ODBC connection to "Driver=SnowflakeDSIIDriver;Server=%s;Port=443;Tracing=0;Login_timeout=30;Database="%s";Schema="%s";Warehouse=%s;CLIENT_SESSION_KEEP_ALIVE=TRUE;application="Keboola_Connection"". -Validating warehouse "%s" -Validating schema "%s" -Creating table "%s" -Creating table "simple-date" -Writing data to table "%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" -Swapping table "%s" with "simple-date" -Dropping table "%s" diff --git a/tests/functional/nullable-and-default-values-date/expected/data/out/db-dump/simple-date.csv b/tests/functional/nullable-and-default-values-date/expected/data/out/db-dump/simple-date.csv deleted file mode 100644 index 7505867..0000000 --- a/tests/functional/nullable-and-default-values-date/expected/data/out/db-dump/simple-date.csv +++ /dev/null @@ -1,3 +0,0 @@ -"id","col-true-null","col-true-empty-string","col-true-string" -"1","2020-06-05","2020-06-06","2020-06-07" -"2","","","" diff --git a/tests/functional/nullable-and-default-values-date/expected/data/out/files/.gitkeep b/tests/functional/nullable-and-default-values-date/expected/data/out/files/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/tests/functional/nullable-and-default-values-date/expected/data/out/tables/.gitkeep b/tests/functional/nullable-and-default-values-date/expected/data/out/tables/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/tests/functional/nullable-and-default-values-date/source/data/config.json b/tests/functional/nullable-and-default-values-date/source/data/config.json deleted file mode 100644 index a307bb0..0000000 --- a/tests/functional/nullable-and-default-values-date/source/data/config.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "parameters": { - "db": { - "host": "%env(string:DB_HOST)%", - "port": "%env(string:DB_PORT)%", - "user": "%env(string:DB_USER)%", - "#password": "%env(string:DB_PASSWORD)%", - "database": "%env(string:DB_DATABASE)%", - "schema": "%env(string:DB_SCHEMA)%", - "warehouse": "%env(string:DB_WAREHOUSE)%" - }, - "tableId": "simple-date", - "dbName": "simple-date", - "items": [ - { - "name": "id", - "dbName": "id", - "type": "int", - "size": null, - "nullable": null, - "default": null - }, - { - "name": "col-true-null", - "dbName": "col-true-null", - "type": "date", - "size": null, - "nullable": true, - "default": null - }, - { - "name": "col-true-empty-string", - "dbName": "col-true-empty-string", - "type": "date", - "size": null, - "nullable": true, - "default": "" - }, - { - "name": "col-true-string", - "dbName": "col-true-string", - "type": "date", - "size": null, - "nullable": true, - "default": "2020-01-01" - } - ] - }, - "storage": { - "input": { - "tables": [ - { - "source": "simple-date", - "destination": "simple-date.csv" - } - ] - } - } -} diff --git a/tests/functional/nullable-and-default-values-date/source/data/in/tables/simple-date.csv.manifest b/tests/functional/nullable-and-default-values-date/source/data/in/tables/simple-date.csv.manifest deleted file mode 100644 index 260a5dc..0000000 --- a/tests/functional/nullable-and-default-values-date/source/data/in/tables/simple-date.csv.manifest +++ /dev/null @@ -1,8 +0,0 @@ -{ - "columns": [ - "id", - "col-true-null", - "col-true-empty-string", - "col-true-string" - ] -} \ No newline at end of file diff --git a/tests/functional/nullable-and-default-values-datetime/expected-code b/tests/functional/nullable-and-default-values-datetime/expected-code deleted file mode 100644 index 573541a..0000000 --- a/tests/functional/nullable-and-default-values-datetime/expected-code +++ /dev/null @@ -1 +0,0 @@ -0 diff --git a/tests/functional/nullable-and-default-values-datetime/expected-stderr b/tests/functional/nullable-and-default-values-datetime/expected-stderr deleted file mode 100644 index e69de29..0000000 diff --git a/tests/functional/nullable-and-default-values-datetime/expected-stdout b/tests/functional/nullable-and-default-values-datetime/expected-stdout deleted file mode 100644 index 846a171..0000000 --- a/tests/functional/nullable-and-default-values-datetime/expected-stdout +++ /dev/null @@ -1,11 +0,0 @@ -Creating ODBC connection to "Driver=SnowflakeDSIIDriver;Server=%s;Port=443;Tracing=0;Login_timeout=30;Database="%s";Schema="%s";Warehouse=%s;CLIENT_SESSION_KEEP_ALIVE=TRUE;application="Keboola_Connection"". -Validating warehouse "%s" -Validating schema "%s" -Creating table "%s" -Creating table "simple-datetime" -Writing data to table "%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" -Swapping table "%s" with "simple-datetime" -Dropping table "%s" diff --git a/tests/functional/nullable-and-default-values-datetime/expected/data/out/db-dump/simple-datetime.csv b/tests/functional/nullable-and-default-values-datetime/expected/data/out/db-dump/simple-datetime.csv deleted file mode 100644 index f433907..0000000 --- a/tests/functional/nullable-and-default-values-datetime/expected/data/out/db-dump/simple-datetime.csv +++ /dev/null @@ -1,3 +0,0 @@ -"id","col-true-null","col-true-empty-string","col-true-string" -"1","2020-06-05 12:01:02","2020-06-06 12:01:02","2020-06-07 12:01:02" -"2","","","" diff --git a/tests/functional/nullable-and-default-values-datetime/expected/data/out/files/.gitkeep b/tests/functional/nullable-and-default-values-datetime/expected/data/out/files/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/tests/functional/nullable-and-default-values-datetime/expected/data/out/tables/.gitkeep b/tests/functional/nullable-and-default-values-datetime/expected/data/out/tables/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/tests/functional/nullable-and-default-values-datetime/source/data/config.json b/tests/functional/nullable-and-default-values-datetime/source/data/config.json deleted file mode 100644 index 10e0968..0000000 --- a/tests/functional/nullable-and-default-values-datetime/source/data/config.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "parameters": { - "db": { - "host": "%env(string:DB_HOST)%", - "port": "%env(string:DB_PORT)%", - "user": "%env(string:DB_USER)%", - "#password": "%env(string:DB_PASSWORD)%", - "database": "%env(string:DB_DATABASE)%", - "schema": "%env(string:DB_SCHEMA)%", - "warehouse": "%env(string:DB_WAREHOUSE)%" - }, - "tableId": "simple-datetime", - "dbName": "simple-datetime", - "items": [ - { - "name": "id", - "dbName": "id", - "type": "int", - "size": null, - "nullable": null, - "default": null - }, - { - "name": "col-true-null", - "dbName": "col-true-null", - "type": "datetime", - "size": null, - "nullable": true, - "default": null - }, - { - "name": "col-true-empty-string", - "dbName": "col-true-empty-string", - "type": "datetime", - "size": null, - "nullable": true, - "default": "" - }, - { - "name": "col-true-string", - "dbName": "col-true-string", - "type": "datetime", - "size": null, - "nullable": true, - "default": "2020-01-01 12:01:02" - } - ] - }, - "storage": { - "input": { - "tables": [ - { - "source": "simple-datetime", - "destination": "simple-datetime.csv" - } - ] - } - } -} diff --git a/tests/functional/nullable-and-default-values-datetime/source/data/in/tables/simple-datetime.csv.manifest b/tests/functional/nullable-and-default-values-datetime/source/data/in/tables/simple-datetime.csv.manifest deleted file mode 100644 index 260a5dc..0000000 --- a/tests/functional/nullable-and-default-values-datetime/source/data/in/tables/simple-datetime.csv.manifest +++ /dev/null @@ -1,8 +0,0 @@ -{ - "columns": [ - "id", - "col-true-null", - "col-true-empty-string", - "col-true-string" - ] -} \ No newline at end of file diff --git a/tests/functional/nullable-and-default-values-int/expected-code b/tests/functional/nullable-and-default-values-int/expected-code deleted file mode 100644 index 573541a..0000000 --- a/tests/functional/nullable-and-default-values-int/expected-code +++ /dev/null @@ -1 +0,0 @@ -0 diff --git a/tests/functional/nullable-and-default-values-int/expected-stderr b/tests/functional/nullable-and-default-values-int/expected-stderr deleted file mode 100644 index e69de29..0000000 diff --git a/tests/functional/nullable-and-default-values-int/expected-stdout b/tests/functional/nullable-and-default-values-int/expected-stdout deleted file mode 100644 index f62d952..0000000 --- a/tests/functional/nullable-and-default-values-int/expected-stdout +++ /dev/null @@ -1,11 +0,0 @@ -Creating ODBC connection to "Driver=SnowflakeDSIIDriver;Server=%s;Port=443;Tracing=0;Login_timeout=30;Database="%s";Schema="%s";Warehouse=%s;CLIENT_SESSION_KEEP_ALIVE=TRUE;application="Keboola_Connection"". -Validating warehouse "%s" -Validating schema "%s" -Creating table "%s" -Creating table "simple-int" -Writing data to table "%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" -Swapping table "%s" with "simple-int" -Dropping table "%s" diff --git a/tests/functional/nullable-and-default-values-int/expected/data/out/db-dump/simple-int.csv b/tests/functional/nullable-and-default-values-int/expected/data/out/db-dump/simple-int.csv deleted file mode 100644 index 3368c91..0000000 --- a/tests/functional/nullable-and-default-values-int/expected/data/out/db-dump/simple-int.csv +++ /dev/null @@ -1,4 +0,0 @@ -"id","col-true-null","col-true-empty-string","col-true-string","col-true-zero-value" -"1","0","1","2","3" -"2","0","0","0","0" -"3","","","","" diff --git a/tests/functional/nullable-and-default-values-int/expected/data/out/files/.gitkeep b/tests/functional/nullable-and-default-values-int/expected/data/out/files/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/tests/functional/nullable-and-default-values-int/expected/data/out/tables/.gitkeep b/tests/functional/nullable-and-default-values-int/expected/data/out/tables/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/tests/functional/nullable-and-default-values-int/source/data/config.json b/tests/functional/nullable-and-default-values-int/source/data/config.json deleted file mode 100644 index a65e16b..0000000 --- a/tests/functional/nullable-and-default-values-int/source/data/config.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "parameters": { - "db": { - "host": "%env(string:DB_HOST)%", - "port": "%env(string:DB_PORT)%", - "user": "%env(string:DB_USER)%", - "#password": "%env(string:DB_PASSWORD)%", - "database": "%env(string:DB_DATABASE)%", - "schema": "%env(string:DB_SCHEMA)%", - "warehouse": "%env(string:DB_WAREHOUSE)%" - }, - "tableId": "simple-int", - "dbName": "simple-int", - "items": [ - { - "name": "id", - "dbName": "id", - "type": "int", - "size": null, - "nullable": null, - "default": null - }, - { - "name": "col-true-null", - "dbName": "col-true-null", - "type": "int", - "size": 255, - "nullable": true, - "default": null - }, - { - "name": "col-true-empty-string", - "dbName": "col-true-empty-string", - "type": "int", - "size": 255, - "nullable": true, - "default": "" - }, - { - "name": "col-true-string", - "dbName": "col-true-string", - "type": "int", - "size": 255, - "nullable": true, - "default": "123" - }, - { - "name": "col-true-zero-value", - "dbName": "col-true-zero-value", - "type": "int", - "size": 255, - "nullable": true, - "default": "0" - } - ] - }, - "storage": { - "input": { - "tables": [ - { - "source": "simple-int", - "destination": "simple-int.csv" - } - ] - } - } -} diff --git a/tests/functional/nullable-and-default-values-int/source/data/in/tables/simple-int.csv.manifest b/tests/functional/nullable-and-default-values-int/source/data/in/tables/simple-int.csv.manifest deleted file mode 100644 index 980f424..0000000 --- a/tests/functional/nullable-and-default-values-int/source/data/in/tables/simple-int.csv.manifest +++ /dev/null @@ -1,9 +0,0 @@ -{ - "columns": [ - "id", - "col-true-null", - "col-true-empty-string", - "col-true-string", - "col-true-zero-value" - ] -} \ No newline at end of file diff --git a/tests/functional/nullable-and-default-values-string/expected-stdout b/tests/functional/nullable-and-default-values-string/expected-stdout index a0f52bb..5ab6971 100644 --- a/tests/functional/nullable-and-default-values-string/expected-stdout +++ b/tests/functional/nullable-and-default-values-string/expected-stdout @@ -3,9 +3,7 @@ Validating warehouse "%s" Validating schema "%s" Creating table "%s" Creating table "simple-string" -Writing data to table "%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" -Swapping table "%s" with "simple-string" -Dropping table "%s" +Uploading data to internal stage "@~/simple-string_temp_%s" +Copying data from internal stage to staging table "simple-string_temp_%s" +Swapping table "simple-string_temp_%s" with "simple-string" +Dropping table "simple-string_temp_%s" diff --git a/tests/prepare-data/tables/simple-string.csv b/tests/functional/nullable-and-default-values-string/source/data/in/tables/simple-string.csv similarity index 100% rename from tests/prepare-data/tables/simple-string.csv rename to tests/functional/nullable-and-default-values-string/source/data/in/tables/simple-string.csv diff --git a/tests/functional/reorder-columns/expected-stdout b/tests/functional/reorder-columns/expected-stdout index 997ba29..f46fd6f 100644 --- a/tests/functional/reorder-columns/expected-stdout +++ b/tests/functional/reorder-columns/expected-stdout @@ -3,9 +3,7 @@ Validating warehouse "%s" Validating schema "%s" Creating table "%s" Creating table "simple" -Writing data to table "%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" -Swapping table "%s" with "simple" -Dropping table "%s" +Uploading data to internal stage "@~/simple_temp_%s" +Copying data from internal stage to staging table "simple_temp_%s" +Swapping table "simple_temp_%s" with "simple" +Dropping table "simple_temp_%s" diff --git a/tests/functional/reorder-columns/source/data/in/tables/simple.csv b/tests/functional/reorder-columns/source/data/in/tables/simple.csv new file mode 100644 index 0000000..4a8ab84 --- /dev/null +++ b/tests/functional/reorder-columns/source/data/in/tables/simple.csv @@ -0,0 +1,10 @@ +"id","name","glasses" +"0","Miro cillik","yes" +"1","Ondrej Hlavacek","no" +"2","Martin Halamicek","yes" +"3","Tomas Kacur","yes" +"4","Erik Zigo","no" +"5","Marc Raiser","sometimes" +"6","Petr Simecek","yes" +"7","Jakub Matejka","yes" +"8","Ondrej Popelka","no" diff --git a/tests/functional/run-old-config/expected-code b/tests/functional/run-old-config/expected-code deleted file mode 100644 index 573541a..0000000 --- a/tests/functional/run-old-config/expected-code +++ /dev/null @@ -1 +0,0 @@ -0 diff --git a/tests/functional/run-old-config/expected-stderr b/tests/functional/run-old-config/expected-stderr deleted file mode 100644 index e69de29..0000000 diff --git a/tests/functional/run-old-config/expected-stdout b/tests/functional/run-old-config/expected-stdout deleted file mode 100644 index 997ba29..0000000 --- a/tests/functional/run-old-config/expected-stdout +++ /dev/null @@ -1,11 +0,0 @@ -Creating ODBC connection to "Driver=SnowflakeDSIIDriver;Server=%s;Port=443;Tracing=0;Login_timeout=30;Database="%s";Schema="%s";Warehouse=%s;CLIENT_SESSION_KEEP_ALIVE=TRUE;application="Keboola_Connection"". -Validating warehouse "%s" -Validating schema "%s" -Creating table "%s" -Creating table "simple" -Writing data to table "%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" -Swapping table "%s" with "simple" -Dropping table "%s" diff --git a/tests/functional/run-old-config/expected/data/out/files/.gitkeep b/tests/functional/run-old-config/expected/data/out/files/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/tests/functional/run-old-config/expected/data/out/tables/.gitkeep b/tests/functional/run-old-config/expected/data/out/tables/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/tests/functional/run-old-config/source/data/in/tables/simple.csv.manifest b/tests/functional/run-old-config/source/data/in/tables/simple.csv.manifest deleted file mode 100644 index 5e5556b..0000000 --- a/tests/functional/run-old-config/source/data/in/tables/simple.csv.manifest +++ /dev/null @@ -1,7 +0,0 @@ -{ - "columns": [ - "id", - "name", - "glasses" - ] -} \ No newline at end of file diff --git a/tests/functional/run-row/expected-stdout b/tests/functional/run-row/expected-stdout index 997ba29..f46fd6f 100644 --- a/tests/functional/run-row/expected-stdout +++ b/tests/functional/run-row/expected-stdout @@ -3,9 +3,7 @@ Validating warehouse "%s" Validating schema "%s" Creating table "%s" Creating table "simple" -Writing data to table "%s" -Dropping stage "%s" -Using %s write strategy -Creating stage "%s" -Swapping table "%s" with "simple" -Dropping table "%s" +Uploading data to internal stage "@~/simple_temp_%s" +Copying data from internal stage to staging table "simple_temp_%s" +Swapping table "simple_temp_%s" with "simple" +Dropping table "simple_temp_%s" diff --git a/tests/functional/run-row/source/data/in/tables/simple.csv b/tests/functional/run-row/source/data/in/tables/simple.csv new file mode 100644 index 0000000..4a8ab84 --- /dev/null +++ b/tests/functional/run-row/source/data/in/tables/simple.csv @@ -0,0 +1,10 @@ +"id","name","glasses" +"0","Miro cillik","yes" +"1","Ondrej Hlavacek","no" +"2","Martin Halamicek","yes" +"3","Tomas Kacur","yes" +"4","Erik Zigo","no" +"5","Marc Raiser","sometimes" +"6","Petr Simecek","yes" +"7","Jakub Matejka","yes" +"8","Ondrej Popelka","no" diff --git a/tests/phpunit/SnowflakeTest.php b/tests/phpunit/SnowflakeTest.php index ff78831..fb195a6 100644 --- a/tests/phpunit/SnowflakeTest.php +++ b/tests/phpunit/SnowflakeTest.php @@ -4,18 +4,21 @@ namespace Keboola\DbWriter\Snowflake\Tests; +use Keboola\DbWriter\Configuration\NodeDefinition\SnowflakeDbNode; use Keboola\DbWriter\Configuration\ValueObject\SnowflakeDatabaseConfig; -use Keboola\DbWriter\Configuration\ValueObject\SnowflakeExportConfig; use Keboola\DbWriter\Exception\UserException; use Keboola\DbWriter\Writer\Snowflake; use Keboola\DbWriter\Writer\SnowflakeConnection; use Keboola\DbWriter\Writer\SnowflakeConnectionFactory; use Keboola\DbWriter\Writer\SnowflakeQueryBuilder; use Keboola\DbWriter\Writer\SnowflakeWriteAdapter; +use Keboola\DbWriterConfig\Configuration\ConfigRowDefinition; +use Keboola\DbWriterConfig\Configuration\ValueObject\ExportConfig; use PHPUnit\Framework\Assert; use PHPUnit\Framework\TestCase; use Psr\Log\LoggerInterface; use Psr\Log\Test\TestLogger; +use Symfony\Component\Config\Definition\Processor; class SnowflakeTest extends TestCase { @@ -196,7 +199,7 @@ public function testDefaultWarehouse(): void public function testInvalidWarehouse(): void { $config = $this->getConfig('simple'); - $config['parameters']['db']['warehouse'] = uniqid(); + $config['parameters']['db']['warehouse'] = uniqid('', true); /** @var SnowflakeDatabaseConfig $databaseConfig */ $databaseConfig = $this->getExportConfig($config)->getDatabaseConfig(); @@ -211,7 +214,7 @@ public function testInvalidWarehouse(): void public function testInvalidSchema(): void { $config = $this->getConfig('simple'); - $config['parameters']['db']['schema'] = uniqid(); + $config['parameters']['db']['schema'] = uniqid('', true); /** @var SnowflakeDatabaseConfig $databaseConfig */ $databaseConfig = $this->getExportConfig($config)->getDatabaseConfig(); @@ -335,6 +338,58 @@ public function queryTaggingProvider(): array ]; } + public function testGeneratePutQuery(): void + { + $config = $this->getConfig('simple'); + $exportConfig = $this->getExportConfig($config); + $connection = $this->getConnection($config); + + /** @var SnowflakeDatabaseConfig $databaseConfig */ + $databaseConfig = $exportConfig->getDatabaseConfig(); + $queryBuilder = new SnowflakeQueryBuilder($databaseConfig); + + $schema = $config['parameters']['db']['schema']; + $database = $config['parameters']['db']['database']; + $warehouse = $config['parameters']['db']['warehouse']; + + $expected = "USE WAREHOUSE \"$warehouse\"; +USE DATABASE \"$database\"; +USE SCHEMA \"$database\".\"$schema\"; +PUT file:///code/tests/phpunit/in/tables/simple.csv @~/simple_temp;"; + + $tableFilePath = $exportConfig->getTableFilePath(); + $actual = $queryBuilder->putFileQueryStatement($connection, $tableFilePath, 'simple_temp'); + + Assert::assertSame($expected, $actual); + } + + /** + * @phpcs:disable Generic.Files.LineLength + */ + public function testGenerateCopyQuery(): void + { + $config = $this->getConfig('simple'); + $exportConfig = $this->getExportConfig($config); + $connection = $this->getConnection($config); + + /** @var SnowflakeDatabaseConfig $databaseConfig */ + $databaseConfig = $exportConfig->getDatabaseConfig(); + $queryBuilder = new SnowflakeQueryBuilder($databaseConfig); + + $schema = $config['parameters']['db']['schema']; + + $expected = " + COPY INTO \"$schema\".\"simple_temp\"(\"id\", \"name\", \"glasses\", \"age\") + FROM @~/simple_temp + FILE_FORMAT = (TYPE=CSV SKIP_HEADER = 1 FIELD_DELIMITER = ',' FIELD_OPTIONALLY_ENCLOSED_BY = '\\\"' ESCAPE_UNENCLOSED_FIELD = '\\\\' COMPRESSION = 'GZIP') + ; + "; + + $actual = $queryBuilder->copyIntoTableQueryStatement($connection, 'simple_temp', $exportConfig->getItems()); + + Assert::assertSame($expected, $actual); + } + private function setUserDefaultWarehouse( SnowflakeConnection $connection, string $username, @@ -440,9 +495,9 @@ private function getConnection(array $config): SnowflakeConnection return $connection; } - private function getExportConfig(array $config): SnowflakeExportConfig + private function getExportConfig(array $config): ExportConfig { - return SnowflakeExportConfig::fromArray( + return ExportConfig::fromArray( $config['parameters'], $config['storage'], SnowflakeDatabaseConfig::fromArray($config['parameters']['db']), @@ -469,6 +524,7 @@ private function getConfig(string $table): array 'warehouse' => (string) getenv('DB_WAREHOUSE'), ]; - return $config; + $processor = new Processor(); + return $processor->processConfiguration(new ConfigRowDefinition(new SnowflakeDbNode()), [$config]); } } diff --git a/tests/prepare-data/StagingStorageLoader.php b/tests/prepare-data/StagingStorageLoader.php deleted file mode 100644 index da05a9e..0000000 --- a/tests/prepare-data/StagingStorageLoader.php +++ /dev/null @@ -1,106 +0,0 @@ -dataDir = $dataDir; - $this->storageApi = $storageApiClient; - } - - private function getInputCsv(string $tableId): string - { - return sprintf($this->dataDir . '/%s.csv', $tableId); - } - - public function upload(string $table): array - { - $filePath = $this->getInputCsv($table); - $bucketId = 'in.c-test-wr-db-snowflake'; - $tableId = $bucketId . '.' . $table; - - if (!$this->storageApi->bucketExists($bucketId)) { - $this->storageApi->createBucket('test-wr-db-snowflake', Client::STAGE_IN, '', 'snowflake'); - } - if ($this->storageApi->tableExists($tableId)) { - $this->storageApi->dropTable($tableId); - } - - $sourceTableId = $this->storageApi->createTableAsync($bucketId, $table, new CsvFile($filePath)); - - $job = $this->storageApi->exportTableAsync( - $sourceTableId, - [ - 'gzip' => true, - ], - ); - $fileInfo = $this->storageApi->getFile( - $job['file']['id'], - (new GetFileOptions())->setFederationToken(true), - ); - - if (isset($fileInfo['absPath'])) { - return [ - 'stagingStorage' => self::STORAGE_ABS, - 'manifest' => $this->getAbsManifest($fileInfo), - ]; - } else { - return [ - 'stagingStorage' => self::STORAGE_S3, - 'manifest' => $this->getS3Manifest($fileInfo), - ]; - } - } - - private function getS3Manifest(array $fileInfo): array - { - // File is always exported to stage storage as sliced - Assert::assertTrue($fileInfo['isSliced']); - - return [ - 'isSliced' => $fileInfo['isSliced'], - 'region' => $fileInfo['region'], - 'bucket' => $fileInfo['s3Path']['bucket'], - 'key' => $fileInfo['s3Path']['key'] . 'manifest', - 'credentials' => [ - 'access_key_id' => $fileInfo['credentials']['AccessKeyId'], - 'secret_access_key' => $fileInfo['credentials']['SecretAccessKey'], - 'session_token' => $fileInfo['credentials']['SessionToken'], - ], - ]; - } - - private function getAbsManifest(array $fileInfo): array - { - // File is always exported to stage storage as sliced - Assert::assertTrue($fileInfo['isSliced']); - - return [ - 'is_sliced' => $fileInfo['isSliced'], - 'region' => $fileInfo['region'], - 'container' => $fileInfo['absPath']['container'], - 'name' => $fileInfo['absPath']['name'] . 'manifest', - 'credentials' => [ - 'sas_connection_string' => $fileInfo['absCredentials']['SASConnectionString'], - 'expiration' => $fileInfo['absCredentials']['expiration'], - ], - ]; - } -} diff --git a/tests/prepare-data/prepareData.php b/tests/prepare-data/prepareData.php deleted file mode 100644 index 7f9def5..0000000 --- a/tests/prepare-data/prepareData.php +++ /dev/null @@ -1,33 +0,0 @@ - getenv('KBC_URL'), - 'token' => getenv('STORAGE_API_TOKEN'), - ]), -); -$finder = new Finder(); -$files = $finder - ->files() - ->in(__DIR__ . '/tables') - ->name('*.csv'); - -$dataFilesMetadata = []; -foreach ($files as $file) { - $dataFilesMetadata[$file->getFilename()] = $storageLoader->upload($file->getFilenameWithoutExtension()); -} - -file_put_contents( - __DIR__ . '/manifestData.json', - json_encode($dataFilesMetadata, JSON_PRETTY_PRINT), -); diff --git a/tests/prepare-data/tables/simple-date.csv b/tests/prepare-data/tables/simple-date.csv deleted file mode 100644 index 7505867..0000000 --- a/tests/prepare-data/tables/simple-date.csv +++ /dev/null @@ -1,3 +0,0 @@ -"id","col-true-null","col-true-empty-string","col-true-string" -"1","2020-06-05","2020-06-06","2020-06-07" -"2","","","" diff --git a/tests/prepare-data/tables/simple-datetime.csv b/tests/prepare-data/tables/simple-datetime.csv deleted file mode 100644 index f433907..0000000 --- a/tests/prepare-data/tables/simple-datetime.csv +++ /dev/null @@ -1,3 +0,0 @@ -"id","col-true-null","col-true-empty-string","col-true-string" -"1","2020-06-05 12:01:02","2020-06-06 12:01:02","2020-06-07 12:01:02" -"2","","","" diff --git a/tests/prepare-data/tables/simple-int.csv b/tests/prepare-data/tables/simple-int.csv deleted file mode 100644 index 3368c91..0000000 --- a/tests/prepare-data/tables/simple-int.csv +++ /dev/null @@ -1,4 +0,0 @@ -"id","col-true-null","col-true-empty-string","col-true-string","col-true-zero-value" -"1","0","1","2","3" -"2","0","0","0","0" -"3","","","","" diff --git a/tests/prepare-data/tables/special.csv b/tests/prepare-data/tables/special.csv deleted file mode 100644 index 6818cda..0000000 --- a/tests/prepare-data/tables/special.csv +++ /dev/null @@ -1,11 +0,0 @@ -"col1","col2" -"column with \n \t \\","second col" -"column with backslash \ inside","column with backslash and enclosure \""" -"column with enclosure "", and comma inside text","second column enclosure in text """ -"columns with -new line","columns with tab" -"first","something with - -double new line" -"line with enclosure","second column" -"single quote'","two single''quotes"