Skip to content

Commit

Permalink
feat: adding extra credentials argument to s3
Browse files Browse the repository at this point in the history
  • Loading branch information
mitchbregs committed Sep 15, 2024
1 parent 44d2630 commit e470b39
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 3 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -228,8 +228,8 @@ keys used to populate the parameters of the S3 table function:
| structure | The column structure of the data in bucket, as a list of name/datatype pairs, such as `['id UInt32', 'date DateTime', 'value String']` If not provided ClickHouse will infer the structure. |
| aws_access_key_id | The S3 access key id. |
| aws_secret_access_key | The S3 secret key. |
| role_arn | The ARN of a ClickhouseAccess IAM role to use to securely access the S3 objects. See this [documentation](https://clickhouse.com/docs/en/cloud/security/secure-s3) for more information. |
| compression | The compression method used with the S3 objects. If not provided ClickHouse will attempt to determine compression based on the file name. |

See the [S3 test file](https://github.com/ClickHouse/dbt-clickhouse/blob/main/tests/integration/adapter/clickhouse/test_clickhouse_s3.py) for examples of how to use this macro.

# Contracts and Constraints
Expand Down
6 changes: 5 additions & 1 deletion dbt/adapters/clickhouse/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,6 +240,7 @@ def s3source_clause(
structure: Union[str, list, dict],
aws_access_key_id: str,
aws_secret_access_key: str,
role_arn: str,
compression: str = '',
) -> str:
s3config = self.config.vars.vars.get(config_name, {})
Expand Down Expand Up @@ -273,7 +274,10 @@ def s3source_clause(
comp = compression or s3config.get('compression', '')
if comp:
comp = f"', {comp}'"
return f"s3('{url}'{access}, '{fmt}'{struct}{comp})"
extra_credentials = ''
if role_arn:
extra_credentials = f", extra_credentials(role_arn='{role_arn}')"
return f"s3('{url}'{access}, '{fmt}'{struct}{comp}{extra_credentials})"

def check_schema_exists(self, database, schema):
results = self.execute_macro(LIST_SCHEMAS_MACRO_NAME, kwargs={'database': database})
Expand Down
3 changes: 2 additions & 1 deletion dbt/include/clickhouse/macros/materializations/s3.sql
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{% macro clickhouse_s3source(config_name='', bucket='', path='', fmt='', structure='',
aws_access_key_id='', aws_secret_access_key='', compression='') %}
aws_access_key_id='', aws_secret_access_key='', role_arn='', compression='') %}
{% if config_name and not config_name.lower().endswith('s3') %}
{{ exceptions.raise_compiler_error("S3 configuration should end with 's3'") }}
{% endif %}
Expand All @@ -13,5 +13,6 @@
structure=structure,
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
role_arn=role_arn,
compression=compression) }}
{% endmacro %}

0 comments on commit e470b39

Please sign in to comment.