Skip to content

Commit

Permalink
Fully qualified names in sql (#47)
Browse files Browse the repository at this point in the history
* added the NamedResourceTrait to support qualified naming of resources, eg `Table(name="db.schema.my_tbl")
* fixed a bug with dangling schemas not being connected to the resource tree
* misc other bugfixes

---------

Co-authored-by: TJ Murphy <[email protected]>
  • Loading branch information
teej and teej authored Jun 18, 2024
1 parent 53ae73a commit 450d483
Show file tree
Hide file tree
Showing 67 changed files with 1,116 additions and 640 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ grants = [
Use Blueprint to apply those changes to your Snowflake account. Blueprint works similar to Terraform - add resources, call `plan(...)` to see what changes will be applied, and then call `apply(...)` to run the changes.

```Python
from titan import Blueprint
from titan import Blueprint, print_plan

bp = Blueprint()
bp.add(
Expand All @@ -50,7 +50,7 @@ bp.add(
*grants,
)
plan = bp.plan(session)
print(plan) # =>
print_plan(plan) # =>
"""
account:ABC123
Expand Down
2 changes: 1 addition & 1 deletion conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def pytest_collection_modifyitems(items):

@pytest.fixture(scope="session")
def suffix():
return str(uuid.uuid4())[:8]
return str(uuid.uuid4())[:8].upper()


@pytest.fixture(scope="session")
Expand Down
2 changes: 1 addition & 1 deletion docs/resources/python_stored_procedure.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ Represents a Python stored procedure in Snowflake, allowing for the execution of
procedure = PythonStoredProcedure(
name="some_procedure",
args=[],
returns=DataType.STRING,
returns="STRING",
runtime_version="3.8",
packages=["snowflake-snowpark-python"],
handler="process_data",
Expand Down
3 changes: 1 addition & 2 deletions docs/resources/table.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,7 @@ tables:
* `name` (string, required) - The name of the table.
* `columns` (list, required) - The columns of the table.
* `constraints` (list) - The constraints of the table.
* `volatile` (bool) - Whether the table is volatile. Defaults to False.
* `transient` (bool) - Whether the table is transient. Defaults to False.
* `transient` (bool) - Whether the table is transient.
* `cluster_by` (list) - The clustering keys for the table.
* `enable_schema_evolution` (bool) - Whether schema evolution is enabled. Defaults to False.
* `data_retention_time_in_days` (int) - The data retention time in days.
Expand Down
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

setup(
name="titan",
version="0.5.2",
version="0.5.3",
description="Snowflake infrastructure as code",
long_description=open("README.md", encoding="utf-8").read(),
long_description_content_type="text/markdown",
Expand All @@ -22,11 +22,11 @@
install_requires=[
"click>=8.1.7",
"inflection>=0.5.1",
"pygithub==1.55",
"pyparsing>=3.0.9",
"pyyaml",
"snowflake-connector-python>=3.7.0",
"snowflake-snowpark-python>=1.14.0",
"pygithub==1.55",
],
extras_require={
"dev": [
Expand Down
17 changes: 13 additions & 4 deletions tests/fixtures/json/table.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,23 +3,32 @@
"columns": [
{
"name": "id",
"data_type": "INT"
"data_type": "INT",
"collate": null,
"comment": null,
"not_null": false,
"constraint": null,
"default": null
},
{
"name": "amount",
"data_type": "NUMBER"
"data_type": "NUMBER",
"collate": null,
"comment": null,
"not_null": false,
"constraint": null,
"default": null
}
],
"constraints": null,
"volatile": false,
"transient": false,
"cluster_by": null,
"enable_schema_evolution": false,
"data_retention_time_in_days": null,
"max_data_extension_time_in_days": null,
"change_tracking": false,
"default_ddl_collation": null,
"copy_grants": false,
"copy_grants": null,
"row_access_policy": null,
"tags": null,
"owner": "SYSADMIN",
Expand Down
5 changes: 5 additions & 0 deletions tests/fixtures/sql/session_policy.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
CREATE SESSION POLICY session_policy_prod_1
SESSION_IDLE_TIMEOUT_MINS = 60
SESSION_UI_IDLE_TIMEOUT_MINS = 30
COMMENT = 'session policy for use in the prod_1 environment'
;
49 changes: 38 additions & 11 deletions tests/integration/data_provider/test_fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@
"data": {
"name": "PRODUCT",
"owner": TEST_ROLE,
"columns": [{"name": "ID", "data_type": "NUMBER(38,0)", "nullable": True}],
"columns": [{"name": "ID", "data_type": "NUMBER(38,0)", "not_null": False}],
"target_lag": "20 minutes",
"warehouse": "CI",
"refresh_mode": "AUTO",
Expand Down Expand Up @@ -269,16 +269,6 @@
"comment": "+3",
},
},
{
"resource_type": ResourceType.TABLE,
"setup_sql": "CREATE TABLE sometbl (id INT)",
"teardown_sql": "DROP TABLE IF EXISTS sometbl",
"data": {
"name": "SOMETBL",
"owner": TEST_ROLE,
"columns": [{"name": "ID", "nullable": True, "data_type": "NUMBER(38,0)"}],
},
},
{
"resource_type": ResourceType.TASK,
"setup_sql": "CREATE TASK sometask SCHEDULE = '60 MINUTE' AS SELECT 1",
Expand Down Expand Up @@ -711,3 +701,40 @@ def test_fetch_event_table(cursor, test_db, marked_for_cleanup):
assert result is not None
result = data_provider.remove_none_values(result)
assert result == data_provider.remove_none_values(event_table.to_dict())


def test_fetch_grant_with_fully_qualified_ref(cursor, test_db, suffix, marked_for_cleanup):
cursor.execute(f"USE DATABASE {test_db}")
cursor.execute(f"CREATE SCHEMA if not exists {test_db}.my_schema")
cursor.execute(f"CREATE ROLE test_role_grant_{suffix}")
cursor.execute(f"GRANT USAGE ON SCHEMA {test_db}.my_schema TO ROLE test_role_grant_{suffix}")
grant = res.Grant.from_sql(f"GRANT USAGE ON SCHEMA {test_db}.my_schema TO ROLE test_role_grant_{suffix}")
grant._data.owner = TEST_ROLE
result = safe_fetch(cursor, grant.urn)
assert result is not None
result = data_provider.remove_none_values(result)
result["on"] = ResourceName(result["on"])
assert result == data_provider.remove_none_values(grant.to_dict())


def test_fetch_pipe(cursor, test_db, marked_for_cleanup):
pipe = res.Pipe(
name="PIPE_EXAMPLE",
as_=f"""
COPY INTO pipe_destination
FROM '@%pipe_destination'
FILE_FORMAT = (TYPE = 'CSV');
""",
comment="Pipe for testing",
owner=TEST_ROLE,
)
cursor.execute(f"USE DATABASE {test_db}")
cursor.execute("USE SCHEMA PUBLIC")
cursor.execute("CREATE TABLE pipe_destination (id INT)")
cursor.execute(pipe.create_sql(if_not_exists=True))
marked_for_cleanup.append(pipe)

result = safe_fetch(cursor, pipe.urn)
assert result is not None
result = data_provider.remove_none_values(result)
assert result == data_provider.remove_none_values(pipe.to_dict())
76 changes: 76 additions & 0 deletions tests/integration/data_provider/test_fetch_table.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
import os

import pytest

from titan import data_provider
from titan import resources as res
from titan.client import reset_cache


pytestmark = pytest.mark.requires_snowflake

TEST_ROLE = os.environ.get("TEST_SNOWFLAKE_ROLE")
TEST_USER = os.environ.get("TEST_SNOWFLAKE_USER")


def safe_fetch(cursor, urn):
reset_cache()
return data_provider.fetch_resource(cursor, urn)


def test_fetch_table_clustered(cursor, test_db, suffix):
cursor.execute(f"USE DATABASE {test_db}")
cursor.execute(f"USE SCHEMA PUBLIC")

table = res.Table(
name=f"TABLE_{suffix}",
database=test_db,
schema="PUBLIC",
owner=TEST_ROLE,
enable_schema_evolution=True,
cluster_by=["ID"],
columns=[res.Column(name="ID", data_type="NUMBER(38,0)")],
)
cursor.execute(table.create_sql(if_not_exists=True))
result = safe_fetch(cursor, table.urn)
assert result is not None
result = data_provider.remove_none_values(result)
assert result == data_provider.remove_none_values(table.to_dict())
cursor.execute(table.drop_sql(if_exists=True))


def test_fetch_table_simple(cursor, test_db, suffix):
cursor.execute(f"USE DATABASE {test_db}")
cursor.execute(f"USE SCHEMA PUBLIC")
table = res.Table(
name=f"TABLE_{suffix}",
database=test_db,
schema="PUBLIC",
owner=TEST_ROLE,
columns=[res.Column(name="ID", data_type="NUMBER(38,0)")],
)
cursor.execute(table.create_sql(if_not_exists=True))
result = safe_fetch(cursor, table.urn)
assert result is not None
result = data_provider.remove_none_values(result)
assert result == data_provider.remove_none_values(table.to_dict())
cursor.execute(table.drop_sql(if_exists=True))


def test_fetch_table_transient(cursor, test_db, suffix):
cursor.execute(f"USE DATABASE {test_db}")
cursor.execute(f"USE SCHEMA PUBLIC")
table = res.Table(
name=f"TABLE_{suffix}",
database=test_db,
schema="PUBLIC",
owner=TEST_ROLE,
columns=[res.Column(name="ID", data_type="NUMBER(38,0)")],
transient=True,
)
cursor.execute(table.create_sql(if_not_exists=True))
result = safe_fetch(cursor, table.urn)
assert result is not None
result = data_provider.remove_none_values(result)
assert result == data_provider.remove_none_values(table.to_dict())
cursor.execute(table.drop_sql(if_exists=True))
Loading

0 comments on commit 450d483

Please sign in to comment.