From 9a27cb1320fd28826143a3d56102e2d37b02402d Mon Sep 17 00:00:00 2001 From: Francois Campbell Date: Fri, 18 Oct 2024 10:27:43 -0400 Subject: [PATCH 1/7] Defer package script conversion until PDFv2 is loaded into Pydantic (#1735) When converting PDFv1 to PDFv2, we need to convert package scripts to post-deploy hooks, so we have to convert the `{{ package_name }}` template into the normal template syntax. When converting using `snow helpers v1-to-v2`, we convert `{{ package_name }}` to `<% ctx.entities.pkg.identifier %>` since we know that the next time the CLI is run, the template context will be v2-based and that `ctx.entities.pkg.identifier` will be a valid reference. When converting in-memory however, the template context is v1-based and since the conversion has to be transparent to the user (they could have other files with templates still using v1 references in it), we can't override the template context to v2, so `ctx.entities.pkg.identifier` is an invalid reference. Fortunately for in-memory conversions, package scripts are converted to post-deploy hooks using temp files (to avoid overwriting the user's files), so we can just insert the package name directly into the converted script instead of having to use a template reference. This requires us to defer conversion of package scripts until after the v2 definition is loaded into Pydantic since there are validators that can change the values of fields (like the test resource suffix being added to the package identifier). --- .../nativeapp/v2_conversions/compat.py | 9 -- src/snowflake/cli/api/cli_global_context.py | 3 - .../cli/api/project/definition_conversion.py | 144 ++++++++++++------ tests/project/test_project_definition_v2.py | 4 +- .../nativeapp/test_project_templating.py | 4 +- .../app/setup_script.sql | 1 + .../app/setup_script.sql | 1 + 7 files changed, 107 insertions(+), 59 deletions(-) diff --git a/src/snowflake/cli/_plugins/nativeapp/v2_conversions/compat.py b/src/snowflake/cli/_plugins/nativeapp/v2_conversions/compat.py index 5bab11e581..6a99b54f36 100644 --- a/src/snowflake/cli/_plugins/nativeapp/v2_conversions/compat.py +++ b/src/snowflake/cli/_plugins/nativeapp/v2_conversions/compat.py @@ -293,15 +293,6 @@ def wrapper(*args, **kwargs): # Override the project definition so that the command operates on the new entities cm.override_project_definition = pdfv2 - - # Override the template context so that templates refer to the new entities - # Reuse the old ctx.env and other top-level keys in the template context - # since they don't change between v1 and v2 - pdfv2_dump = pdfv2.model_dump( - exclude_none=True, warnings=False, by_alias=True - ) - new_ctx = pdfv2_dump | dict(env=cm.template_context["ctx"]["env"]) - cm.override_template_context = cm.template_context | dict(ctx=new_ctx) elif single_app_and_package: package_entity_id = kwargs.get("package_entity_id", "") app_entity_id = kwargs.get("app_entity_id", "") diff --git a/src/snowflake/cli/api/cli_global_context.py b/src/snowflake/cli/api/cli_global_context.py index a59cfc4d5d..9aae191ec7 100644 --- a/src/snowflake/cli/api/cli_global_context.py +++ b/src/snowflake/cli/api/cli_global_context.py @@ -58,7 +58,6 @@ class _CliGlobalContextManager: # Consider changing the way this calculation is provided to commands # in order to remove this logic (then make project_definition a non-cloned @property) override_project_definition: ProjectDefinition | None = None - override_template_context: dict | None = None _definition_manager: DefinitionManager | None = None @@ -98,8 +97,6 @@ def project_root(self) -> Path: @property def template_context(self) -> dict: - if self.override_template_context: - return self.override_template_context return self._definition_manager_or_raise().template_context @property diff --git a/src/snowflake/cli/api/project/definition_conversion.py b/src/snowflake/cli/api/project/definition_conversion.py index dc37092e67..ac7aa1c6cf 100644 --- a/src/snowflake/cli/api/project/definition_conversion.py +++ b/src/snowflake/cli/api/project/definition_conversion.py @@ -9,6 +9,9 @@ from snowflake.cli._plugins.nativeapp.artifacts import ( BundleMap, ) +from snowflake.cli._plugins.nativeapp.entities.application_package import ( + ApplicationPackageEntityModel, +) from snowflake.cli._plugins.snowpark.common import is_name_a_templated_one from snowflake.cli.api.constants import ( DEFAULT_ENV_FILE, @@ -19,6 +22,7 @@ ) from snowflake.cli.api.entities.utils import render_script_template from snowflake.cli.api.project.schemas.entities.common import ( + MetaField, SqlScriptHookType, ) from snowflake.cli.api.project.schemas.project_definition import ( @@ -83,23 +87,31 @@ def _is_field_defined(template_context: Optional[Dict[str, Any]], *path: str) -> def convert_project_definition_to_v2( project_root: Path, - pd: ProjectDefinition, + definition_v1: ProjectDefinition, accept_templates: bool = False, template_context: Optional[Dict[str, Any]] = None, in_memory: bool = False, ) -> ProjectDefinitionV2: - _check_if_project_definition_meets_requirements(pd, accept_templates) + _check_if_project_definition_meets_requirements(definition_v1, accept_templates) - snowpark_data = convert_snowpark_to_v2_data(pd.snowpark) if pd.snowpark else {} - streamlit_data = convert_streamlit_to_v2_data(pd.streamlit) if pd.streamlit else {} + snowpark_data = ( + convert_snowpark_to_v2_data(definition_v1.snowpark) + if definition_v1.snowpark + else {} + ) + streamlit_data = ( + convert_streamlit_to_v2_data(definition_v1.streamlit) + if definition_v1.streamlit + else {} + ) native_app_data = ( convert_native_app_to_v2_data( - project_root, pd.native_app, template_context, in_memory + project_root, definition_v1.native_app, template_context ) - if pd.native_app + if definition_v1.native_app else {} ) - envs = convert_envs_to_v2(pd) + envs = convert_envs_to_v2(definition_v1) data = { "definition_version": "2", @@ -116,8 +128,15 @@ def convert_project_definition_to_v2( if in_memory: # If this is an in-memory conversion, we need to evaluate templates right away # since the file won't be re-read as it would be for a permanent conversion - return render_definition_template(data, {}).project_definition - return ProjectDefinitionV2(**data) + definition_v2 = render_definition_template(data, {}).project_definition + else: + definition_v2 = ProjectDefinitionV2(**data) + + # If the user's files have any template tags in them, they + # also need to be migrated to point to the v2 entities + _convert_templates_in_files(project_root, definition_v1, definition_v2, in_memory) + + return definition_v2 def convert_snowpark_to_v2_data(snowpark: Snowpark) -> Dict[str, Any]: @@ -224,7 +243,6 @@ def convert_native_app_to_v2_data( project_root: Path, native_app: NativeApp, template_context: Optional[Dict[str, Any]] = None, - in_memory: bool = False, ) -> Dict[str, Any]: def _make_meta(obj: Application | Package): meta = {} @@ -269,34 +287,6 @@ def _find_manifest(): # which use POSIX paths as default values return manifest_path.relative_to(project_root).as_posix() - def _make_template(template: str) -> str: - return f"{PROJECT_TEMPLATE_VARIABLE_OPENING} {template} {PROJECT_TEMPLATE_VARIABLE_CLOSING}" - - def _convert_package_script_files(package_scripts: list[str]): - # PDFv2 doesn't support package scripts, only post-deploy scripts, so we - # need to convert the Jinja syntax from {{ }} to <% %> - # Luckily, package scripts only support {{ package_name }}, so let's convert that tag - # to v2 template syntax by running it though the template process with a fake - # package name that's actually a valid v2 template, which will be evaluated - # when the script is used as a post-deploy script - fake_package_replacement_template = _make_template( - f"ctx.entities.{package_entity_name}.identifier" - ) - jinja_context = dict(package_name=fake_package_replacement_template) - post_deploy_hooks = [] - for script_file in package_scripts: - new_contents = render_script_template( - project_root, jinja_context, script_file, get_basic_jinja_env() - ) - if in_memory: - # If we're converting the definition in-memory, we can't touch - # the package scripts on disk, so we'll write them to a temporary file - d = _get_temp_dir().name - _, script_file = mkstemp(dir=d, suffix="_converted.sql", text=True) - (project_root / script_file).write_text(new_contents) - post_deploy_hooks.append(SqlScriptHookType(sql_script=script_file)) - return post_deploy_hooks - package_entity_name = "pkg" if ( native_app.package @@ -334,12 +324,11 @@ def _convert_package_script_files(package_scripts: list[str]): package["distribution"] = native_app.package.distribution package_meta = _make_meta(native_app.package) if native_app.package.scripts: - converted_post_deploy_hooks = _convert_package_script_files( - native_app.package.scripts - ) - package_meta["post_deploy"] = ( - package_meta.get("post_deploy", []) + converted_post_deploy_hooks - ) + # Package scripts are not supported in PDFv2 but we + # don't convert them here, conversion is deferred until + # the final v2 Pydantic model is available + # (see _convert_templates_in_files()) + pass if package_meta: package["meta"] = package_meta @@ -383,6 +372,73 @@ def convert_envs_to_v2(pd: ProjectDefinition): return None +def _convert_templates_in_files( + project_root: Path, + definition_v1: ProjectDefinition, + definition_v2: ProjectDefinitionV2, + in_memory: bool, +): + """Converts templates in other files to the new format""" + # TODO handle artifacts using the "templates" processor + # For now this only handles Native App package scripts + + if (na := definition_v1.native_app) and (pkg := na.package) and pkg.scripts: + # If the v1 definition has a Native App with a package, we know + # that the v2 definition will have exactly one application package entity + pkg_entity: ApplicationPackageEntityModel = list( + definition_v2.get_entities_by_type( + ApplicationPackageEntityModel.get_type() + ).values() + )[0] + converted_post_deploy_hooks = _convert_package_script_files( + project_root, pkg.scripts, pkg_entity, in_memory + ) + if pkg_entity.meta is None: + pkg_entity.meta = MetaField() + if pkg_entity.meta.post_deploy is None: + pkg_entity.meta.post_deploy = [] + pkg_entity.meta.post_deploy += converted_post_deploy_hooks + + +def _convert_package_script_files( + project_root: Path, + package_scripts: list[str], + pkg_model: ApplicationPackageEntityModel, + in_memory: bool, +): + # PDFv2 doesn't support package scripts, only post-deploy scripts, so we + # need to convert the Jinja syntax from {{ }} to <% %> + # Luckily, package scripts only support {{ package_name }}, so let's convert that tag + # to v2 template syntax by running it though the template process with a fake + # package name that's actually a valid v2 template, which will be evaluated + # when the script is used as a post-deploy script + # If we're doing an in-memory conversion, we can just hardcode the converted + # package name directly into the script since it's being written to a temporary file + package_name_replacement = ( + pkg_model.fqn.name + if in_memory + else _make_template(f"ctx.entities.{pkg_model.entity_id}.identifier") + ) + jinja_context = dict(package_name=package_name_replacement) + post_deploy_hooks = [] + for script_file in package_scripts: + new_contents = render_script_template( + project_root, jinja_context, script_file, get_basic_jinja_env() + ) + if in_memory: + # If we're converting the definition in-memory, we can't touch + # the package scripts on disk, so we'll write them to a temporary file + d = _get_temp_dir().name + _, script_file = mkstemp(dir=d, suffix="_converted.sql", text=True) + (project_root / script_file).write_text(new_contents) + post_deploy_hooks.append(SqlScriptHookType(sql_script=script_file)) + return post_deploy_hooks + + +def _make_template(template: str) -> str: + return f"{PROJECT_TEMPLATE_VARIABLE_OPENING} {template} {PROJECT_TEMPLATE_VARIABLE_CLOSING}" + + def _check_if_project_definition_meets_requirements( pd: ProjectDefinition, accept_templates: bool ): diff --git a/tests/project/test_project_definition_v2.py b/tests/project/test_project_definition_v2.py index 0c5a143855..6587480ea7 100644 --- a/tests/project/test_project_definition_v2.py +++ b/tests/project/test_project_definition_v2.py @@ -390,10 +390,10 @@ def test_v1_to_v2_conversion_in_memory_package_scripts(temp_dir): assert Path(package_script_filename).read_text() == package_script # But the converted definition has a reference to a tempfile - # that contains the converted package script + # that contains the literal package name assert ( Path(definition_v2.entities["pkg"].meta.post_deploy[0].sql_script).read_text() - == "select '<% ctx.entities.pkg.identifier %>';" + == f"select '{definition_v2.entities['pkg'].fqn.name}';" ) diff --git a/tests_integration/nativeapp/test_project_templating.py b/tests_integration/nativeapp/test_project_templating.py index ebd3c2eb23..05e51e257e 100644 --- a/tests_integration/nativeapp/test_project_templating.py +++ b/tests_integration/nativeapp/test_project_templating.py @@ -387,7 +387,9 @@ def test_nativeapp_templates_processor_with_run( @pytest.mark.integration -@pytest.mark.parametrize("test_project", ["napp_templates_processors_v2"]) +@pytest.mark.parametrize( + "test_project", ["napp_templates_processors_v1", "napp_templates_processors_v2"] +) @pytest.mark.parametrize("with_project_flag", [True, False]) def test_nativeapp_templates_processor_with_deploy( runner, diff --git a/tests_integration/test_data/projects/napp_templates_processors_v1/app/setup_script.sql b/tests_integration/test_data/projects/napp_templates_processors_v1/app/setup_script.sql index 2aae4f834e..03cc1b2574 100644 --- a/tests_integration/test_data/projects/napp_templates_processors_v1/app/setup_script.sql +++ b/tests_integration/test_data/projects/napp_templates_processors_v1/app/setup_script.sql @@ -8,3 +8,4 @@ CREATE OR ALTER VERSIONED SCHEMA <% ctx.env.schema_name %>; EXECUTE IMMEDIATE from '/another_script.sql'; +select 'ctx.native_app.name: <% ctx.native_app.name %>'; diff --git a/tests_integration/test_data/projects/napp_templates_processors_v2/app/setup_script.sql b/tests_integration/test_data/projects/napp_templates_processors_v2/app/setup_script.sql index 2aae4f834e..41e8427e3f 100644 --- a/tests_integration/test_data/projects/napp_templates_processors_v2/app/setup_script.sql +++ b/tests_integration/test_data/projects/napp_templates_processors_v2/app/setup_script.sql @@ -8,3 +8,4 @@ CREATE OR ALTER VERSIONED SCHEMA <% ctx.env.schema_name %>; EXECUTE IMMEDIATE from '/another_script.sql'; +select 'ctx.entities.pkg.identifier: <% ctx.entities.pkg.identifier %>'; From b1725c15634238cd77d6f91cd975686c87e37209 Mon Sep 17 00:00:00 2001 From: Marcus Chok Date: Fri, 18 Oct 2024 14:04:01 -0400 Subject: [PATCH 2/7] don't pass post deploy/package scripts when validating and convert tests to use factories (#1743) * don't pass post deploy/package scripts when validating and convert tests to use factories * update test_manager for deploy in validate with no post deploy/package scripts * remove bad comment * nativeapp_project_directory -> nativeapp_teardown --- .../nativeapp/entities/application_package.py | 16 +-- .../cli/_plugins/nativeapp/manager.py | 4 - tests/nativeapp/test_manager.py | 4 +- tests_integration/nativeapp/test_validate.py | 128 +++++++++++++++--- 4 files changed, 112 insertions(+), 40 deletions(-) diff --git a/src/snowflake/cli/_plugins/nativeapp/entities/application_package.py b/src/snowflake/cli/_plugins/nativeapp/entities/application_package.py index e68511570b..e2b1c37a3b 100644 --- a/src/snowflake/cli/_plugins/nativeapp/entities/application_package.py +++ b/src/snowflake/cli/_plugins/nativeapp/entities/application_package.py @@ -270,8 +270,6 @@ def action_validate( package_warehouse=( (model.meta and model.meta.warehouse) or workspace_ctx.default_warehouse ), - post_deploy_hooks=model.meta and model.meta.post_deploy, - package_scripts=[], # Package scripts are not supported in PDFv2 policy=policy, use_scratch_stage=use_scratch_stage, scratch_stage_fqn=f"{package_name}.{model.scratch_stage}", @@ -479,8 +477,6 @@ def deploy( paths=paths, stage_fqn=stage_fqn, package_warehouse=package_warehouse, - post_deploy_hooks=post_deploy_hooks, - package_scripts=package_scripts, policy=policy, use_scratch_stage=False, scratch_stage_fqn="", @@ -1201,8 +1197,6 @@ def validate_setup_script( recursive: bool, paths: List[Path] | None, stage_fqn: str, - post_deploy_hooks: list[PostDeployHook] | None, - package_scripts: List[str], policy: PolicyBase, use_scratch_stage: bool, scratch_stage_fqn: str, @@ -1224,8 +1218,6 @@ def validate_setup_script( paths=paths, stage_fqn=stage_fqn, package_warehouse=package_warehouse, - post_deploy_hooks=post_deploy_hooks, - package_scripts=package_scripts, policy=policy, use_scratch_stage=use_scratch_stage, scratch_stage_fqn=scratch_stage_fqn, @@ -1268,8 +1260,6 @@ def get_validation_result(self, use_scratch_stage: bool = True): package_warehouse=( (model.meta and model.meta.warehouse) or workspace_ctx.default_warehouse ), - post_deploy_hooks=model.meta and model.meta.post_deploy, - package_scripts=[], # Package scripts are not supported in PDFv2 policy=AllowAlwaysPolicy(), use_scratch_stage=use_scratch_stage, scratch_stage_fqn=f"{package_name}.{model.scratch_stage}", @@ -1292,8 +1282,6 @@ def get_validation_result_static( recursive: bool, paths: List[Path] | None, stage_fqn: str, - post_deploy_hooks: list[PostDeployHook] | None, - package_scripts: List[str], policy: PolicyBase, use_scratch_stage: bool, scratch_stage_fqn: str, @@ -1319,8 +1307,8 @@ def get_validation_result_static( validate=False, stage_fqn=stage_fqn, package_warehouse=package_warehouse, - post_deploy_hooks=post_deploy_hooks, - package_scripts=package_scripts, + post_deploy_hooks=[], + package_scripts=[], policy=policy, ) prefixed_stage_fqn = StageManager.get_standard_stage_prefix(stage_fqn) diff --git a/src/snowflake/cli/_plugins/nativeapp/manager.py b/src/snowflake/cli/_plugins/nativeapp/manager.py index cfc8b47414..98c3b69c3d 100644 --- a/src/snowflake/cli/_plugins/nativeapp/manager.py +++ b/src/snowflake/cli/_plugins/nativeapp/manager.py @@ -325,8 +325,6 @@ def validate(self, use_scratch_stage: bool = False): paths=[], stage_fqn=self.stage_fqn, package_warehouse=self.package_warehouse, - post_deploy_hooks=self.package_post_deploy_hooks, - package_scripts=self.package_scripts, policy=AllowAlwaysPolicy(), use_scratch_stage=use_scratch_stage, scratch_stage_fqn=self.scratch_stage_fqn, @@ -348,8 +346,6 @@ def get_validation_result(self, use_scratch_stage: bool = False): paths=[], stage_fqn=self.stage_fqn, package_warehouse=self.package_warehouse, - post_deploy_hooks=self.package_post_deploy_hooks, - package_scripts=self.package_scripts, policy=AllowAlwaysPolicy(), use_scratch_stage=use_scratch_stage, scratch_stage_fqn=self.scratch_stage_fqn, diff --git a/tests/nativeapp/test_manager.py b/tests/nativeapp/test_manager.py index f72b4ff8a5..ae74a61b41 100644 --- a/tests/nativeapp/test_manager.py +++ b/tests/nativeapp/test_manager.py @@ -1381,7 +1381,7 @@ def test_validate_use_scratch_stage(mock_execute, mock_deploy, temp_dir, mock_cu validate=False, stage_fqn=f"{pkg_model.fqn.name}.{pkg_model.scratch_stage}", package_warehouse=pkg_model.meta.warehouse, - post_deploy_hooks=pkg_model.meta.post_deploy, + post_deploy_hooks=[], package_scripts=[], policy=AllowAlwaysPolicy(), ) @@ -1470,7 +1470,7 @@ def test_validate_failing_drops_scratch_stage( validate=False, stage_fqn=f"{pkg_model.fqn.name}.{pkg_model.scratch_stage}", package_warehouse=pkg_model.meta.warehouse, - post_deploy_hooks=pkg_model.meta.post_deploy, + post_deploy_hooks=[], package_scripts=[], policy=AllowAlwaysPolicy(), ) diff --git a/tests_integration/nativeapp/test_validate.py b/tests_integration/nativeapp/test_validate.py index 2b67a3342c..6a3efadabb 100644 --- a/tests_integration/nativeapp/test_validate.py +++ b/tests_integration/nativeapp/test_validate.py @@ -13,20 +13,63 @@ # limitations under the License. from shlex import split +from textwrap import dedent + +from tests.nativeapp.factories import ( + ProjectV2Factory, + ApplicationPackageEntityModelFactory, + ApplicationEntityModelFactory, + ProjectV10Factory, +) from tests.project.fixtures import * +@pytest.mark.integration +def test_nativeapp_validate_v1(nativeapp_teardown, runner, temp_dir): + ProjectV10Factory( + pdf__native_app__name="myapp", + pdf__native_app__artifacts=[ + {"src": "app/*", "dest": "./"}, + ], + files={ + "app/setup.sql": "CREATE OR ALTER VERSIONED SCHEMA core;", + "app/README.md": "\n", + "app/manifest.yml": "\n", + }, + ) + with nativeapp_teardown(project_dir=Path(temp_dir)): + # validate the app's setup script + result = runner.invoke_with_connection(["app", "validate"]) + assert result.exit_code == 0, result.output + assert "Native App validation succeeded." in result.output + + @pytest.mark.integration @pytest.mark.parametrize( - "command,test_project", + "command", [ - ["app validate", "napp_init_v1"], - ["app validate", "napp_init_v2"], - ["ws validate --entity-id=pkg", "napp_init_v2"], + "app validate", + "ws validate --entity-id=pkg", ], ) -def test_nativeapp_validate(command, test_project, nativeapp_project_directory, runner): - with nativeapp_project_directory(test_project): +def test_nativeapp_validate_v2(command, nativeapp_teardown, runner, temp_dir): + ProjectV2Factory( + pdf__entities=dict( + pkg=ApplicationPackageEntityModelFactory( + identifier="myapp_pkg", + ), + app=ApplicationEntityModelFactory( + identifier="myapp", + fromm__target="pkg", + ), + ), + files={ + "setup.sql": "CREATE OR ALTER VERSIONED SCHEMA core;", + "README.md": "\n", + "manifest.yml": "\n", + }, + ) + with nativeapp_teardown(project_dir=Path(temp_dir)): # validate the app's setup script result = runner.invoke_with_connection(split(command)) assert result.exit_code == 0, result.output @@ -37,22 +80,67 @@ def test_nativeapp_validate(command, test_project, nativeapp_project_directory, @pytest.mark.integration -@pytest.mark.parametrize( - "command,test_project", - [ - ["app validate", "napp_init_v2"], - ], -) -def test_nativeapp_validate_failing( - command, test_project, nativeapp_project_directory, runner -): - with nativeapp_project_directory(test_project): - # Create invalid SQL file - Path("app/setup_script.sql").write_text("Lorem ipsum dolor sit amet") - +def test_nativeapp_validate_failing(nativeapp_teardown, runner, temp_dir): + ProjectV2Factory( + pdf__entities=dict( + pkg=ApplicationPackageEntityModelFactory( + identifier="myapp_pkg", + ), + app=ApplicationEntityModelFactory( + identifier="myapp", + fromm__target="pkg", + ), + ), + files={ + # Create invalid SQL file + "setup.sql": dedent( + """\ + CREATE OR ALTER VERSIONED SCHEMA core; + Lorem ipsum dolor sit amet + """ + ), + "README.md": "\n", + "manifest.yml": "\n", + }, + ) + with nativeapp_teardown(project_dir=Path(temp_dir)): # validate the app's setup script, this will fail # because we include an empty file - result = runner.invoke_with_connection(split(command)) + result = runner.invoke_with_connection(["app", "validate"]) assert result.exit_code == 1, result.output assert "Snowflake Native App setup script failed validation." in result.output assert "syntax error" in result.output + + +@pytest.mark.integration +def test_nativeapp_validate_with_post_deploy_hooks( + nativeapp_teardown, runner, temp_dir +): + ProjectV2Factory( + pdf__entities=dict( + pkg=ApplicationPackageEntityModelFactory( + identifier="myapp_pkg", + meta__post_deploy=[ + {"sql_script": "pkg_post_deploy1.sql"}, + ], + ), + app=ApplicationEntityModelFactory( + identifier="myapp", + fromm__target="pkg", + meta__post_deploy=[ + {"sql_script": "app_post_deploy1.sql"}, + ], + ), + ), + files={ + "app_post_deploy1.sql": "\n", + "pkg_post_deploy1.sql": "\n", + "setup.sql": "CREATE OR ALTER VERSIONED SCHEMA core;", + "README.md": "\n", + "manifest.yml": "\n", + }, + ) + + with nativeapp_teardown(project_dir=Path(temp_dir)): + result = runner.invoke_with_connection(["app", "validate"]) + assert result.exit_code == 0, result.output From 89515c3e747815a39d89774fce29dbbbd7ae6214 Mon Sep 17 00:00:00 2001 From: Francois Campbell Date: Mon, 21 Oct 2024 05:13:09 -0400 Subject: [PATCH 3/7] SNOW-1733913 Fix wrong script filename when running converted package scripts (#1745) When running a post-deploy hook that comes from an auto-converted package script, the filename seen is the absolute path to the tempfile, which is confusing to users. Let's store a `display_path` for these hooks so we can show users the original path that they had put in the PDF file. --- src/snowflake/cli/api/entities/utils.py | 4 +- .../cli/api/project/definition_conversion.py | 5 +- .../api/project/schemas/entities/common.py | 11 ++++ .../nativeapp/test_post_deploy.py | 54 +++++++++++++++++++ 4 files changed, 72 insertions(+), 2 deletions(-) diff --git a/src/snowflake/cli/api/entities/utils.py b/src/snowflake/cli/api/entities/utils.py index 387012d53b..d131342cb0 100644 --- a/src/snowflake/cli/api/entities/utils.py +++ b/src/snowflake/cli/api/entities/utils.py @@ -232,9 +232,11 @@ def execute_post_deploy_hooks( with console.phase(f"Executing {deployed_object_type} post-deploy actions"): sql_scripts_paths = [] + display_paths = [] for hook in post_deploy_hooks: if hook.sql_script: sql_scripts_paths.append(hook.sql_script) + display_paths.append(hook.display_path) else: raise ValueError( f"Unsupported {deployed_object_type} post-deploy hook type: {hook}" @@ -246,7 +248,7 @@ def execute_post_deploy_hooks( sql_scripts_paths, ) - for index, sql_script_path in enumerate(sql_scripts_paths): + for index, sql_script_path in enumerate(display_paths): console.step(f"Executing SQL script: {sql_script_path}") _execute_sql_script( script_content=scripts_content_list[index], diff --git a/src/snowflake/cli/api/project/definition_conversion.py b/src/snowflake/cli/api/project/definition_conversion.py index ac7aa1c6cf..f5e7f27706 100644 --- a/src/snowflake/cli/api/project/definition_conversion.py +++ b/src/snowflake/cli/api/project/definition_conversion.py @@ -422,6 +422,7 @@ def _convert_package_script_files( jinja_context = dict(package_name=package_name_replacement) post_deploy_hooks = [] for script_file in package_scripts: + original_script_file = script_file new_contents = render_script_template( project_root, jinja_context, script_file, get_basic_jinja_env() ) @@ -431,7 +432,9 @@ def _convert_package_script_files( d = _get_temp_dir().name _, script_file = mkstemp(dir=d, suffix="_converted.sql", text=True) (project_root / script_file).write_text(new_contents) - post_deploy_hooks.append(SqlScriptHookType(sql_script=script_file)) + hook = SqlScriptHookType(sql_script=script_file) + hook._display_path = original_script_file # noqa: SLF001 + post_deploy_hooks.append(hook) return post_deploy_hooks diff --git a/src/snowflake/cli/api/project/schemas/entities/common.py b/src/snowflake/cli/api/project/schemas/entities/common.py index d312f493ef..d9036d9a4c 100644 --- a/src/snowflake/cli/api/project/schemas/entities/common.py +++ b/src/snowflake/cli/api/project/schemas/entities/common.py @@ -28,6 +28,17 @@ class SqlScriptHookType(UpdatableModel): sql_script: str = Field(title="SQL file path relative to the project root") + # Used to store a user-friendly path for this script, when the + # value of `sql_script` is a path to a different file + # This is used in the UI to display the path relative to the + # project root when `sql_script` is a actually path to a temp file + # generated by the in-memory PDF v1 to v2 conversion + _display_path: str = PrivateAttr(default="") + + @property + def display_path(self): + return self._display_path or self.sql_script + # Currently sql_script is the only supported hook type. Change to a Union once other hook types are added PostDeployHook = SqlScriptHookType diff --git a/tests_integration/nativeapp/test_post_deploy.py b/tests_integration/nativeapp/test_post_deploy.py index 00bec42613..ea6df2acbb 100644 --- a/tests_integration/nativeapp/test_post_deploy.py +++ b/tests_integration/nativeapp/test_post_deploy.py @@ -1,8 +1,10 @@ # Tests that application post-deploy scripts are executed by creating a post_deploy_log table and having each post-deploy script add a record to it +from pathlib import Path import pytest import yaml +from tests.nativeapp.factories import ProjectV11Factory from tests_common import IS_WINDOWS from tests_integration.test_utils import ( row_from_snowflake_session, @@ -456,3 +458,55 @@ def test_nativeapp_post_deploy_with_windows_path( finally: teardown(runner, []) + + +@pytest.mark.integration +@pytest.mark.parametrize( + "base_command,test_project", + [["app", "napp_application_post_deploy_v2"]], +) +def test_nativeapp_post_deploy_logs_relative_paths( + runner, nativeapp_project_directory, base_command, test_project +): + with nativeapp_project_directory(test_project): + result = runner.invoke_with_connection(["app", "run"]) + for filename in [ + "scripts/app_post_deploy1.sql", + "scripts/app_post_deploy2.sql", + "scripts/package_post_deploy1.sql", + "scripts/package_post_deploy2.sql", + ]: + assert f"Executing SQL script: {filename}" in result.output + + +@pytest.mark.integration +def test_nativeapp_converted_package_scripts_logs_relative_paths( + runner, nativeapp_teardown, temp_dir +): + package_scripts = { + "scripts/package_script1.sql": "select 'package script 1 for {{ package_name }}'", + "scripts/package_script2.sql": "select 'package script 2 for {{ package_name }}'", + } + manifest = yaml.safe_dump( + dict( + manifest_version=1, + artifacts={ + "setup_script": "setup.sql", + "readme": "README.md", + }, + ) + ) + ProjectV11Factory( + pdf__native_app__package__scripts=list(package_scripts), + pdf__native_app__artifacts=["README.md", "setup.sql", "manifest.yml"], + files={ + "README.md": "", + "setup.sql": "select 1", + "manifest.yml": manifest, + } + | package_scripts, + ) + with nativeapp_teardown(project_dir=Path(temp_dir)): + result = runner.invoke_with_connection(["app", "run"]) + for filename in package_scripts: + assert f"Executing SQL script: {filename}" in result.output From 99c1915d2f17efbaaef8e2d259cb7ff197b1b411 Mon Sep 17 00:00:00 2001 From: Tomasz Urbaszek Date: Mon, 21 Oct 2024 10:58:14 +0200 Subject: [PATCH 4/7] Add SNOWFLAKE_HOME information to --info (#1746) --- RELEASE-NOTES.md | 1 + src/snowflake/cli/_app/cli_app.py | 2 ++ tests/test_main.py | 2 ++ 3 files changed, 5 insertions(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 1358d38609..d1032aa5ea 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -29,6 +29,7 @@ * Align variables for `snow stage|git execute`. For Python files variables are stripped of leading and trailing quotes. * `snow spcs service list-images` now displays image tag and digest. * Fix `snow stage list-files` for paths with directories. +* `snow --info` callback returns information about `SNOWFLAKE_HOME` variable. # v3.0.2 diff --git a/src/snowflake/cli/_app/cli_app.py b/src/snowflake/cli/_app/cli_app.py index bdad405a8a..1b25a3cde1 100644 --- a/src/snowflake/cli/_app/cli_app.py +++ b/src/snowflake/cli/_app/cli_app.py @@ -15,6 +15,7 @@ from __future__ import annotations import logging +import os import platform import sys from dataclasses import dataclass @@ -141,6 +142,7 @@ def _info_callback(value: bool): {"key": "python_version", "value": sys.version}, {"key": "system_info", "value": platform.platform()}, {"key": "feature_flags", "value": get_feature_flags_section()}, + {"key": "SNOWFLAKE_HOME", "value": os.getenv("SNOWFLAKE_HOME")}, ], ) print_result(result, output_format=OutputFormat.JSON) diff --git a/tests/test_main.py b/tests/test_main.py index f6936f0bfc..57790294f0 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -66,6 +66,7 @@ def test_custom_config_path(mock_conn, runner, mock_cursor): ) +@mock.patch.dict(os.environ, {"SNOWFLAKE_HOME": "FooBar"}, clear=True) def test_info_callback(runner): result = runner.invoke(["--info"]) assert result.exit_code == 0, result.output @@ -79,6 +80,7 @@ def test_info_callback(runner): "key": "feature_flags", "value": {"dummy_flag": True, "wrong_type_flag": "UNKNOWN"}, }, + {"key": "SNOWFLAKE_HOME", "value": "FooBar"}, ] From ab9183fc0a409197ed4ff30f9a685485a30793b6 Mon Sep 17 00:00:00 2001 From: Tomasz Urbaszek Date: Mon, 21 Oct 2024 15:48:08 +0200 Subject: [PATCH 5/7] Make JWT command uses config connection details (#1751) --- .../cli/_plugins/connection/commands.py | 47 ++++------- src/snowflake/cli/api/connections.py | 13 ++- tests/__snapshots__/test_help_messages.ambr | 70 ++++++++++++++-- tests/test.toml | 6 ++ tests/test_config.py | 6 ++ tests/test_connection.py | 83 ++++++++++++++++++- 6 files changed, 185 insertions(+), 40 deletions(-) diff --git a/src/snowflake/cli/_plugins/connection/commands.py b/src/snowflake/cli/_plugins/connection/commands.py index 1c35a0c8ad..839d0de12b 100644 --- a/src/snowflake/cli/_plugins/connection/commands.py +++ b/src/snowflake/cli/_plugins/connection/commands.py @@ -16,10 +16,9 @@ import logging import os.path -from pathlib import Path import typer -from click import ClickException, Context, Parameter # type: ignore +from click import ClickException, Context, Parameter, UsageError # type: ignore from click.core import ParameterSource # type: ignore from click.types import StringParamType from snowflake import connector @@ -355,36 +354,22 @@ def set_default( return MessageResult(f"Default connection set to: {name}") -@app.command(requires_connection=False) +@app.command(requires_connection=True) def generate_jwt( - account: str = typer.Option( - None, - "--account", - "-a", - "--accountname", - help="Account name to use when authenticating with Snowflake.", - show_default=False, - ), - user: str = typer.Option( - None, - "--user", - "-u", - "--username", - show_default=False, - help="Username to connect to Snowflake.", - ), - private_key_file: Path = typer.Option( - None, - "--private-key", - "--private-key-path", - "-k", - help="Path to file containing private key", - dir_okay=False, - exists=True, - ), **options, ) -> CommandResult: """Generate and display a JWT token.""" + connection_details = get_cli_context().connection_context.update_from_config() + + msq_template = ( + "{} is not set in the connection context, but required for JWT generation." + ) + if not connection_details.user: + raise UsageError(msq_template.format("User")) + if not connection_details.account: + raise UsageError(msq_template.format("Account")) + if not connection_details.private_key_file: + raise UsageError(msq_template.format("Private key file")) passphrase = os.getenv("PRIVATE_KEY_PASSPHRASE", None) if not passphrase: passphrase = typer.prompt( @@ -393,9 +378,13 @@ def generate_jwt( type=str, default="", ) + try: token = connector.auth.get_token_from_private_key( - user, account, private_key_file, passphrase + user=connection_details.user, + account=connection_details.account, + privatekey_path=connection_details.private_key_file, + key_password=passphrase, ) return MessageResult(token) except ValueError as err: diff --git a/src/snowflake/cli/api/connections.py b/src/snowflake/cli/api/connections.py index 4aae2f55f4..1239d0d7f6 100644 --- a/src/snowflake/cli/api/connections.py +++ b/src/snowflake/cli/api/connections.py @@ -22,7 +22,7 @@ from pathlib import Path from typing import Optional -from snowflake.cli.api.config import get_default_connection_name +from snowflake.cli.api.config import get_connection_dict, get_default_connection_name from snowflake.cli.api.exceptions import InvalidSchemaError from snowflake.connector import SnowflakeConnection from snowflake.connector.compat import IS_WINDOWS @@ -79,6 +79,17 @@ def update(self, **updates): raise KeyError(f"{key} is not a field of {self.__class__.__name__}") setattr(self, key, value) + def update_from_config(self) -> ConnectionContext: + connection_config = get_connection_dict(connection_name=self.connection_name) + if "private_key_path" in connection_config: + connection_config["private_key_file"] = connection_config[ + "private_key_path" + ] + del connection_config["private_key_path"] + + self.update(**connection_config) + return self + def __repr__(self) -> str: """Minimal repr where None values have their keys omitted.""" items = [f"{k}={repr(v)}" for (k, v) in self.present_values_as_dict().items()] diff --git a/tests/__snapshots__/test_help_messages.ambr b/tests/__snapshots__/test_help_messages.ambr index 912fbccc4b..8d502b4504 100644 --- a/tests/__snapshots__/test_help_messages.ambr +++ b/tests/__snapshots__/test_help_messages.ambr @@ -1514,14 +1514,68 @@ Generate and display a JWT token. +- Options --------------------------------------------------------------------+ - | --account,--accountname -a TEXT Account name to use when | - | authenticating with Snowflake. | - | --user,--username -u TEXT Username to connect to | - | Snowflake. | - | --private-key,--private-key-p… -k FILE Path to file containing | - | private key | - | [default: None] | - | --help -h Show this message and exit. | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml. | + | Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token that should be used | + | when connecting to Snowflake | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses connection defined with | + | command line parameters, | + | instead of one defined in | + | config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Run python connector | + | diagnostic test | + | --diag-log-path TEXT Diagnostic report path | + | --diag-allowlist-path TEXT Diagnostic report path to | + | optional allowlist | +------------------------------------------------------------------------------+ +- Global configuration -------------------------------------------------------+ | --format [TABLE|JSON] Specifies the output format. | diff --git a/tests/test.toml b/tests/test.toml index 1fcd71bef8..397c6e8bd9 100644 --- a/tests/test.toml +++ b/tests/test.toml @@ -47,6 +47,12 @@ private_key_path = "/private/key" [connections.no_private_key] authenticator = "SNOWFLAKE_JWT" +[connections.jwt] +user = "jdoe" +account = "testing_account" +authenticator = "SNOWFLAKE_JWT" +private_key_file = "/private/key" + [cli.features] dummy_flag = true wrong_type_flag = "not_true" diff --git a/tests/test_config.py b/tests/test_config.py index f58ba74f41..fef81e0f91 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -146,6 +146,12 @@ def test_get_all_connections(test_snowcli_config): "private_key_path": "/private/key", }, "test_connections": {"user": "python"}, + "jwt": { + "account": "testing_account", + "authenticator": "SNOWFLAKE_JWT", + "private_key_file": "/private/key", + "user": "jdoe", + }, } diff --git a/tests/test_connection.py b/tests/test_connection.py index 127c20271e..5b2b08ce0c 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -351,6 +351,16 @@ def test_lists_connection_information(mock_get_default_conn_name, runner): "authenticator": "SNOWFLAKE_JWT", }, }, + { + "connection_name": "jwt", + "is_default": False, + "parameters": { + "account": "testing_account", + "authenticator": "SNOWFLAKE_JWT", + "private_key_file": "/private/key", + "user": "jdoe", + }, + }, ] @@ -426,6 +436,16 @@ def test_connection_list_does_not_print_too_many_env_variables( "authenticator": "SNOWFLAKE_JWT", }, }, + { + "connection_name": "jwt", + "is_default": False, + "parameters": { + "account": "testing_account", + "authenticator": "SNOWFLAKE_JWT", + "private_key_file": "/private/key", + "user": "jdoe", + }, + }, ] @@ -1164,7 +1184,9 @@ def test_generate_jwt(mocked_get_token, runner, named_temporary_file): result.output == "Enter private key file password (Press enter if none) []: \nfunny token\n" ) - mocked_get_token.assert_called_once_with("FooBar", "account1", f, "123") + mocked_get_token.assert_called_once_with( + user="FooBar", account="account1", privatekey_path=str(f), key_password="123" + ) @mock.patch.dict(os.environ, {"PRIVATE_KEY_PASSPHRASE": "123"}) @@ -1191,4 +1213,61 @@ def test_generate_jwt_with_pass_phrase(mocked_get_token, runner, named_temporary assert result.exit_code == 0, result.output assert result.output == "funny token\n" - mocked_get_token.assert_called_once_with("FooBar", "account1", f, "123") + mocked_get_token.assert_called_once_with( + user="FooBar", account="account1", privatekey_path=str(f), key_password="123" + ) + + +@mock.patch( + "snowflake.cli._plugins.connection.commands.connector.auth.get_token_from_private_key" +) +def test_generate_jwt_uses_config(mocked_get_token, runner, named_temporary_file): + mocked_get_token.return_value = "funny token" + + with named_temporary_file() as f: + f.write_text("secret from file") + result = runner.invoke( + ["connection", "generate-jwt", "--connection", "jwt"], + input="123", + ) + + assert result.exit_code == 0, result.output + assert ( + result.output + == "Enter private key file password (Press enter if none) []: \nfunny token\n" + ) + mocked_get_token.assert_called_once_with( + user="jdoe", + account="testing_account", + privatekey_path="/private/key", + key_password="123", + ) + + +@pytest.mark.parametrize("attribute", ["account", "user", "private_key_file"]) +@mock.patch( + "snowflake.cli._plugins.connection.commands.connector.auth.get_token_from_private_key" +) +def test_generate_jwt_raises_error_if_required_parameter_is_missing( + mocked_get_token, attribute, runner, named_temporary_file +): + connection_details = { + "account": "account1", + "user": "FooBar", + "private_key_file": "/private/key", + } + del connection_details[attribute] + data = tomlkit.dumps({"connections": {"jwt": connection_details}}) + + with NamedTemporaryFile("w+", suffix="toml") as tmp_file: + tmp_file.write(data) + tmp_file.flush() + + result = runner.invoke_with_config_file( + tmp_file.name, + ["connection", "generate-jwt", "-c", "jwt"], + ) + assert ( + f'{attribute.capitalize().replace("_", " ")} is not set in the connection context' + in result.output + ) From c9ea73e4e8171fa17bc4ccac533bd93db19cc94e Mon Sep 17 00:00:00 2001 From: Tomasz Urbaszek Date: Mon, 21 Oct 2024 15:49:49 +0200 Subject: [PATCH 6/7] Fix release notes for SPCS entry --- RELEASE-NOTES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index d1032aa5ea..f347f21f7e 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -27,7 +27,7 @@ ## Fixes and improvements * Fixed a bug that would cause the `deploy_root`, `bundle_root`, and `generated_root` directories to be created in the current working directory instead of the project root when invoking commands with the `--project` flag from a different directory. * Align variables for `snow stage|git execute`. For Python files variables are stripped of leading and trailing quotes. -* `snow spcs service list-images` now displays image tag and digest. +* `snow spcs image-repository list-images` now displays image tag and digest. * Fix `snow stage list-files` for paths with directories. * `snow --info` callback returns information about `SNOWFLAKE_HOME` variable. From 5691dcb662d26e28e256a0b295ce97b60127e787 Mon Sep 17 00:00:00 2001 From: Tomasz Urbaszek Date: Mon, 21 Oct 2024 16:03:27 +0200 Subject: [PATCH 7/7] Allow snowflake.yml with only env section for SQL use-case (#1753) --- .../api/project/schemas/project_definition.py | 22 +++++++++---------- tests/project/test_project_definition_v2.py | 6 +++++ .../test_data/projects/sql/snowflake.yml | 3 +++ tests_integration/test_sql.py | 21 ++++++++++++++++++ 4 files changed, 41 insertions(+), 11 deletions(-) create mode 100644 tests_integration/test_data/projects/sql/snowflake.yml diff --git a/src/snowflake/cli/api/project/schemas/project_definition.py b/src/snowflake/cli/api/project/schemas/project_definition.py index 26faaf9a05..06fb22bea9 100644 --- a/src/snowflake/cli/api/project/schemas/project_definition.py +++ b/src/snowflake/cli/api/project/schemas/project_definition.py @@ -115,7 +115,17 @@ class DefinitionV11(DefinitionV10): class DefinitionV20(_ProjectDefinitionBase): - entities: Dict[str, AnnotatedEntity] = Field(title="Entity definitions.") + entities: Dict[str, AnnotatedEntity] = Field( + title="Entity definitions.", default={} + ) + env: Optional[Dict[str, Union[str, int, bool]]] = Field( + title="Default environment specification for this project.", + default=None, + ) + mixins: Optional[Dict[str, Dict]] = Field( + title="Mixins to apply to entities", + default=None, + ) @model_validator(mode="after") def validate_entities_identifiers(self): @@ -163,16 +173,6 @@ def _validate_target_field( f"Target type mismatch. Expected {target_type.__name__}, got {actual_target_type.__name__}" ) - env: Optional[Dict[str, Union[str, int, bool]]] = Field( - title="Default environment specification for this project.", - default=None, - ) - - mixins: Optional[Dict[str, Dict]] = Field( - title="Mixins to apply to entities", - default=None, - ) - @model_validator(mode="before") @classmethod def apply_mixins(cls, data: Dict) -> Dict: diff --git a/tests/project/test_project_definition_v2.py b/tests/project/test_project_definition_v2.py index 6587480ea7..3b4348e10f 100644 --- a/tests/project/test_project_definition_v2.py +++ b/tests/project/test_project_definition_v2.py @@ -214,6 +214,12 @@ }, None, ], + [ + { + "env": {"string": "string", "int": 42, "bool": True}, + }, + None, + ], [ { "mixins": { diff --git a/tests_integration/test_data/projects/sql/snowflake.yml b/tests_integration/test_data/projects/sql/snowflake.yml new file mode 100644 index 0000000000..2d0add5936 --- /dev/null +++ b/tests_integration/test_data/projects/sql/snowflake.yml @@ -0,0 +1,3 @@ +definition_version: 2 +env: + monty_python: "Knights of Nii" diff --git a/tests_integration/test_sql.py b/tests_integration/test_sql.py index 4d02b6d547..3be9873eac 100644 --- a/tests_integration/test_sql.py +++ b/tests_integration/test_sql.py @@ -178,3 +178,24 @@ def test_sql_large_lobs_in_memory_tables(runner): ) assert "VARCHAR(134217728)" in result.output + + +@pytest.mark.integration +@pytest.mark.parametrize( + "template", + [ + "<% ctx.env.monty_python %>", + "&{ ctx.env.monty_python }", + ], +) +def test_sql_with_variables_from_project(runner, project_directory, template): + with project_directory("sql"): + result = runner.invoke_with_connection_json( + [ + "sql", + "-q", + f"select '{template}' as var", + ] + ) + assert result.exit_code == 0, result.output + assert result.json == [{"VAR": "Knights of Nii"}]