From cc99b1e90d9cdc09c568ccebd78c88e0c4325441 Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Mon, 17 Jul 2023 23:48:29 -0400 Subject: [PATCH 01/24] Update docs structure and add getting started guides --- cosmos/hooks/subprocess.py | 3 +- docs/conf.py | 18 +-- docs/configuration/compiled-sql.rst | 8 ++ .../generating-docs.rst} | 10 +- docs/configuration/index.rst | 16 +++ docs/configuration/lineage.rst | 82 +++++++++++ docs/{dbt => configuration}/scheduling.rst | 2 + docs/configuration/selecting-excluding.rst | 43 ++++++ .../testing-behavior.rst} | 67 ++------- docs/contributing.rst | 8 +- docs/dbt/execution-mode-local-conflicts.rst | 70 ---------- docs/dbt/execution-mode-local-in-astro.rst | 40 ------ docs/dbt/execution-mode-local-in-docker.rst | 47 ------- docs/dbt/execution-mode-local-in-mwaa.rst | 47 ------- docs/dbt/index.rst | 68 ---------- docs/dbt/install-options.rst | 127 ------------------ docs/dbt/lineage.rst | 77 ----------- docs/dbt/usage.rst | 68 ---------- docs/getting_started/astro.rst | 107 +++++++++++++++ .../docker.rst} | 6 +- .../execution-modes.rst | 34 ++--- docs/getting_started/gcc.rst | 68 ++++++++++ docs/getting_started/index.rst | 37 +++++ .../kubernetes.rst} | 6 +- docs/getting_started/mwaa.rst | 103 ++++++++++++++ docs/getting_started/open-source.rst | 68 ++++++++++ docs/index.rst | 19 ++- docs/jaffle_shop_task_group.png | Bin 136482 -> 0 bytes .../index.rst} | 2 +- docs/requirements.txt | 4 - 30 files changed, 593 insertions(+), 662 deletions(-) create mode 100644 docs/configuration/compiled-sql.rst rename docs/{dbt/docs.rst => configuration/generating-docs.rst} (87%) create mode 100644 docs/configuration/index.rst create mode 100644 docs/configuration/lineage.rst rename docs/{dbt => configuration}/scheduling.rst (99%) create mode 100644 docs/configuration/selecting-excluding.rst rename docs/{dbt/configuration.rst => configuration/testing-behavior.rst} (58%) delete mode 100644 docs/dbt/execution-mode-local-conflicts.rst delete mode 100644 docs/dbt/execution-mode-local-in-astro.rst delete mode 100644 docs/dbt/execution-mode-local-in-docker.rst delete mode 100644 docs/dbt/execution-mode-local-in-mwaa.rst delete mode 100644 docs/dbt/index.rst delete mode 100644 docs/dbt/install-options.rst delete mode 100644 docs/dbt/lineage.rst delete mode 100644 docs/dbt/usage.rst create mode 100644 docs/getting_started/astro.rst rename docs/{dbt/execution-mode-docker.rst => getting_started/docker.rst} (98%) rename docs/{dbt => getting_started}/execution-modes.rst (78%) create mode 100644 docs/getting_started/gcc.rst create mode 100644 docs/getting_started/index.rst rename docs/{dbt/execution-mode-kubernetes.rst => getting_started/kubernetes.rst} (98%) create mode 100644 docs/getting_started/mwaa.rst create mode 100644 docs/getting_started/open-source.rst delete mode 100644 docs/jaffle_shop_task_group.png rename docs/{dbt/connections-profiles.rst => profiles/index.rst} (99%) delete mode 100644 docs/requirements.txt diff --git a/cosmos/hooks/subprocess.py b/cosmos/hooks/subprocess.py index 027b46e85..6fd691aee 100644 --- a/cosmos/hooks/subprocess.py +++ b/cosmos/hooks/subprocess.py @@ -40,8 +40,7 @@ def run_command( :param env: Optional dict containing environment variables to be made available to the shell environment in which ``command`` will be executed. If omitted, ``os.environ`` will be used. Note, that in case you have Sentry configured, original variables from the environment - will also be passed to the subprocess with ``SUBPROCESS_`` prefix. See - :doc:`/administration-and-deployment/logging-monitoring/errors` for details. + will also be passed to the subprocess with ``SUBPROCESS_`` prefix. :param output_encoding: encoding to use for decoding stdout :param cwd: Working directory to run the command in. If None (default), the command is run in a temporary directory. diff --git a/docs/conf.py b/docs/conf.py index 1a2116233..dbf7a5068 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,10 +20,9 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration extensions = [ - "autoapi.extension", + # "autoapi.extension", "sphinx.ext.autodoc", "sphinx.ext.autosummary", - "sphinx.ext.autosectionlabel", "sphinx_tabs.tabs", ] @@ -48,16 +47,17 @@ "image_light": "cosmos-icon.svg", "image_dark": "cosmos-icon.svg", }, - "footer_items": ["copyright"], + "footer_start": ["copyright"], + "show_nav_level": 2, } -def skip_logger_objects(app, what, name, obj, skip, options): - if "logger" in name: - skip = True +# def skip_logger_objects(app, what, name, obj, skip, options): +# if "logger" in name: +# skip = True - return skip +# return skip -def setup(sphinx): - sphinx.connect("autoapi-skip-member", skip_logger_objects) +# def setup(sphinx): +# sphinx.connect("autoapi-skip-member", skip_logger_objects) diff --git a/docs/configuration/compiled-sql.rst b/docs/configuration/compiled-sql.rst new file mode 100644 index 000000000..5d42a234f --- /dev/null +++ b/docs/configuration/compiled-sql.rst @@ -0,0 +1,8 @@ +.. _compiled-sql: + +Compiled SQL +==================== + +When using the local execution mode, Cosmos will store the compiled SQL for each model in the ``compiled_sql`` field of the task's ``template_fields``. This allows you to view the compiled SQL in the Airflow UI. + +If you'd like to disable this feature, you can set ``should_store_compiled_sql=False`` on the local operator (or via the ``operator_args`` parameter on the DAG/Task Group). diff --git a/docs/dbt/docs.rst b/docs/configuration/generating-docs.rst similarity index 87% rename from docs/dbt/docs.rst rename to docs/configuration/generating-docs.rst index f0f7060ee..c3ebf587d 100644 --- a/docs/dbt/docs.rst +++ b/docs/configuration/generating-docs.rst @@ -1,7 +1,9 @@ +.. _generating-docs: + Generating Docs -================ +=============== -dbt allows you to generate static documentation on your models, tables, and more. You can read more about it in the `official documentation `_. For an example of what the docs look like with the ``jaffle_shop`` project, check out `this site `_. +dbt allows you to generate static documentation on your models, tables, and more. You can read more about it in the `official dbt documentation `_. For an example of what the docs look like with the ``jaffle_shop`` project, check out `this site `_. Many users choose to generate and serve these docs on a static website. This is a great way to share your data models with your team and other stakeholders. @@ -20,7 +22,7 @@ Examples Upload to S3 ~~~~~~~~~~~~~~~~~~~~~~~ -S3 supports serving static files directly from a bucket. To learn more (and to set it up), check out the `official documentation `_. +S3 supports serving static files directly from a bucket. To learn more (and to set it up), check out the `official S3 documentation `_. You can use the :class:`~cosmos.operators.DbtDocsS3Operator` to generate and upload docs to a S3 bucket. The following code snippet shows how to do this with the default jaffle_shop project: @@ -39,7 +41,7 @@ You can use the :class:`~cosmos.operators.DbtDocsS3Operator` to generate and upl ) Upload to Azure Blob Storage -~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Azure Blob Storage supports serving static files directly from a container. To learn more (and to set it up), check out the `official documentation `_. diff --git a/docs/configuration/index.rst b/docs/configuration/index.rst new file mode 100644 index 000000000..4c82c115e --- /dev/null +++ b/docs/configuration/index.rst @@ -0,0 +1,16 @@ +.. _configuration: + +Configuration +============= + +Cosmos offers a number of configuration options to customize its behavior. For more info, check out the links on the left or the table of contents below. + +.. toctree:: + :caption: Contents: + + Configuring Lineage + Generating Docs + Scheduling + Testing Behavior + Selecting & Excluding + Compiled SQL diff --git a/docs/configuration/lineage.rst b/docs/configuration/lineage.rst new file mode 100644 index 000000000..498b61b30 --- /dev/null +++ b/docs/configuration/lineage.rst @@ -0,0 +1,82 @@ +.. _lineage: + +Configuring Lineage +=================== + +Cosmos uses the `dbt-ol `_ wrapper to emit lineage events to OpenLineage. Follow the instructions below to ensure Cosmos is configured properly to do this. + +With a Virtual Environment +-------------------------- + +1. Add steps in your ``Dockerfile`` for the venv and wrapping the dbt executable + +.. code-block:: Docker + + FROM quay.io/astronomer/astro-runtime:7.2.0 + + # install python virtualenv to run dbt + WORKDIR /usr/local/airflow + COPY dbt-requirements.txt ./ + RUN python -m venv dbt_venv && source dbt_venv/bin/activate && \ + pip install --no-cache-dir -r dbt-requirements.txt && deactivate + + # wrap the executable from the venv so that dbt-ol can access it + RUN echo -e '#!/bin/bash' > /usr/bin/dbt && \ + echo -e 'source /usr/local/airflow/dbt_venv/bin/activate && dbt "$@"' >> /usr/bin/dbt + + # ensure all users have access to the executable + RUN chmod -R 777 /usr/bin/dbt + +2. Create a ``dbt-requirements.txt`` file with the following contents. If you're using a different +data warehouse than Redshift, then replace with the one that you're using (i.e. ``dbt-bigquery``, +``dbt-snowflake``, etc.) + +.. code-block:: text + + dbt-redshift + openlineage-dbt + +3. Add the following to your ``requirements.txt`` file + +.. code-block:: text + + astronomer-cosmos + +4. When instantiating a Cosmos object be sure to use the ``dbt_executable_path`` parameter for the dbt-ol +installed + +.. code-block:: python + + jaffle_shop = DbtTaskGroup( + ..., + dbt_args={ + "dbt_executable_path": "/usr/local/airflow/dbt_venv/bin/dbt-ol", + }, + ) + + +With the Base Cosmos Python Package +----------------------------------- + +If you're using the base Cosmos Python package, then you'll need to install the dbt-ol wrapper +using the ``[dbt-openlineage]`` extra. + +1. Add the following to your ``requirements.txt`` file + +.. code-block:: text + + astronomer-cosmos[dbt-openlineage] + + +2. When instantiating a Cosmos object be sure to use the ``dbt_executable_path`` parameter for the dbt-ol +installed + +.. code-block:: python + + jaffle_shop = DbtTaskGroup( + ..., + dbt_args={ + "dbt_executable_path": "/usr/local/bin/dbt-ol", + }, + ) + diff --git a/docs/dbt/scheduling.rst b/docs/configuration/scheduling.rst similarity index 99% rename from docs/dbt/scheduling.rst rename to docs/configuration/scheduling.rst index d0da53768..625280309 100644 --- a/docs/dbt/scheduling.rst +++ b/docs/configuration/scheduling.rst @@ -1,3 +1,5 @@ +.. _scheduling: + Scheduling ================ diff --git a/docs/configuration/selecting-excluding.rst b/docs/configuration/selecting-excluding.rst new file mode 100644 index 000000000..5bcf42beb --- /dev/null +++ b/docs/configuration/selecting-excluding.rst @@ -0,0 +1,43 @@ +.. _selecting-excluding: + +Selecting & Excluding +======================= + +Cosmos allows you to filter by configs (e.g. ``materialized``, ``tags``) using the ``select`` and ``exclude`` parameters. If a model contains any of the configs in the ``select``, it gets included as part of the DAG/Task Group. Similarly, if a model contains any of the configs in the ``exclude``, it gets excluded from the DAG/Task Group. + +The ``select`` and ``exclude`` parameters are dictionaries with the following keys: + +- ``configs``: a list of configs to filter by. The configs are in the format ``key:value``. For example, ``tags:daily`` or ``materialized:table``. +- ``paths``: a list of paths to filter by. The paths are in the format ``path/to/dir``. For example, ``analytics`` or ``analytics/tables``. + +.. note:: + Cosmos currently reads from (1) config calls in the model code and (2) .yml files in the models directory for tags. It does not read from the dbt_project.yml file. + +Examples: + +.. code-block:: python + + from cosmos import DbtDag + + jaffle_shop = DbtDag( + # ... + select={"configs": ["tags:daily"]}, + ) + +.. code-block:: python + + from cosmos import DbtDag + + jaffle_shop = DbtDag( + # ... + select={"configs": ["schema:prod"]}, + ) + +.. code-block:: python + + from cosmos import DbtDag + + jaffle_shop = DbtDag( + # ... + select={"paths": ["analytics/tables"]}, + ) \ No newline at end of file diff --git a/docs/dbt/configuration.rst b/docs/configuration/testing-behavior.rst similarity index 58% rename from docs/dbt/configuration.rst rename to docs/configuration/testing-behavior.rst index 29ac09ee1..6f2b7cbef 100644 --- a/docs/dbt/configuration.rst +++ b/docs/configuration/testing-behavior.rst @@ -1,10 +1,10 @@ -Configuration -================ +.. _testing-behavior: -Cosmos offers a few different configuration options for how your dbt project is run and structured. This page describes the available options and how to configure them. +Testing Behavior +================ -Testing ----------------------- +Testing Configuration +--------------------- By default, Cosmos will add a test after each model. This can be overridden using the ``test_behavior`` field. The options are: @@ -24,8 +24,9 @@ Example: ) -Warn Notification ----------------------- +Warning Behavior +---------------- + .. note:: As of now, this feature is only available for the default execution mode ``local`` @@ -84,54 +85,4 @@ When at least one WARN message is present, the function passed to ``on_warning_c If warnings that are not associated with tests occur (e.g. freshness warnings), they will still trigger the ``on_warning_callback`` method above. However, these warnings will not be included in the ``test_names`` and - ``test_results`` context variables, which are specific to test-related warnings. - -Selecting and Excluding ----------------------- - -Cosmos allows you to filter by configs (e.g. ``materialized``, ``tags``) using the ``select`` and ``exclude`` parameters. If a model contains any of the configs in the ``select``, it gets included as part of the DAG/Task Group. Similarly, if a model contains any of the configs in the ``exclude``, it gets excluded from the DAG/Task Group. - -The ``select`` and ``exclude`` parameters are dictionaries with the following keys: - -- ``configs``: a list of configs to filter by. The configs are in the format ``key:value``. For example, ``tags:daily`` or ``materialized:table``. -- ``paths``: a list of paths to filter by. The paths are in the format ``path/to/dir``. For example, ``analytics`` or ``analytics/tables``. - -.. note:: - Cosmos currently reads from (1) config calls in the model code and (2) .yml files in the models directory for tags. It does not read from the dbt_project.yml file. - -Examples: - -.. code-block:: python - - from cosmos import DbtDag - - jaffle_shop = DbtDag( - # ... - select={"configs": ["tags:daily"]}, - ) - -.. code-block:: python - - from cosmos import DbtDag - - jaffle_shop = DbtDag( - # ... - select={"configs": ["schema:prod"]}, - ) - -.. code-block:: python - - from cosmos import DbtDag - - jaffle_shop = DbtDag( - # ... - select={"paths": ["analytics/tables"]}, - ) - - -Viewing Compiled SQL ----------------------- - -When using the local execution mode, Cosmos will store the compiled SQL for each model in the ``compiled_sql`` field of the task's ``template_fields``. This allows you to view the compiled SQL in the Airflow UI. - -If you'd like to disable this feature, you can set ``should_store_compiled_sql=False`` on the local operator (or via the ``operator_args`` parameter on the DAG/Task Group). + ``test_results`` context variables, which are specific to test-related warnings. \ No newline at end of file diff --git a/docs/contributing.rst b/docs/contributing.rst index 74f689a18..8e65f68fd 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -20,9 +20,9 @@ To contribute to the cosmos project: Using Hatch for local development --------------------------------- -We currently use :ref:`hatch ` for building and distributing ``astronomer-cosmos``. +We currently use `hatch ` for building and distributing ``astronomer-cosmos``. -The tool can also be used for local development. The :ref:`pyproject.toml ` file currently defines a matrix of supported versions of Python and Airflow for which a user can run the tests against. +The tool can also be used for local development. The `pyproject.toml ` file currently defines a matrix of supported versions of Python and Airflow for which a user can run the tests against. For instance, to run the tests using Python 3.10 and Apache Airflow 2.5, use the following: @@ -54,6 +54,7 @@ To run the integration tests for the first time, use: If testing for the same Airflow and Python version, next runs of the integration tests can be: .. code-block:: bash + hatch run tests.py3.8-2.5:test-integration .. code-block:: bash @@ -72,11 +73,12 @@ To run the integration tests for the first time, use: If testing for the same Airflow and Python version, next runs of the integration tests can be: .. code-block:: bash + hatch run tests.py3.8-2.5:test-integration Using Docker Compose for local development ---------------------------------- +------------------------------------------ It is also possible to just build the development environment using docker compose diff --git a/docs/dbt/execution-mode-local-conflicts.rst b/docs/dbt/execution-mode-local-conflicts.rst deleted file mode 100644 index 5b3bb61e2..000000000 --- a/docs/dbt/execution-mode-local-conflicts.rst +++ /dev/null @@ -1,70 +0,0 @@ -.. _execution-mode-local-conflicts: - -Airflow and DBT dependencies conflicts -====================================== - -When using the `Local Execution Mode `__, users may face dependency conflicts between -Apache Airflow and DBT. The amount of conflicts may increase depending on the Airflow providers and DBT plugins being used. - -If you find errors, we recommend users look into using `alternative execution modes `__. - -In the following table, ``x`` represents combinations that lead to conflicts (vanilla ``apache-airflow`` and ``dbt-core`` packages): - -+---------------+-----+-----+-----+-----+-----+-----+---------+ -| Airflow \ DBT | 1.0 | 1.1 | 1.2 | 1.3 | 1.4 | 1.5 | 1.6.0b6 | -+===============+=====+=====+=====+=====+=====+=====+=========+ -| 2.2 | | | | x | x | x | x | -+---------------+-----+-----+-----+-----+-----+-----+---------+ -| 2.3 | x | x | | x | x | x | x | -+---------------+-----+-----+-----+-----+-----+-----+---------+ -| 2.4 | x | x | x | | | | | -+---------------+-----+-----+-----+-----+-----+-----+---------+ -| 2.5 | x | x | x | | | | | -+---------------+-----+-----+-----+-----+-----+-----+---------+ -| 2.6 | x | x | x | x | x | | x | -+---------------+-----+-----+-----+-----+-----+-----+---------+ - -Examples of errors ------------------------------------ - -.. code-block:: bash - - ERROR: Cannot install apache-airflow==2.2.4 and dbt-core==1.5.0 because these package versions have conflicting dependencies. - The conflict is caused by: - apache-airflow 2.2.4 depends on jinja2<3.1 and >=2.10.1 - dbt-core 1.5.0 depends on Jinja2==3.1.2 - -.. code-block:: bash - - ERROR: Cannot install apache-airflow==2.6.0 and dbt-core because these package versions have conflicting dependencies. - The conflict is caused by: - apache-airflow 2.6.0 depends on importlib-metadata<5.0.0 and >=1.7; python_version < "3.9" - dbt-semantic-interfaces 0.1.0.dev7 depends on importlib-metadata==6.6.0 - - -How to reproduce ----------------- - -The table was created by running `nox `__ with the following ``noxfile.py``: - -.. code-block:: python - - import nox - - - nox.options.sessions = ["compatibility"] - nox.options.reuse_existing_virtualenvs = True - - - @nox.session(python=["3.10"]) - @nox.parametrize("dbt_version", ["1.0", "1.1", "1.2", "1.3", "1.4", "1.5", "1.6.0b6"]) - @nox.parametrize("airflow_version", ["2.2.4", "2.3", "2.4", "2.5", "2.6"]) - def compatibility(session: nox.Session, airflow_version, dbt_version) -> None: - """Run both unit and integration tests.""" - session.run( - "pip3", - "install", - "--pre", - f"apache-airflow=={airflow_version}", - f"dbt-core=={dbt_version}", - ) diff --git a/docs/dbt/execution-mode-local-in-astro.rst b/docs/dbt/execution-mode-local-in-astro.rst deleted file mode 100644 index a31b6a610..000000000 --- a/docs/dbt/execution-mode-local-in-astro.rst +++ /dev/null @@ -1,40 +0,0 @@ -Local Execution Mode in Astro ------------------------------ - -It is possible to use Cosmos in Astro by using several :ref:`Execution Modes `, including ``local``, ``virtualenv``, ``docker`` and ``kubernetes``. - -Below is an example of how to use the `Local Execution Mode `__ -and the ``dbt_executable_path`` argument. - -The step-by-step is detailed below. - -1. Create the virtual environment in your ``Dockerfile`` (be sure to replace ```` with the actual adapter you need (i.e. ``dbt-redshift``, ``dbt-snowflake``, etc.) - -.. code-block:: docker - - FROM quay.io/astronomer/astro-runtime:8.0.0 - - # install dbt into a virtual environment - RUN python -m venv dbt_venv && source dbt_venv/bin/activate && \ - pip install --no-cache-dir && deactivate - -2. Add the following to your base project ``requirements.txt`` (preferably pinned) - -.. code-block:: text - - astronomer-cosmos - -3. Use the ``dbt_executable_path`` argument in the Cosmos operator to point to the virtual environment - -.. code-block:: python - - from cosmos import DbtTaskGroup - - tg = DbtTaskGroup( - # ... - dbt_args={ - # ... - "dbt_executable_path": f"{os.environ['AIRFLOW_HOME']}/dbt_venv/bin/dbt" - } - # ... - ) diff --git a/docs/dbt/execution-mode-local-in-docker.rst b/docs/dbt/execution-mode-local-in-docker.rst deleted file mode 100644 index 99e3138fe..000000000 --- a/docs/dbt/execution-mode-local-in-docker.rst +++ /dev/null @@ -1,47 +0,0 @@ -Local Execution Mode in Docker ------------------------------- - -One of the existing `Execution Modes `_ is ``Docker``, when Cosmos runs each ``dbt`` command in an independent Docker container. - -An alternative to the ``Docker`` execution mode is to run Airflow inside ``Docker``, -use the `Local Execution Mode `_ and -manage the ``dbt`` installation in an independent Python virtual environment, within the container. - -An advantage of this approach when compared to the `Virtualenv Execution Mode `_ is -that there is that there is not an overhead on creating a Python virtualenv each time a Cosmos ``dbt`` task is executed. - -To install dbt into a virtual environment on an Airflow Docker Image, you can use the following steps: - -1. Create the virtual environment in your Dockerfile (be sure to replace ```` with the actual adapter you need (i.e. ``dbt-redshift``, ``dbt-snowflake``, etc.) - -.. code-block:: docker - - FROM apache/airflow:2.4.3-python3.10 - - # install dbt into a venv to avoid package dependency conflicts - ENV PIP_USER=false - RUN python3 -m venv ${AIRFLOW_HOME}/dbt_venv - RUN ${AIRFLOW_HOME}/dbt_venv/bin/pip install - ENV PIP_USER=true - -3. Add the following to your base project ``requirements.txt`` (preferably pinned) - -.. code-block:: text - - astronomer-cosmos - -4. Use the ``dbt_executable_path`` argument in the Cosmos operator to point to the virtual environment - -.. code-block:: python - - import os - from cosmos import DbtTaskGroup - - tg = DbtTaskGroup( - # ... - dbt_args={ - # ... - "dbt_executable_path": f"{os.environ['AIRFLOW_HOME']}/dbt_venv/bin/dbt" - } - # ... - ) diff --git a/docs/dbt/execution-mode-local-in-mwaa.rst b/docs/dbt/execution-mode-local-in-mwaa.rst deleted file mode 100644 index 4b1e02d72..000000000 --- a/docs/dbt/execution-mode-local-in-mwaa.rst +++ /dev/null @@ -1,47 +0,0 @@ -Local Execution Mode in MWAA ----------------------------- - -Users can face Python dependency issues when trying to use the Cosmos `Local Execution Mode `_ in Amazon Managed Workflows for Apache Airflow (MWAA). - -This step-by-step illustrates how to use the Local Execution Mode, together with the -`MWAA's startup script `_ and -the ``dbt_executable_path`` argument. - -1. Initialize a startup script as outlined in MWAA's documentation `here `_ - -2. Add the following to your startup script (be sure to replace ```` with the actual adapter you need (i.e. ``dbt-redshift``, ``dbt-snowflake``, etc.) - -.. code-block:: shell - - #!/bin/sh - - export DBT_VENV_PATH="${AIRFLOW_HOME}/dbt_venv" - export PIP_USER=false - - python3 -m venv "${DBT_VENV_PATH}" - - ${DBT_VENV_PATH}/bin/pip install - - export PIP_USER=true - -3. Add the following to your base project ``requirements.txt`` **preferably pinned to a version that's compatible with your MWAA environment**. To check compatibility, use the `aws mwaa local runner `_ - -.. code-block:: text - - astronomer-cosmos - -4. Use the ``dbt_executable_path`` argument in the Cosmos operator to point to the virtual environment - -.. code-block:: python - - import os - from cosmos import DbtTaskGroup - - tg = DbtTaskGroup( - # ... - dbt_args={ - # ... - "dbt_executable_path": f"{os.environ['AIRFLOW_HOME']}/dbt_venv/bin/dbt" - } - # ... - ) diff --git a/docs/dbt/index.rst b/docs/dbt/index.rst deleted file mode 100644 index 078c43e1b..000000000 --- a/docs/dbt/index.rst +++ /dev/null @@ -1,68 +0,0 @@ -Cosmos - dbt Support -==================== - -Cosmos allows you to render your dbt models as Airflow DAGs and Task Groups. - -.. toctree:: - :maxdepth: 3 - :caption: Sections - - Installation Options - Execution Modes - Usage - Connections & Profiles - Scheduling - Configuration - Generating Docs - Lineage - - -Quickstart ----------- - -Install the package using pip: - -.. code-block:: bash - - pip install astronomer-cosmos[dbt-all] - - -Create a DAG and import the :class:`cosmos.DbtTaskGroup` class. The ``DbtTaskGroup`` operator requires a the name of your dbt project, an Airflow connection ID, a schema, and any additional arguments you'd like to pass to dbt. - -.. code-block:: python - - from pendulum import datetime - - from airflow import DAG - from airflow.operators.empty import EmptyOperator - from cosmos.task_group import DbtTaskGroup - - - with DAG( - dag_id="extract_dag", - start_date=datetime(2022, 11, 27), - schedule="@daily", - ) as dag: - e1 = EmptyOperator(task_id="ingestion_workflow") - - dbt_tg = DbtTaskGroup( - group_id="dbt_tg", - dbt_project_name="jaffle_shop", - conn_id="airflow_db", - profile_args={ - "schema": "public", - }, - ) - - e2 = EmptyOperator(task_id="some_extraction") - - e1 >> dbt_tg >> e2 - - -The ``DbtTaskGroup`` operator will automatically generate a TaskGroup with the tasks defined in your dbt project. Here's what the DAG looks like in the Airflow UI: - - -.. figure:: https://github.com/astronomer/astronomer-cosmos/raw/main/docs/_static/dbt_dag.png - :width: 800 - - dbt's default jaffle_shop project rendered as a TaskGroup in Airflow diff --git a/docs/dbt/install-options.rst b/docs/dbt/install-options.rst deleted file mode 100644 index 3b4394ff6..000000000 --- a/docs/dbt/install-options.rst +++ /dev/null @@ -1,127 +0,0 @@ -.. _install-options: - -Installation Options -==================== - -Cosmos can run ``dbt`` commands using four different approaches, called ``execution modes``: - -1. **local**: Run ``dbt`` commands using a local ``dbt`` installation (default) -2. **virtualenv**: Run ``dbt`` commands from Python virtual environments managed by Cosmos -3. **docker**: Run ``dbt`` commands from Docker containers managed by Cosmos (requires a pre-existing Docker image) -4. **kubernetes**: Run ``dbt`` commands from Kubernetes Pods managed by Cosmos (requires a pre-existing Docker image) - -The choice of the ``execution mode`` can vary based on each user's needs and concerns. -Read :ref:`Execution Modes ` to decide which is the most suitable for you. - -Depending on the ``execution mode``, the package ``astronomer-cosmos`` should be installed differently. - -Local ------ - -There are two ways of using the `Local Execution Mode `__: - -* Installing ``dbt`` together with Cosmos -* Referencing a pre-installed ``dbt`` package - -Find more details on how to install Cosmos for each of these below: - -Install Cosmos together with ``dbt`` -.................................... - -If the Airflow host does not have ``dbt``, it is possible to install it as part of the Cosmos package installation, -alongside Airflow and other dependencies: - -.. code-block:: bash - - pip install 'astronomer-cosmos[dbt-all]' - -Using ``dbt-all`` will install Cosmos, ``dbt``, and all supported database dependencies. -If you only need a subset of the supported database types, you can use the following selectors: - -.. list-table:: - :header-rows: 1 - - * - Extra Name - - Dependencies - - * - (default) - - apache-airflow, Jinja2, virtualenv - - * - ``dbt-all`` - - astronomer-cosmos, dbt-core, dbt-bigquery, dbt-redshift, dbt-snowflake, dbt-postgres - - * - ``dbt-postgres`` - - astronomer-cosmos, dbt-core, dbt-postgres - - * - ``dbt-bigquery`` - - astronomer-cosmos, dbt-core, dbt-bigquery - - * - ``dbt-redshift`` - - astronomer-cosmos, dbt-core, dbt-redshift - - * - ``dbt-snowflake`` - - astronomer-cosmos, dbt-core, dbt-snowflake - - -For example, to install Cosmos with ``dbt`` and the Postgres adapter, run the following command: - -.. code-block:: bash - - pip install 'astronomer-cosmos[dbt-postgres]' - -Use a pre-installed ``dbt`` package -..................................... - -If the Airflow worker node already has ``dbt``, install a lightweight version of Cosmos by running: - -.. code-block:: bash - - pip install astronomer-cosmos - -In this case, users can specify - if necessary - a custom path to ``dbt`` by using the argument ``dbt_executable_path``. - -For more examples of how to use this execution mode, check the following: - -* `Astro `__ -* `Docker `__ -* `MWAA `__ - - -Virtualenv ----------- - -Cosmos can create a dedicated Python virtual environment for each task run, installing ``dbt`` and -any other user-defined dependencies in an isolated way. - -In this scenario, install Cosmos using (preferably pinned): - -.. code-block:: bash - - pip install astronomer-cosmos - -Learn more about this execution mode at `Execution Modes `__. - - -Docker ------- - -Cosmos can run ``dbt`` tasks by running an isolated Docker container per task. -In this case, install the following package at the same level as other Airflow dependencies (preferably pinned): - -.. code-block:: bash - - pip install 'astronomer-cosmos[docker]' - -Learn more about this execution mode at `Execution Modes `__. - -Kubernetes ----------- - -Last but not least, Cosmos can run ``dbt`` tasks by creating a Kubernetes pod per task. -Install the following package at the same level as other Airflow dependencies (preferably pinned): - -.. code-block:: bash - - pip install 'astronomer-cosmos[kubernetes]' - -Learn more about this execution mode at `Execution Modes `__. diff --git a/docs/dbt/lineage.rst b/docs/dbt/lineage.rst deleted file mode 100644 index 0298a3bcc..000000000 --- a/docs/dbt/lineage.rst +++ /dev/null @@ -1,77 +0,0 @@ -Configuring Lineage -=================== - -Cosmos uses the `dbt-ol `_ wrapper to emit lineage events to OpenLineage. -Follow the instructions below to ensure Cosmos is configured properly to do this. If you are using a virtual environment -to separate dbt requirements from Airflow as outlined `here `_, then please follow the ``venv`` tab below. Otherwise, -reference the ``base`` tab instructions - -.. tabs:: - - .. tab:: base - - **Prerequisites** - - - `Setup OpenLineage and Airflow `_ - - **Enabling Lineage** - - To enable lineage on Add the following to your requirements.txt file: - - .. code-block:: text - - astronomer-cosmos[dbt-openlineage] - - .. tab:: venv - - **Prerequisites** - - - `Setup OpenLineage and Airflow `_ - - **Enabling Lineage** - - Step 1: Add steps in your ``Dockerfile`` for the venv and wrapping the dbt executable - - .. code-block:: Docker - - FROM quay.io/astronomer/astro-runtime:7.2.0 - - # install python virtualenv to run dbt - WORKDIR /usr/local/airflow - COPY dbt-requirements.txt ./ - RUN python -m venv dbt_venv && source dbt_venv/bin/activate && \ - pip install --no-cache-dir -r dbt-requirements.txt && deactivate - - # wrap the executable from the venv so that dbt-ol can access it - RUN echo -e '#!/bin/bash' > /usr/bin/dbt && \ - echo -e 'source /usr/local/airflow/dbt_venv/bin/activate && dbt "$@"' >> /usr/bin/dbt - - # ensure all users have access to the executable - RUN chmod -R 777 /usr/bin/dbt - - Step 2: Create a ``dbt-requirements.txt`` file with the following contents. If you're using a different - data warehouse than Redshift, then replace with the one that you're using (i.e. ``dbt-bigquery``, - ``dbt-snowflake``, etc.) - - .. code-block:: text - - dbt-redshift - openlineage-dbt - - Step 3: Add the following to your ``requirements.txt`` file - - .. code-block:: text - - astronomer-cosmos - - Step 4: When instantiating a Cosmos object be sure to use the ``dbt_executable_path`` parameter for the dbt-ol - installed - - .. code-block:: python - - jaffle_shop = DbtTaskGroup( - ..., - dbt_args={ - "dbt_executable_path": "/usr/local/airflow/dbt_venv/bin/dbt-ol", - }, - ) diff --git a/docs/dbt/usage.rst b/docs/dbt/usage.rst deleted file mode 100644 index 36f1f3741..000000000 --- a/docs/dbt/usage.rst +++ /dev/null @@ -1,68 +0,0 @@ -Usage -====== - -Cosmos supports two standard way of rendering dbt projects: either as a full DAG or as a Task Group. - -By default, Cosmos will look in the ``/usr/local/airflow/dags/dbt`` directory (next to the ``dags`` folder if you're using the `Astro CLI `_). You can override this using the ``dbt_root_path`` argument in either :class:`cosmos.DbtDag` or :class:`cosmos.DbtTaskGroup`. You can also override the default models directory, which is ``"models"`` by default, using the ``dbt_models_dir`` argument. - -Rendering ---------- - -Full DAG -++++++++ - -The :class:`cosmos.DbtDag` class can be used to render a full DAG for a dbt project. This is useful if you want to run all of the dbt models in a project as a single DAG. - -.. code-block:: python - - from cosmos import DbtDag - - jaffle_shop = DbtDag( - dbt_project_name="jaffle_shop", - conn_id="airflow_db", - profile_args={"schema": "public"}, - dag_id="attribution-playbook", - start_date=datetime(2022, 11, 27), - schedule_interval="@daily", - ) - - -Task Group -++++++++++ - -The :class:`cosmos.DbtTaskGroup` class can be used to render a task group for a dbt project. This is useful if you want to run your dbt models in a project as a single task group, and include other non-dbt tasks in your DAG (e.g., extracting and loading data). - -.. code-block:: python - - from pendulum import datetime - - from airflow import DAG - from airflow.operators.empty import EmptyOperator - from cosmos.task_group import DbtTaskGroup - - - with DAG( - dag_id="extract_dag", - start_date=datetime(2022, 11, 27), - schedule="@daily", - ) as dag: - e1 = EmptyOperator(task_id="ingestion_workflow") - - dbt_tg = DbtTaskGroup( - group_id="dbt_tg", - dbt_project_name="jaffle_shop", - conn_id="airflow_db", - profile_args={ - "schema": "public", - }, - ) - - e2 = EmptyOperator(task_id="some_extraction") - - e1 >> dbt_tg >> e2 - - -Connections & Profiles ----------------------- - -See the `Connections & Profiles `__ page for more information on how to configure your connections and profiles. diff --git a/docs/getting_started/astro.rst b/docs/getting_started/astro.rst new file mode 100644 index 000000000..c14f7c94e --- /dev/null +++ b/docs/getting_started/astro.rst @@ -0,0 +1,107 @@ +.. _astro: + +Getting Started on Astro +======================== + +While it is possible to use Cosmos on Astro with all :ref:`Execution Modes `, we recommend using the ``local`` execution mode. It's the simplest to set up and use. + +Pre-requisites +~~~~~~~~~~~~~~ + +To get started, you should have: + +- The Astro CLI installed. You can find installation instructions `here `_. +- An Astro CLI project. You can initialize a new project with ``astro dev init``. + +Create a virtual environment +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Create a virtual environment in your ``Dockerfile`` using the sample below. Be sure to replace ```` with the actual adapter you need (i.e. ``dbt-redshift``, ``dbt-snowflake``). It's recommended to use a virtual environment because dbt and Airflow can have conflicting dependencies. + +.. code-block:: docker + + FROM quay.io/astronomer/astro-runtime:8.0.0 + + # install dbt into a virtual environment + RUN python -m venv dbt_venv && source dbt_venv/bin/activate && \ + pip install --no-cache-dir && deactivate + + +Install Cosmos +~~~~~~~~~~~~~~ + +Add Cosmos to your project's ``requirements.txt``. + +.. code-block:: text + + astronomer-cosmos + + +Move your dbt project into the DAGs directory +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Make a new folder, ``dbt``, inside your local project's ``dags`` folder. Then, copy/paste your dbt project into the directory and create a file called ``my_cosmos_dag.py`` in the root of your DAGs directory. Your project structure should look like this: + +.. code-block:: text + + ├── dags/ + │ ├── dbt/ + │ │ └── my_dbt_project/ + │ │ ├── dbt_project.yml + │ │ ├── models/ + │ │ │ ├── my_model.sql + │ │ │ └── my_other_model.sql + │ │ └── macros/ + │ │ ├── my_macro.sql + │ │ └── my_other_macro.sql + │ └── my_cosmos_dag.py + ├── Dockerfile + ├── requirements.txt + └── ... + +Note: your dbt projects can go anywhere on the Airflow image. By default, Cosmos looks in the ``/usr/local/airflow/dags/dbt`` directory, but you can change this by setting the ``dbt_project_dir`` argument when you create your DAG instance. + +For example, if you wanted to put your dbt project in the ``/usr/local/airflow/dags/my_dbt_project`` directory, you would do: + +.. code-block:: python + + from cosmos import DbtDag + + my_cosmos_dag = DbtDag( + dbt_project_dir="/usr/local/airflow/dags/my_dbt_project", + ..., + ) + +Create a dagfile +~~~~~~~~~~~~~~~~ + +In your ``my_cosmos_dag.py`` file, import the ``DbtDag`` class from Cosmos and create a new DAG instance. Make sure to use the ``dbt_executable_path`` argument to point to the virtual environment you created in step 1. + +.. code-block:: python + + from cosmos import DbtDag + + my_cosmos_dag = DbtDag( + # dbt/cosmos-specific parameters + dbt_project_name="", + conn_id="airflow_db", + profile_args={ + "schema": "public", + }, + + # normal dag parameters + schedule_interval="@daily", + start_date=datetime(2023, 1, 1), + catchup=False, + dag_id="my_cosmos_dag", + ) + + +Start your project +~~~~~~~~~~~~~~~~~~ + +Start your project with ``astro dev start``. You should see your Airflow DAG in the Airflow UI (``localhost:8080`` by default), where you can trigger it. + +.. image:: /_static/dbt_dag.png + :alt: Cosmos dbt DAG + :align: center diff --git a/docs/dbt/execution-mode-docker.rst b/docs/getting_started/docker.rst similarity index 98% rename from docs/dbt/execution-mode-docker.rst rename to docs/getting_started/docker.rst index 2d24380bd..73db3be7a 100644 --- a/docs/dbt/execution-mode-docker.rst +++ b/docs/getting_started/docker.rst @@ -1,7 +1,7 @@ -.. _execution-mode-docker: +.. _docker: -Docker operators ----------------- +Docker Execution Mode +======================================== The following tutorial illustrates how to run the Cosmos dbt Docker Operators and the required setup for them. diff --git a/docs/dbt/execution-modes.rst b/docs/getting_started/execution-modes.rst similarity index 78% rename from docs/dbt/execution-modes.rst rename to docs/getting_started/execution-modes.rst index 4d8130e3b..b2b8ce8e2 100644 --- a/docs/dbt/execution-modes.rst +++ b/docs/getting_started/execution-modes.rst @@ -10,9 +10,7 @@ Cosmos can run ``dbt`` commands using four different approaches, called ``execut 3. **docker**: Run ``dbt`` commands from Docker containers managed by Cosmos (requires a pre-existing Docker image) 4. **kubernetes**: Run ``dbt`` commands from Kubernetes Pods managed by Cosmos (requires a pre-existing Docker image) -The choice of the ``execution mode`` can vary based on each user's needs and concerns. The default ``execution_mode`` is **local**. For more details, check each execution mode described below. - -You should install Cosmos differently based on the ``execution mode``, Learn more at :ref:`Installation Options `. +The choice of the ``execution mode`` can vary based on each user's needs and concerns. For more details, check each execution mode described below. .. list-table:: Execution Modes Comparison @@ -62,25 +60,17 @@ Example of how to use, for instance, when ``dbt`` was installed together with Co :start-after: [START local_example] :end-before: [END local_example] -Detailed examples of how to use the ``local`` execution mode when ``dbt`` is installed separately from Cosmos: - -* `Astro `__ -* `Docker `__ -* `MWAA `__ Virtualenv ---------- -If you're using managed Airflow on GCP (Cloud Composer), for instance, -we recommend you use the ``virtualenv`` execution mode. +If you're using managed Airflow on GCP (Cloud Composer), for instance, we recommend you use the ``virtualenv`` execution mode. -The ``virtualenv`` mode isolates the Airflow worker dependencies from ``dbt`` by managing a Python virtual environment created -during task execution and deleted afterwards. +The ``virtualenv`` mode isolates the Airflow worker dependencies from ``dbt`` by managing a Python virtual environment created during task execution and deleted afterwards. In this case, users are responsible for declaring which version of ``dbt`` they want to use by giving the argument ``py_requirements``. This argument can be set directly in operator instances or when instantiating ``DbtDag`` and ``DbtTaskGroup`` as part of ``operator_args``. -Similar to the ``local`` execution mode, Cosmos converts Airflow Connections into a way ``dbt`` understands them by creating -a ``dbt`` profile file (``profiles.yml``). +Similar to the ``local`` execution mode, Cosmos converts Airflow Connections into a way ``dbt`` understands them by creating a ``dbt`` profile file (``profiles.yml``). A drawback with this approach is that it is slower than ``local`` because it creates a new Python virtual environment for each Cosmos dbt task run. @@ -94,18 +84,15 @@ Example of how to use: Docker ------ -The ``docker`` approach assumes users have a previously created Docker image, which should contain all the ``dbt`` pipelines and -a ``profiles.yml``, managed by the user. +The ``docker`` approach assumes users have a previously created Docker image, which should contain all the ``dbt`` pipelines and a ``profiles.yml``, managed by the user. The user has better environment isolation than when using ``local`` or ``virtualenv`` modes, but also more responsibility (ensuring the Docker container used has up-to-date files and managing secrets potentially in multiple places). -The other challenge with the ``docker`` approach is if the Airflow worker is already running in Docker, -which sometimes can lead to challenges running `Docker in Docker `__. +The other challenge with the ``docker`` approach is if the Airflow worker is already running in Docker, which sometimes can lead to challenges running `Docker in Docker `__. -This approach can be significantly slower than ``virtualenv`` since it may have to build the ``Docker`` container, -which is slower than creating a Virtualenv with ``dbt-core``. +This approach can be significantly slower than ``virtualenv`` since it may have to build the ``Docker`` container, which is slower than creating a Virtualenv with ``dbt-core``. -Check the step-by-step guide on using the ``docker`` execution mode at :ref:`Docker operators `. +Check the step-by-step guide on using the ``docker`` execution mode at `Docker Operators `. Example DAG: @@ -124,14 +111,13 @@ Example DAG: Kubernetes ---------- -Lastly, the ``kubernetes`` approach is the most isolated way of running ``dbt`` since the ``dbt`` run commands -from within a Kubernetes Pod, usually in a separate host. +Lastly, the ``kubernetes`` approach is the most isolated way of running ``dbt`` since the ``dbt`` run commands from within a Kubernetes Pod, usually in a separate host. It assumes the user has a Kubernetes cluster. It also expects the user to ensure the Docker container has up-to-date ``dbt`` pipelines and profiles, potentially leading the user to declare secrets in two places (Airflow and Docker container). The ``Kubernetes`` deployment may be slower than ``Docker`` and ``Virtualenv`` assuming that the container image is built (which is slower than creating a Python ``virtualenv`` and installing ``dbt-core``) and the Airflow task needs to spin up a new ``Pod`` in Kubernetes. -Check the step-by-step guide on using the ``kubernetes`` execution mode at :ref:`Kubernetes Operators `. +Check the step-by-step guide on using the ``kubernetes`` execution mode at `Kubernetes Operators `. Example DAG: diff --git a/docs/getting_started/gcc.rst b/docs/getting_started/gcc.rst new file mode 100644 index 000000000..3f4bf7d0e --- /dev/null +++ b/docs/getting_started/gcc.rst @@ -0,0 +1,68 @@ +.. _gcc: + +Getting Started on GCC +======================= + +Because there's no straightforward way of creating a Python virtual environment in GCC, we recommend using Cosmos' built-in virtual environment functionality to run dbt. + +Install Cosmos +-------------- + +Add the following to your base project ``requirements.txt``: + +.. code-block:: text + + astronomer-cosmos + + +Move your dbt project into the DAGs directory +--------------------------------------------- + +Make a new folder, ``dbt``, inside your local ``dags`` folder. Then, copy/paste your dbt project into the directory and create a file called ``my_cosmos_dag.py`` in the root of your DAGs directory. + +Note: your dbt projects can go anywhere that Airflow can read. By default, Cosmos looks in the ``/usr/local/airflow/dags/dbt`` directory, but you can change this by setting the ``dbt_project_dir`` argument when you create your DAG instance. + +For example, if you wanted to put your dbt project in the ``/usr/local/airflow/dags/my_dbt_project`` directory, you would do: + +.. code-block:: python + + from cosmos import DbtDag + + my_cosmos_dag = DbtDag( + dbt_project_dir="/usr/local/airflow/dags/my_dbt_project", + ..., + ) + + +Create your DAG +--------------- + +In your ``my_cosmos_dag.py`` file, import the ``DbtDag`` class from Cosmos and create a new DAG instance. You need to supply additional arguments in the ``operator_args`` dictionary to tell Cosmos which packages are required. + +Make sure to rename the ```` value below to your adapter's Python package (i.e. ``dbt-snowflake`` or ``dbt-bigquery``) + +.. code-block:: python + + from cosmos import DbtDag + + my_cosmos_dag = DbtDag( + # dbt/cosmos-specific parameters + dbt_project_name="", + conn_id="airflow_db", + profile_args={ + "schema": "public", + }, + + # cosmos virtualenv parameters + execution_mode="virtualenv", + operator_args={ + "py_system_site_packages": False, + "py_requirements": [""], + }, + + # normal dag parameters + schedule_interval="@daily", + start_date=datetime(2023, 1, 1), + catchup=False, + dag_id="my_cosmos_dag", + ) diff --git a/docs/getting_started/index.rst b/docs/getting_started/index.rst new file mode 100644 index 000000000..7aa42ca5c --- /dev/null +++ b/docs/getting_started/index.rst @@ -0,0 +1,37 @@ +.. _getting-started: + +.. toctree:: + :hidden: + :caption: Contents: + + Astro + MWAA + GCC + Open-Source + Execution Modes + Docker Execution Mode + Kubernetes Execution Mode + + +Getting Started +=============== + +Recommended Methods +------------------- + +The recommended way to install and run Cosmos depends on how you run Airflow. For specific guides, see the following: + +- `Getting Started on Astro `__ +- `Getting Started on MWAA `__ +- `Getting Started on GCC `__ +- `Getting Started on Open-Source `__ + +Other Methods +------------- + +While the above methods are recommended, you may require a different setup. For more customization, check out the different execution modes that Cosmos supports on the `Execution Modes `__ page. + +For specific guides, see the following: + +- `Executing dbt DAGs with Docker Operators `__ +- `Executing dbt DAGs with KubernetesPodOperators `__ diff --git a/docs/dbt/execution-mode-kubernetes.rst b/docs/getting_started/kubernetes.rst similarity index 98% rename from docs/dbt/execution-mode-kubernetes.rst rename to docs/getting_started/kubernetes.rst index 8ee7b9b0c..9a500a047 100644 --- a/docs/dbt/execution-mode-kubernetes.rst +++ b/docs/getting_started/kubernetes.rst @@ -1,7 +1,7 @@ -.. _execution-mode-kubernetes: +.. _kubernetes: -Kubernetes operators --------------------- +Kubernetes Execution Mode +============================================== The following tutorial illustrates how to run the Cosmos dbt Kubernetes Operator using a local K8s cluster. It assumes the following: diff --git a/docs/getting_started/mwaa.rst b/docs/getting_started/mwaa.rst new file mode 100644 index 000000000..29d72f27e --- /dev/null +++ b/docs/getting_started/mwaa.rst @@ -0,0 +1,103 @@ +.. _mwaa: + +Getting Started on MWAA +======================= + +Users can face Python dependency issues when trying to use the Cosmos `Local Execution Mode `_ in Amazon Managed Workflows for Apache Airflow (MWAA). + +This step-by-step illustrates how to use the Local Execution Mode, together with the +`MWAA's startup script `_ and +the ``dbt_executable_path`` argument. + +Create a Startup Script +----------------------- + +MWAA allows users to run a startup script before the scheduler and webserver are started. This is a great place to install dbt into a virtual environment. + +To do so: + +1. Initialize a startup script as outlined in MWAA's documentation `here `_ + +2. Add the following to your startup script (be sure to replace ```` with the actual adapter you need (i.e. ``dbt-redshift``, ``dbt-snowflake``, etc.) + +.. code-block:: shell + + #!/bin/sh + + export DBT_VENV_PATH="${AIRFLOW_HOME}/dbt_venv" + export PIP_USER=false + + python3 -m venv "${DBT_VENV_PATH}" + + ${DBT_VENV_PATH}/bin/pip install + + export PIP_USER=true + + +Install Cosmos +-------------- + +Add the following to your base project ``requirements.txt``: + +.. code-block:: text + + astronomer-cosmos + + +Move your dbt project into the DAGs directory +--------------------------------------------- + +Make a new folder, ``dbt``, inside your local ``dags`` folder. Then, copy/paste your dbt project into the directory and create a file called ``my_cosmos_dag.py`` in the root of your DAGs directory. Your folder structure should look like this: + +.. code-block:: text + + ├── dags/ + │ ├── dbt/ + │ │ └── my_dbt_project/ + │ │ ├── dbt_project.yml + │ │ ├── models/ + │ │ │ ├── my_model.sql + │ │ │ └── my_other_model.sql + │ │ └── macros/ + │ │ ├── my_macro.sql + │ │ └── my_other_macro.sql + │ └── my_cosmos_dag.py + └── ... + +Note: your dbt projects can go anywhere that Airflow can access. By default, Cosmos looks in the ``/usr/local/airflow/dags/dbt`` directory, but you can change this by setting the ``dbt_project_dir`` argument when you create your DAG instance. + +For example, if you wanted to put your dbt project in the ``/usr/local/airflow/dags/my_dbt_project`` directory, you would do: + +.. code-block:: python + + from cosmos import DbtDag + + my_cosmos_dag = DbtDag( + dbt_project_dir="/usr/local/airflow/dags/my_dbt_project", + ..., + ) + + +Create your DAG +--------------- + +In your ``my_cosmos_dag.py`` file, import the ``DbtDag`` class from Cosmos and create a new DAG instance. Make sure to use the ``dbt_executable_path`` argument to point to the virtual environment you created in step 1. + +.. code-block:: python + + from cosmos import DbtDag + + my_cosmos_dag = DbtDag( + # dbt/cosmos-specific parameters + dbt_project_name="", + conn_id="airflow_db", + profile_args={ + "schema": "public", + }, + + # normal dag parameters + schedule_interval="@daily", + start_date=datetime(2023, 1, 1), + catchup=False, + dag_id="my_cosmos_dag", + ) diff --git a/docs/getting_started/open-source.rst b/docs/getting_started/open-source.rst new file mode 100644 index 000000000..8930bef94 --- /dev/null +++ b/docs/getting_started/open-source.rst @@ -0,0 +1,68 @@ +.. _open-source: + +Getting Started on Open Source Airflow +====================================== + +When running open-source Airflow, your setup may vary. This guide assumes you have access to edit the underlying image. + +Create a virtual environment +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Create a virtual environment in your ``Dockerfile`` using the sample below. Be sure to replace ```` with the actual adapter you need (i.e. ``dbt-redshift``, ``dbt-snowflake``). It's recommended to use a virtual environment because dbt and Airflow can have conflicting dependencies. + +.. code-block:: docker + + FROM my-image:latest + + # install dbt into a virtual environment + RUN python -m venv dbt_venv && source dbt_venv/bin/activate && \ + pip install --no-cache-dir && deactivate + + +Install Cosmos +~~~~~~~~~~~~~~ + +Install ``astronomer-cosmos`` however you install Python packages in your environment. + + +Move your dbt project into the DAGs directory +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Make a new folder, ``dbt``, inside your local project's ``dags`` folder. Then, copy/paste your dbt project into the directory and create a file called ``my_cosmos_dag.py`` in the root of your DAGs directory. + +Note: your dbt projects can go anywhere on the Airflow image. By default, Cosmos looks in the ``/usr/local/airflow/dags/dbt`` directory, but you can change this by setting the ``dbt_project_dir`` argument when you create your DAG instance. + +For example, if you wanted to put your dbt project in the ``/usr/local/airflow/dags/my_dbt_project`` directory, you would do: + +.. code-block:: python + + from cosmos import DbtDag + + my_cosmos_dag = DbtDag( + dbt_project_dir="/usr/local/airflow/dags/my_dbt_project", + ..., + ) + +Create a dagfile +~~~~~~~~~~~~~~~~ + +In your ``my_cosmos_dag.py`` file, import the ``DbtDag`` class from Cosmos and create a new DAG instance. Make sure to use the ``dbt_executable_path`` argument to point to the virtual environment you created in step 1. + +.. code-block:: python + + from cosmos import DbtDag + + my_cosmos_dag = DbtDag( + # dbt/cosmos-specific parameters + dbt_project_name="", + conn_id="airflow_db", + profile_args={ + "schema": "public", + }, + + # normal dag parameters + schedule_interval="@daily", + start_date=datetime(2023, 1, 1), + catchup=False, + dag_id="my_cosmos_dag", + ) diff --git a/docs/index.rst b/docs/index.rst index 07ee7b336..4220f7668 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,10 +1,14 @@ +.. _self: + .. toctree:: :hidden: :maxdepth: 2 :caption: Contents: Home - dbt + Getting Started + Configuration + Profiles Contributing .. |fury| image:: https://badge.fury.io/py/astronomer-cosmos.svg @@ -21,7 +25,6 @@ :alt: pre-commit.ci status .. image:: https://raw.githubusercontent.com/astronomer/astronomer-cosmos/main/docs/_static/cosmos-logo.svg -=========================================================== |fury| |ossrank| |downloads| |pre-commit| @@ -33,11 +36,6 @@ Run your dbt Core projects as `Apache Airflow `_ DA - Utilize Airflow's data-aware scheduling to run models immediately after upstream ingestion - Turn each dbt model into a task/task group complete with retries, alerting, etc. -Quickstart -__________ - -Check out the Quickstart guide on our `docs `_. - Example Usage ___________________ @@ -72,11 +70,18 @@ You can render an Airflow Task Group using the ``DbtTaskGroup`` class. Here's an e1 >> dbt_tg >> e2 + This will generate an Airflow Task Group that looks like this: .. image:: https://raw.githubusercontent.com/astronomer/astronomer-cosmos/main/docs/jaffle_shop_task_group.png +Getting Started +_______________ + +To get started now, check out the `Getting Started Guide `_. + + Changelog _________ diff --git a/docs/jaffle_shop_task_group.png b/docs/jaffle_shop_task_group.png deleted file mode 100644 index 52dc46228993ee47821c5af0f874cb59749fd147..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 136482 zcmeFZWmHxByD&0%$0}6J&^H zFdW={A!{))Wf?Iskg|)Tg|(eI9Gvv)I89V7wZ2Dby3y}MMA1Q@r^nP`V{odXR`59l zos!ZB!FZ;if~ofs$dfO_k?q!t!c0&l)EkJ>5%<`47lZG?3bHNBYevq-BkFgE+77E3$&dR2QV+^`kgf=I` zy+4^GG1Ivtxht25Vd?z=A1E%C@_G2jil{FRTuV4j{4%0Q?7^zqQ8(LthA;6Qa8C?q zy9hD#o_1Mdv}iiulS4xqkl`M3GHXV|!|h-!6C$_VFL=HcG$r$iARJEFQe-k7s>mT^681EL5iOjIYvV`8t2|TDbb2(DL}$F`ouf8ac9YQ>)AK>9_zB=Qv^8T{N?Hd!IMuTs{~%C9MSQ1x)F5wFoO zu{?BS8!2A(&OI+Md#uOmRpNmRjvAVhe6$B^&vWaC!;-QzM9Ex#Ldtg7NFS zdhz|oY##%>);{(J9jtcH6W^U(gRWAIpHqwWNuvs~jiEBVe*Q_sg!b77s|ezywgzr8 zc-;W4RA=g4-iFA%(_orlkB^ieF-1|o4N)1tU(ToP3B)QOFBy=SiE8X9&B!Ar3%n9> zFL<-{9i;=_^Oe6=1Nrebigq2FgSN(FVe314WMMtbT+->+Z=yrr-}G?_59dsOoeNPn zYrK6X9IiDQayUpvbTKV_6}o+7OnzJav<`-dE$U;k^83eW!7UTfqaZ$HoQ-jSCjaM_ld(so!l=2J8=(4 zq*zl@Q}p59wHiK%D(8N7c!rWSdzqSihYBY|gMxw*>AUt(5w2nwap5+!-IYuJd{Y^D z>Do9=T{UCLNn0#X3EkZ*e*@9f2+MBCI|ahOk8%q_3x?3*U`Ka~KlY=ZTf<|0IDm8{ zf=-JF?Vzu~vP07Cz^WkLM#WsBjn`2S3YJVw4aScO@;kw}rSHwN6Q;gpE3*?813E(WV6+?bDsx z`95pIT7*pgU^?e)!Qbm@$1Ok-dnwo%(edQXQ!|`AJXy4d!S=yo!R-46o763oc(0qp zZL?5Y2x4AqeEaX{G21diIkb2Qu}bt$N}ujfq=NLHy2l6= z6cl|Z;3-Bb&M62}4_BvG_b4LKY%DrdUt!k{vXeQbka~Zyys@0ST)IqsXzs#&!7dSF zqHvJ;Nv(?Zh}M?YJ(fN8DweybsK}xyt>{^i=y+q1t@>F}n&yuBO+j9+MgC_E;&SR2 zx>SAhN?O`)v1C_H7qYJ;x>IMp8CMHis)Gf$i>COU8wKM2>mx z!<_C(`iYq=cb(#h$ghJNc6?Dn>1}cTAq%AnV|5d&R+iQY)_W5Uqr0PDN1VrH#uD?? z3SH&%#Rx`gM^@HX*O|xX%YD?&Mp?6sON?I{M}&-HHuG(_E;tDW2to`B7sy(Pz0;2g zFP~pdUqVlakI5e|p?IJ?Ls7%zdYtlXowQ7F&WFd*Yh(YWYmX=^#3Q6Kg#CW~<2s^K zVnX6qk6*u?AWG%hr0SUtUOF zW}fQkV4i(m`e4Z*-az=k=?6j9{15Ddql21B+{tI!AGF?7ey~=tb)Jm2>#G^4(X$5+ z9k#~RzK9-2C`Ku^oba9?*%IB-*}CFVCl*eo;~M9hw0~VEKWjI8Qm47C%+K8{-z?h< zJuPT$ZvEol>UV|q5p5VZ3wNB-fwzFSf&_!ahLn%gos^O!f`pjFFtX-xBAd%auPbFc zZzJytkG*>nx2&!DR6&-CLbd__EbmbYLuz@mvi?i`)dr5H7MEJjhYb!+tviyQ#6Rjf zN~d>m5DhLWdxOajcl`sdkb&1f>>qmSL>r#?J1-Idaj45uK@5_v#lBQX|a zwY5(7Rmvpn+1F=r&y-B0x`*Bf5=ccQKd5Kl)%O*-9y#|x=RzL|UKKz2bnu$<^yE%1(P}P>*5Szhg*3r;F|pM&noqeo&nl$lVwr3b?F7Ix%N~`j z4dQdpx^4T5==`Fc`sxIz97PRG+qkaZImXRM2fX)qbpA;Fk?)5jo|6uaLF+8*($Sqf zmZaMn)`tCh!t?tJZ&nlY*w+TPQ@WFC!CY+o_8vzhN4(vxO=}?o^UtE#SbP@tBorl9 zBplah1|8|S=+kuPPUL!7YM<8N$@G;XFWlo|{zQO5P{-t=UE}e!Ue$l2n=QX8yox0P z9KmwSw>in#j>RZrI8ry}w-I9g$vn%PJK>6HQKd}VKx?7aWFY)9?oC`S!(v%d&8Fxp z+wRZfvEw7-q_z|j9A&jy&2~Ep?ulCt>L}{Fg={4{>L>n!0XO5Ax5S%VMD~oc=hiV! zqdS97lDCRWix}!~>L1vFt2mvv8W%sbkK3_NY1Hj(d3-&MB(3Dvu@SJb@gUmO(}|dn zwe|6MyXbE2xwQM4FSKdU$L%b32iYgrCw)(D-qSVU{B{*_0MQ+l30s3Si=4hxYBPWG zjMI$Ejf0xIjvR}BZf<$rZ9VMA`yDBTC=Iev-nwI+6|aK>2>vUYOQmq7)3kn`@o|B| zi-nEK{POxwH8rW|!mhV=`_rRqT5F0aXS@N&FDGAh5hM~2GdCI*Il9{l9yibKh0e^) zZnF63IkbES=N9_8T^$trYbxpAYw!{1F1`r@U0@MYilguE_bd&6muXM%8Q`jU3{Yb&$CWJC`*s@;u1I8&(5%jU3V zHSwrATd-45GcC$z;YRfOyQ^bgdQyN!YYiB@&vMtUbM^Vuy*;{x$gB1I?QQaoqy8e- znbeuEZ^G5?(bWj{CAo*MnZLjp-NxwXuv+`Owvr2*tJ)3S^ZF~qMnpuDvS5b02OUNd zqoeR%t)y@^Y;bN~LfLQBQtp-kiyE%&54hN#naQ1*^`~w(Z!PG880s{3o9E78ygcag2@%^;AZT_ z5jS@=b+LAGvvzaSp`V%bxK^E8> zmS@bYEWg(VrV7BG@+n(;ncHcLTiXLP1NIQ&VdD|_WBmW|=C2+9HdE`bna_CGx&A)& zZ?FFQ)EBPiE@F=Mz@BbGfAQ<*VK}-J#rJx`q41+dYT2|VVsj>U6&9c?g z#1i)L-1JGe^mIt`*Aye^2dO~E(F%E+Mojr{%z`}K`#0#( zf)xTg*zAty9{(G&08vBz4^s9ALH}3O{+nF=uc-Y^HU3xB{+kK=|BY4qE|nqv8ZXQ6 zm3CEi^*vv3#o3Ahv(0_|M@^rbza1{kJ8ADbc8b&Q44Vz>>}&Do#XL6udsY>_XIF_PaXjN=LxmMlfXbZ zkvl2YXU|md@bCr_wxpeGQL)Jw_g~4#$YdEJ>o+9G-52|Z<*w(P!21pjjBal|{{&@e ze!m8|j+fR#ph2Ep=9#uFe}aO%7m^RVq*#TykI#Jj`ud{d;>>TAMa7g5d*{U7sxZtcY_ zzyw@^=Z$|7XPpQj1Z-=7GSITIop4?D^~W4EH#qZuz>NO^^*URKF>P|BcWh)-+iEOnkax_j zQ4`WUjR630Xg>fBzq5sh7#`^m=Sv0K)9w>Z4OA?1=;6smmAPe}nb|{ujDa6q+y5M1 zKv>bKXyV^(#zhYlSmdgfmL=RV5$RZ%nN218QUJn%{0LOR$Vx~63Jhd{-+7V9B%=}a zSzNo7PXAFk@A4MMDNKU?W|4mCqe$#MD<|!Ci|`&d`QUh!g#vYQ2@02I03?g*E=*6T{F+0N~5SK6E|Ys&pVr`_to%rTW)d{6bX#WU zGl{4@g;~1SulrWdjkoT`!iybtPtZ;-7XmcxBOL<6&C8~zaj`)AxLTRM8A6i$=Y0n zWa;H$e-3!agI41a)49Zg{|T=4;Ur0ZLz$%k1!CwMuGxUW^r!fr1?Zu-9`k3%WC_rx zn)4ef_4MO5eN)(XZvA2LargK%j|w*VM>S*$N6ZCJDncJdeLf|DaN&dgiribNE^fvV^Z(xD*`ZF%TTSfk-an;-DpT z7#6de&yrb`-%k)e`w0=G7gt)?O*#HnaI#QY)upJ$YQA%td7$6GwymGRhvS6SWPhJ~ zWxX!7)bA-3<;^=iKZAQwKg7~L&@#=sarC&i$(?<(*)ZOC-i`8GMMT1oi{h2uO<>}Q z>CtyT2^1*-wBob(y(3?=2bmqi6HXPVrN+kEe}#nz1YdalJoe`3n@h_5e9k_AA44?@ z7+(}mo@)1gc4si}Fz?~-X+iAaEROH(DCXnsXSweHJqIcLGCm-eD&)s z8aeNjqsPh?)$vM)vvfRmrc6jq_vB*O%%O7K{hT?X-Sk3^36t+Crl+;DP*6{I;wU#c zYWfRykG%I%wY3wgtZ!^&W~s^fRphJwkTlncy!M90b?Q zxF`2!XA8O3xvwmB-mslyPnus6;G1{NJS6LJ%LYW>r9ZKCc)#G9K3ghz8m zU*QfqZd=LLY}c(=ZX*}HCaXsZroX}?$FsNJiU@%{V@nJwdzoi^$!5Pj*qFYOHN#odC6SWvyTb!^;pQl~y;W8v~7o(VOxb8f_Uy;J&{>t}yDB`d?)Kt)S zlgoOJf4llvwzUf3#@*|vI;xNplG zE`{|Sc5#y@i40tMJ+DK(PxPcm3zFSV7SFf5y-wp8W9WbW(edkD^9}lje%h~IAp&L}Wz2ZF=#Um_6rJ9$|G2M@J(gL=m3Mcoa*PJvgLLR~`QW`1| z?rk5{susCvH*R%Sy68!1!rcrYZ}t7x=vlDLWJ5TiD#u9$e_A`K1In znh5$jG;%Mg6JR!5BoOfU@S@H{2xwjyI9lG`6|a8Uq64OPhDN-nudbFO`pUteTA{KS z4&KAZ+HL2a+B|nOQKm{!7djqxi;Sj{_v3G_PQ8vnWB9c+>8}}qQ%tL!?_F{(d%=65rK9TJ~cV&81_4@POKVkvHX%b?pZ*t;wynDi9^Kn?U z*#O_Q&C_T@B`oSKyZ7nC9b+4o8DiIK2JG47p?s9s1wpNor73Izts$uoM=gkQYbL@B zG^s9Wb0=T4ge{%Wei`mCeZY22+3Tj=hh*Ua=8j&wUjp^u&)YR73jqyTek~>mqpU{tG_`w%G+&Et|bIP3p!F z_LHMiA7 z2P3T|y)3cX`<#&8INJD$U)|SYs`ha7ES<0CYW}!_FjB)4u;N4uSUtpC591_ITkWn0c*#KB~pI?BJo6 z&Bd9}X&r~HbISH&I`v+Zk7QoUU3ib6!DC@Lw=kVYzx+LD0Vp#3DD4ItAYrhG7-@Tn z$%=q>k)orc#oOI3+zsh!pX^dskRPjZQ zj}o|hy}sYN3k0Rd(ZXwkP9D~@-YX|!srDMbY(2ya7w|_3@vFSJXsJLzu+B(_4rzcr zOU_$;{>xU1zz0BbJ#mSQ9Q4{%>l}1wwkB^W0|pYm;ks;GbU)NG@KI4zl5LJ8(DR6} z%GhkPijq3JAYy4gHP{$G!#Yz#Ft{__Jv;o8ns6@AuFzaI@_qeB?uj!^vO$DQA062m z7&U%4>gxF>r_HyNn8f$Yt%}7vc3b_k=`1l-D-5sCD(ZvC{KXWvy~}zUo1QJNx|)AQ z(A*oNF-W5yEWMy@MU*3rKCA5uSgso3i{8_rl&V*{Es}w z_ZSb0y<&g(4V@1s$Tl9vt(^Mno1KJ93^h_Net3eJRO3>&6Q`L*x~x*4ya#@+xaDtM z)^Rw6mEVgvyqtFCdJ^7V>f>G;!QR!mSyAyj;1p>A%157dBaT|o9vA?z<$g$PpvWBT znSRVs$4`6xE()gPVoeEYP5OBO&%RpH@%0#cUp|jAc+-ummA5nf#BZ4H{5o8JX#HvZ z{9W|0CA!<1;gRL*AP^IK5GiR9{A z`jGSg3-Md2l4$Q+A|5)VEmXSew2&UraVL^7QLU0E6IP{;^c&#K=RuHboi?}RJVv|U z({k5Ll^^T2`OQ{B?BiH;t}iM5zR(W1$>T(o-EpN{SaFb(!y)K1llgKoyF%lFvDtG3 zs3m#R(6Li3gV*8@{G1yZ%bDb%wJuM6ila5(Jv(8pO;>jL(zACJoHpYLn$&}?L)DtA zcbBOG0tNN0bbJFlL6kyHb$@fL5SmD;jY zRLewobs)&fa(#Nt>vF-(Sf3HPwLX39x9)ZB<~?-^E`zU11y85Dh5*6!9*HTJhpy~@ zG_CJF8`ayHy7_3k98fR#O@*5HcQC+sFIHCO-OPA&&hry-USA$vplo1z;SA;iO2V0d zF+qDqhw(Q9^Jl~kQU;94N+^C>47#bFvGyc8%X^5JB)>$tcB2-G!TdCU{K5^&>yY>+ zJH2{U6g`g??McUl)~ksU9SX$q&1I7E7xk5I(#|8QTiuWN8p|dthw6r~d3Q&pSd8o;uekU8J7Ryu`90gW`WquOa^d2Bhu&y#bP1u1^d17zgoqxJdg`m@jU976nS z9d{S_M>LHJcUZTlv6sVUFiQyWS65TBWr{I2d+(keIVWsxjq z)qraT2IRl8z?=~X2XNIQu{@~oNCJRI66BfsnbyD~eKIsEJmSD2PhrQ-Rv5oHj>s_{ z%^q+nm?{3^L)Mel>KNEB#xcNk{kawQS&Lx>S91 zyjdXiG(kBF=mZaWxOj{(nl zf}(Q^OV{fCrRsJ;V(H$RvI%tL>!!nCCuFxNje!sVz(S>EoHK5(;9toO>$ zX_{%|G}yuAj!7`pbhoR{VY)`Lto;+GWe#8?wk8=9lFNC%Hr74=5zlrwmzy@GKL{kKO+Wpgt>JySEgF2;_yVy!<#C*?nTcUyly z!G?2ls(B#a)2L8v9Ehy?iJ0pH$};M^`YfS=%~GGjMPPZUQ)HyLZ3Z7x19J2gK?*k^GwU#u=bjT;c+R|332T^AP+N? zsKEXW`aJ;j1}h6PJO~90bUY0V2~Zsj^kzRW-!IVhjQTz{9yX3MIDaD-1`nJ~mAv2w zAFf=7l3`vezIR{kH@_#puk%jFI>U3UlIG!ZbQtfXp%LM;xEzY#CJzZX!;%!CR7OV1 zQU_#5mN?Re5=Rl(-D%U+@#jWiR1ob&f5u@UH~lht_x63Fi$!~-MUKO7 z)ZgD*e^){NCO{)PTD9bY%vNT{TVZ*5lsWeBAw9uGYvc0+^pg73V;~qZS_@IoC1KDs z5FVShfYr`^T7DoOkLHF43Y>s>xUUhpg%joqP>v@6-plvbm|vRt$CMU8CtF3P#Ni-; z051{qzKbK_I02EH+oTcc?^XZHj6YAeKw7}EMk@_i5FTLkn?c4SazKIL zUa$h-uCxGV8qq5PmcbJ^^7O=<_~+Bdp9ewzQv?oi)<^iX@nn~s-^~Mb48#W3Ca2vN zg@arJ2pw^MzyFVTe^&vZA=6E@l>DnQ{apii=8&9*`p=k@!$EQl$*?H?cMqV7|6LVC zM<;VEmtaV+mq2Q*?Yz+;_S+BtlIK4(N5lb;lsW2cpdNAuXu_=@4^RUU0>sWQqXJgw z{_Tzbx_bi$dCGzSRCEFTugWk6PjJK$6R0Dv)pbBPQ9Cgg`d0PV;K{l6XpFVl$_CDLF5r@A0OzMuj|Q*YrA z!1um%u{jb){#A?o+Z|3hLar)ajz=w(wX&cJ7Wq*8_V%_{?ouPJNCvDhf{gUT4G9Ow zTk8<-Z?pbZa|g;m{VRAY=Z|s+Z~r-#SvTJ3?EI?&`O8fGv78pLMGFd0B`*Q&TZM~W zs0azH9_jq!X#Kx1=hvj=Cjf$|_nU!gupxk;?`s~whO__*)G9Ic&j$=YyB5%3I-|u~ zCI*TGM*nCy00U_K14|D9lRJPI0;uRaK#$DZ#8UuV7XT<1DD3`OZU7m8oyj9H0Nn*8 zB$H2YdVtZ+bLRqlzmm$Zd!ANu}6wfsr`{GY@ z*_TDU>(1!@FB^)az*0NyprZz~U}a#5;13U8i3p&Z=@PCCydpd}?iFD8$DPs60e^TL z)b#{7+5&*xNC*uAumnWX+<wd9V$VC_s2Z42gab5EkrN#aFP|Khp0S zK<~2i8x8{6Ibhk3p5%#zJ`dkMzSy2gr3Wy(_?pkO?a|zM?P(;e^6SIJGXo z_K%4813GcD1S@h~0|mRwm2V&fwB@IOU?kkX`bS$G<_KXfjd_|W?n}IXj0RF>1w`j1 z*qI9f=#>LnWQzX_gZ`4cHkk4k!a|E{fZ~^<-yQ?|{MN94@FHQhz`_j<{$ym33}9}M z$qx!x1IU|pPY@^XbNYCNXtrFur<{$!u|`@^Kr^hD3DPy%dH?~Ixfyf;QM-zrNsX-< z>x!4Z4l7RnVsFi~Ph95PnyDQpvl4zq;BE3^0IlxnHnggZ4Y4f}|1VTc)Bpl=&Bo=6 zX3zrMiBL_b!VNP9V0rAIS1%=3LrrYuBra2^&-ns4?{eY)NDq*8$7J^;^ zn^8767Z4%f(gWg=i78f#6Xpi&EA5#1ak5-{UUj6#!CFQ!hr#?CCmlTG{2`CkNrO(4 zM|;W4WB$&S_b0Nt!sK$VZ&gr%CaQCSsR}0j_>yU*a=ANslHbQNCW-)dt4Xn_q8Ys~ z>{31ugK&}o38|beyHtPJPbHI_XP(O@7-CZY9?XXH5mW)TY`SjrEhl7SyI+sp$8@r0 zl1y!$`;t?D3)tp?ywSIQzr*26MC9hM>c5DX1+x$~$&uldfA#ic=>pH1W?9N-CQDK% zc64K*)sxDrLp#xp?lERNa`=7y{l$*~xfT(?pn57}GJP1WcY`LwX!te11HQO^W$vku zSx?$}@?x1st-5u98E^V#`bWq)UIiaAMXr57&JfIL8W4aZBJ8?SmB>o7mG z6(%jYYJ9O#K3`?)cpMyGTa+gq`lvBLC{K5o?I_BR&2j^Nx{1L%jYc!O0uh1(>?hVo zK3kKM;;T+rXh_Mc*~eGDL$2Xy`@5zJ2bqF}kUOU<8#uHWfIws^=gK3Q=mFF`C2FuM z6yI=>myl6BOo`fAI+Tb$9d+xIoh9|b9>fPuq~717dQr%{qO?m*cR~c7v)zL*Ch`U( zn3+lVD~Xd*H$y8QcKbB-H)=@pnxB%%%%<-g*x3OQm0X6})`*MB8b%rET`AM8XFFr< zYOUGt;|-jDtc8aF5@lNMtW@Kzp;7Ox)x_enc1!CMkoc`F!X2O#h!%3FKRh%(>oT}1 zc6h~R>$QPq$X8nkdeQH5ig$ILz>#=MEcl6{Wuw3#7>%POk>X~kjA})W#%s)K;^lmm zy=O>|5Z7Oh#sr9a**s>VX`W{^02U%bF6jz{BL>Lwv#}2o*<4NSH)y3!`< zqGK6S*9FEhIT_5>^7nXVOJK7y@d~^*+@sZNL)7Lb^a(A5V z4l`PwxmyO0M{j~~ND+3D6QZF?i07Dqw z8X)4KK>Ua*rS9m3?b&d2bhiAZ$Xw+{UY!gci9$ESZL3bT&DJwNntznde3iU8Wk*_@ zcTIfw;#4xANTd6z$87d zRD6&=!mwBL+kGn%3kZj#br<+Wa}K9?k9LpGsms*$A-)R_d)Jmc7hpsoU(l&EN*Kq> zTKeer_l@5xS=zhSQ5s9Eos3`j?K~Acs+hakr1ptX0=g%HMas(_F6U_Q1~`{@GwS+u zY?`;0<2-et`wIptz~M@&+A0$uWx;}nrkRKt3LIQOJaE6>e^n;Fa@aT0sDPZGGLSlJ zJ{z2n^2aF#aKu&M++{@few)bQWQ&*I>@KsH5U)xy_W|!W+R5uS+d8w}Fl-72b*7@a zOg*G1~Gi& zs=4#;S5hvSF3;Y*x$q7BgXTyqPVzC)E+>g)WCOY$dC=`wJ}+6`G5qC~C}aWjvyy`P zK{zh}^i{_7)E_~5VL`0d=}YjSONIceVU?_jvzO zQV>00q_VRT3#brqVY2XkP&5}O%nmTO ze+2)*+5p}Im>oN~XxYG$j}IC-e@Tb(eSi?%fdfo9VJff~$6kx<9wh5gZyj5I;yw28 zORiYMU|@tqnA}$&%mISz5f*Gz+%Rik><0$}vR}1492^8W0|X%7|D72O0RU#-Ap^9% z(SlRP#z7_cfyt(HnjsDvaQ^9n^WlNbGP&@$8s@c$)CpeWb`JCl|*L?G{ zfB?Y(hJ`$i{f-kx0jx!9!&n65lz%RQ1PiEQn*L~AZTJwWq8p(CBh_#8)%<7u79EwC{hEnJho63$G=Wze?{I|&w)LO zbYAmO;AjIYzaJ+XLxB_k)O?ea*I%856b~M80>*{|1)Zue2p!qq>RLZ@SrIgt;D*Vl zpnZA`Y@5Qt5%v0~FzZ~e;A%Z;=k)|iP9(4-5(()$EMfDXO{4jlECEp)uvhu%sx>a! zD_}b913E+q574pq=2nvCf4~F|SOHOtK}8CQ0%qi@W`brwIGg|`Dx>OPjo1!0B_g0j zUIZ5qqDRkIyD4!xfp@rbJnERgLh!$iR(N1&$Aqjt!$pG`r~-+Ib^!$FckZ=FAjhKw zlAPo~5m>hL*>U!0`KK5It!V&1#uo&b{+4=KBK*;9*>v6H)xIqa4QGc;dFcUFa4V@a z1c((3xz8HeLukRU^DBV&pE>r{~<_!ak?lR9A}g7=-e!vFN!gfA()2{lme(gq;m9MErnJc+}+{P*PB;vxsKS&NR%62_jCF9?^LQ_R- zl+V<1)v6^W!2JD@Y!JWDk=(wFZ+R2=jt#1c_R}@JX&@5FO6QHwz|olX81>>3n+>Ae z*5WxTU!hXn`FUJy2cx zY0Vgw>7PEg(Uglu8OLAQ|7O=WG=z1o0~Apq=des*?{sYg7Y*jiXzM82dj%_M99LNy zs48mWzOS**VzAp_lUR^>$+s`K>{ddspO6U49$e)d;Azb;>D4^4ksgK3mxEse=3+Vj;l+oKsd?2K}W28*46s6P!^P!xP_133h z8kmi8XH_C{gyh;UGr*vBH#v&wU~ActH6km|}%~^8KezX3d*Jmd|~(cBk7kPWR@iU)a{Z%UO`J8iN`!)qGDjz-(Hhp(pKzOFL;vMuWo5qjDm`4&o5 ztBzc#T-ruOMRmbAIyrP>J!^J0F|OKZp(ewm-}xhUds|-3K|>=Xwpd+PHlcXad2aG^ zrysxh+Ui5qO7AX#^ubB@OM`XxSZ2fdCg!!Z@LjptN`up#1x9q4zSY|mdYz+*ko~oc z@}_km1Za!d6j1a1Qw2W341!$^r4mGh2QUa-nL~oOVJ?8N>b2_-L6B=+CDQW{E{}5C z^Nwwm8<{MPCnay+u_Nf|pCN6|=?bD8D7_`|uOHB_l{C0LrMa*_(ZSd77hFei-A-;# z=npGoUykD3a(%HCjn&hOpOC_ktLowp4wyq6?)|LqlVck}eYpG9PjD!`ahZJMXvC{} zug$f&k7=F{OhSHiV@G&;z=40tnX7O!AmK8ObxScP$iJ+ov;xN*z6#lHg4zsCT5qP6^0U zum0HZP~(f|;XXl%=`I72!|m<9{tz=}Ra)X@Ys(dMFDo;<)#{74@m03@I@ody}ZbKp%(j%7Kc7C*!d3#_e3&1ZRW5 zUKVs^3_poCl%R}{*0kNNb#w?qVKCj`l6HN(kzM8AT~6jHOQ)?7DY(M7;dE+GEi#H@ zdrg@pJiL9cUqe8&jPN$Ol`a%(`#rf333c(^tp{fC^We%lDqj*P(Z3* zx6|rN#EGlZ?VHNOtNQZZ1$|<{aBFWprH!c@Mf~MyRR5&I4+B@;v15*3UktTxkowdg z6&mO0*9&g~3wqq6OU| z;ClVZbnA}cT1pb;Z2twf;=51F>WK%pINCyY>`|9Rch-JsY|o2bzXmWs{jbz50#1Xa zz;U)*F{}2&F{hT8HEf?#gg<{7ZTCBK&^{kU+PXPUCkZ$uJ^ayw&40ED{^4akwdtmP z4E0%^p4-Zw)F@hzj@=)fS z>ub}S*ZHn>UGEJaO*S)8e5(y|R8xu9sy|)o_f5#fGQncMtKlF=kD|kbv{1u^O|?V7 z*3F3SVOR7+7mY2-mBBlGi#_HqmLcPan|TK@E7cbPYv9z70K<8|pbe%18bQ7lVnWUv zDg13e|DHP+{WUTz#aaZYX$jDhNs@LmP)ydDTpvN?&$mxhOk z#iw=ensL&c46>5Jl8SS9G+FlkVSu9h(Bzssw)BaRU&#@}xD~}MBGj;?qbkj6xCi*x z08fatW+Iy3x0bJAI?8lqH%%uW?#5X5$JYkPjL$Hc&0cm}w4b>XWyDwb8*E%BHd&R+ zo}HG=pL;c)drS@^RQhB2^UEOizFzgcS=C=vq0#>4y+>2xfP0 z=mXz0fvr)at!Husr8wZseV{@-ag4s8qIj=o_6Rd+8Sgnly_zt)IL8w@;qvLC)bVuj6UPlS0- zsop!Twn%>z$+a|8!1^WA&Wp2w+%=gkJ}K$GWwYwN&-Fs@w4pBE?Inb~^3kQ=1OxOB z%2V#HxAGcfG%6-mdZ|6Wx(tO)3V2tPdV3Vm$tg{xr!l|XBXSrbbzhNic|R`p?8C}V z`fA_2#+N*3AyaZcC4qZF$O{H&d`;_NH+e)#$ABO!CUwQN({8%U(JXn=)ti=Ugs)_A ze9MI)Zo>g5LFghU8KeRmI0bwUYJXyZ5%!dVeYyS@_eHN2Mm#0+>Au_ zygn5jH*&7$EZK?B$l&w7WON;9-EHc^VszI>9MrtjHwiHstlH(a8hL=9R26Y?tHiWPFQEUmOOLjGsj;eDQpRZ^OM8s7@X)g! zj`iI|-h8PKq*_Uc6Wo}}EY+fe)D!RWX|y4Qr>o-`x`>OD^ebT1I0 zFNy_xs(4YmD)7)DxugQtBJQ0Edr2TAI*vD279V#M*WJ6+A_kqZttZ}9PHQK!?RMh7 z`EZIbCc@N-QNY1V{$y1(`5q0^yyao+I3}Un(~K^ z!*Xp$sAmjH@)d^_xX5keBuu?zM&wjmZ1(o3hd|LN4}RfQeVgOZJ?aWK>}R^^bDDhm ztm@Hf&<|gnFZR-*HO`3g(>qP`W+ycyCLcQo-UJ=Kdgx-);AIvXu@daM%e4k%QlrNj z;|rmi#Sy0i+aBH0g(fE!V`(Wf42Q)B^+~2P%s1p|TlY_FfDXk9(eXrGBlF^ z#JgB(XD6@K&1x#W-Am`uHR}+`){JcfkX9^~%yc`VVNKO= z=b)JHLeC-+pyn)V_f7eI*6l5ZgfUli?)%Rnv2&+*N8hgwkr?mx^A~ESnU<&l>w%)70ikzAGvU zzShj7)igAX$Rxz1b}zdXU&-s7cl1ithZsvByP9{|5lfELpk`K$CHaWs+V?AleJjD4 zPn+wHIO_co*EvKe7<=A&P%aT4Y4h1mEqMD&zWX@RtC^s&UEIPex5rCql&;G-l)z46endMjW4)suJwBOWcZ%o7#-QA;M8v_S8-*Z&om}fP#rs?pV=fE6 z8A0f%we@D@vqI5G37DD)OoLNa&ljTxT$ZrFq zy7-2$zW24@9_N;a&xYik)M^r#ZV-Mec?h4^qqovRZC1_a-HafGdp_39)ooY`+1pl! zJ8>J%T_c1M?vOh6NGASMuFgWbj-A;tZpR0t%o;wQZG1MaV@}z(tY>DAuHpB1;0r>s z%kT4z#4Z_lHupYt6^<=mXzI3G5O8@>W?4VXux(iZv=5xwyNX7VKCv(m{i}~494MAm zd9^kYBUDlWxv*SmUo}Ws9sym0g*lbQ`Xt^?yPZ=EE60*xG_~=qFytjl{My+jiysU0 z;%Rz3@$M62&ZQ$VtQVo0wQHqzrkD}dp-MW2JCM`;e8WCs;pKsy)6p-GA=ccWQr^qY z!%4OuOVW3TFLJM=G?WJ9B;T?>|E{9JGFWdh`2P945~tuG=8)C5*ay1pK^ag7{ODiE8Pd7i+6DEJllgET(wA93Z1tSjqI zMQ7UZbI$RZr&R;C2L%5QV{aW6)!MxQ3nC#Bipl{=DUnh_q(MX)lE!;!&PBpf*r;89_Wv%aw^S+VoANao)IWnGShvlfFdv%I~$J zgix8aG+*JSwQBgI93RMla&}IZZ}VARThgpHjAy%1C9=p^7C2B#(O_M<_Nv{uddYVN zOuL7DmiKXaJyk3mpx*w%6Qtx;y%8Js5y zMiq9kOZa+bJ9A^JNiR&K^weS2W3ozchf%+2BW(`7G`8x<6ZT^Yxke6U*RLmNOL&qacdYUT=$6+MtJL&SqgHG|)7=>|z;^*bK`gj$rNZf2~Z`8eF;`Vl{3!?a1;@P+Z@;rHr zTVNx3eA#EclhxLD-yf4ov9I%)&64iHe~&VMkOxMd@Traj_u)NY zEmqnyFv~8^qSA#kOoOfEUdhEne6)m;-@1Rx%@9-n;RoURnp^q+8DqS50!1wr`~hs!N-b=+D(}xwuY{;yQ{{gk9TmPC8;y0bRr6 z=rV8ddYoB$k1l_WkpX}+LwF3d1}hM8FZD)vIC~f3d-p9i z^*h@Y3)cmTI(G{-JD7zaG^Gtc0K4@&=bmg59JXqDQnr7ly+wGh)Xw?G+6vNazf)GO zVP!c*Ka{ETN4060&x4<$QFT|``#Ze3>J?@z+2nmUDwKk$RcPJ68k<-jHS#s>(j_cA zr#6*2OseiJ5I6N&mIlBv53W69qMVwrPW=QPRya_sL2d4s+#QR+-dS=4}HNBe0WC?^9 zqHmITu*)UCGOU*nVV&1Me4GvcVSZ@GwJKnSIdXntX56tbb!*UH@NoOvnjs~ic4;-B z>@dC(%hdHeQ!>sf!%F1LK)Qs*ii(m%?YP5zWQBbVj*Hp=CPlIz9LMNLSa-^Txc#oo|7;i8lLGZHo2^LWRxT*QKNSwPVI+d4AL7I*X>&KE!n-U@Uo17EQ$8v^aG!e{ zl7N@cd~@>O-td37Z9vBo*bFi&0Oxswu%rmyJ4Wuifch56t#rI&9RL?A%rTTWGcX~+ zc&+$%_}L`5A*fQbG6hbpo9@TMLj*n|?-qu z8|WS)ttE9?1!Okq;=xhD$qH=;FN`#wLE%wp^RI&Cw7{PQyJQpOim|==G|r!|^|kj(for=Bk0q*a)7H5}wGlLb zO2JV#*aP5(>3~1}z91o3bkY~3WD7ktq33nIx9$^E|BILtMF*YI-(Dy1_bB!>)~%c8 zIJQ%gsNk4rb@|S--7QGrAZT&AT_%A`=fL06>VgSj!PXDTb;3D0{a4nSHs>m29_j2$ zIp_$Sgh|0u4Zyj*xrxS0a^kYnn^PdS!J#ig7|_9~-Hu z^}LoS?($e>O44n!zQtB*_j+V)Xg_3^4*A;AJZl>kF(y*wyli00-OFBX7Pmnr^aJ_q zu(`N&TIRZr_c;5+rD0OuCWIN7;PKB^niGf=twoOknI;N0NX5h^K%4p{@YCg>`<=sp zfVeMkM-I6){(dHNsNsIhZ5@@0x3!NBZ6F%^y93z>;<=^Mm$T#K1yg2f5S8_xv^qA9 zQYe93Dde4thLK+5$>tYgi7wMsE2jj7$JE9ZEA}IU$T@qFrA#m4r8eSzURS&%inqTW zSuMA?%(@C4c601}uj9wGpo`hhhNnve_W8uolY)Cc1tpT)Q!Bnh{^psIT_b39`T>1T zswc)N5T0+yj)d)?-KU4p^4s4Rl|B_OjPM zWO=q2&qV0A_q8fuCA{{&Spr|SuirvY{z}pt!+hRPwJ8~THd^p4O4{M@wI3!s{bIM~ zbUWgx*V1Ua#Mi-8qTqL9$I^l2Ncjhd+(hGB1x#H#t&XJyP5G0Wx@=*A^_rNEvCEtJ ziK<7(Br}kqV`5IEFW*!zMc$PI$f`EGF}zqdvEDQ^sh-Zxq|Wp!&9TYSv0hM$pvhbQ zSBVt7NwgQf`VDSSpL0F`iqQ?UYVT>dgtC{?imzCYo@XENnqS|CE5l3s4~51j0+u6{ z5z0IosD)LrE;GwcDJ?`VTVFYaGfs4qU9S^Sxl zb7uNiIiwkC;~wdPe6W$YMqoZ8A(4DIr7WEE*#Pxuq)rNAbOG%Wm@?DuyKy5fOKXu- zWx&8S7LebcEe=s4o{4=PGY+wF{dayk`{q9Sz*KsmsSWuxA-U2~8K{UE*7n*2z4iYw z=p8Q5Juw~J1h&Fk00VaZECq}k`Mb40$ZLG*VW9cNY-;vx&8Sz*GdxWLju9D*rHwX# zTlHR0i&~JKp|=^#V&^qQi&ofWhw}oTo5t0zf-=$^hMTSBM~aT!WQtnfKouV?Sll#1<9dPaBx~u3 z?lw*^^tiOfQ+kS6@5^t?`kVfBVb?|~Y*LpptC+roMbE$aG>lrw6}rz*vz+YG&yfin+wTwq}9FnlBaXrNah3zhVh zZ+Y>%kc8`ubklP%oItN-R}!L5SyVGN2RS}OSS?d-D8g*%Qyo|ML4tCq^iFS<$jA|z z(k*%~E{=+Li$+*Tt4pqc=ki2i zOM;aP**7=Bamwa%DWeNUWJ215uj@q<&8A|m#b2A zQ^@B(K%zaIF^^m>ysUIO#-S7>L7iuhWhP_N#bmaJO z36B&ufe^4iwlA@J>{g+Vv~8?cC$L%_iPx_|To|RBd+XTb!UNz3gQ9P&x2hq0k;u8_ zeou*qHG`K9!KQ@Z;Sc-ZcD}+-=n&`aWHUp3{uockP2iPRpww?4)$W_RrjasYhBTY9 zG*L;G`bdYA2N*%NdD@wypPyFBu4OAj9cS;S())fNS^TRYbhiSQ$}<(m%fzR+qAR^U zdp3KM&%XeD(`aXPM(sX}?%TQ*lV>57{!T$&IR}>5?Mfnvs>V?*GBc8z7X8W}BhoEj z^-P33JDIH~(bvk5xo&j)QSnX$P(3b@k>Lkhg$2(P&$Mz9Bvqv0RS3UW9`P4TKdldaS#X}lt!~tRTvIsiw&@po zi~7+6_o0QC?zy^}Vz-O?7_DcvjOeq;kNd+{3yhwLRr zECuH$o!2-Zo$&+7trc#^Hl5lKbDPziu>K>jljQ*gIJGh}A|E5M-x6)Th@Sw6SUpQD ztL2eO-3F41)H=@v!Ho)JNZ2S>$o83if+!y=Y506a6U=1#kw-^q1_ zq|I!zTGX z=a+CJC1GRTWAx4<$-g_4j{%}xQTzgU)p^iPH3U=mg6MMRjE5+>Nt;<%BWj<-;d9&?7 zQ(5<4FATS~{PL;j#>8)L=AcJ54}U%`gXCKa={cgfUhy0jXvD4^U3<_my56j`L9YSp zGU9Lh=o^X2L1(1ckPsJyVL?zo9aHCPgUqAvU(bwqjJ#rqq_A`yE%(&)RzP;Y>BRW@ z=(@vt!4`#M<4~qxf#Hh1l}AjQ*0;5$3C1^bLhiW-4Dkj)N%C>+M;}F))X?o?wq8=;Sc#xXpvp<^AQFYsms9G7z zgNg2~jr%zwX&LZcBgAOR8a~(L84_({CNa{7x<91bZ&%L7kON@-u5!b;-azyFXBC0m z5gBPdh@2*dM&xlt4dOo=*ON-qb#2sIRq2Uj!{S@#c{o3fD>{=6gNZnY$xs3T1*By* zJbIp}x>r0?d&5@ff$v zdO3)-HE^FaHWdkpp&pC@`y98$_zAAj8483!b3Ru$z)1Og7K-TQZ1VQd){JN2BdtSc zjKsVI*|Y{z>08UC|MvfUsQ~Cgv_^a&QW85xu&vJd7Ope41x~l5_1E=r;agX;JmG0H zL;=g4Gmxd)&jShds`R3*Yr89=;Z+%8=X9s$NwT?FjA7_YG(Q$K3MglDc#m6*0%rFw z**G>bZFf^;j5cV0nw6HmOXBDDy^cwLEZ=mD7^4hPw;>yJu2^|JqK|0Gc9kt4u+ScM z{eC9C;jtMzJ$L+tid!pc&DCh){MC0MsynlZd_I{Nr|k6{=}H@NWK*U6b8UyMG5LNQ z->k0BbA?8bBh~XEA`Jo2Gm8mSztGh5&Kge(OuprO>xE|n_f&F)LAM6d#}IIwQd+B! zPyx2wqanA!Ox9PNawd5et3P6Geyw{}EQ78ju@6n(*5Bt#pW3KuK-PW@%DK*(HF?Xm zsB}#?h>1&_qj;z+`oe4oM$uBf&0!*y51SO+n+Hr_CR0zh;rq^{6)WWKWMS?t{SDqF zqYVc^tR!y3W<)F0C|}4nL3;8+&OqcR6~>ppbz@?YJ5R457z0l0uL}-W43ZMhgock&xK9il z=R|xQ_4ntP9z&ioKE?J$EU0Y=Y%^2POU!)qoT2cS&npMV<+;CwptDmjqr};=aL8ZT z7OUhnv<-9C-k$ZaI;$?>MDYbut6Ckg>;S!e4@w1O7~RM3HG;f34P;jNVwe4s&tp%8 z_DP}!l~qSdwL)9Owkq$4hQMdit2MLrT?K4-6d~WAYY)^=q7w7avpD%^%gw(#FZ$`V zCy>(t@apbet2xtc4{_t>f<&PYE(`W|8buq#@W0@MK7q!2i*!&TsmUUA|kHD zmAHJM7-C_oqm#P-v&WMK$FNp32oNL(f!gC z{OI6Q$0wbieKVwNUaEZn^{NR}Zs&sUhi2wr37lRoPE2H6S9+XPFVa22YS6PRoWFL) zstRGc(NdNNM=0vr&JKR@nPOeV)EWL25dhae?8_I+4?bJ-{V`j8k=}b=d9?rj-W}2}0H#CJSEftIU6eD* zR;?ny_Lp5z1U_tA%r)}WfD+0%ucNrwP*~B!PVcspg+TPkg0?RC4jVZ!pkw#1Oksvd zxlpP{ygL7$PJM|z4u4xe-s;IGti)ksu%YYkbb(!dd!=_Y8z0bbXg!|NIt1N069T*Z zi4P!Y+kxWf_2>*lI^EAj!iJ0&t!ZR@gZ0J}Td$}9D3nx6HpCLwJrQ#S9*&TH`ejzUW-%g0E9@{A_V-gMukGEkfNnq!;bpNTvt#>-InpQHL$HiBm4xR!Ynu{yQ4tojG`*oCEE8h@?68 zK!Ih$a}K<_dcc@?ldMkPrI?;w750Q1LU0xJ-(rs_VP^sSyp8lDd>RRkTy0km&btiP z!^^!;YIAHl5Y*t%dGG$y|IV+!&H$1;PK4ww@^`%8%6qmC^1~#V!JCD&-1-q3D3&?S zkY`OouQdpo)c&3m@EJ6cUyEm6W+3+kNa469ab{!a{J{N+bk~WRDg8e^^|8PgnXB;K zr5J=k#Ldi1&1J#8jy)7|zT!krTD?tc7MM>SVDa3}An4C|jc{}4mAMf7g%*jdnKmq~B%Z4k?M)y^g`(3Q0#lBaV$~=|9<2>MIz{MSc zC_iC1lyG!$3j$aW16IKm%fDS{PdD3m28{&J@R+A!Q(3BAdX$!oPmaDCfgBoRr9@2bbRYZ|^Sd2GWibm1MZd)b55QCTT%u<{gw!`1y0DYY zBF!fALU9N}`d`_}2|r$H=Y9TiDfAK^KTZ&wwT%jwd?)!*`;eoC>A5@D4EtG&1 z56Vk#B|LDb1fo2*{v~53olj{>bwm6rP!uD$B;-<52Dy_ExUb#izrGEXDeJjGITyv) z2>wwtIbjeGmRxTs_!8bGP=E)u0l3ufx}?rYF#BfWCZa7L-tfe*nb?#HP5y!r=!Olv zQu?QF_yLcZ-;}y(Js&MZ(CTX;`uhreJ(OCIBUb5DUVkqlR^8Wd2^Ed-!LpZvo43Vg z(%{`l01tfBMuv1LwZS7qOniya0!p|Z#8QGw$jl9H?rK&73m>&|6dN5CIS zE(LsHeN(Em`HRn+Fz5+*^{Zmw@Q4&po*Bh2CtzaFo=fRC%-q9>r;#VOeZP;#5d9pF zI-*`Pc4MTXQ`D>*M<_x4)lAn(hRh=W+OW{@dgq6^$Zx-R(E3LPz~tH}av3;}D8cuq z2m(u@W3MxQ!mfjDkS|Zui+^Rn0b&9JWzYepYtsFK6qiT|8xZp~6=UZ-ZXw2IuEfHO zQ(E~m_}nlqr3(`+YKQeNW;BHAWNB?LU1{+AHhW}LwN>}qZP;-mWTRrT4)nARhi`Ae zQX21Wh>uY!-_d~i&DiMA{BpN#ZroWj+DIb?4}6V#R-cW4=Zsd}pJ)~|UODsmR*!7= z!o!_BQaenWg!P9pioFHS2s95SMYr=NRaz6{?#ci)8Sk3&lfR=5c-is7uO@@R2F2nn zyyR6Aa*O_t)~#uoaO#olI;{!12fBI!Om-nY_#h~87F72B+`CuJ50K_3L(a( zFr5N;Kc3Ey?NiHzu8bCO^I!yIAK6K~oN-;?#{DhN>;~K{1~$7IjsH8=kiU}vtNAO8 zoe8ud69QRek~|p5t$?K8iC-~814o-A@*wus1|^%)9Y2CAm?8acX9;{j)d8gxxqRkX zS)M41(FSYeb=}5JuZjYEDn8DQJA7P+54{|!mGqYy{npQ&L*CpW%q1(KqZysZEEuMw zkCv|Up=!PSLDBJGsO@{E&EK!->WG32@KXi^sMe&!zIrRztmpND=Zp# z3D4&S@XV(aXaWyYq0Njk(=(?W!wR>x$!0tHx}Qv;7DpQc3i8+KX+p%JH3)GCul-xBUv2l0zoR=sPS$UO(C_Eje>yC{ug|;B0Ei;ZbS3l@{~SK#HvgJlp*>tUe7>r zuZSL}@xUBX?$VmIJnw1H37u~;FOPmUw>cC}aMJ;VoPg{@^`%+GJ6=mhb&T3$4&D25 zi>%^*8=3vb|6^qHJ|N~GyWY_~Y0U$Rs1kAQ?CqNltdUVLU^W%1E(V;;LM&>dw(M=? zLvOFqjc%o%Hq*?Mb}~iW!M@UGayf~W$9b+(lXRMe56r)okV0>Dd3=k0q-^{8;CL<_ z6(^t{>7C_#8HZ3q8Q57w6Gw79mvp+Z&RyeyX*QX!ESi+8vIf@&vEhqE1)JXfNS7(f z^B?K*V$Csc8!I4++rVMjuv%lhdVqt=4I*l|%HU=@?+--V`t@90sPxH5T;&LJDlB+{aHfokRR3nIr%AVoH9*Isjgh zB?GxE(4F&5r4VqU5ZK(a&}v_y2{|G~f!Ny-!&$1x-m8Jazfj27qy3?)Ys^4x zhS*juVoKG|o>Y_Z*hIkUILN%Px|Naat2duhG$Q^xX$Smb0J@2K%zO2Z`1TtLq?)Mw zB{*ewPYuyFCHuWfS$XmD*CaIa{>is`V`k4?8up1Ycst(>Y-Id(_j!|rNcXC5tcK%c z^%LmwhO5Pl*3q|SIxu1YvyxWPG4knSX-XNh_M>f&=GR5!csCfxrEsnDM$JtI^jx0J zU6Yqb8xU|}oMKLQhNZfB?nRoA<<$FaKgoQWQ z1kT_LOqPir0`kara;VMInFz9Fk=Mu1jl?)6$}}UvRNpHNHHywE)gN!($}=a$t{-cF z4Lf^mimt5M9l`j@FHFgKxW`=Y(>9{_@o6DGFCRtc3mcf~|+ zyw|*9s5knNyNILzX_Th_D@t64yQQ}N0h~s0T(WzKx@+0BLS-fI%eUcEvbtxf!&A}m zG+1~jKy2h&W>>+;BZOW4AO)b>`%*p947!{cAL%?2j*snn#W~&1R_}}}#yfOwchys!`nmZ& zppw)e{P^)*;nZq>;^+&Ur%a632ls~3eR+~AS2;o43>`75yjo)u%PKalZJ;KkhZ-iu z8&N{QJ1P6FhJ_pdr1|a8zy_|}p!v!W-3oL=MB@`e8t@zlD*YL0Jo#$Nc&$K|>rRe* zav^mF9xE2W1%M9+zc`*oGnM-~`hAk2O*iHm81K}%TkXgCb7;3MBk&=k)5ydf`{Dk~ z6~C(9{BU`~8|5#Wgx`xp0zdtRSY9s^vn@6Asm_`s=RB4Wk!QiN$=aOj#5gqMgdvjo zWdt^7Q=Kjy5$X}XFPeA+e7^n4m^{^-TydFgtCXU`njAoPmjSx_1SAf)!6bplz`))q zZm}*)yRStH^_|?5zPSYp_PywN#Z9nbOGCBRJydJto@eFrpak77uUN!7nx98o@EE%~ zA8*`yBrc%&_HLJsf}Ov|=S^Po<%P`MmIE_;Gsm)Lo!hbki)v@Zg$_QKzHQ5%m&l3Y zvaE2@99}e@ynzg~4b9=a?+Ums^8O7J3Q)P@-$|h@3pHIOPl}|8zK!z6*JvzYyK2el?fJ!m?%w2`tDO@IC;lY_5*8EG9 z2)yJs@RE#7SpcTe06^UyoPr|_h6NKM2C1|(1AT>b*a#8%CkWTSBkA8+{Biu>;-o8w zFTbn^w=nBqTvYHC*QXXQ2BkJ|Y7g7FM-Jh^xHHU%3}8GhhR z2^4W7NpkJ`>nq2bj!XiuQ3rBk9BVD7Aj2LIQzQb_-2v`Jf)4|wfj^M&-1#aC%uweU zP^aR3kT@cVtdg>F=2@hZschJ7AYi1K>>ETa0PI zZI?mNLQpKw0nic)K#RgtMvGD&f&y_)D!=a&(d(HHenV7jy1-)K`P4T_?saAeo5?z_ zs-tVq^3&Y&2E_TO$y`XcVxo*Oak9|35JMK)da@n4in)We6n3ugrbSfOFLsQ@mGYs; zppAPsYPSkcT$Jt%j4y?MC0P1Ywki0#xbX*qoUzCN=amHItIdrRP-L`VfA{Q`#h z0;aO}v=&r_`M}c)Z@iGB0_D|RPU$D|rhl#yo{Z%0lK4F^fDZZNGJWMbb0^xU?3Sb&)yO4E$ZwP) z`AIb3U|oKw@x_Nq>l98Fmal3Cz1tCc;aFK3fp)x+Pe8f!oCsJoi&OiK9I+xakmRyb zK6Z(~^8Q{e$#-}M&9L}x#lzXA7g{2^W%CFHeG$L;?JUULb*;4PY-6y>k#b$TZ54@o z!1t@uTyX+)ZoHn~>l`XoDIX2CXj%Cdg4!&w!CAlsSV7^R3fxEQ|Dysg6htqZT!)&< zC|djt1O66Pk1Aq}y(Wc}0#Olw#B)x%5e^OLL!Y`C8=Myam(HO0mLL zJ{2bxJ%i#@T99cpP5g1ef8Z{|d^{-0Id0zoh9LpM?Ae>Aw+SU%LGd&cC;NDNvpsg% zq(dHB|E~ssGRXaKy~p6v=Inn6p{1zYhIX~isT5t8D|AB(A>0h)wzyR`%-xIQk9Jo(tt2+DuUvvKEa2mk z-;)K7NlI7)o;U|WgJ0%dCOn2uzzYQ4@1$2@onWT|{_IcVd-|ajdWbMT$r&KfI3p39 zz1I<%ZW^D+kBL??jb;+7shzJ2r6G5^ z5s>598~bs|MhlkRJHvK-Fh72z9{g-TqK&G!e{# zXbZW;75`7kiM3yb3_Niki$a=yB0EQkAd9EcPnJzV_`F6TmmeUB3q)jnUP^TzuN_OM zSbowV2zbg6ZC*&AUZfX$T3w)1y~Q2f=+kUS6-%4V{3$xCK>^Zlae;UMlC6iq;`tA=`jP(fB z)P@zlW-+qFe=00shC3TJ7(wG@4QZwyN>7=OmJR`xT;-+#fO74V$>1%*T0r#-u>@o! zlRGhhr(X_y0%Mp5Lq8Ea&$^pwI5FBGFs`cq(OWE@rRrIffUDa3?q1=DQyWs#HA+_A zuUH1B6X%l`VQn=~4*)kb28-DIEt(W$6adw9+0)+8Pr(9ZbnwHY{?q`u6D@ey#uI2X z?K2%#^9d?`yeHV@B%%*yj2;B{AfoldCkvPdRv2i7EXp#}Dffp9KKYl0_-zu+z~vV? z(sqTZgDe`KL7k--T)>qvW{Q2}?>xa9ypDRX7$_M8{#OUFM| zJ7^B5jD!Vv4-hJsOs#X;1VDr!3{dyZQc$asREVa;C6~kEgQxyi9{3UB$phsc4%@2-O& z{i~HHnNNNsN_e7$*T*1_=uhRS0Funj#ceei{5`__7^LI?ASAv*_$e^f_SkXklWu~0 zJfCR1{qjPgL`N>@&nRyIoU7CQ^`fN{A)=Ou<#K@J2vB2YWC}jxyf-k^@z7`-Ne1x6 z0O-PE|9Sj^nY{8pPJs1kCt|QN`gh|e&YtS9dYn>;fuW>eWrFD}7^4%wMenIRpOFMa zgIh{Jd!=PE*HSKz=;uAnC|k%&(EBseg0DdVNrZ((-M0L}b&WRx+%>^2D2uGnZ<^5J zXJ=ZBGy^kGQS^>(FseQFjDw34OXU4455pF?U{U;w^FzrX3xl8-DMMnL2U%ky8U&<-t)Vh6ntCZ)VK?v2)#xiv;Ic% zB{phkHBxcmwLf8CVUYogu~H<=i75|jS983p)qWoT{xA2=YyU}}zcu`u;uHkMem$eY z!FAkztSeX=tlq*)0dZ$-X>|}(=~(7iG@y)!00DGss=)Q{yZryxEw*B4x4KnLtcWhD zEOo;9Lxg}JAivVP2MvDF|E#1XRw2@=9pp}2Kq0o$vR(fuq-NXred#qab(?@c?Bk6- zxc$tHyf0g9?Yw1+1FMYWSAj9zJGe(2@CCe(Qg1Dv%|A74L8+lmBsJfx0up5W|7JAu zcm6=*zoMK)!^usl$X=LVBYy`fL<_sx;rn<$u)0`T{q9Nl1;k2T(RsG;JN8;%fHoH7 z|5qCn;F8hrdIF9ag$ak%Bl*)B{f#T|eBx*(ERSsp=H-*WgW#@>#$|bB-n1TmtgE2# z0bX1Cc&n@T662+S9k37~p-f;j&}Xd1$Z9~5Vss|^;bh?tNx_ddLII3j*WpPxY3xlq z(9RpLO%3H5FVIpm9j8;>WgJSHY4mN|36TG0-;a)@4G3FuE0JVML~&%MH~pZSHv zy2(Y){{^4Lz+DT4eLw|4B_mAG&N~#8M~}M;(l?d=LyM>XY;}OL@h4U-2PWnI7OU6t zaQ@V*1MB=#Q{9AgHi&0ojLm2)bA=I~{J<-6k1sY}t((63EJyPy@(S|tNr@yZ6%Kq4 zsfz}+7~0Eob0yK+I<}l*I?RU^!U_sD5myh774ChL;Dpo^^TN_J7rFgue(WdHh*x&K zPP1@%;d5VvzpL_KO0DL_bWgd~YiN^y^37XxhxcIVFs@LvJv39coTW+2w>x)SBR|JN z&BK`OP&{{@_vfGxDj zx4Z1G(=TJ9=wrn~Dl6!f_2@ay2BR{V`tkKIm;kA4dP8Oe$7=xX|k4rv%v% zED-Xj=hK|n-7b=56*k0x>m2OZ_-xq6l7*VmrhDAaYL+V%lVZiH>bs$$E3kbTa(l!3 zS3Hj#teH~hNJNl3ep|v@uDZzB1fCZb($~<_x=;5}%TRS!E*#5?X_-q~zM_tC@ zxt5YAzV;1d@c~GQd)5LQtA%@rw^7R*f{p@9Hf(jL=WFHj8^|TtF@4Xu;RgI@FxjHD zj!dpU0JXQ66V<&QoUXHcAOoY|=3hSzWAwSRmh~>>I`n#*0+{({3shQ5$`cqU`4j{` zrVp?lM)Gvv3!lG^Hv3z|K~pWT8$<=sLw`Es8^GT_meJupLCYNP#$6PAoYT!TjwkI( zvsfu3MkIHg-On3UJYuiV-kG}Gq&)d(=WXcHuVIG=^jw%N_e7yx91i_wGug3@y?^ye z#G1h)YFAj++)m|=z1vaQjTLkg+#$xtjqG5H!A zAPPLWRcET^9`b!y2j{vi;v~u;pg?n)nZi^xOctERZLxD zDGg*UGd3>y4%#C>|3QHaGQa$_?`)Pq9p~cQJm6m>&9OQ|z`%^a0id&88pBTf074Mu zL-5DAIVW3q`Jo^Gq2{qE;*CDhV=0}j8FX0V*%xcpc3~2I#LkNW#kjGia2EM}i{W*8 z^L$3qDz^rUAHwS4S2#GUzGzfD8lV=NHPF)(Xf#{Aar|@id-gkZ+^r!<3$!t%NzH^) zwc*z^?CE6|2=GEwJe?NfCWR&U zq~$zPmcyhfiMGbE_Mygy=a2WNU~t2zh1iso`y4Bc9COXCHGa%}Q-=Z@nuz+_l$1R` zx>eunWoyyq8+0x9KP(|b%`iKPx@CA3oj6Ugs4Ap}*k|R==N6`5(pWU7kEj`~jJ5Bx zR+aJ-pKX%~+^H&iSW#g;JzMVE@kISqzN|Elpk)v>-~G5)HRg*~k(|HQb=B+ht&p>) z)s<^n)Y9(sTbYfejq5!xTvi7^I|)(@pNvqxKU`1_w^@6?Ir8zb^d}c!x;@7`0O;;0 zpR;py|1%2!B!grURR9d=!J3G-{z|l)Yq?>%#>QX%KRC6Y5UhoY7YbjuVaYXabF~X5 zS0t<~daoqm(D)cVGjz-J*(|@*6dm8wx7VSv`=s59W(^7MN3+9vYM2yyyND2BIjsJzSWo&E~EIjq4(R47}cO{owWZ^r`N_pXW zvnT}%gK=$vn`%#7|8d*_tzUz#UC)MK&(znrYx0v*IY#O?BN) z#2pj2Jd`|CJyfSh`^_F4*qA}Bvi+FV)b3t*Eit>I;Z5`wRPCPegkTq0PtR#;IXgCq zuuoDxR>BAiXavf`iE(4t039xBrUyPEc3nOJ}lQw>m|5(lQyd18=sC}%^E3eH-=GX43L}a+DFDsk?37v z;ne)PeQsa7Io0^XP8tP1ulu=Bx4}915sIie2;Q_kquQ*b(sIBKdAQ*{`_an0csDch z&P8*NL8NAwqyWc$Jsb4>Zj?SpOMSw@UVORfvEWeo@TIzW^aso>k>^U*yfv=M;Vb775 z7tG2#O#gD}J^0Kd;`Iksvj9mqpwUc3@)!fy$@~!EpY8*;NF6#U`T~@&n+lO<9GUW| z)qet0av@v^B`?h>4;ZMYMOSGZuOr}bW^h| zkZ}>|0V^)W%SRmtES#g_kv8vj-LEyB(hs4>^DlAtNLPqE#<^6B9kU#f%jw%<43` zH~~|r&2w(Hwy6eT6mwPQY7_PuaYf6n9O_4<-p6Gh{F+`PH@ASEhd8@&n-n1G%JYtm8CxV9Erd5p{GgOVoS4Kd2 zN4?s8Ju*tK9sSibY6Z*iSjc;7b?+@j%v!`F>s@S7tX%s4DS{Fcm&jRd}d&>x)>;ebyvI z3)FwM8beNHDK|>6hnU!t$a*ryYWvLr!4oVv~+0bUG z)=3a(`XkAj0^gVc(+DK)D;J7;dX^h_#t~>H$Nr}yJ&tJYmA`;0w{W9ZwK6SfIlU6K zGNE03?i9ABUM5HQUi&d|eSTikh_WZ47qI!LQlH%|m6aQLmwCX*7ztF!Rn1#V^ib5D z+ui6xV)MQXdAbThp2iJ|7mANWe(&D8X*aHN;en5z zm{J+s19j$O2QWwUzv%2u98qujdE{xiTm1lc_1cf|XxLen1;(sM6!^nenJaszv_*aA z2UOl+DLz|gT5|_qV>$?yiZ1Rj2VapjpFbvhKgBb|&Dqp09h6dxwMnI})2`%{UVp6rk080tIS14#q8my~Z`p`b_?=QR}2u z+cBy}wK`}!LyK%kSfFeGxdWj1TlZg#h;t05vsfkutX*dAj${Fqn%%3_zwl^OK|=aP z{#}ajF}BH~wzkyY!s*AoD}U5jIBXO;4=)sTPZwo1J<2SfiP?@VTeEq_J<=5@YnT0KyZ&9iy4^)+E?4bgk;nSw43##hy7hzj;sEI4)Q8<}Pm5OQn>9(f2 zIi|LC>kC437UKTPHo6zX?9a$@*f!8Gzb+6?>!oOrO&q+kNM$UMP;C-P($Ro6z(|r}uUz%Mj~1 z+n^NR8Cy$T3_YS5VrZ%2)2dXDE|amGMk5#MpXr2znU5Z>QYD;h76jc+=xsibd3T_L z#ozzcKQRE?4;o1;V6ow&?x${&a7bgRM>8C!&K7RVZ@27!I`7= zL}Rj-u|@(9hh>v}cg=!!vkA(q&b+f=?@X2~@tFI35vhhCf4f$-F0XLt=GwWU9~Hk> zkU04~#Q2cM`g>W65X2SfsA#^f@A(aY`Mz?>+1(g^!L&T48|`X+bfdo3Lnk^^d!acX zT63!z=$LRX3ZFGW`kL=O^qHYR^qyao&ohA$Qq#TSUivpY+QtXEHwP5IqTcM&kqgA} z3lzCon^vW4Q!dIT^Kr&AYqs;Mfyzj2S?}32oj++)N_}L+FCi%t;u_OI{7~L zTkhwOjYcND&$WEAE=@E@?2vxXdabH2i-aSmJBw0wt!u^WSNy=8C*V69hvnOI(TVi0 zxh)FTtB%fs__jWtTg{n`UMDimyKl^on!}bsx~SI;kB?Sr;L0__y|S7_id8h*1gh!w z^4gkmAiGW^lF%QfxCt#h;S;;z4^X|7z>aWLYoQ&H?vlm40sD1bkp92z!957SFoDb_ z`YisT6vI`ZmH_GM-=s$hJ3d}3UMo2vj!4B4`f9Fz^C4RVfMJwB9b_!>5tmkaGb zmw$%&yfoEHOm|a&W61C33bH#YU2KI>cywRO5Wl;WBP|}3Fp7_glh|{h)5Fe(?Wz_F zf&=;xo^|1PbB!TXR>k(A4pfWl7N&zOs;6giTjxgkDQ~67BW^Hj(pv|VqMr)*25dT4 z|E@$4mSy<_nnVbmxmx)#&XeBFsMvtzs2Vzv(S(`0XDXr_4r_uh!@CxXY?0rN=p{HC z9`PK>IgS;Vq`38`7wV?1FC=}9TSAOG^6z^ko959i(M*_bb*w3qT2-OAd_!mNqr6E4 zH{OlVg9<%{q{v8YLoFRK9@5CE8idgSv`Ob%Vf!Z_*#kR@q;Cq9?8VxVtRp2fvooBm z5%>T2ttrcMqh}$~Pfw=XM7n-31>&cfTSSdsQ!KdLDbx1dpnJAOQz6`cC(oB@bf_Sn z_R4W+A6!?xV4Q!SEk~n-6S7tjd&4Yb-l1Ep5@E+yWN8n01%uc)^#X2v`N*FcxMo^N zRx=N)&m;B|&j1H-zngYa)`sYZG^Sea#lj0SyS}Vvef5k2xzOo8p;E(0I`MlSoI!EG zES4!H=YFF9m+2Z$-o)}SXx2d-Jw>^9`@xp;oajk_iM)EAO+xsE5}1h3*SPPJ3^rI3 z$xOp`TfRHvxNId$^d}s^Q(&OxRcopl?@B{$kVt8Ma`a7$1DWiom3Vn5Y~XhS@KGU$ zYFX93X}tg{n>kd&wg{n`#<716)P{g=WUR5q3p4(Q3pRd65bzFUW&98=UmafepX!#3 z%`<=fCE|kZkNAhC`Xq|O#ne&v)b{w%CNFP+jFt7v(^Vz}>wWjBEJ6B@mmxK4`X$IQ zS_on{kozN`@s}ndsR$KIKFYb>0F+-`#&sRzkCg|k+?rfI+}Hn7bRod9PBelFQVcNw z0Z|B@gwy`+CB?@d_+1D;@L2kz5SHV_je*82%VcHf$A=%V;SP(RgxXEl4%v=Tcb5M@ ze0^nDlu_5NA}T0Ki3mtZ2@Ieh(jgtvH8e;MDJ4ULDAGMhD=>7YG=hM%4&5T%-3**P z`sVkYbAHY>FwcJWUVG(zua(_!M`f&0cZ{e6JTOPb`2l!fD;AN&8&G1k@F4jhFL0-* zRRKKCE`9K(-!UepxqQGSh9iLir9LS*M6(hi{7UI~{_S^&o)dgPhVDOS@u{Zi5tCgawW~{=-R3A{F)74>w(R^{6;8F8m7~7L}wwrbuv$;vLO@ z771`NGc!M&s&v$A1ezJTsyL7^C^NHR;(b5=K4nmo_8Q;JQl43$jP5sk|KG>l%#B?U z32Cx7ee_+%$G^gT>}A5y#)SE>$!W4Ncy(>Q05daZx5azFkwU;VmU*H`q`I1o0pbvNeSzqETN|65J`Zu$CG)zXsINjKI{NnFHp&0m3 zSed-oL{V3`AlE`_(P1KIS~uFmC;uh00L@AJOJ3%^ldGv_NT0dIfofsYG#96!=_{?< zEu60FR%0MVEPtupCa!q4M0&Whyz1oKh=Jcv;R$Gwc_8*R}g|D zA9y`rQ?&UySUfupk$!;h07k2Y;9yoB@}Daw!IjXF;S?x)v?r4duB*bw(^hbBWOsu% z3$MLinI3H`Tl(KgXT;QJTVQ)7F~5H6zWzI3HSp``h=5ZgS+MZN6k$*Txgcfy&qEL+ z{GrO3>>;yC7L6mj)DR28k`P>>JQ+|Ji;_)8y2T)ZuX3>yZ^Zq&bX&7vly55H?!|id z?p?a~A2`i1@Z4AM8Fqo^1jb1rsCZc#N--sYf_o(R{}yX8f^9|y@iA#4+Lh_}J1wY& zQu)kOItVfIcwEF#(iaIG%|3`q8Yq8o?}k!feFl!(`@TqWnn(xmlNW)LjXn%5C?C7| zf0vN#7zq@abKhnD{%kJ${Hb@3waLNVyKlh%)e3kAp*#Z2(j~?(Un5B4SGRHvn8ITr z7Y4i3uacJ}pzUy9GAisx;Qh#6d{MvS=Y~0LkurdDh4r{y-i{{zkGt>_yCPOSMeq5W z=%EoKb9orE6=`&ynDBXJN&CX}7Ebn7@;7hAz-V=uR?bhmC{>Lvo9>cNVIgt=(MTwD zWZrE;{Tn1wQiw+IweOr~P5c|B};YX6%ZO6-sOCwKOu$DxlznllMs3Kqv3qCP^4L2Z z^yfe3kUp#9UPw7bib$E7OP(N(~aua*qG5NqZGFVq)wUC7VZ8i$^NLADYm zyE{?zYOJ3u%BF>^J0l<6F0^DcTkt|1gs2{6R-Ys_ifqBN0JB$ZNk7GClEH?|X3FXl$k1TU^R1Baq$O9(>9#e z0^8H32F z`0#`J4>qD~S9I*%LeA4U*8U!~f4O!5jd`9T*|}SUd;TbB?=!w|W6@x4+^aChqpxBo zOMxB?0BRlA4hBri8d+}P@vTWHMKx~6+l-Y_L{atfe$&;J>2?dT9Gc@pO6SpQB(n#1EX_wQuo4=6f08EW!Mr)# zsxw&qtFp5aSpd@Gm825wufKkRzqe?!84&cwYgxj+N&Gv^T-Umjj~BlY|MTsMLVOXi zjo8Ta&-4uP^^C)7{yz~!yj1{xW~I}5tpfqzXJ13#mRV`}N%Qj%e7NVwb!OI8B1}d# zL`u=;x%eCHxa<&x`3eVd1;q(<+++BpjD3h?c&PyNgPPE^Z z+w?;zo=tu`iJ=eVI>r5O>Z;+sXeZ)>5B-W=SA%181I&`Kqm4yrg2C&m1SqHxd;m#c z`I(=lmvS>UdG4wi?$Mc9>M^kO*UBU;sp2$`u^8HU=5ODUpO+D)cDr%?$@_PxfzX+V z&UOxIZ;|#Vw>Ir-zaJYKqAv~A?~O4q=10du4puKli0D4xT|E#!{$^K+e3252lBxXS z*-EG)-)1+X^_UOdrkZ~7YROlW06UMOxbjA&TBb&fI$xGcB5haqp>m~GwX<#bdDYwp zp&)CVfH1IUHjgQF30qL5pFf)rI6#2^Y$7h+LG#N7v-q8#7}SFo7fNZ>JX2=i#!#<) zbp9H>tI3EZuQ&ib-MXXwnkHL$&|6~SkSgvW;yC#6mJv&Fe6F`h9zKB@GW-<%AqVdU zotY=WcOg}J5B&RGedv=imm?MG8Z3FuXjRJYMaA7(CI4b{L7U#PnW>N|cigcw>AV0OfK1&-3Os)nKqwY-8dy!M=}mQ7s4 z!00!8tP3JuSkD)|>WvQ=IQz^asuPwZh0d39`0A7NG=A5)rsID9eiV@w{m9GE1{t3t zsOcUxzwj_p?M4d3=gT^0Cm4M!>WIgOz#b&k*0lK^SD#Vvov@piUUkCNs&nb_4hj69 zksdh^lAoVmQV@I)2m8GBLW6o-A>}ysq(tj73iwxDIGGQ+$qsGo*TYZ&_M!1Av$wGN zXiMy@Qz3?FYsq&Rl$B7bF`3e5I;eEQCTilx>*3@uJAShNv za)+4(!SwxeDb2fZ!|&1BY1l$NVDdW!~6Sbh_< z>FD{aWa>824_1v_ohZbt8AbOv0irZhX@bB3!_5rJQlBi zZ{UJA_w^aiwXtWw&jv+sVSP>px~Zc;Hb5*IZY%&M*%jp zjpkR=C^!X-gTtp2Wt=KOnfr8COl5hoV6F?&#nW`~nwxLQJ@W5%bmr38Kkd$IVjW!f zI7qF{8Xp-QeH$4T_O?Zi_N{1X>1FQt0Pn=;ebK^_&sDg*I?YXA-Cln~`0DHIUf&*V zf23DP2WgdzcE@mt0IrDGTI!Zx35b;oj!V+DW7UfCfIoRwaKPRiU|&DV4n$J^nafi~^Q5lpGgG-@)r7f$$yA zDDyeuH#o}CtK*&-6=CUz7V#LmQk>m!6s38H9lLBf@l3U5mV& z?Z7Vw1HPhhz{si^mLfoDNh#H(ZT2r?S7e~L8i)Qt7 zKR2(Tyklame%&t^8#W>rJ?q7}Fu%#oDyV)I@AYiQOC?`AR^+3Cb}G&hK~8crQ}GPb z*iBch)Xu{SyVga=z6Jas9Wp-*aaQyZ#(UAO>6-c}jdAn$sjx~xY#4goWjw&Q8~Acw zHy!?7d^56)nA#CU`$1kj!vih22uIx88ShPuZ$bl5SVJh7%oHshL!rJ!grdzjT>XA= z=ON&Ed1Bu<%%i&l%XU{kp4)BRojv->M!Bp)2#ktv^A6>}Lhxr7q)yEWBBoKF_7XmO zHJ?!e`Og0~4z`3*@9vh_CJ*sBJJHG5okJ*>Iu9LF{U4-X0;CovJldeT!{HDi}F8=CO=EUWfZ4^-9zn} zj2iUVyWPnUWVB>M^(=_a{pmhkq&z_nEM=B_T20vNz&}rM-V%89<^7-10*xZhD3ZYt z=Z(EGA!0L(QYcWg0O(T_$y8U`U#~C<>T7w^=S2!%WN6>A){F`BXhza-_eKB38YN9G zIG=Ug2lD;g^==h8LhJ{xo1x^ci_zCk5l#I556)Cl4t54k>2^=!&koouaJ~hI zf>?3^H6R!tXi)ZC`uQd=QOw1U$JmpKqCs6|PE4JObL;&-L4t$xcv2VLDMy9z7G5if z+Sg><=!$QdBH45{k^i(9rkbfRXA0C1;`i@YZ|?X`i(SEb3MkZ}WbWkeeLa9+G<5V` zn7a>t+?nQLP=+OCanFz5O)Y0Vs^5#gw|U@mnTp-gri>vC4e3_Joi6Eb{!9b(u+DI= zp0a&kq80_f-a|Fmj;+fbY#nt>Pcs1Yomfs>Isp#8A8X)9%pCt<3Co=PZ2*F$`Neb9 zFrBl~z4qSx61*^5$~`ODtL?2SS4Qf{5R4q|-kEx1bfSR4$>fiQ2bT10I`HTYF^<%L9|r6EJXYH0mOZ5+9uFPhbFM`tV=*p6*$S# zggG9IlByYAr1?roZz$s=^T)bTSVX_uayy3NYRQ;2*#_<}eV^ysuJm;vRKx0}$m$vm zp>W?fUPN7UY=%-Hz}?D8_T^B?J@V-lafg~Kp2m;JDe|33Sxx0ej9C0#wK~@7(JF{7 zwOg0EXYb#VHl%@2Qd05X{1<%k?jN9G!2{B>B+CG#42FxD?{f`b*<;2-EbbM;$-gZ7sMkYXmOcfv24XTdI$Tr(v_K-3;#xI%n@dwW~` z=U1;X5C0?k7~O;~;J!SR{M4y%BwHC=bDnV8uF))4pX*9YfJSU{;nOcKh_`>eL>VE4F;Z+mw9s2Dpu(fsO zE2wg>u?rjN;iA}UhF*Gv51D}$H^F`OWt&CTb{w@jQ=f+4dDyUSVs#aFH{Gc_5ml+M z;?15H%qK#PII?(%SN1&k1r6sP+E2Bia+Ru>qCwP4-7g}XVJfV^Pj0@rFvS1Nsgk>w zvlrTL5DD^fiLw^@hrZ7&QSJ4M_XC$_-uuC|yUtdtV%t9d_C;T4BRFz_{-fa?vIrF!# z&XgHCnYJF%fdKX^p|atkvf=8Fj#&5@r;BlT;{x7QX@Z>iCA!t4$AoLl|>4sVB1yRZ6g_#x9C%27}8Z-r% zv?9UU5e0FRqE5x0juCx#E`buE6ZsstRtGW>a``{y)#P8OQj(zgH!0E*@E zJ~Vc}sG;BjUBRw3Y~NkJS%VYaTuRBAsW3pUIV)R^-?`6Fg9*SLu>3;z>EY{%jG9`( zA@ZguNC5JBKu7@nFYB}}Q3)jFB)*zKl%Iuxz0z;;KWUCU8l%fHw~{8Gd`tfhV+{WH znna$js6R-oGFtv3dyXwe8yOo*Ka|JNX?FaUPU`d?1Ti?2JG_b^FGCc=_p&o+oWlcW^3@k)W)4+@+we!#W8qmmx+hlyflrM&NrThDyx z8TIIIErjQfB*Xq&lH0<4i@jI~<4Qwt*)RpL57<}F;yX_$sc~Dr4gsioI0cd>62s99 zdg#@D*N*@ko8O!|Z~r?N27(oKRsW9ZU+F zKpyL`-$GpkdX*LN^RYFBv39#ZIN)Z`A42|;HwxrEF&W(zX2=-R*S+ihCHwPVDX}Lp z8S8|c|Gpsig|HeW7|ztCIBry613N;aiM`(K!`02Hsj_WDYP{0<7Q`O^ktyB=H-`C@ z4;ScQ;|a2~e+okX*G-U2y^_wk8wv(xgDXOTvux|3ZkS+3cJ_f5VnTh*{mRv9;KbIl zz1ka0_SExfLar9flTRGPulz{R%?`r~x_WvfsCfK+yx2nZN;1 zT|qma|FL{G+)<1E<;1ZJ)tFy@lk0~~Ovm+%RmC?!w*F**|IZn%wCDT;&Mn|Bq0}&l zhc;6Bma<30s(%Sf-zD(=OBYpb6O{N|DYTYQ|4SVOQlQu@+#TI-vu>L15mpk)Zm;uJ zR1i}~5Wh!u4Lcnymig6Xa{x~hiJ>nrurFo&{%jl_@zej-9Ab1vJ~Ot(K@FYj&%XRE zqzYWEbUwxu2xNQ8Dq?~*xm!4E>#gqm4_k=*KGIGxlhRIMShhRLBAo-}sKkMtUxUojPS!ewHOzpHf8t7*1h?=|T9EhJv4bQFhjk<=Rj!B!J0$g~R{TXcK>9*X8H;Ve< zSciklEn_nF=s{w}AQ%jPMcd2_xF};YqgXF=7pT!R~09 zC%;$X%~rAs9oHiL9526r_KwA22PO|@CkORDZAY88n-@kr^%2ozV4q@v9ND0?&Fg*A z_=d}ieZ)~*t6Fy+_c_)d4JMR1&8e<%?ce7+nM8f*7$Yd0rNQw0B%*-P?G%nv!NXF( zZ2t!?tV3-?K{!dnui^fe2ycDs#5?!z-~R{M6fd zg&F4>;$(e!W`3{Ia?7iop+z+&ID!i(>DLlaekLkc<6IAoxG&E6V5W+<-^+(@iuihd zMsP&wxFjyqGWC!d*YFs>>J`n9zoJ5>KV5;LuF*u{(2pOuuIppsUll35aP#up|^Fwkbdd0b!}f5Px0A zzYXmNQY?3QYHliXZ7C+;kt{o8k^CpxrUjcwAIdl${U{Ds9<9m>=1oB$)f*y@rEiC1 z+Gdn3;N1>G-*ctvL8JthA-{hq(&d_Rn-*1#j@~DrgipIX20`f*6Wi)gT$YRV&A|?F zxU}h9su#&&Mh_1Hrfsqw;Wo%yZ9n*P>zU(S`Eqxc%$l8yy8&i#w81*>y8@7#5+`Kw zibM#QNW%Xl;1rcg+g8GA&xZLT$q;RA@5<~ubjoes7AUH+F?(Ll=un={w9Sgw&HqYO zh}LeZF&(tkxp<8{l6-ek=Fxr2YBDdIo&gDklf+-EdoE+JDco5w(sOKOC%h*Oy$=9L z)0EcqYz}PY2SUo5m;4npTdIQ2mbxx~n&3qtQjk|^kwXZ6Ze`bW#zn-ENOug=y zK|e8I*cbU`iT$}`5_|rs$BUWN-FTp0xv~K@{BxjX9&*9Rk`Ax#?&Rt&VMo4XLqC^ORbL)*ciQoV3W94kMcZGoN%smoASUyLkAD>EX_f?9BIi z7YdiBdj;(-=wBC(@ha^15(*v&?=8H9qtD>xb%#LYKR;QQWaen)%Q(E7QrZ?+}v}3_;nC@Yu8oT?;#>-{eG!H1t z4&T)1oKwepNliWm-w9W0mTUb!KMD**j?KPy>>eal%Xx_}8hBqiYi}Po?RKBzn_U(i zpgfj)Ie$&wG(9_g$GKMCVYXHaul)$uG!eAhK5%ULARt7aFP-&d62xd%qFCxdRAyPg zMZ?04+xMsNO@NDaXl6{j;FK)mfS0KlU`A8%{q|LyQDBcY{+RUhN0G}dsvJzwkL{hW z5h6~nJEOy@yU1DtmnS~rbVuBW#OThP%8YZ{(PX=9Jf6GrG#8%@LVY^V3U7;yI}{Ik z&XBObGZNB=$iIS=AzTkgml~XSBX-@sR59e>^F?oCV{#4$NFUlL9jvd^n2FbC45|_He z?~)wn*Ig@v%rku zC_bO)cQf_Bq6~568H7c=FOISX2T9_KwX%=?fqXx$!phiPh~2LLFWTB;zRB7NBW?aY z?tFFzh`|0s+AR^M@VKm1dD+5QS__ZAlpHNqr7s*HN@0}qYDD-b1fYKf31b2(oNuYV zUPuTqt-@3wMB>W2j`NKbIL3cz?Ect9+R*W)1kwoVlAN7fld(zJVb+{4Im)i~x)xJs zPETQjagNg`OS;-7Pg_-Q%jXlfze^m-Ht##In^2VtU3h*ba;mhqbKT3m%U1lRthBf? z14VZ;3p0GD>B1y9ORdf|@{K*Y*`kN*>YLs9dHdeAA)mAI(tWEH2s1+rf4JA=OD&(| z!O}uc=ZT?V^m--5{G0wJi2F)VM|WC38H-#A)id*X zA(4%nCnFWqyn=^*w^{o@3kV zz=9vcv& zO1I_r+KvRL}M)qlkA$B}YbzzZ*DRF4j%;?FSx_mRs*FwaI2GI#X1V%P) z=p+=^Y2Q#{fDe5fxWv6W`<$~CC4H4#k zPcBsqppDPH>pgec=ATqYyA4e_%qehs9y?h70#dCr&28mXkj(8~U$dUO`Xf9{em+D? zItisHTP44R)(H;2)x%&?O~!?eHC5}tkO&ehDdb@XiP5PlhJ&0r#TPa4%va;|1m;$K zGL1GLZn)Tvs_#6tMOqw4%*~mp=*rnQ> zXcB$(^_JZq(+a!o;z&a=t0A+%{GnX8)bU7+}z(v?I=*Q!>Z#f4T z_#RQ>FZ>c_SU1x?V=@H}1Xs^2_=0v9_MpE3ye3-MR{THU%P(>0$*4s&^AYq-8!U*z zy4&;|O}vcg50V*&W(AWMh4s6Rs%%aaMXf;qwg0i4WAEhi(`$RrO>SiI4-27v2MY|Q zOpblfo{54-y+>IDdKbqHi{%>czYF$8DzCcjop$#yC=DGP<&Tj0?_itao+}%xjy->Ih}1#NqBMcSnISup#E9x6clvmx_<|-?0pG=5ajutxCBy>HLnF zYQtXeA8n8@6 zxK_M|d{Z8S(NO8}&$?c(=Nm0$Nu}`_iFd{`U9;F9wWhjv+Q_%bwe@)0Ji}fIx+Kno zz0XfvAz%KC&~Es>P~H0~F{z(TpepCqK*|NP ze$EZoDr$uG@oheEVxEk;VeK;c-1!r8W-rAAHNGdWq`_IEbJY`2(`=WF2P~m~@-V-h z-mU8;@J5O$&{cF-{DaEfBOM3E@1A#5o1S_go0u@XD68qlw4y%2Ev`dn&owaBc}+P0 zspEQ(!W5WrfC2lH39s_)jb8-nNV&}NATRAx-t77Gl7+6;=tErvc3z1?E)=5Gpo+*}%z0BX2!Kw-A=TXs>kI(|l^H5u26qwdx4#O_Aj z(MQ=4;i~VG76W}yp)aQE_f&SF)EH_c-&=KLuEn8vuPVB~72Qj3U$6leuK~mtv52x@ zOcc0VTrd2mmH=`wc0n{$n#F+g4U@~LmyF#JrIEu1Ltb{Lg8d&?E+(6=+dD{y zF_YZxThSj9+Buk2F=L?Mwx-_3>t3__qAiuIa{9R}Phgk}GPGL&OVB(k{;M$>yXt3Xe-^#fF@? zTUC22)B_FVGNyR!FJO7aS%wOiZ_bz0f>__LK~Yy>t=VC>}*_U0)29%~vyEXCX#ViyV23)h|k` z)y9$9)Us5D=8to6-ZRFKf)<6N^ZN3sQ#dNus(%dWTGT~u2z?vH->tvdW8rEHN5`=C zjK$PO5+%zy=gp0Ggpy6Wux_#?f#wPI7x5?#4`>VOSm^loXy0NFO3ml;%RagK^Kyd` zeLcO${WH^D^%?~cB4{bV$!~16C#`&0h(6va?atIy8bC8ibQ5C@codCe3sTtj)m@bzf z_Mh5c93HtTyQG`lOHsNL;OXu?wufs?Yuue^es~a6V_`Y1~dai{%A9O)oezPr^8+@f3R{lB#&qHmv(mcaFr;i(P zYuIMLA+B`tU6H_e%Wk$yO_{f@LCrM%;l^jQz`_6zOJjJ0iq&MnTjw>dmr|)uWyYVe z=_~3iguE-MxXm>+g5e1xOP|5I`>si{+2$cUDyczQgzZ`!RA8ayvYfQMXvvFDanXCn zS*FGD)WauwK}7LMyl=y1v52-ye3RQ&E|=L-I}%mZp-cvEOIiRdOtv6Da~hAZ zxFhGZ<GayDb3FtM!tS>4rb}KfrQV+R(FlVb`5UXV7y8Kbrln8$ zrhp!|O@N5Mgx5Z^)a0Mj;Hk2hU`gQJ)Qj+Xu7NJBO=V#dNP!fFIRPe(^WonX6y!&7 z<4vR4H$(nFUx3;5^(Dp`y)1G(%hOi;wUUpIBX4LbgK2;F?~YH!*hCsoO#jyTS!d&1 z2IOi%HPXPVQ`Y|Lp)?exGUndHOZ3lP09kvu<|G{zg$s|3SFD>_<1Km++1WkYktVP! za*;?zrld#3^(E?#YO*g}dM>(CqBWS*#;5d8ucC{UglM=l3**)dd}bz#9Z)G53j=N4 z3t5*hpB+6_S7jL$?{CkCX=`&6tM-0aj~$X}JlnIS0>=I~h2u-62=-?U=ofnzt|s{X zXMH=#@AA0v%#3v-;Dhx;EsmY-Y>tauW*)UG^^JY*S)UP(abe=JE!kO~tTFeC3kWDQ zF4@FyeC$TTa+mXayHFwq$1z}Rp^nB5#OJXq#{>DzJmDJxioKPdQ;nO|A^>J|7+qSh z*l*p74C&?wwB~p-U0&P?k_6WQlY}VA+9v?X%o4`oSh4C>Kh|j?VERoDnirmfW?Aw% z!FL)qfxE-*&;Nr*Z4rmtZDG%rg7p5K;1lcJ9#L(wvmd_z*p9feQ16Sbp|XqYV#%%I zl|st6=L?k-rW?`yVcJ0{^T9XG6UQn_)`bQ?y1?yL3gq|vb~-FB8}+=;;OP z{J!*W3?LA8K5R9^`67Zy4_0hI!|v2ZGUBgE6Q!+sN`ByLv0$!cxSduxsNoM2y)*z}F z)*g$~(}?CC6mPSheXqdT|B?2rZ(R~WqrpW-D$FUhd+SzVLZ)Bn-h4+Yw?(|ckL+wc zyZR~?fkA_8Sid5ri8)kLs$07s*rWwX)<{Y!V%YB+|p0WW7d@ zWC~tFVPvCDzvA96pXal0zP`Aa3pDH8*AgQ_T=bQA{^cQORZRtc|4za55&gq!u)Azr zAbahO+4}`ty3J_KlpCY@g%0}NP*{~+Mio`dZa8U`#&-)PW%;g6&=}%jgpTT)0gf5n zh@p!N7&S`fA>a)vhc)qUY=s>x6T~HATVN9Um{|;HN#`LE9=n4%^`b*>z}x+kEy2Qc z2n)Cm2i2z#(e&H$Ozf1uHzqb%j6Qoi%qu`oiQ-PL|Gr_I8?UHQ=D4e$fvhc!N21;S z5q`4b$j2>xR%(pa3LQ(Z*k67n)3=z82x{>7W^|GP+EJ*bx)}^V= zKG=Mc{daMf$C?S;sPzalWU1a(WD|PgWxmb(Ea}gVJl1bE?=$F9m<~qU^$NzT3#uy9 z9Qho-tLL&P)t1AZ8K`rqI9$auO?JocCKRgcTFc;I8wo*8q=hZmzj@HT@w_O7c1rhI zTzpo4w6rjK+;f?=eFC4nd+!423^y=`y0?)JN!Yib;&TQmLx9HTIy&5dvV#7Er~))@ zD%XA}hiSnRgcSNP1+`Fg@KS8(s4HM0`||XyjUIQov+Piyd5B6bRS}$>nLV0^n$n0p zo4kt(;f~14+XsD2$UGFDA6QCH0SYjJbclK^hJo7~NdwAP7jLB$9u)8Th{rVA1oxiM{(^r9)_$7ufcNQIu7&Wm>^JU<#F7j5M1)>` zY=sAFIbI;2IFx+uy2o)^K@Oow}+&adC8pFnp@K zMgPGkHhZ=-QDK$WUm)4KCi!LdhNX~j%Q-wl!M`olMJ z@#HtpOqZ?wFns%TLfl1Kr4$d{VNxrdP2@4;S!hCG_I$1)l&CPyt?(|kJf--{%hFp2 z1>CszpG)t9))sm+_KG=sRU%te{h9jl=sN@ZPPVpTFYB}Vh&`HTO%`&Ts0Ow!B>-m=WoNm$Veq?9L)xf83lH@T$ZQ{0pz30_gth7!SME zlz?M=f3pw2e%F(UnI&^P835xNXd-E_3$~O8Z%%ilxH$Ru2%ZiIz7kD5q@q8a<9r0b zIQm#Ph-rNRNTBFTT>4t0mG>pIeg@=x8|YkY!0$ebpZQVjXD7yigOK{J$WYbb)hp@# zMRkGu3-GjS)~1wfW~E1j-Lo6Bkxvat@+xKu@juRrYn*fiUNW*&Wb+H3yv_wP)5BWR z9>esk`?TTK7Zw+f8iH=kx=+YT%df3NR8r%Vye}#RFUDS?$BobSBQGtTRHfDYkwg|Z zXSiP+VQCyNUF_iLSExH_H+W^Uo9B6vgtf9jS?t?pkI?c>nSzg~`L>=-#gvuh>D(_c zV5+Wvs(~Mp_w8%j(Ny}L)yoofN@hPb+?2fD7}E0>;V8j}aXC3+J9)eEj7>#O)J~&c zcenYn$<42OUAQig^Bt609GtS~t<5M3Db56{ldLl{#|cyrG}>QA?3~xx#j>BkiMj;i zs+UVVsJM<^c*0CI<@RDkuX<6(CHnh$lD5KB@S2NnE@O*`+BI_5Y>&;Dlw64Gcl?1g zZb@aADa=6(#cv9Uk*s+bg$r&OTMbwl=H})Nifyn*g29;@uD1HQ<+bYw2- zrz%+4MEoTAa~y7}wHt3T>6x;FiDE|Bk{GCA{>bqW$IF?psEmHf;zV9=8YqAG-~i7b{6VuaWV(GxBp;9B!Y7eikMU zr)qRuI6>um={Dpre^u2u)gi{>xHz(YepM6qz2w4zRQ2K*3pB4q*3gsgsQxGcK4sW4 zp2hst@#pv*$MaSfgO^fxg?2Ec3cV?EgyIBo?%P7ym&_;G&f#NPgl-Y^E@foz=^Z4#90>Sfr_k;^j{i-n5H zv7*Ct!Go@Nq)VWIR2<{=ZvT|Eg(;@KpKX#EQNPmzUd(_in`b@aB`n(rKQfqsRh{Q* zH9)4$sFCs7LW*dRYBSq-1V?`+w%HhZr~%b(r81k)=Bc+1?yc&&Yf1p2^nJjj4>epS zR};&5CW9ef1E;;1sJWYypEb~kjIu8j3qb09jmVVpJN0M?B(Z`Rp*qy&CTw`8jl7O? z@^m~nh%Gj~M?UVe3h3%r1K;W>&Rx~5c}mAKc5!>Te!4%Nb)4*^TX60@+f?q|`$6zD zd&&{n*#Al8O*?94K3P_6f1Qnqih<{$NXwVZv%PP7H=BDbSgFDn- z48{Grmc*ljycT#=kKcsy4_6&!xb(O7+c(G^u1)^jvSh8^hTmf&QIr$JG95sydBqD6(SyFu%j#HD27$?)J(V{TP| zlsh~z@YdE^oqY~b@RsM2NkZ*56Ci(~4^~DM+taw@HdFW&7K`=dWuVu}I{NGgAS7jm zr1h+~v(WNY_Q!2f%DBd7RHrxj*5`E@=vYTwHjJ}_TK1vD3q$Y=%7GzdB>n;}|558s`g&<_LDykpU@~oB znrj_n==hPh=WLO2tpc6Y4Aa~9Bk)gWV}v~2$wF=XgnZ$9a2ffn1z zq>Tl=`yKul?^E|lttN8QROXM#zM@`drmp>%>3D7Dxw!ujiuYHtMhKn=%W2)Hp$$e@ zH}hKJ_#@vwS-FLUD|Y28=ZpJ^V63ARRuBZrPwB`#TDe0S&N}?tie_Sp2705cx4OO} zT@^nmuch7L2Nqo&bm*OZPlF& zs!ggNs}x(DJ+D&!qHvJg@~LR1Qn#zZ$s#Nv&P>e~tO09#thJxAxO zqlQz;U4wenmInC(l@9rpIyY@Af8@)_yaZiTE9#UNbFp_T$wyS#u#0ty4-0jk6R>#Q z)SwNH`=y;em7%}uM43{z1-;m8ibJwH>RC$%(9)eYk+U zu3F&{qple%^PaKAe96TIA@n&s%!rJIZ>xIms6I$}1?^%6M!A8z|0B}(z+@ZRuw-Sa z;TcB*Z69&x1O(K#SNzSMJ?6TB{X5e4&_K-`k{jBZaFV84Ky6w`mD(j%-Nd^aZM5Rk zMyGq)UcY6V>ZA}htb2RQ{aa&0QXax#MC1FASyJ^4p{#l1ZuMLcX!~VR`FFUN^BgiUc_iKx( zb-TAhmX!X;o1}MQXbDV!gZ9+m%KJcHEnNCCBeD%;+TlP;mnOH_7vx{dc4pS!UR;!r z!K!u=U9|dtvF$`Jaz;xOe=s6T5TK#D-mS0e z9m9NtH{!S9^oyDEyr(YHY}(*aV)pODu%_?Q{RVcMci^23GbS9qEL?GL zXN(bvL1N9?7_Ch1K_PLr5LOIbi7TEYgiu_1}@97TamB%;ECRC_d zr$eKs{*fk&E+K^E`du8iBp`gI!yE?J{Y4h14(bLz=mOFXm z1B(tk(i2H!zC>T~N3zDZJ31swuPzKc<_B_O*C6;;i6*byVLvA>(O&y)_)ZjbM3*tM3kfbZ$ z%y8U%F(-8&7d%97WeD`*LiFv1y4w2%$Q|Z}9?CtT`s-!B^X0a2uc*S>y_Mv!E>VIP zNM-w{hdhMXX)1vmyX8@&40w6*w@~WH28d=}((SLD1%^+_{|{YX0aWGoy)Af;rCUTmLAs7~hk!Il3kXPeNH^a;UiIGJH}ju4!;B!j?_PVwv!1>7 zTH>vEzqSDl307ENHi#_(bxtk*{Kr0| z%6a|7a)^eCF5gSX1F}Y#S5CvZUKhMaj0d8ip1}_Z4bn&~>3q-t?QS)Ga z)G!R8^pzpu8}#R(RPYNjg(lBnXb~X($zcm2dhy~T*|U;Bi^i~|&16yN(P4x*7DPY_ zre3Ha|9u4CY?{6E{c+(1gY?{|e!o0&Y#2zKE%wXNuJ4OL(b7p-C}Y>b0&uSH8{U8+ z3nE$IJ#XpLx^;Rcw*x`&$QKJ%5djU&i_`R%V(3BbT9{ztQF zn-EZLH;)c2Yh6dc)FW7Q#KeG5kycd>g!JViy(ERDMc}Vv683`5^ZpQQNoeKk z*NQEe9Ay4@%kC%$*5A31lJ9}?vVB&NNOc#J0t2ZXDXIVqPG2Fk0=CiX?(K?}gthbQ zB>4`!A`S(!&6xxibGx|=2BwTU*tPKbgMMOoBTN}Z#pI74O|+V-q@Q~P zC^h-&T0S1HBqUsad7HF%c3p2SJKt2%zqlA&k>fByV3q;8ef;o+_vlJzWx3e4C#qks zq0*`<=Ri3 z>k0gXxuh{}kld@^6tlmGGtvW$;f{SDY@ETcz^&rKHj= z2n2`#m7Ys(_2>g(;~(YlgG6F{8}C1}e|b1b&{lrlZvn3OjDhH|tO%4xj0$rvFh@hz znGq6p957iRB(B{^&$Stw$=ZHmZ5pj!WgTkc=MkSu2Y#tGQzTS)mQmJnBlm*=>vtA} zPK{r0$XC-B+jhD7CTh~IBoW!(miO=>Y|cwNqFEndOWEz-S#mla$66jd?;CtK9tbw$ z(zO=*E`B^RtWw=yL>CL%j3WlMxf!9-f}{HKQwoebwAW!52hOH|9Y7hl&; zUpeL|6mdB%FOsjyE=*>odSCK9V^&0Ki26@8}H^kzg1KyCqeX|hDRx~T#) zy39eg_?VX9GUOnMr^wFzN=Y+=w&oQ^s~8R3Dx?_J#EFkMCQ{x@@ggO`Y!+tn;1U(4 z!$DIlzZtF|6DD(Z*V{M>OnaEawUtc8fMiKfs6gcDnm=-$d{&oFK$=VX!t$*`4VCa) zQBK_W(2TyPYN6X+Wl|iN74cnCWJjj}%&A_f7}6E>%Cg*O0M@-bco~;r0$Wf+0e%rJx_w7NV4* z#N6xTSv#V&FgVNjcgwU9Py}!!!w*naO&R8iC_s*2Yq=!8I3UOuxtUUhCv8@1VB~aR z^bp{YO+4;>&XS-<-e_b3yXMRK@T=KTXZ*y63QH}~msPaSwmA+Tex{4fc>D3z=V|rT z{N?AB9F7%Ei4Xf5BjS%C_cs$5jAME#Tt59a^Jh}I9uQ2%w62Kh-_qm7grX!bG!r|v zd%rNc8)(X$&q)huQ~-E=W-Igm(j|TY65*>7{f??xQvOir!UriR zS`2v!C5TTT8oeA(<%Ez>#L@?vJ~k3A#;9IE%Dm6~{At5uZ^P2cfs*+?RYJ9L_h#Jw zvxm8^?2kc{BP9w38c9d#=y-gelQOn3)one?_62vdjsTDN$#P#ddG6Mfd4j&NDTL@4 z$xuv-$krP{?e&bL4a<`^dKD?#aU~6m#MZr40Kj-?P^sVZmt_Bimtfexc+cgthw$>q zhU9l<2&e-wyMxq03VeF4jw&rAU>!5eylo^LQFUwbC>$WBhEJwuuhE1ll+=+Vwp8js zQ;flLImU^yYWxI{g=amb$Tm0VA#_drxLQ*crK_pR!bz}`ENAN{8QqPk{9tDiJQem9Cf%b&qVFX=<4N3PqDeg73las^-}Jr3K6kMVS4nb;gEP=m6*(;fGDuR^_q}U0ya(u{>^H(1Wk4 zs><@U;z}Ai`?+hg$*hFi#xeG&^b6UW*U$2O+^@+#P0zEkDTM4y7PiTIXykbhpfKp% zBqWyUFmOu-iB&pMqk~2>GM3}o9QDUzf{dg5F1C-JYDq@*>#I^AXg`*T?#5n!lgTPx z)e>_PO;gmje7Rx@>V)cW6UWYq#0Vj86(}TUwClU{H#kHN3rVJh~n985tR!DLMOhUyOj;3w;jcQYCCfXK$NtPHeJ+g9Sg@N1K>g*vs7|;m)B^ zRV!?w7>D?RY>EON5+6_VgkL9~ljE*y=}EELTr><_~ZI5==r5$?S^DlgrLl04x+{vj;t z*{ArjJ)*f`mXQQgkusZ&?>yNHl5)UfzroS0sC$ZELO+rP*YQ0uh?Am7vK(!DTs5(a z1`+Z?RK(*Mo0b;yR!vgZ3wNdH4W)c_9OMfMu|*N*_BapV0@WctD3EG#TZg^cz=b82L*Aq4x42686mMpy0&n&cAV zQ_8dZgX*X1^gG5rY*N-&ingOUZfndv_QAX!m2WujLx`xME^awj)y)#syM;EzM3Y$} z@8vJicJ_>O^RHQ+P++3NYm8TEBYoolXDN&WSR`;;(pFNa`Y--XH5McMf0l;cPVn06 zfgI=`uNP?gbPtZ^%*50b;d^FzZnbuy6_aFo)2a`F8nl!|KsLVWk#|8)L4157{#P*? z?ZQW&?1UG!plEbsp~szg1`~*G1VxWNrB8p1m|iN^;9*;Pl`X}B^jO@`O(ls{+3q_i zR~rBW&o}oL^=&zhT4J7PMTPD=r>tl$A;7OBahDq_CD}}AP;S*E%e$b9nL@Wf(xTeb zNW$fu=sY&~D=~rXeFr(*Rzub&Ud8AZWGM?NkPgAi*I zp&$Ukbi7mbS&7er?yD5>yN_Qg$G%IXVo@!xB)7$2SLiqrY^2bpeQh}PE>yY5JXd>k zweq+L5t}UeD^MyiQZ)+aylE5wP8NWR_ zRXMv{C($H0sdVboPCXqj;^_+^@yJ+qTeo6DR_Imcgps$KNdKGJ*PQ8%qOs2$x zq~LcdyiWzU#ZR7PiO>>=l-yR4MGJhfG{C?2+yb!~bgFHp3|o3%zjb6N{Z2hz<^H4t zFla&_X?tW`wVCv4k4PJ9Yrl;z7%hP<;OjzSbEm4gV=*`@ZCBmq_c$*16#wIMv4KGI$n~-u z;WD|;EmD4@4}OD8fc7RZdcz9wzP|>XXjiy4z;1#d5P0IN$TECT1=*!|us@4faV3>( zZKd#Vd1Iq)Z*Q+|4;2=qZ#C%bQ6tCr3+}vhCIW(Nm=eHH1Q_Bh(f5rd_&DdI5kl4{ zQCROf*pI+0Wz9@<_tVj{3-eq$`8C&qktUzl)p)W|^j&DbD4sSFjb#BS!ST8ysXHOvmf+)w`gW|G?Vj9yr2Hm@!%==2Rn>B2aSw2iEMG$kxRrav zbM2dYgf_%AC3GjS#ls*Tpa`3(h~VEIZb_a!UrQZl}xE^QD|2NCI13`nI=+^ZH7kVt+rJy#T zkpg&;#{i|by>$GTZ2$P%=iXUWeZ57Qd1q{Lh-$oQj*FDnj0p-Vsvg0XAQ^(ZwMC3! zb7DpFHzvsopT0q`je)7ut$-Ww4 z_)kFPN&)1NQ+TkZFc`X*7@G5PY;2-?@L)hHnG^C9{)uG?4K>YZe@b$)oTOxMs+;)d zh(J><*#KOE-S~rYDsV9BJ~1KZC|CH=+(gdIwY2!mBbqtmvN^hjp>5yn7mxJ=$_Q=TvfA^DUM5?Fz-i%iT&|7St6~^E0;U9 zmcIIU6ih4l9vN*~Qv?N)xOG`N6~J^GsE?U1TFodxjh1w~uadw7RqCcbg zm6Xg(lV?w{QU-U|4|zTugugATRN__K21nB zBA`L%|Mg&CENq9H25VcFio0{4*z=_dQf+By1f}CuIV7C0lBVA>78EciZoOOW52h#>>MiUcUfkH^7! zfI_tiLLP+!YAjzO(OZ|<@^Hnp)0fGtN?5u4l!!5P=iWsVnT?U@Pcw!o0fW~(gV;1? z7yK7!0%9Rnp8;(9rGBtrfdgkS%%K9|pZ6%@ZYvs5aHFmM`AS6hw#AVT;@BVkVY8KjtP1dVu6qp`hcMAPU`k`9ij{EweR zK%wa2)c@z=b8d5PPKutJZk!tM)V_(EQ9JO=q$MmlwR9Nhfv=ih$-q;~F&wD8H(T9r z%S0`u$8x@NV&@PA|Hn3AZA6;&Lx0k0UkoaJimrL`<>1E1=Fd;Ba16=iZ*_}vq|sy& zmRlP*wTga4x%W6C(8!S}Kn2r$efbD2(6n9h-DPKAxy|s=JpxCscNBF^Qrz@0CZbb= zIpt0p)~Xui<2+A+3yTz3^0bVyuv`)Yjl6!TLclkieZ1pE(XZ`A94DuE_`fBOoqFd| zlc(tdDOrIt>mG!{5M)`FYgnI10!=%ec1y1Y_xW=BX#=)s7th3Vj0NEh*2oXT8Bs(8)t4@lA7{ zIuhfLC)kJi(rx37Oya;CKh{JLQ33!jch77I3_z8ae)+}490?4bzTCnv=xJm#C}8`l z+GEL*Gy9(DA$3_zi2!~osjyPqVw97OxZC=ObTqL~L`fvtzY2F3Zp&9bL{K05V!x2< zS=4WE0xW{o(5LiZN`$p=3 zRr?z!`TMUSLt}seg&9sgCNBl+L6Sd%J zG7&u2wx)L7HkS*s471H0X1)6fWwEvX}=|0gSn4)XO>x8`!r1Ts*z-Hf@Ld) z^pEs|lv*z$IZnRxEVw-|7X;vPDv*N|Ng@Dv9bAecNXYOI6n*9>;0PQxF^qwWR(X-C7&&UuIEYlGZ2^?nBf&KMJ-Dt0mPs3sm?kb4)CICfZ2_YwY8 zvD##qxY43=9X9N}KNr=VxVAm|<3zY%&phJ+ZnHmx2{4*qT0-(o-DCaNh8vGZYIh!i z(@D?{H#V~+&8*bc>Y3NZM~~;88}yoosoVND2BXVkmCl4dE5>i7t?K`0hm%B@1xT)= z3dCB;+$IL>I)n87=$D8H9bjM2zxMj7QD{zk#Ddz!x3vC-Q(V;2R-jHSke!lG)}-wq zN_JA#L#xT{#a6Sq^;{CQK-F#)w??_qDhr6BR?~vc%jYBJKd^os0Q@!>z(E?=cgY}! zO*Cbq(GDMNa(!9;w?)1frJ)ZvI;zBxfL0z-09?TZkbJqNRP-1)*Mk^~ImhP*u1v3> znqFKc^m8l9DJHN|lVQ1>OnR=j?c>c_lYzN-ywjMjYE~(-6PlX+``ms3!SuSPy; zcSr4Gb+we0mDQrtLMk43RbXcFU2zwCm~A7%1bOoE@^UN7*-DcQK1T&VIrjbGJTIbk zcH^HI3?(Hf@vGR?9&6`?x>O36p(Zo=rBb8jwlG{{2uWRnDQOD_JAU)bG{Fr|lXvc2 z%W}-1a~$f++r0HjQBSsEA0YD`Spit^!+&>g9~3*}c=>$*IXmxZ*ii;6~o^ z6hKOz_r59~6!E}i999iaiSp3J-=3!06Y^__nwgUpVwGjssODfZb!GL@dp$AqNMY{O zKYRIW_8=!X9r^3KCy~x@h+pBcTzhCN2LGtyzvK0HG19=nc7$qD0ZZG(?gb2%A&4XQ z#Crnxkdp60!2Z# zcGr9xd@DbOoPsXidC}ZqHFbU(KR+}x7nanSW-|J&K>@NssO2~~jx+FIYcYjy9sOT#6@I5bUJx|YrEC>SK4uCx)EWu|ZQs5M0GBiXIxgAPpdrp*=xAe7+R;e|K~TEN=%B2HIpIF(3@=hbz0(@2)+d|bqwQJo=}KE1=gffu zO?`dJzRI6{OpG5Vg#>e4wgfDr4eN|A?`xgUFithh%td)BEE-KYXo%r4M`d^i_i1Gc z&@uUD*y<)v_zWh#E2SQ2q{ErcU9@hgoi@9edM?OH$Ws14p(Oz3@ff>dQq8dGmzYe6&H8uU1P|4FSi}`xv zxyNoQK~6rj`{Vnd7MD4WIv%7vK963b&(zb?W9E^MY^)?p^H4iRO_5*L zu6ap{&{rm%CEx@quzlLTT*iB?S8YFI$74TJS`yi068YWxGAeiNhqc;VtaVxKaIvw% zJHMX~Qumj|C#bqcem=nF!6Xr{{a$HTV55Clp;fZuFHcj{-exve6pn5%PdmQi_4H7w z35?sa9qj>`>u*K81A39QW?ERL6X+*BBl@RG21NZZX_%Y*C7sAF{&!DADfoa%XSZvC zgUyO9!6LwEn!24=(?{tGM^b7|q?{?~ou&x)k)B3!vg!M+7%V3d=;P3gv1r$7;@|FP zdw^>U)G@+x1l$ImyLc6iri$`u4+^KFGtd0Nl%_W>Z`01!ua%Yqhf^RnW~8H;h8r_q zwGH%$UmiJlEFAa5&vNMK`)w%HtsW42r>&o}6J#*O2p&V{trzewP?F1j$d(lXS}c5@eeBnx*kimW;#rjYv?>#8bMsL%6R$*gM)ES*w9_$aKlt8@e9+9 zu)@MZiGRkzBZYt<@+grce~AY}o||Sx)wKBrQ#!+uMQ3r)8-#g*!z!lB(yPkJEVwbj};j($7A@y7NxUOA~P9|6+}VPoE*1_MTv$G~g)5)*0qc!&;X-5;e4{r4ml*36{HupmpG1YnS#eB-oh!J(!JVfnyGLnwfMyAwIu zX@|>fe)~xLWMgKAM+1-6W#6}DR8o}Y8F2li&)Ujw{e2Hs$DSWIF^zLqr7Xi;qD_bw zXE@zhoKJGb6uy{wdH$_PA~-^W(tM5iEraas;b4V^6&2^tIJ%V#{@qDytxlQj)bgbc ztn8(}}Dr)2FC%Z`+4*;vMF{P~<(<{+iYlxX^0S zsdVg>$fS&v@}2b$?CRhQV8CG=&kHpS4>c8-U+YJQkAwMm+4|(>Sz66PL7L1|j-D4r zJ_iAQ0uOP>ymi?$n`~3b$i^8po6m2G9GAPP`|nvP^ERw2u8ie7Xcr0a=0Jt6%Cnpud5A? z_LVqARvBukt3L#!9{P5ai>N|WH?Qz>o=3*VW%Ir7EY*k-5gEG&=H;!dtv?!0;#bHu zh(4DyYY2e(m#B&xT8*4zubhmlt`>RU%ToRH`>uZ|lRE+TAl{vFTc~{V)`jd@rgwvI z{{$CII!N)QW^~g_$9MgO=cBFd|0$Y?97+Gd({`}nCrD4A>)AX%r4!shc-RtqbGc{j zsmZw4uA$>sRaNmiXx7tt^kmDXR@ziUql(b#Wa@cFvWodTn=R@${qN^OyT72PEob$0 z^@_&`|ELMCFWoF?&ouPdOwH?FPPrYjpUL`6wK3d0Sfq4Z_C-ay7@-dkc>@tu-^&I* zF{=jx_6i-LFR7(J&R)p3T6Xkc+Bx|Kk(|2eR9M%n@iwto>n44Qwq^c-<vs+0?>~s3Mm(+3fJUHLdOygZ{INUY(jIjO?odu{>ItJSmR3HI%P_Wt>tFbdikWm{!m9cICv+1QoLnV3EH^5 zDZhLZdUs~i9t|Sjl24=F1{(9+2K8QVBJ?1P&c|jFOzcn=N?DIzKknJ`d3zfmsH&~k zEY0Js=a*UdsC#XG-#$h&CiRa>i%>j*+agntD#_LFJ;z~5;x+4H^>Hba&nExN(ulxt z&yb9QAhCx_blW#jjrLD@+zmin7unbW95u+vOLRgM1p3d`MA!NCib7XXoP?6#5V!47 zlDpnHR5o5j3x3ZVZ&%gpvrAo>$zhqVF(9`bt&S>Tegk@+VT?9$u{Cac6AJ$nZBgK{ zpQgI&N%qm;kWQ0l)ksTKmuhx;Vu$VQw-X(?1f^>c^;3a4*j1{R2tWoaG3tzWaWaOw$NpbTNq~HY5+mpQ`(&++qA+If}}$-eubHZRVM1a|fYphii-3`nC-px*0q(k53vIL;h>yw}Vg9(B2f z9^YlVy8G-~471H<+ea`T+u~I|wUjOBzwwOL7ueqa5T7(#o4+xmzcR#cT>SL%3{}NT zbMVF}3yGBnAi|m*CafT*l7#B3cD>g-- zW9(dKbeb4_UxMVaM{NOD5@S$C@Ih_F>Eq#kXk=f8z@FA|+_u)wW=pTr=2xa?vd`bZ z3T^&?SZheJ(O}}v0#X!GioTA|_8*d#;sLxGt#09Sd6Tx0AI=^AT(XKBj=va%ze%1$ znqdKLjzo*k#Yy$4(Pq+}JNM{hB}CN+%HF?M4zch;v74{fD_$@tYdK&$=tV6H7j7$W zEY`ih?vy`A8;e;8QKkthTzRD4doRd+ZM93wk5cyXQ5COj(_y&-oI3(dz}CA-7;|Uda{U1QUPvCuqQf00)k-0 zqB%LP*@jabc{z>y7bb)k4V3j=3*|X|D}@ndNu9!1`D&j$2|-9N?3L?RcNu#*c%Hp- z;`co_BEG?GFK#1qD7J5~MYY9snVouhprCAhWA#SunKmur327E;cHs_A0(pwcTZ`1x zB!;5rrExQMje5;zM@$tnB7r%D`$Jv3DZ=l(LMpA2s2L--U%AChuDvhS^BNsLsPVcj zjKBD_e%Z-!0L3|~#OC+&TVBZeR%Mrd+Cns#Q9kNO!$~uDJITaW#o9^3>E;a|!T zlusL|e5#`~dRxhZ7AW9)4(nJjKeCIf#;vzc~U!lb@p z%b72%=<$z^xCi);i;GMVTVf4g1P=Yc*~rW0f5xKuL9gOuvo=d!eXZk~IFj%-a)TvCz*LyZ}4 z&mxPps=m-q44J2U<$ibUQW$A6=hg9`PfcnmiPtH+EqZ$UBDdr`EIYLCF3xU}(W{WH zl^aN(d|AEbeNxY`@7%j&Q?*73Q~Kcr=IL%BtAu=t92_G9j?_Od5aH*vw4!`rthnmlvjSY zi2s#GZL0sVWSWtAWstZZAaN+QmxvF+9D+a26h;|E%N_YJ|AeBg+|NRXV8gBkBu>oR z2Ci_b(S}dkmxB_USXBh87gfYy_Muk2>x>S@%%)6NvSQ-% zwR-NYV}}AkuhX&iX>C^Cx9W?9DAYwKo7-6)-n-)k#@WZ&9_%Uz@||aWw~C3!Mfhp8 z71QJtwut&ht4*==vp*ATE^m<23i96h4aKN-*~OpdX+hcFJEoP%Tn2sJd9bXQGVc{sy)r{Gk%?HP@avw>Do>KV6mlV$_vg zWc3sbrMWy&iX^}RcceYi46Lx)D(eNZ~=ODUkoeTP>KO-N((E>DNoZM?^`5Q4VHR?Vz63aa#*gq6d z+)|c50Z%1bW>6z~f7{8zM-;O_nl36uSl?M6le@`)CJsTq%9Fg=F=qRlD!Hm=aFE6a zqb7$x^oQA|N}e8*p6C64iXi`xu2k>EQdr*dYgAj9FF7R8=S%brZ2479`W99l;w{n)u6nEz|Dq{mv?p03uxN z*WVpp>i+oCVjrX;_;KCbkb-Krc=tY5OeH)_S>7#oyF_a-=~@v^bC@!p zck_;qk4HgnTY+3%Uo$W>KP9Fsdz2DiK4fX>CTSHkmLTrKw?K`l64wFv9#ZnS%cjE{RYPjN&$4)`>Z4|czp`UkSxSi9f&BioT0jO#=H>maACJG# zZvUc~JB_ZbwMdbE zH1I=v${*l_SWw3jx6RQo=0jp;iqcTi0T()<&^N(|F8UQJL)W%=qwB-b8RvuU=ofZE z&FXn;IJlVl4{Hes9?-e^u1zNTdX{tsNk#faT20v6#f3tU{O|n!JP7u$x}zx{me?Mw zX?bKQXwPkcl=2z^=2yZ0^U;tMVbk>NU1NH_A85FNQn-!-X-vBVk>}kh1NF1KEClW9 zf#9E6K^0vVZ8bwW%4m@`5>@p|wB+Q%5#H1fRikedWG~mWpXS(3Myv;lE-@d({$2DP zxNJ-){f{`4W^`RBcnU;_g zsGT-zNCD{NpDelWYIpYPMvuKK(8OYZRlRA+7T-f7T}HSfU~up3^9v$&ZlbR%lZv0Z z#lau>vqO<$Ka>{Wc@%2{HQGiIguFh}l!d>lFRWTb`Mq+m{#}aqw~a2d;-j&;P}gjD#?@(?ww-LfV1HE1{% zc~q`WhHOGw+&?d=XDf21d7J!|y}yVCw&AIX&g5VXCR5XfZccoVd|0xE$E1lsc`kNIf#S%ca30!{gSbVUd<-BY;wn2 zMHKNO;Ill*FVarKW*OI5S^AGSN`W}GO?lcl9)3;P9N*9T!hM|&h5DnQpy&oX@Cm$w z3jd`raBFrb)PuryBw1yvrYr#u_BjZH{!OvFAN%GvI?=4o=<;0@tBs_md9C;V##6v|1E;$VLK&HjfIT4e@1 z!7tvIGLk?4y-h%$9q^38^vBI*Z%%BzqEuD98A!D)qeyMwac{b7b^$B znP1O2rEM4?lDv8Ov?RrT)345MXqI8@hDMfQwyh6ETB(u2k<4hd zDAXCpg6+Y*K+~SIH<_8zMYmUh_xH5yYg8SoTya%egoDS{O==n&Q#}aR=y%5CXy+2~l+5&u|+11DTlA3NFfQ;g1c0opLr*~4Kz=ED5@ALucD>FfBw z5eYI7#xCX)V(t{!`DNmr4Kra_95BL zwEgo)!wF6i9GJ=j{66ipudgpI8Chu*ILVX_pE9rFqM~?@6oe?D1~tf+Q+@Uf-_+dP z+A>=rl2KFpaG`=VVNpq0Zc(dOM*bl_*P&xY0ib{1nKBI4prPOyP*$4vy_l*rim>Ba zKj1HE_$0F>Y_k}Ugh2H0uLQqK_y=<}y$K;G&_$ekTSXKujOeKRJSm3hj}F~IqKCg2 z!ZQ0ghR~+E^UDBNXuzRtV+W&Vrk;hAgoI{g^bntEoD@9djflFT3(7wFn4yV@iLzbk zPi8_vN$8kJGM0)tO2=y_Mz*vg=Ypz|62kt25ZwZ0Wu!l`;s+1>1)_lfz5aVg3{UIb z3zbiZoq7ADdG>W47@2k4Ln76n<^;Ss6V?rDZ~Hf>$b&ht{? z?q}mM$|V;peH|rk=+?aCUZLdT~oCvK#`JM&l)Nd01oO4iq?nAiv z_dl&ty>Ijsg>p?c8&`pEt^oBezv^fFPW9+fteBWsx*4D!^=Mdl`W77Q#WoG_GP z+V5@mEHND&9qBna)#Qm87D2K!809z8(&v{L=4Ie4ShfDO<>_}bZ~4-v^!b1A8pRK1 zpb8{~PL0etG7pw|vvLF%;>{6_rXF|&Y5jINA{tm{l($<@yl0eoFC-3%Tg* zg~1A@+jXd-z+-15LdJkU8j<+Qze>|GnfI_Spj%l(t5U$}1zRWSyfr5qKCAV(y<`<} zWjTZR--z#jiv4J-f*N#R@=`K3rTD=JHcU^lI>`6ct5*#4`QHMgi*i}1dA0iU>%_z~O_%W$%KLv6-Uo6t6y1MR&JQ4HL&_&A zOb2ctpi)|!yxXuaq=RD?{M=eOxj~O!hVE)LVy!vF*2%6Is=LNL^Lc09YWfv%B}^W^ zbF`WVZ^u&MRmL|*^0zYAm_q|^6rR*xIsT8RL_wVBFpuQ$C#?PE$#$&P*}Q2<@#jtE zU?BYM#;q@qjfb$Y2E`9Km~3Qgf|~Eb!_#wdjpcH&@P?NkZ~|<%&;&Va?A`aLe}O9R z(>*wtTU^ZSlf1AvN3S`U$sQcPsPviu<;tR;rA)=by|Vp#(zhP;{-f%5?f%MGf3QZY z5&T8UhuXMQGMgu8J$IQHt>sq`!|a0d&6(m#ia_Efs6E4m-tNJ&K(T& z*e%{hb=etB2^PC_GjwM~Hgfecb$?h-B0C$~LUK z7-HLK9n^4C3ei8O7`Gi8H1m_|{o3-xJ{KdC=BEe<)&D4!NGFnJM3OsJ;R}+eIl0@Z zVb4;`{E~1A^xu1yp(Nt-g>#e>Xic!;+j*iB?i|ND}M6KU>jdGd?H55ic+s878UL^J);Q^oB7%F09L}|K)r|M}YM3b{R zJoS;Jx1V)C1L2ti4c3R-YhN^+7V^%$-aCF*zQq)BquhXa%N<*Gny$aT8%Y0mOK<-Xz?C^fAtz#xt`NxobaYMnPI$4nu58P1Shh*coyp)pqcG ztkn766iRAm)M>LS=cB<>{HTHq(yorg zai%&m&Ut505k<^LU0t%%ejXWUn6%@;zX$&eQtpJh?yk#2%9k!nsO9d_h62WD-H;H{ z1<8BuzNll5NUqwf$pZOs(i8#X-)!hXjQ?CiU`@+%>AE;n{h9w7GN$~2slNeg8jIcb{9=fX5cijAcb$}`@5%a&u^N_6=%Qf zSgyZ8lq<0>Q@;K3j<@cxkPt`4mN->l=1{FvRwoYS zy74s%0q#XZ91ua{4mJE%A|iJX?%lHzi=CD0J|cog5X(1-4(!MENj?gRAMoHyAhHK&uabc6IRDZtZ`0;P7j1(8SZR}3d zcVLTp#LO(8{fYzpRWFcEPiL!kdNemTt5%w*ge?gPEpAi)4E|9E(Y&+7hR<+#c(_84 z`Fx|GEe)Y>-mLlQ&peEtlDdP5Nd{)B>hCij{h-SmrA7^vNGYuMcSO6R_+_WMz`Rax z61E247P8$_6a;W44lAH zZwa}sK)}cx{NO|9xPEl5E7wDhxfbVjAjj^838iWEONAhwD@W1$S*g777Rjd(FZ zuzX2S7DyEUK$L$wq)`j1+p_F6UE%S`gx%!mTD?pbF|=--X;ZJ=voOgtZ*^gj(gfEmx%Y6lqQ>ZGi~b}bvSa&T1QleayVXI_r09BrU%}( zidCBEk2ik5iL{=H2hzKv!nCk7sb3l8gL{z1Fkl{R(@&WZzzyYAut5Nm4{r*$B+@sa zKak&HeV|WS?D?EL<7`#8!jp3BgXNpfY1ClUrRB~R)rlKD{Bs$Y@%d+nSX`FkdMxBz zFcZ$#(j_UrI~3qupFp*oqEOG+C>m1gV}7qCA8*;U!0f}bNMXnaANQfzdAyZ=^V`@# za317k#?AqYZ0^#+CIS=e#mSVylQ*sOqPhO~Y;e09q@ir~k@tZ6@BljJIhSGdg+h4+ zCiO^Rr#HTrH#9x9lA&;8mswkk{W`aziR8E*z3o|H%ECk=EMH#CB5V{So^3Jr3c7uv zB1eYh3{YtbsHocOI&XpVvvbQ%8`I;{FA}{2hq|}W84myPtW7V@pNy#qMNI46`ICLO zq9Uy~J#;VRWtFa`{FnS#22GA7@Zn+Wd@Kf(i8@gk5wKK&{=BHk9(7Yk*IsUY$9(M^ zs_9Ctq@KN@Dqd^l>6wRkD;Yv|FUcu>#Jfk`Q_SrZtNCMZOv*s3`Qcq-4P*+3o6JA_na+yG@U!gh}oB70qC=?(?})WpbE? z(q|1BqHIk}26ot6Ue60MWMZiOV47?71XH>(5pbjH^x_vy%TVY6cl!; ztO+gqvi!B+>9p%tCj;%T_=}WSHzd8HdL?-!6923Ft#z>Um`;GYaD9^!bT%TH2Jlh@ z+93$@(01^1teQ96YYKc$HZ{u}tE@c8)>SNKqk zB?<|)Rf(Z@8iGn*F5O)wR^29YY=A-q=$TmrS)A?26r;9qbs|WG(#7|#oaaW91I`2e zb&%24A4nNrNLv|~Ya9fQpVbZf*Rp59UX$#tWS{o2j>^uxlH!Qid%p#b=|Bay5{?>} zgYzMa3G77~B}iqhtFckx;7+mjUE2=k@tiEyLWCcFZF~M*`Z&$C7+3e2*!g>0+^jEJ z6Gg~p@z+D|O2i3y!ndfsNK72b~g0EX-b7V-C`+j1=zlXiuwAQG0$-g+7ju z47R^j%!=|7RC3oi1pMiD(bnfz-HqN(wm&PDve9c?%~8^2yN5@@YBTRdB-0?LTqd(|-U zwNO|nC2gYWHfgN+vNLH;U z@5LaEdQbs!w?us3(?FBaN?ZF)-o;gvm*sXQM;JL(FW?K@ywYcQYF|~&iNQi7qs;r& z5{v|Uhfju|+|{siKCWp%c~5zZo>R*pv0_cPhAOUnZ4hoR$+-u?!lVxt*{lA*wflMT z&|oRZWrfJdG~AFGzIRbouf#l$ne~6IunTG3zye%&dcZjb4~O{(+`UL%ow6>*g}jr3 zN%ZPU_hL7lpQdcnaa6 zr@s+1ym$2}Ie$yafSa=Zgq1RF67|E0LVM+T*-W}9CxKUnj*cx{7#K#2%vD4za~fTm z8}%kWLn(rn1{_gDN{hh!v!FP*!Cd6>WaMu`M1%8Hn0WD4zUk+!nx% zJ&v5e#|3`vKAgcziny2Dn3T$4hj#+5JZ*C@A-{U%E9dRd%k= zf8=~?{mpDKz#DP)?s>mn8pC-ef}##TA4~{r0dL}P81hv((OWNO3!E8+bmWNzN9v8} ztL$(K6`VFEoG(mim^-*A!u&-TAe+w)^Q6B_ezh_&Ss67TrqHw!K_gojDm4$9`Sy50 zXhQ!-XmM3XYD!h-R0`7H#hBs%a#+fXRsCd}v~me_Qy%3kVop63A{DNcRP;SkU{jNDR>))N$S*YlcPi7xpt@*nBbLN5(ITxKkVZx5 zcPdDq1}3wDx4iMj3I)jHNuX%DRmF#_9-DJkG%tSGwvGM6+z2GLMq1W%r;39vW9S`~ zK+$xi!1MdQW*KAd|8iY+`sX;d2DfS7J5Z%(#nIooxK4*`&gU-uKepZiD$1^H8&(7) z3_uv98zcr)8YCqIq-$sdhwg5qV+d*K?(S|7Vd!q892)5u;=8!-=lS0ETmQe7Yt}4v z=9+6?=id7~kMlT=Ef>B<47kNj)V(GHRyznT5ZP#`QaEpMf$&+bpM!<~Q@FOSqUltE z(0RE&8G`%IRx7NEGur;kD%MzZc;gYajPbab9;H4jQ2q?nu0Q}Gdib|`?D(MblI*y4 zPkL9!ZT=*_)0Rooy#^YZroaIG?MbkNF))C9z$YBv;-Ec_0j|5fL{c`~2;@{#qAfM_ zzW?3RakkOmUkn-CQ_USQ;v3)(b4VJ zw_!1~TV}-nyK(+CLb*nW+<0f!-+E(U%UHL$S90OlfFH{KP1v$X zhdHK8vwlCx#R;Ljg4wvn@x{xWkPFYP=+=!Oo*fZ6Rf@V7o#S*`VRFEjJT#{oo(p%h z09QTI^@AQA zpV@9=Y>@ynZvj|{PwU8f-Gn=2Q3oj@tf}`@Wc(FUcw~<2Fnj{#1v-a+BSxl3OVN z$1~r}sMcN6t1fxE;9<|QjEGiNW-@6y=_{Y--nIor^}9gpz~IRkumEMK0JkO6>nDoy z*9>^q>gJwW#}k|&_c0{mKNHUX+a0fhYHP!RnF;`p$iBYy`*!gV#GdY<1P|NU50Q$` zp+Zt8#-F1;kks$UJz;HK%C$JNo>2`j!R#E*U4RXXbWA=xEjMJPOU%QLa!itu0yiF^ zv0ZDCIipT*{j*a7?ELCaU4a{U_@nQD;%2Rd!xG-6^@9S~shc16OhVJJCn&lSz`X&M z0aTtR)1`KZtm2vMs2KsovCbqc{s!k~>PrKl3*zDa?h(e1cp#CVuo3{hKtb)q;}#Le z6mBVb!&~#@pN!%!>O&7@X~cW6_bDj;fI8e(xWoM+AktcUq=h?5AD-F!QCB^okNpUF zV0nJ2gZypGYFu*PG2qvD0_MkvXI;^K_6Q{pK!nF@v?2olp^(ht6FI6!N8^mIW%hC; z93J&bpeua+K@os@9o3#LAL6L^TM*?2#nQ=&D%W^Z*}#lERc0V_a1Z4|_7M%RA~mG^ zv$DM_>Vn6^2^rA;qIZsu%p~00-1d)*M0$v)q`#yFaEJSLZA$WB1OlO~tD6><9OG}U z{=9R%)?Kxpea1}Nh-f?0nJ5QS*KJkoZUf;CT2wpxs+`x2J0omw=vKqD9X~_1`szCaU=bh%ot z`o~vzKLBIB_=52~GPBq!c}GEqM(voR^F0sG4_|)>Funv--&0e;0r1co@ni#FpkNx5 z4>cj>V|L8=?LthBi)u;Q#A{jpvER%Gj^Cs0i?+pO)G4}v6_wXLP#2iw77v{vN7h|uYBT;Wm@RK-*?aova#hN?rCyqdb2;fHd&g9r644GBu+YpPy+Wj2;6bk??Y zZCB4O{X`y*<>SYXA*Iaw0gJw*1K?kIH=?#yUotP;607T-HV9t*Jv*WiN`HiP`kdDL z2+CFgn;rbz^66`2c(yCh)S?S;0~QP7hx=^Vnxha9o=Y^v2WfR`|(rn{n;s} zfUV@&=RT}MWc;D;gga6m)*<_qN;4dNV5n>Pq%z-VI2U z3=V^H+PU(^2FSK!Q=f#4V_cY-+M6f}eMfhGL3rR{S?YaGI+P*M_g>mO0ziFym}omd zaoFviK6X{`3wu734PcRXoncCG-3&a(@2sM&e9tfB_x@ypv7EBrj&gK<7vmC%t`Mxp zgLV}#Tk(t9336y0=uuUm)Cje9qB;< z7PFhLem|@J96^a7K0Zi6Qx|V@!+5(I_vvF26c$L}Q?Z!H%lwxfd34Re;%|h25ZW{j z1X1d{J>0~-&buIhNb>Oj8EhM=G~@H)#QGbwZICa z(_1PSX{D!ZhnIs?hoR64XP}~3SP~I++%sV79W;GUN-~>u*W;G$7N>#` zTgkko0&Ke^O=z5Z0l< zvlT~C&wizl1t8MN<#t5HW#>Du86!uENtbD+4W#)azK)#+gn(Q>xzPK_4f$^W#C->f zS^1~PldJ!SmD2%1z58*?lq%qwF(`FLY8UPTU^s(r62Ng?58Mv?R+g9t>;l?@0|L>v z{vsGD{T>Zi9-`E}#xQt+_nRG@{iIlHa(HMcH!SS9(Wh>!BL@>Bo_@)Ng#?x9V~2$= z)UJyt>0(M5PXiwDq;O``oY}8@-0~jZSo3E{HCXlcQ~GoAB!F+kjD{980GzLgkpQ$Z z6}3e=KA;}l8nNyR*hiEd&ZN?x9sH>ZAd#&|xw;d;#{=cwda=kf0Gy91ttVv3-++;L zWl=@Qs`!w~T8HeM)Hlr+D^I!ZC;Ka&T=({A{7sB2_wasUrC;o3n8I^cAQdO&0Y()zym(y4F0yfx6DzXBsA7(vi%cGeWnV)* zZ1OQ3vTE?|&XH zx5AE3>Ja7Ym{BM#~) zQ?@;wec3*M>FKyD7W%x}!PO`wk=UM|u>A+e)xdOKen$Xae0WWnszfQs@)@~VDc#$`jsm$X%bmC0N3?%ALol0`1 zyggOd@QSC*J}1aAx!Jb$NTajydBJ^LfaPw!JG*PQvzWhS z-_mNAjc^5=kco&y1OzmIAg1#Ck&fu+NHLfeLa#B`U2*JPCuGq^yt>w;;2z$ZSKvG9vU;% z6KkS8bQEz^EWkh1nN6W<&X7xtHSsU1YWc(@{fhGYFjmOC{Zo>3N6p{0_s@ou8w}G_ z97+pA|8r>*9;F3%;>|N7CO(oGuc=I_B}$fsG`VtTJ3mF_EtbO zs7UK*qUmub^=Eh}w#7w6L#HA4M#bPg6tSrsLmYqbY$PvdY%W$sdSo~=_`AND3OKOK zH&n;10cy8Nnxk-|X!n6CWUZ$~YpP5t%3v89@ZqSFo|RX%uAlH|YZRw5E6Ijk@rFxZ z!;?pI_=F!X{ZoHp;$)s2cvWI>CCk)Ej&+~UF;FyMH}<-63?G;DtD+T~t0WbakzR9E-LGICC%JlmIUg_+@1Ot6O9%a?@qky(#~gl=u( zLJ=FA{hs0SvI!RkrN3-=53Bf~X~^Lef{Vw8g`2lJN?N+zoeeGn1PIESGA(!}GDCQ8 zchw%jNCo!W`6;F-aMTMp&q&1rDH~UKS3#}I-{P)u@b)RZ2tUpN$w{-xl1rPGBWnB7 zUrZ%Cc=mb&VT`O+C8sLRpp3*D-b*kqRBNWYy5u`VbdzTc)||vToWB)UGyna4lVOBS zfLG3oAT-@4WAsgE*_^+JC)itZj-;5{m8kG2*5d%9ciM`oZ~THvBXyZ!n7ThV_=g)l zvEt2lh@L#fh_rl8PlKP`Fx6^Ax_E42yfCIMl2N55q-?C`ppB<#Bx0HS+LfQ?1ptF6 zh*6{oe-nyK$DK|94kS7|QTuP=VI6QmNNGLaa5y zl}&ojPw{rMJE}2T?3lfW&&NPVBkduvNj~h=59ri45zL-+L9zBBk2wRUFSzdY4z{>qqQl} zN%CwnbL%^=SR7>Z_T8aZ-W2lXZgb88>)uECNMfVzz}^~DZ3hbO}m#H4cYuTgKhd*(pe7FK8+rP zs;b{v?zv)KfS`Lhhw(iC?WzX4ji@m_Vu+3Ju@)eFs_T5QdNF}UxCCZ*eMS!JrUPv4 z!A-MwdP4^52zjTtb!RdU{xb@8@unf@nDuP!p@RM=|BWb8y&L{)rx+`T-qe-obQ|pU z(;x=Z`#*Y&49v0--785B4Z(q7-b-bFn+{i8Yp!S1rB?JxaQp#3De)!AMeEhT(4~ta zSJWVJxGQa%Ku&n~E*amptffxGL3Q>;IM??W8o+@48UB(hs!vsqj4rCX7-P9hqQ@P_ z5c1kI{Bbk(*Lt7xu8!d`{8x%MN_~ANuS`ac)A_*o_<3~`Q5k2xN|fZ5*k*`@}p|#kOb57yA8@pO2^Ea`UL<=9x4+fY6tz&9cSl5}=Y{9W#67#V|fY0SH_z zMxJH50FC`0gy44osz*TFE2OKCUEMdiG$2RY#RXWMb_ao_4kO(5A>u8amYSmuRMUkSsPL7#gREqs^xUe=xL)4Y zj6Q?eeC~)Cs2kPcZxVSaxbW0vZNUBLf;Vf76ez)~1%a+gTfQ!MlN5i*`-z6HCXX$k zLp=)ko~rpO$Us)1V8y?$1-)HySmj}@VOCo!evn7aTW679&+W4%KT#=8DbCdUf4jW4 zhCz=zCi8>Q#?&G~twa4;^YMlZP$HPVo8noM*OKX2L@Tl3E^o!}>+d*sD*90tzBs6Ufp;kB25e}`wHT7FAZa^6VJO8x$HlEw<`!=J;KD{@2T1X_UI#ZuC9 zScuRvSd2Q=q*1ma@$`rrWIuyWfP1ixKw4*=pY58=JstTuF?-QIejbip(x()6NHnh( zTFkV+XFm8V1b?z1aeALbSucO87<_1{y0xo*W23ssi`4wC*H%wbPRcWbg)WUevax1fRu77VFVn9jKS;ojwAY#Rg~T9dMpK;2Vz1N%F01hsjN70J8$~c;{g+?@qHXN?>f{?#nfx^|WAabmgc9r( z)ZJwi5IRKwu$5A8-%W9P7|ILkC3Dl2*13+T`?cQl-Rd1auBHX}emcd4kND|hIC^Zt z*F{dIc+KgT<7(Ab>R73Q>DjrsK)dNJxle2l51*|GGl0RsFfPbS_7%pB9%c6aoMeTWct5#~Tc5@`K3f+OElX!VkkU9?-%b}GHRm=8-JnCH|C9PL}2&8_H!7aP`w)aZ&_>qq8gZDa^&$>z1Z;(fJdX(D^j}_Hhh&x6B1$ zpUAyiwrirl`$zMF#b_J6_@e!)W)H^vHMl-K_KZqPQg^O3X1eSHZmWL3=t|H`WNw-) z2^KT8o&oMoS-DqEHsTdDp?%Sty%LQ&h~@}$5_we*^%z7W3phhOGNTe>bAmPPs*r{w ze=|24wQd&R-OgiiXWoBM`)W(ee|f02Lbwt&T{Lsr_$@DjLW(g_BT2tsb++jcO7*R( zdDOnpl;h5?d=pai5_C>~en8(yj>9S?*^X>Xd4fGyoYthLn%(EEO$nCbT`!f&HvL@x z0)OjJe(}$$#tDacpLpbAA*O}d!D66z`vt1KQ70(O+hTzRY~2`!xID9r$>CRVih(2H z)Z9S<_s!vE62bLy0b0LjD-^C*2KBa(oXI(#xOWeB3$)hJtH@ysFFo$7@w0giM~Wl0 zg8izZpRCN*T1Oh^_b0LJR6%K*X^j#}+pU(RoI{XNe!9;-(4RLU?5+E69+xJMnRgL4 z*q%WKhKI5>cDbaAWmsDp3`wrn;;DVYkR-=DPrDrVc(2(1M&P(N!`y?HF3RvgTOBZtNE6{io%`9n#VkEGx1`V~4BpI<`r8|sspF}D z^KN@Pl44(*T{$!;`!G$);;hf7^o2%>is(F9`62y%hQc=AS!_km&B?AiwAJ<{TgK?G zBz&CL(p0OJu?aC!=>lU?q3$d>;SEP0&1TKM;cu=*SY)E5FXe>zY9D;=wEN{hQwW_= z98Zcu8-v=XXLc_T_Pd_9-6-x*mC*Cj_EBV(r|r|Mm$JT=T?$%a)~Ts#o(Br+EyP71 zoR`atJ-Dm^>8=q8R^Vp67XE4N2~7@fo~9G}AhHnet@@RdPdxa~T4w|vRwFK~hH&K) zcgkW`F@-%^9%d{^9UwB7dHFLw<5og|ajD11IB(QCeI>85eKU6`DUMF-%ia0aeif0H z-Fd^Mi0vP3+Dh|TPT&wZUYpg7N;ai@A8|J9Z3Hg4PEerU1?!aMattI`pyBLgWx~a; zyV%!oR?|6ID^sL)8ZvNcuZbFHilC{UZWp3H(CE&mGXeU<)XbIPxw*snhL>l2SbeiT zsw0U_oOoNUKCdiBd>7vZ76y)G{g7J}IQ za9EMtKv3v!-Cm$~MuR6#4>&YtchAyivsv~}lPkVncqLtqalbb|n|S$){jtWL>B$!J zxWV6uPEXSTf^H~py8E2RG3Q>z!0CtKP?osRljkqAIZY9 z-yj6i)cElgCIQ-l)NH(!gd{H`({YR~i3T>yZ?3spn@xBtF`903C3#^+WlLf&J?~** z)iO)dj%EWq-V<8-!`^>EJIgtl0s8$YZ(%9d_U|+tn)l&zRWIFrr;*-(A)#7h*HL|X zoTW4NJI4x~8J-q!&w#B!Z*p2whv?tm>Vn^?!CHY;0dJlH>&}V(m?}lt{VD&o48*nS zM?b5z>YM#%GE)pIA3fN%{qA(fv3sxipuKZLJGJQ^%*CGGh+2ot3asg;bU&)z@*>N2 zp=0}cca%C!$F9XP=?#*veyKKFt6S&vSBJ8^i#1=dRUF@(E?Qm zyld_vyP9@LG#+tFz)nj#A@OW$`4u|1d^hk@?piHcU!z`;pCSCW3%AGJ>h^lCT4h8H zhp&ENPl;wY-^68qyT<2a@pfPufKOtlC>f;r7i^FN)+R?KSu14MTt(~t?8vQT!l(T? z>}YUUS)zocd$@Y{FXm>`+wIiK@hNbm(oO zaSK+!%X-i>rM@PL5x{-6(D=MdNT^|>{r*U*NVhp8^P>HOFZ4E#%=9LQq)>iljZ-yO z27>N>U-9^K7WxtV`lK;4}xhv&4%cR{C;j;cJk@L_exlnR~dRJamlFft(HqEA9lFi_y z{igKxyzLJ4YTj1nWNw`Ecu(v=vQ~M`K&oXGl>+w`{enNI+U9wN09s4fOxztAYkFk?Q09%x#OsWT%OzGS()vh zcLHaGrucW#_`1(covz%G)X)bzYcOFvOp)V}lnkmMKyqKu=WW zu#L{IFv-%Y z;IBDpuMn!sVtdE2NL63DnMCBU>i7~^E3>Mjz}G7*Sr31DW=tH>#-tco$|Jd`Kzmor z<>84)%UbF5WcB`>lBhwOQDoPe;ktQiU*H839KSwkRu8?quVVOMr{yF9U$=YB?MgLe zH7Q@i%bNFnwD;`YGyLf-c=(=g0yKgUkKvHVRAP!hx0>^eAnV}d*91_HH zxo?MEhQ6|O_i1gZKz59a!sYx5X1<$$CBTT!jXG|7|3Ik8heTZfT_x)$mKvP$H!p;g zcd|TgeBZpydW_~jP22wa_3?9zS8M|z#^h^Mr|IDnBG(KVPK_)x~x0lVj4fAiRe!tz3{T&P> zh0q17?gA&j>B$mZ3`Y~S&xD{x@_Ow$LrFrH%?zeQ|S5% z44MwGbeIlESfo3|j+h8`WZxFdoXD9jVR&2ENc|j<9)j^|Nf)_%Jud5V(cHuWa z_f$) z_T=i-zWNVAHv9F>Kh=d!K5dJl!6<}K* zK#402ouiHg`V3yCe;9aB5yP9TOP~U-o!|jNlyF&1(KRQnLQ9E%fR^iMh>doll&$r=PDMD7cDzz zmX_DQnpX11<^!yGWGgJj9BFmzCir)1m@1I-4cS^-gO%V| z7bBrm{z_#mucAfGip6?U!P~oDvZ~8##^N_k5@T?l0Rgj^^N*$*Wb7*TvTyd@tv%lH zoTJff3w_f$p5|y`H1BF_W%N`-6*Jz=xnj>?TD_s7_Qjr6JlK#|+wO#6G7G*r|3P{_ zMu6mx8LJl=0SmL;3S^|V$`lAZy5Mso+77jZ+>nIacc!e1kI#vm{Dz3)Z7{WnIY-$1 z^tZ3vg9%(Hd;rKp6X3t|)tJkMz_7zCK2ve>DSfKt>amslC}Kqnr8~$jxlE0A@9X|^2ax+Sd`dJ1*Hlb-eA9s+>htclK zO>mqY+b>ac!HU+tKu9QcWM6}I1gJmKE2S^c%?{BmCs zKu)9Rne233yX8shakavkcJ(~n+Ze8%O}^-ocB-C5$lNU$d!|OO4P+@TeWaVJ9yS(n z9`EZ~LX_Q(=T_D7YkPX0*#jf?BUHCQrBMQh#g)}!sNSe^x5xK&GiU9KCDta`b%ci~ zL;*RF1q>Kn*ICns_oTAN9Ck9uTa?+$ZP*mR=0#J8wT41QiuTx#Z4-y=Ar}kDT)WVy zXfWs1GkNpQ_d?vsf}SmY7TG1#XRZlL*Sws&4wb}eudq_-AXe81L(L=mO1cIv1(pT^hL;@4@}gM-*2=XYfup{a!Wny?H4T^A7p!Zjg-@p zKI3NgvV8{F0O3Cn34mPqMxY0)1@=g5=%iA#P)dA+p+ucfx18cgT|Xz6kY?8{A{U$S zc9K|Hpwib)DPq=gTpIHG;${S8#R`qV7-%`ZS8zZmJ`iuLnt_)H3QfOnrkFnzD>jDi zHkql6=u)Tz<&7?9`vaG~-FjjE$vEqQ_Y6NPYv)?Kua()pbWT@j zmf);*g^V0C_f!A9v-ZxSn6}Q0(wT=>6C?|2e^t!Uu3&;mgWdvNo&FP=W=<<%Z1N`6 z=X#mNc*(lW*LDW=Ip@C1U3OzM&UnoQw`RR=20+NLIO!}PP!0VD#O!*I9d#@W92+ zPXu|^=hZrh2BZqgrllVbGQ)%`JDHap480?C$T9EnRM5o0eD<9>*Us-5wv3i0U88?3 zaPZEw1TM>z$K{};6XU>v!W?wBQUI+Oz}AOb%@L zV#>4YznOpRi2Z)HeM&`N76a-^aTzZc(LYlbqdxN&t$}8odtw%~ukne+wGzCgWV~I* zB`P-1i~qEWkq~-P?k@k`BX&KfNq^jL{B`+!=f$=W;H=xmm{2i{$8rXWr#Va!@$va$ z_NIAemJiHG+oG9xQc$2@i0*jWrg)a>2#``)qPO}6Q31CCNoFr(i8hAd^;f>BA`Kn$ zC!IDPo>-4LKq|Ef6Enk@v zwKn~xFtu!aC?jRCcUmNMck2?!Gs#C}vo9wdl5CN*O$9p4rD?$WMwfFu+pO#PIW%hi z+r79PL;mPNjIDvJ*ElD$F~3&D2D|JG_R;r`h`_Tq)D4%BxWg08&Cs9TvaEI#{m_WO zTU$g9nsM#lw`Rrm>+t<*JNuAct!3;N9%UQf>%cYaEP72lN=>H1@)}0BmS=a}S)Syz zp!jx2!z|$6*bZg`+ZuY?pxm%CV}JNcl4yf5pei#174gm@HC8>AgUaLyHSm(>x~jgM z+-tW_GvmR3ggX|Pbw-yy(eT(%C{|>kY7&Qhes{OQs$@)GRl&VFmetB8TXD1L7vkV> zF-|RFJz$1slnC+!VI}1139ZOCfuT`{!`!6xDRo<)XeS{Sn65EA-Qy#}MDcwWWjX1~ z&6-@$sB)?1Ob+uMx^V)Q@@+F)5M4U7@a68kN6_@mb&^i!`*#PAhFmhY#Ql#VZ{)P% zKQn-Cnx419u62%L+mhc0h(fQn3uGX-UY|Yo*oqX}TYFtn^ttO9UX(;#oh4o1r8A4d zQzTu{;GxUhKzZ~P9ThH(B4&?+g9-thA*w#CE?q3=U!TE7^aAxY$lcP^34L=B`$R^2 zsi@}`cajZ0sixHp9l_W5OKVJ65#T%b#k}ZO?9ngpnEYk5;5V~L&rSapxc4FuuNflL z?9>5@tdSqy;e-6&&K>)IHsccC&4}mTqj?1O8*Pzo+tAS9GSgZ2Z)k9UZ)akx63?j! z&`Qc5&;j#4T?6i9c-osP%JFg;@3h|o`3KoXP4xrpc6J4G-E0Fp9$u>hobTlSi1ZU55S-u*dTBOR_P!69JV z-F1hljl$bmqp2?sX2==5_4Hs&Kr=57rhEVCkEcU7#9-{Yj?H+YJlR>g((`QMLfJ<6 zn>a}uTVLCK+#qr<&(=W$;dK02NiaFqueDldBtKWV9I+gK!2-)@(Y>8zl6T5Lp6`u# z<1~}~(U<5{(o}IG00xs{g!8VwYeg zzgUG?=wC|?{lV(0vg`PSWIEI~+*liyj zefW-qZmQ*%e$iFJ5r_P+Nf)y@OMD#KJHOvd;kamt|qQtIsl8OJqY51cs1MX$XJR*Xe9^u z&rk?gmhQK^m?_VN5NTk#Gk&?Kk4+%59y7NXLgzRg2ztZQb4>RMCM_oo0D^d*9M=E8 zd%bGK2Tun=z+p`G0NDXQ$dCZDz1n##%;3y_Xe;7@z6}s>0cvxS_dJS zFUt+9W*&CF*OW_VV`Ew$yy{}AbdL$F()Ls~h>{*@ypY&g$O`#fwKZ`UGNYmxl2xGA z*%^#S-pHp#=H&%<+5QzGof-AzM1 zpj^^4V&2bbRQ*SxSn+P$ofGuBR7)j+*yaN!t6T{ILZC;9$fq-U;(h@EG_X)KUmC>9 z2hVu?EBdo-3F|P5-ibHr5bv!w;cm^pvJ#eR1qIA_ra)HOytKw&2ddQ&H+^5at6wVK z&YDyr5GEZm;?r{$S66vI*tL}Y{MnrNNz=C-HFvUkRWiUAuhUXhYjQ}qJUz_fX})Gc zJX*wf1#>r<>1XHIkIYqa*|ePl0K4~pGTK)TUoWZ$N8bjQid^X;(zEODwdr?{xB&ud z;&%Tr3z4M1G+F^rp{Diy2SA8rm6g=Kf6Qs-Om%gA4O_uRqxL2M#{c~xvo|MlTRS2J+Eg~t4dyK7N%W8*zN2*MM-9Ny1P2-52R41sc z#ruW$3{HD_?}+uyw!pE4Y%BAk`wB19NKNeXdt@EZc0NTWIGbtI5(e7;=T zjSw_iZ+uO;d=5zczGN{gp&s?UZU=*G&pOU;8cgfb^pIyQf}i92R@sAlI@_|3c-a4gisgx&Gt-lXL(m{n%b4 z`FG@UTOZwI?aJ%&#-|U~IO>V&Z5O#eQ%bR`?T%}zQRYOWMi^CAhcMMgdzN)nqrK*8 zQV(u+B*C6r;rStGR0Ol{zk^YEKiKu_ZtU&|L9l)74{TG*I&_v;<3rqCH3xb5$mwG|R%@hPk-wnBg!NRBKj{)6yhe z@geTVXG5V&X)J3q;oY-yT+yRJ{_-j9B{h|b&!0y!>LS~V0;ANI?U&un(X~xHNB*|_ z!L(hN&n*@YqN9aewx|bJ?+HTC4$_}!4Hjkm$sVV;tB%(FagiWB-|FSuSF5D28o=z| z;WlB+4n80<&+~Y}klt6=g-L}IiTt~(bRjau8KYk~t({g3^lusrwM?gX3AH{a>y%XG zQenrDLLj8b-)t(ax>||D0EtMR+>&8^N2_`HRSr-8Hqb-GN#8A)FXr1Bi2u$%GicYO zUwDnU+UYYAiR$GCsqmKE--0!(ZHkIm1K>3|6Pii8ePl~nx#g~Qn5xCfdq%K1k8>AE z%V!`vjnUc{P|N@VT$ZbrUn_#`I-0FTM=7!(5r>sM0-7dLtQgljjr9L8FNwwxI-Pz z%a{DD#-L5(7Xe5}QZ1JW=I@ToEX#8%nmk#Ki|7?UA?mz7Cn7@ctu|dbtXCqhpEY;p zD*NhhE46GbX8coU8UC+23veZPo_;b%(?w0^v%_U2`LXZA7U#t`U?5$Sokq#@ZYdMf zyy}0Y=5y-TEgE8fY^Ch!8gjgCn#fLgk#|3oA~{erG*9qGtF_i^k6?}t+sHm(GfS`{ zxW@o5$eX;mS(G}yO(=r7^LSq>I|MB!lCZoECv}!=`?mY3G_#oJg2&OvpT;U}_ZL{6 zb@;3l?G|ssK_E|ov{d@igNCjw$%9y9!X0L2X4zYf)N8v|{la=KRmXEI^I?(l7Pgpa zV_&;GyKK0vIrGS1ing2WonBX#9jw=_@!%s3IkcjW1q{p+=reBMf24u46S({BH!tByCR zd&}F|*G05Gu#fCN=^fYdQP>Rm&~S)1e4WDHTMWDiy{LCS`I%CKVUc;=6N&xH4gBFn zxuNz5E7y9(8^O)|71O)CR1mVmDsB#X2s+MGUF8O5OJ8As_LFHpqs&><1}x(C1v`r!)O0OpQw2 z(@%nzD)Lf< z>^ao$ubC{hRW^(?!-qf#7L~I_NR#X1#57I4^I@3w^}2G6;~ZhtHktB;fNLPmjwrD4>dHa50r0NnOop*@ zW&7`rkE39Fv#1;;?P7fi)k!VtaOLmi4FL+D0tvB(fq3wX-H;yI0lXPE1Bx@GdAT5>@`vZ=#rwoHmIMWdow4|VSHpnwJ=3%x92 zm1nF3;#(d5Luyg0@p5KL4P~o)JB{^>_+6-e;oq;ud7q+Kxf%R?eQDmXG;yJ!Yuvq1 zaD>}KopT?HWG0zU^;VN-?=u@KOhj~VA1{QKF2E-kW~Q*?l~@Qa43%zh2R;kP<90Q@ z)gElG`3|(b88Q0`DTN}t40p^xKNbVQL?fxQQn=LVBds+rgxZDDW*se()6^#_%vXTE ztri~xdz=!UJO+20m#Bk@@SRU@JA8=~Q+g80x0%HjYn<7f+_@gQ`aqfr>rhh$k`aaamtx!A-7Z0s*8^8YO`GBlYwrr44azx82 zT4)&S`E(8e9~IC~!^9a2=mRLh8jWDT{~H*TX$ccmWbp^AgD8f(Nmo63q-8CROabCO zF1_OdQWYgwzb$(6omVxdNg-jRvrUagUkV>cQ@q6<@jnjzR&H6E7cQ=2K+|Ev#NEl) zUlNJ|o~@&c6xCxN&_N)XmC1N0K#ZtPWU{w+R=m~E->c(D0p zv%02G6@i|WQ&TrAq;M{bxQ@t#8N#>tag&MuCs_^=+i60ow&T5a5km<-?xH-vAw?z{ ziv6Pw8girnlqoY_hP6K_Z3GYAfNu`aVEz8nJJM79i)K;AIbu%ZB8-|lh{*ydH?hlH z>NQJ#FOb?lr{g=O{3p@S_-~>Si+U5VN$hP4spt2@Q#z6DUA^oas`jd7JZ`317CMs(+D&eyMwMA#dK_Z=)c*&!uw03F0^GDCZe)O+ zS_*R1lRgg~)@upJ8zGP0CDkT(e8)J}0$(mNL8=J8m~~+u<66Vm z(ylO4`D%R3xVD5}kze`wyGGMYNRTlALeA~48Cx$W;UO z1wg+eJ}B4|WpA95yWE3K&mR?i_V8IF7qCIkQ@#`X8JeD!EejJG`_pCo@z2-XffHiY zKmW2Yxck3)d9e~;)t5w|gPz~^%$aeqej(e>Ksg&uofTPWH@k}dx#e!G+^r@aA5XA_ zC(9%S@FM6QXYpdF0H?LXv3yTPjc`~59M<1jM$*|_&$4QA7$^1K7%phj443YiM;T9w zEe6go@?TYOwE|uj<9~}O#>R(3fg}|qb-Mnbs#deA|8EMxi1|aOVna6lcJ($kHIB8E zd2V2w1O-)QW^e7gy3luoyj-IJ>T|1wE>6Fjnce#0``muu^ct0grrNEc5OoCrchcsg z=X?5C@+S>5jY`8X2wRm{J+BG<1#y-_g9+^ z2|vPwXsM%%b7yN3ez3R+x##X4JmKG7{Br^R!|yQoM5W%=9o8~H58Pxt<-9xfKV3b} z|915rjM$`Fq%P=F1?!SUd#~cuL+n{vsQ~3opbqW8AS0g^Xxe+ZGJUxU3nTvLN`IHH z2e8`#8WVl2XXh5h7ImmYBSXiRj<4^_Y%Kx-LB_hjCgpDn;9FB=CueT~;rscWzSFWe z<<*U8(#6>X2v#(<{Q|2{@rv-4h0oD;tptaq+TW&7G#j+W0*9x%#{a4ZU_43GPWVCawQA1q-&gRfChp8-LV zOI42+^Zqe#x4_=JghvJ0(Wk^hVWqEdof@eD^kSB=9l_`-&4^bIP9%v(3tNlvkt})q z=jYsvv@8SE!t6XQoT@Eo$b-KWPwR|I2_LJos|U6vC3 zk?n*4ap4AJ@R(8`0^I!bX3ih40i7}MpA%?)KBCl50Nyo2TDV>T3K}ag$VeFFq|Gff z`qi)rJIvIPQ=FqgLB?HaV{(TvHo;ac*=~d?ra(1+pq%c1De3<8Dz14KhJ*RyMP5b* zr6p71m1%5Rauz^7^ci?a6^Pu#{%0YighXH3Flm4DP~AYuk!gQ7nb}82?#%`~+e>f{ zif~5-5dFkX=5OEO*zf}5#3~u~Sh#}~c;{5|af-xlL$;q3|Ulj8WyWIPq{K^~TWz;$F{t>?U5Bza;b?wnB z!vdteHDCMjh~F8YUR_@DZ~&=M(A!KUG(ZtsLfLSi1o)u*!*o&x^wJtN`zx*P44a5e zrR;dZQ2<8yd+GkzH@DB43iJpi@IeS(q2(9i^9>Gs+JV;qP=`A`Ox?w%J?!~fCoX^| z783Z`Wr>x>dH~S2$u@`7Ho!W3t<&=Xo9_bInamg13GQ6?A9oW5C-Yw8PxzWSx_^>! z*QDjY!s)gAQ6#&tBJ`bp`64sE%%o?@cQe&_QBHAejKkv+Bj`aWM6jeWQx6~fl>-k9 zi+|F3Hm0&O+vpc2boQ&d#;8}bX)jb}t5cGht*F_K4tN}rg#Rme;8~XUo`0Af4o*b? z2vaKC+HG$A5Dh4_+XEnvFkU}M>AS6-)1#xXKIpqPg8_a0EFIvHZg*B*J{~BvlAH9L zZ`5D%z6Sm)`8^fHZNka`d_bLnD#MvZ(?v(|yhjqX5a&O+gtXhnSG%GW+y7fq(3Xyp}TZue9~DDJoDy2_X#T8&N5OKiV^>>|Li}e3gZ6LwTL5NDOe&HW@*CxgnTi(TpVhBcjsM6d_eSVuHo4^SZr}GNm^6TzeEY72Oh}if?vPS zH<^dHW7KtcfkFp_D`qF2FEm7m{0(4qmjSSFS1TE`y-v<8RUir(MGCEV<$xPvP5gP! zr?s}mShqcE%N@YnOk@dNJ2#gZ?;SuE`$Kk{UJPlq!R%BTV?^#)Rlf9v&bhDsAG*Ff zo(k{(KR2=xDr7|@n^FjwWnO!4DzeGSULi#`S9TfK-h19#GOv~G8n?2t_qx{ac<=A$ z{r>%-M~ZXLIj`62`Fg&_IcJq0V9L>z=RSDhdwX}1CzRshdBojtiz52km_KZ% z)ZBbZc>;rV^eRR_FGuN7fRmVPbH|Qda$z|izY~CCf-Lj;N)!z{LfMaU4+-P1duIkq_kuwv*tWQ0E zoF9IC{ra^gx%qsKnx}mztT$<39so~$I_ zbeM5ot;1#NKx@Y zyezn7$uI2+{X<0(8vc)HSL^X^BlB7(66?z+6qGPenz_iwrCj3oE5R)efP(3H6MKW3 zTDC~nWVq|UAyMmGJR8UxxKPr`2a43VkIrvwCmm-~yoPHi0o)Jq#|$XhOMm}pkj2R~ zusob(cUkJc6qcw{GWrH^o9<;VsIi3owMUmTjQ-y(=ywgXoZTsK=XdBNN5YCD7TxJ zcPesVCQI`M1k!s6b|+Lj72Ekam7=fY{rdveaQt|`w^N#JZ%gVd1iI?D*AtV|(2cS0 zlMbRh8t!lg+|W(JetM0_j^Wg?&Uy)^=w6H=0wm$KbnC{bW*hG{M zz9AQH(_V_pqVc9r=cJ|G+;SfxK99GN=V%jqQ1o~hrwq!27 z4{;}i^k%g%9PaHi+Zi&Ow0!5+(R#zO@8Q;uDS{e%$e3N;kk(9*&nPoAK+I{ zuLNc)U(*#H$UD6eV7@B@9;**ffc*IJ1Dm5wn#vwOV-a?{r1Y8adj;u0U*}#O-mb(M zFP2+*5r$r)&^K$m0ok7%7{X;4KjIN^UBvG7Kg4b1kT6};GUkv)7K6j|FoQ}7$8S23 z<90d1{p(vHS%^JzeM;}WC|i@mU>ACq*c731>0Y`iYvC+>n9>AE90R7oC}Ktb*>nj+ zmclZLa2-YLf~p%_+H_ z9h}ma`)%6}+RH0i-5y_L?U~5$U1m7dqdgfPY3v*1jK7GYGI5vGE-{{4MAd4UxChOU zi`#4Uc1p6V>Z{ZBpM)w~bL5?*5&RMm9;`hkZkX-9G%6FSy}z#)K3H*2w=Z#iC|A+3 z%w|1)`Hl)~xdidqo7?YwsbS5F!1wfgy_1&)6`rYQc6XQ8gMQ{l=4*E%61>(A#0#)J ztX7;3ej9HT>woB@jFd~x9@#`pI!)2PLK*C}Xq)OCK0$3QjrIRX=#n*2jt%;XTeADf ziI&6Rnyxmbqg^s`fS*@^*&lg^I}~>kRznfo*k0dif{NJPb_tPpLohiKR$aD{GPN2r zkZmC+aL0k5{1MU*$Z$#s$#gx#VWH$N*~kb2vXksxvG)^xt!gqe$W9+8Jy>_o3$i*( zeN-tquXL6vR+P_icQJq=%Q3mxQ%}P*&%x3!7g++HfX%kgyfRR^dSSf!=Y74smI~|- z0nxUu?9oRoovz8yr^c|AiR0MrvmMF7ym_@K>kyjw!*OTk*)Msb>IKHcO`_ojCS2;; zcVzdAqYl$2OYp;&JP|&=1y7!|{?cenlF?1#u$%I!tXvXSU$SAoHyl?2pP9O(5)Car z4SUjh``0MC;fHME_P6!jL9FE}gG>DWYZ7K$*d1|WastYLU$gTu=+RAlzW4p3$CaW^ zFHQ<%Ec4sZH{J$h(W-F(nJsEQy zYByxHwP7v}gY9%t`2v?2gcOryL}k*6bi(Tmo6WN8WMl*8I31eVuH7+W>FkupT7F}_ z>8mh*`oKhde=~<69{=V^rViHBJ|&O>0>yu*OvhB}0(r+oz^JO_k1U^@?N4(ppKW*B z4S%Y0BAHaRxuz+uwH~yvr%A}=W*hT9xZ;r;jnXEv{1biH#9 zo?n$PS?qsgEL`8tf$)6#;oT&<@{_y$8nX_yqz|g z1X2VNK&Go7@u=~zvlZ*i$?a8;CMvZ0&KS$XZ=O#RoJ)y9sw>fo{Mb)l?hDjxCn7GN z6IU_JiiB%EovGUI%iUV(^hx%ONrz8mhZ^9)3=({p5Z;Hm&3;`bFiq-B_qBs7hbxC$ z(QFBrO$Vz{8^d_yR{63?oPD6f$g-**-DI=Gi{|^%n6!I9BJq_O|7mSRvQh@fD$pO_Nt8 zy0K?ePpmHH+ps+ObZ3o>Cm_Zd$c|4jOFKQk<$C_sU@xxKDeX9zqGS-Z?rRiuAl(7Ds3fjSbEi# zOcx#C=p-1B9jki%@dX?k01Hl$Dc#$A&-D+*JsWnLq&!KQ(KKBW20gTG9n!0@-P}A6 zYjp;Ecc#Vjd#Ik_;o!bD{#d+c)su_OoA>3xB;B$Y48N~{aGiO$8vBuOey;=*%bMP; zBIr=@8e9^EC;T8R+N*8!9+g}l56nbxYrp;S7b1x9)eSdOJ#B3chsjaR_0%M^>j4Sx z#7ds$-VDbolpEn?mGkxMC|`)THD-9<_v!9?R4o2T=cq<|!Q*E{6uTs0;zL#99`ow* zBF;#TQq5a%qWcqjjZ($6+dg6UIkYXB+>+_W_;hF`g{0cf?CZ5m6VdrKioPA#i>Ze~ zR$J?`s6bJEik0k9F>daL%L%!^LP>?ShSr*@x&r2w=o6i`8Xd*x=0&NkWsuGOoPDK* z31|gyeujBD!TWjfU8d^l2h@f)n?C?D$+EvAEL5{AJ`WSLAD5Of)DZLRYaDEim+*aK zzCP_UTRq{B&=yXc>@~N}{&d*#GrQYy+vH6Y1;1Tkl32A0Cd_Q_r{1%}6{u`q44vRl z8qvn($)Z>;;|hrqrvv|c^LIBKc#pNbuFMS&e0+Klh0@a7Hyg7cqSumWaN53Vhc@N+ zDBb1TZeZwPhT7r5mh|Ec_DW)NbJ>=!RTW7SQ-Y`R?1%RKM3?a&==Ilx+x2+3;Pz! z8J{ktcqCKcl9lq_t+aTpctdk@UpBm0JF`#^U_}Gu^a|JTeY&%4Gl!;FX20!xmo*2` zt6zqWT0|&2NgXg*4#sC4wpL?}Aws2^8Wq#qeWLIT>~e9{eY3Up{l#UsM-7r*#%8Vf z8C=AtTXX5=%jJvO0duk{r2RP`wRZ2^o;bxTA`%+TfWL3rU5p3+Wj>1d7+%c9pk(Dy z6m0T{33TaM*Flpi7g?3iq;d1B_|Tc^lS-4WHwN_kJC_2obENZl!40H@Tw1E*b^B~q z<3FBt_VvV+V5$hVDoe8uH)=>!;$6)GvQ`tHuE{3%UNq~!anyI?0hLSXRp4!iIAkNK znOD(Ov14Pp@7}$;w~xdG%nj@!=Z;{;z?ovvr+eN0k#wb3u>v{?F?Z>Jo}BK8@3 zQ84ZDk)p=H^xg#f*1&RKa+2w8KDDOu;oR6%LX-ybSnw_4GQpNb#zN?F$I*N=hkXmi@rM;uNufz5!}objd}loDX6N|leSol++WEgNSMjqHExxwaAA_?O?I;-p z%o*)Z-|9QrUWfnGGPW~niS@~KlB5+O;IjKzTN;KOz~c&K@|Orc7jNqbzc{MR9J44= zT)#`5rTPm1mC(uTgF$mNR zPe}Yao-H11@3lf~Utjv!+;b@LDI&Rg`*^npnMI)maS6kF!X4 zZ&yy|jZ0X}Mt+5_XO_JN%i-anQ{VcUfL)S_9bP;<5YPl3y&6?v*G0>79oY8sl7}~M z)Hko&PCMqv>guPtaU8uB_@UN{FxN7#6RInJwd<+ZV4Zrrv8p)URPjtEP>;Ad_B5~y zvG2jRvOMVVkxtBA*S=}_1@{e~&-v-_a1B-!XwAr+Uk;dhgp6*!SXJ(*a>>$RUSC)0 z{v^D_tDF@6O4yU7)Fn&fM+HQ!LxfhdIs9`#(%%hwqV$b_Y4Vr=Qdso-_blh=fG5yJk)I?CV#VPOWhXy@DgH)5!$@NlduX z!wGMfM{z_Wl*FG0!3SkmJd7=!a@%pOu*YrT$d}*_?vbrHFg{`D+S^ZsH|jm-V_kBu;-gOiUg%7 ze$G*a{j@2wA6-J_jwg4h`D>4lzW*Nm!Nf<$hXe*X2;SVZklTVc51;kSY2+JEN!0U_@d5rx1mR zE?|zhDc9wwMbw^6yy8pfpGZ>-)~vlaqo z-edstN-0|LUz8zwJp-wWViaVpD5%& zBJc+x0u!~v$^lOSblU|0x9R&%^g%%-9QmI8R4u@WwIlnXbE4hpu@2u z;u@|RS=D{&8c{$yIDM#k)2sA&9fhBcV4W%EAvIRoy-rEl8LuxU@p{<_lU#mQs!HePXs>Amog)EC6(u7feE-y-TZNo{xj z*O7}tGbbPL3k($jfp_iogvXr}@uK}z_w{SC@TXEa{MmBZK3L^apP z^r>A^91Q01i$gO|!jiz&L{LXP*|q;H^@l>_#apGCnWW+#uij3t9AtU&VMl7RtQv68 zA1wxpy^! zl}I6q8?6|&AB2T6*8+e6m54pg%*I!Z97(yxjRwK2`B>o&LRlr?;&2K_(^ zMn4WKCHcL7t!kjWrIp8(Uro4TGA^m1{XI z@46|x(ypG)UPbTsTp~d+RDG0rJnh$)zH@S(yDVwiRed?(EOAp zU-xw1I?qT%nrMqS%W8E5D?~3`mEh;M0|R+ZvHSj>_Oq-E-Y>3P!+=EP{DSW3k;ytL zHu6zXx}eu4m(|cit)-znBNoG9V$E$?U*qPTrG6Ds1Fdg@IZZq>RsHICRUudCVB+Ej zL0l;f9fn~|NOcPL&8h{npCqGlPEIkoR%Rv#M<^_tnrrvWmYMIk z65iw8XK`3~fify&_6Kec@2V$BRMuza!U&@dE*MMlV9_469kVc>;+y?E?t5rU7u4L0 z$6~o7@2Bi+!sSn#QFIJEHQ{;OMKpST170)JwnE+<;x12r2|@U;kCnQ-u{97eNao;| zC*~1Z`eEJzr!Pn?<|Ttbhv#~rVnaF7RV&S&X_+*wmipL;I+II=hi|Pa(8%(vT2)dO zr5_o>WlpK^F0PBvs`X!M=36sIH3mkAtvag6dX-BNybSNG4lhFuG`{5LIV|{kr>_|A zH!a3^-pCgiVpAO)Fh`Q`{e z$<{71^BFxJCze*4v#x7A`Ao~V_{N6YO{l3DZr5bKML&Oc!W)%3A8u`%WF?v*i7e12ksqBP``kr!GDVW3)FD zu;E?eRc1zA4-H6`pP>AC=EPHUtA#CE6gu^rkxo?01b}SlF>(pZ#NbE9)$%lT9}R`> z$z9fKWH%b92rJs7b)aQ$rMU4bC{AKjyUeMFrBj`?6Z=?2 zR+-9@Ir{L3T;1W4@bbNY7Ozp%PrY@`JYI0RUbOK+@Q2NJBZDw}1=Bp5;xv&um$pHt zpGf@z-&}Sh-oy+r>oN!40P7;75^T@7lA>-M?_#kSa8aqL_9L0^8Sa?Bj5 zd>SQXG4_z6fE)(r;|Mz5O=g|_F0kx3dK>;A=2YhqSL%^TP9#KYu3}K7(=V-2PJ9>M zPO0gkoGttjGPFxP4eJRLFC}6hzNcl;_sOk8y2a0;SYvg6N6K{3N^R*evKiSMPHeWC z!P|q!lo6ZVna!J26J8$U7>cQedkOI7xrSBgL8SrHmdA#+v3pz z*P+^Jrcwg7hw6S|SKh{IU0$DDdh;A6r0|M0t%=`hk)qEJ*Kx$Y|L~`$%pqN2C8v+| ztE{Z5Ozgb(cS*Of;+Qyk12=tojn5cYL1WHH(UaV)tRX%I4UHnP-Ld;yiy`UBkK6X) z2yPvfd=EK4Q;ej?&T0GHXfU*URE_g-hzUoZijVfComx$d1Bhq0_iLb-Q=eI@A|=*{ z^atrBX?8-3Z8o#-5Te@Vp@t4SpS_>b0>34oYB(lsx(K0^1{a%3ft=_UsiN#u(K{7l z9{L!M%~IMdjglYO`sg#&!JGMuq@27giHk869^td=Xr}DL_iXtqEdr2FY-G_NxE$#l zpxz~?S8v*gW>;?MHgG~C(zyv}l3ppzWI+ppI8KCOi)EtcJA~~*$Bw1Av&|(LpWvQn zz$A@kvccV>?6F$=eQ>JIts?4MOp=1+8;ugm^B#%!R=-(u78{jVwGt@W-JTx{2xnn$ z{@HR-10l)^G*8PQQjjwmh&4aG6 zc$i$meY&# zL;=}85-8+3*&G1n4N!Q~g6)99$_mAlIdy)0J?qN|jMc z`2yvWxtKCHZEjPfs2xqaJm`J)V|Q3m_(8bJaIjFt@Y1QgieUt*)Zig~A%KjGfH*R0 zBk1GN%x*))?61V{gq%wBb|s?AH5Gf^CSIuKnP8_asc5q~nDrS!rB3Jx z!3_bQy&?BPujL$Xl5{E~ra-J0?6})_))EfVMkRG~>C!$QduI zlm8+-d8;y)6yff4t8@bsvGwO&0F@ zd5cqHt5w#TuK6`cD;93a3jq@3W6lp$Uft~d zmTpANX1Ap+G87_o9@|pp9iM$OXdANO3aBi6HtusfF?ZI*OV@QPei4sXIeyYY(e53_e$A>ZLsMDOQoZuMGv1z{$B&}T zjz{JcjJrE$blRGr!r?ADjWsIafll<%Uby)iDQO(P*8}Is2MXXeDCHBbX&a ztnj5|GB&YYvtCI7?-tj`FY;Bk#{3$7T9Jt_Ef)C?XzI?=lAoM3+UpZ=DP8@1s9;K+ zvLA1Z*NkkoLz%TUMi$c0q@!h*T$@^~O&D#jXRXH$E*qCwD-;Qbn;moejh;6BOf~rh zs%MTBs#Ny|F@Dkb@ z3uz1`%ndw8x~rLR5iou?2}lZxB}sOT+D3*`Wiz4?yQN!z9dcG#*+dp@+w#W8sJbk}5<=9vWj% z#g#0obY;a*>+tIQ>ZS0BE(z{J#o{SdjI5~@obVk51Qk`=+mOWU;V>IHdreEOUxB-Zs`gZNs|5@G1#`JEv`-q5VKv>8WW2z@f z&Hq^)-VM7&bJKiTq!e;T5@&Z(zUtvM)%=(0+@jcoNPhvmIIMD*_$zxuMjI+0tFoMD zRYo;XGh~MW$16)4T$nbUBtp`ZdZ*^Yl}oh}*u!gl>&po&;=HK*Y{GD?{1i`({_6Ie z%=F=^lkWt+x_+FE>-ZJX5P1a3z8t;K87?eOK2W03Xl(B2Se9EN0hICLqCKvO0a!;v z4p6E?M*+w!b_r9fv+9L~7hR+4V!2eR8uv&V@{e)Qdwt&t&P8-Cr|&*Euc7O-&cXdx zH(ornf^Mz6^Wi%}QRUMu-Jca+ob=L+Kf#4*$-VZ(4=`877za!f2pNsH#m8_8T_EOl9rAlI#5 zXdfpC$^!O2GjZAyq9XSKZS^iRBA)hi13vSG4qQR|o2KJ>-?Sbu-}T+OHc zspjo9KESHYFxBRspHL3o>Hs!eydkpWf0`r%5t$*DEE!6zj7)v0F3LQzRTrDKcC3;v z%DJGzFpy?5u13mvEp{2*gd31;l=ShDgei4gO}sL-CgbXgnjUuhCu!6!dqcI^+Ven!_J&OWn{WosYDyx^R$eY+nax2B|T=(XCeU= zTF0;y%u_$Bzv#KC+s^tD)ck%&j*T*ZBJN@5J&wrh%lAwhb9pIor6fPz`H|(M)|a~T z>Y)|NZx}o`#l_W~w*W+hzIu7u2N(9)%V9C`9$qNXw)tFRYVAbTQU9Q>3dZV_uo%#h3mV(!a|f`arH_ou+27rXb!pRIcn!n(4g@T z6Vf}eJDbOl${Q=c*$aFTH0gbYC=d_0`WMbI;H}^#1i^e8*$wO4ARH;0edHRMZ->=A zZkIKMa?2{`^T`mh+UtYs{sWr@f$k?|GwAJ&XlP4oX%qWnc1opR^4pyd6Vd zC8sDkQD#@@>Qcxq%#u(bs>ph{-WW$|u4AC${VddFU0fM-X{y`lIRIa=^M=EfMjGs8 zlL!cX(ikk5$zsksztEabemMP7%%c-Gxa|>~eQWW1?qHaow+^^7B!`OHXNnnJVT%^w z=mn2V{;tHxbqGhLX4vBJ5F`8@zx)P`B?PV(%TL0p<>$G>n+_T8_>om~1lo+CEbjZ{ zx|i0QvMoL{zuf!kJ6`uRV9M&5=(oXxEc8Ixg82X_pAwFWNvxLyBL_^j3kK~!0h|eb-1v$28+svE_i%{%2t`&I$aU2&e>l- zYnBxg@LLTlofAs{M{aWuO5#E8goc}fR^oiml0lmDw>>KdPB_BB2R)w2w;YRD3|3Ee zi&4AA;Pek854doRvG#H)?NzBBM+*g=Rx={jK)azVPKN-{N?>!pxAT*R_xB-+$IS6f z`}y?)#a5Mdods@YU7g#(zxB&7mf}+OdCFc{H*&C#^K3KoHHOVDyhz#?(wEO0eX=BI zUDPu_EJ;(d!cpe;D7z)b;JBi+bJmjM!Pqbno6=t`Mr4y zO8G3hIWEH@3vCPud$}yGF`3=`ibsd9ZAElVr(dwt;@)g(ZhnwYYqd61&Q0ya6X=gV zZ|A_dUHWWVw%lQYuYfYoLTQh|v0~OL!2DI4P-kCf)R)(f>~LR1pUS~IaxLF|lj;Q2 zDkfz>^$YW{;8cND%c?D|rztf(J-4Hy{sh@J8#OVVQ*LJA$J0j8(+rQZKT{=NA zu9zw^@AS69AM^-(B#$X?t!9sQCin1;*{4So7XBEq5`jLA&skOqI>WltQ2qvUyW}?c zQEkHcST_dlD7P2JcO5FOK7!CEw|A_!4c~LiEzQ;RHEwMZ2rPXq;ZFcAAY|v`kg;!E zFS=lrchp0$gXDYk$S@`*rcl`P)A!0a*rp^Y=XC)?%C`hzap-#_g}tcm?zr-Dz5*ZH zF)F}59a_lGv-m*EE-d(k3_ZAsIa%n#Y7dS~&6N}S75;Pjm&jES?iv6z%Ezsrq!6Wk z;2BY8aUUVPbMed=ep{xi-=w~#W`dLE%>R0|mTD*Rh20iQ^`tz-+j6xYdeDxzO)4bQv!5EDcc8}KX zC#GB9t6o4hqvbQ26`}I-@60T=oB2E*ZY!8BzQ1Y?$`+xZ=uL4oS>*I+(}ZQ_m=Lgw zSimlLlg-CKOUv(SbHfPvEf+__y|V)xwljANlplT$cizSLAPQ^t3M&pm**jtOJ<-%q zzLQbL;uXh$H&x7YWwgr%!v$XyDD=w#N|_R@OjT=Y_a4=v+b-8_xea9aq+6q?^CTGY zVP$1ySFdT@bFHT_%9GJ6XH?Y)C@sC4m#KtLdxt{O-PgDI`t=RbaS9EPA*f^vy8w9s zywCmMmiVf$=D1W^s$b^r5!sJ9t}y+9h~Oe{nMU$-xR!|KO)?(%wmIFb*HB;0LE$B1m zLcg_V!n&g@gj_~~GnaA6`T?@1dePENHyEFT8}Fn=hZ*3DS^?eg4x(~DHqjplZ~P4O5NIE@QLe6VN#bVCU$-`%=LI5%I;2WBK<(efh zVo;^Hv^-2WM#KrNGD)#8-+Z*Vv|>xoT3VU?MGTiLR~sb9^qHIkgiH{(Eh1KHH$dBsb(ke2d&}+C97&>nZBTTE$z>s1@>`b1-TXGv9H2 z^AVeA(eT77QU9J@?vkO5^1IVVC~o|NSwysbH4V4d@WEZs)q32Q>cGQtZ-e5MM?a6*KK2i8`$gKY%5=2S&zM>E;TEv_DsdZB+=nWs z%-l?Rqyzb3L-xVb=aLeH=d{??J>{n6n-FPD$xak1_KQ>+l`&phCp|7qSxW6P z4sPLjf=HCk4m@Kk?R)rxq=}%f%;k3LUnEU}c%{-xk-McK>5?1lX=-)>du_#}McrCA zK}*-u%(+ABsqzz8yEU2$CCoLdX9yg=Kc7hVR=+YPkmXFgj;L9 zqS8YD<|8e(CF+F3;A4FOvT5>Ybf&D0m83an_#J4JjGMYsG!f6z-g{LWjra2czzS9I z`za3%ZyFmvbpHC(hUcbdlS{p2Pz#@ouw@)lJrPaJtu!s29jx!KcRSlWJ1F5!9GNFc z>|Mgq*IjQ*+1?S=oK(d9RP8I&>?=0Qs{;_-v-Or`*Cd(tCBI{w!5!*=f3whS6%uGsQ zR%Thv2)$O{K}TcqjVy^kwAw(K$Lk?^o-?w1)@3*khG0b~iBsf?ka$m)gMO?MVWr(>b?A_gK#n61h=u_14Tf)}0h6lcRK zHywK}4%C__Bx!xs`;+SasbA+(dP7#4az)@e1XcU%rIot4y1u~M{A$8*b~gyQljpXydO3d4 zbosWl971{1Gd(OUOxmaH{F8e@G%uX_gc_6st^&^Z^QG1E5mvw0$_m-+S50s}`>sKr zROr(`gUf$z#SDS2GCtJQD&?i5omW$!o*$+ zaqDwW3vco8LWQ2PW9d_gIX zJRpjUZJ4jCbs|(Mqh$KmZ29=$EV5TCg9YK#v)TuJR^&Ndd)f65zWaZ+^*|TtBs_NW zVp|xNC_Wg;r@8jv*7ytIcBoE=b6Y`4z%Gi$TKkJ+Eid;%H_i~@kn`j$4`}(qXuu@ zH?%hq34!|Kzx9XlPyKl-aF|by3u}&<7bd_>2hMF{O2aeAU*%lx7REam!MR_jN>?f9 zS#>a*jbGbX>P^`B{~`3hwCO8_E9Wrb57H#GnfUG3cL>piqc?oNB{FMh8~< z*^yxp4`PIfg1UkamCZ-c$n&rDN_-?Z*PH1$ z)s!nETQ@f?y|6__9RsM4>rJqiZ70gvk!TAWr(Z|RGAksEO$j${E1b}DHh^DMsxpYba z*5%hj`4hUu^>gcK1u04Y800UFY2-n=l#YDGg^k>_1+&=&0}a!vL2z(w!9ZvU$LFI< zCStKu$&`!yh5|pttkCr#v{jD4cl}GC{=DbywE$1I=dv$u>Ept1JPa(agF_@B<=tNs z(sqmN10*^+I-_~0+|#1iu!DoJ`wdO5Ry(O(Y%rz?UQ_!=iRAK-XA;QC{O<<@6<-+q ztlhK^$`MLqffpbV0G;WN&a>RV&vX-6oX8-~vbYclwe^lM1P#1buO)vE(9Z*F`Pagc z$^XFF>Bf{>OQ4$j)h;8N02kCE%_XL;&;Ho=bYv7y!N0P?jJk`mDHOu#_HVcEr}T}wBw}~ zI5kGK{MNAczXRfa{LKp&S?7iq(iMf5nvvfsoizfVQhd%rIb=LQ2%c`zd7rCkTjs+_ z>$VekIH+jrZsN;{`!R+h!y;4}+W%hm1)Lf@uAIQt->U!+==4Qk#Pz^*mtUj=NOTb2 z{W9R?5Kv^}3kydz5AyaO4ZIu3?z^#W^gijYipM`joxEx)tZUQ!HwB3*F9&rt?kXN6 zXb)gCl?6(EL5bqP$_O%gxapn>d&b1BdUf-Xv$A z$$7-xS*6kRk|qDiKY7s~L+9q51NPi!EB>N8xxJuLkUt@BLAH?x)~ixicLVbI7GMA; zN41XQZso2$ec0goeFj!heB-&3Feki!PwxUw4uG5yfolQTc2s-dG|1nk&2gTeglv9! zfjykzgVLIVQ+jvg*-6Y&1iJY6_|U{U84CY3p3b>_tHs`g4ZWZ@EMj6qUDLfZXP2!( zaPR~Onzh>yH{d&3X`Cs{se+Xy; z@tg64TjrkhzIcGBxQ7AVIQQ{5f%;^C?F=)$E_)%sDL|b&)3C9K;0iBs_X*bgJelJg zPd+-y3mDJt7De98{}Oui`GiF;y{Fo3Rp*f~?G(Ym!L1xnure9u(Ff6>&g{FewJ*XqPeTpV0FV!*EVez1YgSKzWL zFV6g!0)6{@HrT(2iT&K7jEhnY$Xt-sv-53glB*-xx5U|pJ!H#P9xre!E=J)|Q014Cr>s0`H z&3=an_(gNzrMsA>F==P3oK9OE$|q6@O0_CU)oVZgf7WMYy4{rsoE_gE6s)ZqDKmSS6Mprk)b%Z=K1m!Zj>|G#(fIuJ3GqPx<$-DS+9# zKst$~egEhpG2ZUq+*jDHZfbl<@t>em`rBaPOQWFSS}+3_6kUJ<>U))f{ZR&vMwoE! z$1|^Lu4pY{_;QuNZ^vJ^{XrIu0JP%)KtSPvMTHu!$$z3^8IZ?wh6oUm(gS=H zxhpniC)DNC5s!$aTs=DYo^SXc?wP;{2eoJlX5`0kUPyUDc;0#jtYOa-95YA(ah1!} zF5*@$Sv|PG`t8g+Vza>osx1GOr@z0vDRxL?clb?@aK|rs(&Sg$)_=RC-;#?oS3s+Z zXh27YoSKQXUekRY9<%r{&MseGLfwe+#@Q{5LP3O2lM74!e-JS#17N#Gh%zQNR{$EMv0&_W!qM3g?5Iue091jhY|`IAil%#Km9Y z5Pl%bLYyQjRA@>#ff%?G<|81%P{ggl>M%Tsv@Z-5agm17S{-?~U7lGQ+eDwHn z9i;{%p{a?#H+nWstn=wV;GNOsoEgj=#`hvf0c)WgbovRmfn+3NcGOZs?mGUxXgfB<&fK&72L z_xDs{qa%Ed4TZy;bM9+AXZVKxs`UKLQiI}vb=Uhg{E^bYzH4o5&1lv=$Cbb1gx^b! z2RrDUH1ESU)x z)<_Jn5r7ypq0`&x%T^sj*Xb&1*M{F_0{SK#G0HkR$Mo+7N|XXKD5m3Id@64@a%-&q zSNtgmE`s9~$O#C6Vx4zoT?OGk__vWkjVg0^Don_viS4j1(KG&{%rw(#z0uFSr91$4 z|2stjYLE|%Su{;Wtm49Y4b-mwnE)UAf9<_zRFg{=KPoXw6s1@w0)ikS61vixBA|dE zA`p7$BE5GoHk4|C&;yEq^j<@=5R?ExKzc`d?;Y+$1<(5)?_K}9?zg+%^Wm)V;hA}6 z_WX98y(b`c#ts%AE4`rhGM1kDOQ%6)OBN=yNg8)g?GuizWkJod82)eAA;n36+*y}k z%}y_2G-sil09M4C9rE~HSc^0>I0XIp@#An~Lw6@q5+ z&otP`urd&nuXMP-sLX$VP{!xpq42+$je%^)^{aEgB^-U)K_43~RAQX%fpGb%DnNw8 z74DZANqoT`ZKzdlSkh4csEx_e4_$JsS^nt#R0$1Yff~LnZ2z7mijq2;LGP&&;{g7c>85 zzoB5iop-rGLYeK2{sF(VmB@ZZ7!4tKo4hV;ES zLkR;%qWIg`p(Pqnp9i=a*5vp7U93csAKG1nZbO~ttji;(i7VZHT>YR{=f4*$jXe5m ze|IE60^Q$-tNG5*T_A_v*~0jIfA;L+wmbTGbm6K!XEDh3`0oKJg@GoST=%?+9Cuf{ zf1SAqDUh{*nNoo$B%F%IIU#3>v07$u8O8?48FVn|JG@;xjiq=Af{80K zoW(#B{u&4Ah5B3!+do#gy?sQ$CwYwI*PqW)UgYF_{qXIbuU~n>meP$lIJg$2GkQ~u zfC*B6T7WS<@?!?bQY=~%ru&)ed+ckMax#^j%6}&8&!~Utkfiz2AkI!^R(2uMB6-Pj zm41hQywUhQq3gq;eLJ1?=)3c>3X6w-9=;43e++6r)EW2_f)5CXKzYK~dB;NzYj-OM zS<_<=0Ud87kJJF8#HJ?lpUgrm47TY%)RfvC*;fFANZuJcCDSQ72JUy8MjkuD#+Q?aw|@1NsoC zLH>wYZf}_?Merg0hs!brU2XMY%Ze4>vKW9vEK}3NdP`?@PDb$$jVaq9(^0sa;TlI! zJvX%dek}8Fx0VE%&yvi+T`ze<$*Sg+nJN?UhT4KC~D3}8ZZZqs@C73=+fBHnC?EUl}a!hyL z)h17L4v(;=-tQCUx2Uov@Rnl0p>=GGdiMCcbjY1nee?Cipv+O zaqM5$IDBcAYxg(zajNtv`bblSR*d>8UO{mhNE*e{f@Un+@(zbhhf%QostND{|9U2yJy&ByBL#JosyR_zYTYQ&4N#eD9b~_av=Dmq8*y z-Bl;nX~FAFdhY;s{5x0jBgRAtEWf(vQ?`Iji1RC!cLvFB0ylc9{lGP!7++ z6l*jv_L}zA9Q{p}7F}d}tZ#TgX4$9Iht8m&s67&r=H1%~2r~QS1j3^xOq&XJh$vKx z@k>-B6u_SLpuKz`En3PrHAcKl##YN=`YHcFp)VB4X>sEBpv?V@sFCBw!*5rfYlIQJ z=D9nt3+5n2>$fa>;0bxX%O?}%GH#oBo)wFrwpJ3d^|Y*Za5DXU0>c(SG-xG;Zg9aK z100%O>~-D<{H++z{BXjnoDsa&q>NMYX<_C{j@%QxVBEcYz_2OZt=O%t3Gbiji$9(3 zDgj84*vd;j$M zB=h3%d)xx)TKk%b_d8Q3r5F-z<&%3t3>9_cJWlq@Gs%Js<(;KBISwAX)c;tgITW|+ z9+f{S)vDX^Mr}RU!nV-8i=MMNxz{4o1)L?%YoKwWEvvB*Q$s8Lysz82?siB-;MkQ}D}ZNFZ4fcN5JosRmJq->%{` z>1Lgc>C#SKuew*)xziNP zvr3r{$tG%SiWeA)M4tlmRVa9ScF*GclehHm(>sq*(i_*u)H>m-SgG#8y z!ek9vy>X74o68`GR5Sp}2ZVt??lkpD6?o>Xzms}9L~3V{r*BC4>W+A=KRiYKV_eUw z`)$Ayz0d3OiI-um+|4|Dl;A@dmfv}&Vf3z~%|OCFgQ2UUwyb`SsoeDYH%5J5T!`fs z)GTw*Ns3YCylm@v-}F~F<>Q!wN{V)y4ANry0|+t;6A@5$nhnI5iARAD*3TMb3%RCW z3A0~8*k=h-1IlQsREusjw@O@7dN{L}QpbDr81@Dk{NyDFo`)%#xyCi~I4-k^{rB$6 zAs3{~ZZvam*H3Ge=#jyB+$6K}W5|DkuFbgG5nv6I_wEAp+AW~qT4sc|WvgVsv$GE` z+g$;X;|o|J9{$yMQ1z6k7{~XAhYpai0{idGeC9W^pC3vfTVo;Ghka#=AZj>Nr%nlz z1F`~HmbP?+gbBcS<3PuO@b=sZ`HuWSQb)z;{0Z)!O}%r=^hKZ^ejiu7uYj(3IYVvz zGe!ZUCNY4bf0?tMvkVZ$*^WB`kPpP*#FT5q&bfXz-~{q~IFsF3t`q*HcD=cti-L?I ze8xNLg^jiy-C0$>*r<{NAX55U`yBu>>+oFt8;hS$u5$KISH1ccR^d4brq>X~d5+Qw zto(zE{5S9(xUqJ&qjZB^#Qq+(qMV>8`TqMM>Z!NWTBM2#1M`BEF+hERTlV&&oD)Mz$`+_fP$NB zA+O$gs(q2)JKt3I8rPyExRvl%j1VPF)WI!cGT5I_fP3AA8V2bMQR_+K!u4eh{;Nbw z*<7(622BH^F}qi57F8s8!-UeZO`TO7t0+yAO5#++Gl^={-suy3+LoDsgwJgdZXY=y!P{HlYW$MvP(* zgsPC?QZC2<$xd)<p+3Vjt=fbq%t@075^M2r^)ERm2%5iw5E(;p7!;`%0o_wGX9RBk=wXv@Nqu+*4q z9sU8_IzqPSv%k3{zQhj1Gi>7QAD2D=w>+w8-xg6JgMV+z)*_{o7$3N8zGAU^1nym1 zUrh-ky6N%7K~4~oJ^i2)gFJ3i^dbs?M>YkN*Fg-On+(I(qWP~A{icwm~ev=d$Y6l?khh? z(tLz0o7F(I1LwpQ7XHo&T9A+Vi-R8s5fT4J;djQoF!h`hQp9^Ac7%+-v`J~88TceW zD+3<_`N@CLhBa2Yst9mf#WHzf8Y4KvVX}9h7~@W}VL#vYT?Rj5?Q!$|Vz5R{`@Rz` z4hqf2Z{C{tWj29hFqOk-wAzOiT3-DYngDPYJ>1*2;WQO5@tpD7Ec3K{6$O!-r73b0 z&w}h>3hof5ZtcWTXEmMs1ExF{nL(vWDQ)j+?Opbgjk&o5WDhw9*>g% z7v1Z1^$2>P1Kzipw|6)WZE@rdnQ427_l{ng%gttPOGN`N^!{v`S6B{m_AIN1Ygf-?!R$|EP+$P$@u>5Te?ogms6NHxahCn(9%i)rz@@T)XJcC z`8LtTsm3bDQQ4^h5Z;njc2#8NVV9xc`u+t!*5QS6~y`%MeEx3 z#bk;utD8Uwrz9VWxKyTO-FlUR-(L;kY1ul zOyYL-yv7VNY+^(zG|6}hvW7O#4}H9e{VrGi`KF9@XFDiBJD;e3G9|-1r7b(#4m)bM zciI(rE*^p(c9goUd^7660V$)K*RS4fp{I{Sbfxt3w;aTGhzXbuV5}_O9zs(bOR){4 zzs1RTbV^i@xmhE&4KMC$%KAKb&Ih^vsr8r$TK~KreRXYP;dAn?(YmYh%ig(yw*=$9 zyw<-p^dqUaAU1_N9v^Cv;c02%&U>ZQ_*H%y)5*r^MEeS=FFcpR*nK4v4W^0?PAGVC z#jmd9&fLpP7Y*es7-ab|BH6o(1-BwJ@u%Ot0BAAoD!050F0lS}Yy*-%EV`b6sA@kk z;6+022f4#M^pQiv*(p3mvQUSD*Dl@pcc`}=*>d74Pr^3$ z+%#uB|MwLCXtT51gNkEWxw4cj^lcEMmke zwKlzxyS$dbO9Go?mL46Ae1PY^!U_zOpOzlTB zJ|tG%L23oQtX}ZUAw^ToRi}Sua~~X=j}!CQl7CXQutrZYL=;)EcJT%X*0ndTu+P`EbEj%#OKeyCg~4cFbF-g^ z=;GR%iFS!7X=DMf0Hl7Z@|~{@VhYXpmltk)_IB6$M7nZTfkI16%js%V?XnfV%Vj!Fne#&UyYF~h z^AY?+muQ}2HI3?OVYwPd=UL}0ydp!eYsXwhiFMa1qXNA~a2s0As_pT6UwN0FFyFyt z|F1cnQ}1k)!(6jOu2;t7dc1Y9!aBDKd?eu@LRi%h*$lxi`XZAob%G|PRgeOPxI~C=Sj}!ECKydwI80osjcYd zRwV|<43I=PYnFzPbkoapbbsw4m?khYWK&~1G~ZJ;+nTMV=Si;ARza5FtD=47+q|bG zcVW4x)8cjuJUh_GpRZUdB;~b+^N!EaaQ&H~s~AI$bSuGHQuF942fK}Him+qR52_s6 zJ(N6;9D7g&ZnT|V_bIZDRF0k%i&z=n{bX=1oA!{44R-b3Gp)yw@;-!R?HU{|4Qom(LtvhK54zP_Dh;S+&K@H_^utf zlFXtM=)1KWd=IIGZ%LiCJx_FD0Z80O0QqY%j)i`(6JVXq$f*_6dg zaX*AGjtbgwkE-+)gqFAKox7SeXS&0=#T;di;xG19*!pe^mBFn8Ck=d8IL3E(CRdDc z(*aB+f)exTVFlhpbdgk3G;G11h80XkksY`uyfOT*1q^ooebDdAp`Sdn* zH!_ao%<2w>frpWHPdGv+DYM!_gH6QPXP9Izoyc1rLYWa|>{$`7>1x z0pUo{sE1Nx0Z-(tW>2!(#Xyr5E3I|CKjrKazS)b$-am3Ivc3_uS7t7H1>l<~!OHjXcM_!_~Wj zev0loY*p*#jFM~b)^|+9X0Ypyjp$^0o>{Qg`zrpAX_dR3!I3VPJ*hslI6B6whr&!N z+%|i)VUBIVk1QNH>Yi0C42VFt7A=cfFD{Y!VZ0(6>>_r)G>Ug?{{&!S++ucqoV|CA<1Tj=1zBcUm! zxNYU_-Q%=xG}m?QF~SC_{_zQJeG{vB4vln%4yuxIHF6yS-EXIdeYU>UWJDhk#iLBu zwQI8yX8f!XQ9gpJh9yg)rt#r+z-ofmgHS3-kybh zXQXyh@Ioyg7vIK~_mc9Z&8|v%8i3#K5^fmsfvrYFu8pLLlA;X=t91rHJd<_LOa?M@ zV|?in#WTCK#Mh_P$8iiMG^@y=wV*li9m=;C)oX0+y^1P5`2y+QsP@_~dl$@Bk|}Fx z(352}d~dHT@02T7XB=7BzI!%#xUJe*&_`<7gNeo$db>q)htc;G<<-ski$SPirmcA_ zpk;B2ORDGMknR~ghq)w6?8XcE7VygigfasJQ1Tx)#KcgB)IifZ&RU3}4xOSza5Gn^ zQIu7+w4m~n&wc6M?Z-GV?}qx)WKq4*y$nfy0czg$we(@@z!7P|#DCF=hswh48-idrZ!soz_dpYT5bxQ&L3sm&siE(ZVPlexU)LxZea(rz4 zE!=oLq-mzpC*JHf`1glVhUrluudUuF*T*WmD@P_R@l|q9_yl(b6}#@og>7ar^j7<* zNSLczQ=7pLF-&t3iUS`MM|{^dWxzR~trJk+-BQ%CM5#jQ3E2J{yRz>!FmGtA$M2S= zWT_$PMxWX3TDXtc!@toF49|{8r(JxqUBb?j#+6Kebs6iB%{08gz$@Q>HnG;5Qb20R zCum_oJYhChe3S1Oi*7JoZk*&9^v>FA!FRAWqp$62y9K_RsjPXghq`HoBfr@zt;up+ z>yEfZlgBa?0`+I!$*L&h>hW@N`Jtolq;0+4g|lZ|MYho6S!(XQ%jvcWeDwBu_E`ZH zKt&4z<`K7T1*{r9k)>H`Gq@AZM#=^bfscFG* z@=(b6UdN?PHu%n`lWjFN1x;zz40o3$FD7+VdK@t?=0ncd9ha0uo+RV9pt99qIQFtnzKF4wU{otLzrJ*Li@TSp2i?c{ z(`I}iy$X3mi9nm5-KotJY(=kg8eDyBp0(TlQkwN+A}sRlsoeu-vy4010U9>)&DLMO zV1>MD^PR$I-*#{zr)NE2c`T&DW%bec?vHp51(CUpkJg^en+pzks`{}918{U9R4!%H z&GL}=i0`NKuxAe9!$*eY;=ER-7NT;_kH_gb{?L`e$dr*eL}+qX7sSNLyo1rFg|g&d zq(8lUof7f*nk+4X+=b=Ab2g;_IvsmmxZ!4}uc*=5J1?%B$M-`W;um!KJhglH=GWx~ zr8dAw+=|fgm zn{{V~o`9du5COj_r}otSaab*q3hpl^Vq*YC~^05N#8~aSAlss_$etEA8Vag%Q#+5Tr5M&-Ie@6#mj>a+?37W zQaa>tW3!<&jDBUyJ@_K+fI%qixDn2XL5Ca`9h)3(Jsi(!W-n)V8g(n}4%mEHpmcUl z89M=uhtBLz(kF7#PLf_;+8xZRC_8g>Eb*)Z*o8;ad+zA0a``#$687PKfU*S0l}LY+QIwi z@oRD%W-P6}4BIO35_-j1>Z|rvnGQ^$kJjIF;=XHjRL+MSsvYI-66#I!bGWAux1~4c z!s)8A+}^!ik#x^B6u5_8ai3QeN`BZ%r|bxLEtiz<+)L_5|QHLS=~E=b6X@pdt`pXwXdif;SS5j}k3 zO+$%0N9)>iw%dyn@52=9wgO)iZduj9jvHIFzL)X|&q`)M;O?Y_|1fo=S2Wax<_!}l zek9(W?RMcMRUmCb>#12dO8Tygyv+d0yd!y7v@*=RZhJOCV8lPtXt)Bt62vT+IbJ5t zdgbAS!yLepLA1G>Aem5rshL6Oqan_-KG&?ZI#P#tJ1^frndWI_RXa-4Z@!y@V{U4b z!_&G+`7Ri(cX%$kT~z3n&*`>IW^ydw(|(`4&GCCpj1OgpVjaNgupMURVLw+evvN`n$IefNu;DcDmQ&K)PzK1zxiDkzo zD+vT-z^Dwhp|-n=iT#whO>gWDA3@_e_{nr_FO8k;=i-iw#_l6#CBcJCKk%Cco{S{Y z?oUmzlf^Tc-s5qLRz*gwh>~8|fd0$yi{3YU&R`loQTI(&^}n3KjYe zMq}0LBrV}o&t%;kEPOC#p~NxD0=$7DATpgT-(j?@Z}qaXnm;q#OcIxyYEa=gTbE~! zuc<-!o5OK=BneJO;X@&(CFhnI3$)6IJqDe-1!RS_ABV)^mUR!}FAWvt^u+eA-baAD z_auFtJ$j#{g``xdBRkTp@9G8yTq3X8+_cDC&8b?5cw;OR0@2jv?{JZG5Bc5^djLg6 zNr?!SX2)KK)W}F9-No;Izxk8yU;`ReP#P#xcRACcb-;?G>hxO^nB#rD2AmCwhwm1s6&WU7hW|wMq~p+Rc^kMA{!GUo2zYR>!;(w z!nv(!5a>9eGpo3;(Tq$k9eQJHXUvuDo(p+wVnLH7hta@GtX^JSz|w!Z1nz<9y5}l2 zGc>fkeYtHDf$YG4$+s0CL757i?t)R?V`t8`N*)4PA=*b_aGj>LJ-SLGrJ-Q=%>*vd zN0&CDOhwY&YCmZthKt`{agQ1<)L+ifkjCj=y2r=lC)d_{2G*LpV0=9sYiacY;* zThO5HtoMMd<)!-U7`$*`Avdln7Z|7mCQBVRL1@zXxbOLFF~23>Egp&%w9E0lL9Kk# zO`PX4YqU!%dOUDDdVAmlmwVmrz-VyYm)I;iba`OkC11DO)!7P2t!lxIH!!%TeFTF6 zKTn^^a8AqQF6J}1fl}sbWB_oWxjll}c%y(x_-)En^A@rJv_wv(?o+*AnCuj4IBI#F zE-bv2XS;Kvg^Up)hwb&AZe0ojEL@wVU&0p~^C{Tjc|0p7nI?}iAe&2)=VLz5W_6!8 zGgP{x2isvV-vR&E_upui^a`8Dh$)oxE*$%4;4^}AnuuzhE)n-jmwF-7s*D;=UN{lS zRAO&#eR!B&->#ES5og^XP~B_qWYNv!-CcUhP@0Dgf;ZS~bhY%m?)g?Phz*0MA&hcj zuv%BL;gEpjQFEyZVX1?;x-97)qu2Shf~$3kGXgC#Ml zQT4@e(o^%@jS<@#rcX^<44fHoIkLzO?(@&LOSz^eHt{^xifw)6Jk`hgnZ7E*By=J!yN_9rF-63$8cPr=x=d}pGxO{ju!v}CbbrOiU zY}PGK`fMz`UNBd*+$EiB}l57Pz3LhupWYozk#4`myd-UhV2 zyW)Nme{s{{_9_KXBY>xBeZ3nvEP8536x&-}?#;*TF0Yf`SG2`7MH;$Y_0d3LA`qT( zNcdMel&>b4-kt=-#UgjN@+xxttRvI-4+Py2wpXbrs&Devsv#bDjUm>j4x(z zx=wGrGVw_ep5a}=7J^#UE#d+=5=uCgr-Z*M|5(FbFPTa2f}kW;kdW=Q`)%05AW|w| z6yb7o_U#VN9DyqYMnEvv$2i29RX-j*&FFzMis+}lVNID8>qm(aFm9`LW=X|e!z46f z`V5K9l5^t)Z` z!lWY2^=RU%3*+#4-h_rcyHVEcDVHrdtOu9+u{2OhYc-_pU*X?YE?n34(=aFan5%5` zAD2gCgJ)L?+>hb%G{Q&d-qs|*;30k!A6q%HbrHVi?7V@1c@3woSYO~h09qtt&|AOl z0T|^Nwu%{wwpSgF)U1}pNj;?@xCC17DZYS^u7>QuSQfkIw{u}M#yZBaIa~@N*@3Hl z4FoSZq{59UalYL=*7utK=vBG#QwSV97xtsVqwVEw{@~|!f(msP;k73nBGzO?l+y1Q zMYbX`3v>q=_$G7g0xh2*+09~Ij=H?Xu$D57ja$-Pn~vf zrqu%lecKU+TAA6+Yh1Hk^^=#LJ~0?_HL|V+BVrqdC^pE^12?CQw)TViCh5m=Ie4~@p|d4OXOFE&PIF93F{@PE!f_@n z2(J6Oy9l+PfKG3#Q#PDg5cNxXKUrccq)UM;^luFL=@)FAWGJ{rf}ZIq5dyV)?o(@KX4yOiYoRK9UDm-@}fc zjDFuR5%V;Jroc$7EC*Vgs~1=hdfu|g^|h%}XjM_Y#VbYAw{pDh2X3}W%(fUuHCf$k zH%iu@c_e&c=1JNDZD4#!&S$G1unitbXZIfYqEY)zigkU?hcgSWVyFte$LPf-W+KN{ zBZBCBt0-rxr+$nP@C)!RmTW7*%-OGb?!KdAfissBKWcy&H%(V{wEppcD*l<-(I2w~ zxF&FJI>=j1_ z_sb&WM^s${J~T}!X-r(z3t-{+!iWdGV%TiuPxnc)$e&A&m~0Q zx7M8>-=JDKEpRki@NG>{PTf>6X>v0vWjx=Dzb!6YFz`ik92>f|2fb+_y36UC<7A}j zt7|MCzWCWo?};;&(Tjl{r}kY~s#CO|s~n)%H#4tTL)V)aJtqsDJsfR( zaAt6<=$AykG1?!6D+$&)@Cl)!so9iN#DxMiyDqgSE;G^6Ve|B>_U_d7$++uweZsHH zn)w#D-B0_i+YDvLs31k)65qYW4xxK=I$j?@u|gn*u=hFIV`XgJ);jPPCWn>8*Nd+K*MNeCt$0maJ{)5dYV5no77Ad~%2@ z-3JnITN+sc6Z_6awWnBGz^V^_^g059b*tYouIoV0CxO7cph2?pS%^0kuTKcC!x-K! zRh=b!DsDKw<$N|nD5jAd@`)A1ATSk8cOVCet%%G<%KfdB1RSz?ELHU9gT^Pow`fi?HO&K`nk_s$Jl;hA*n+~2JqHxKA*vv7}pA;@5j{kGfi0h!EY)d3JabnP?a)3gY z^}gr4f#8F$y8sD8q~@W(dMGR(oNyEt&V!P{U(~Cz1623J?&f2Dc2ol$W42q=U0RBm;IR#qhvdhPKSt3c)h^e1Cx0gT-1odS2@7l zR|XH1uSv2?$EN@)a0CSHca$N9(iX^DynZf00W9IRi%v7R_ANTM{l)`n8qksZ#S}>5 z%K`9fwGf_pgBgY~D(W*qRJHn0;Jwb$rf*6r}Kau1?<@WJOTJtzN zSG33vHQ$A(rSBA#mLuh$rj7WD^zI@3WlVHaQ&^h$rHToBSJRBQpINUbpE!|rLk_lIo|W2b#DA({rNqRj@|5kn5U6McDB@EBqDle{owc~+s(+xRsG*z-#*>$ z*1$Gi)4TI6QXx5+BALQx2L=9oEV=4*xKdIM+oukf43I}?2!s#W&`dh?sXidnlHM3( zZ$qI`Fe_c5NX){Q0)aS&?n<8R4(xSlW-th2; z+g|!7H0W%II9!cuJ&AyU*0uLy>?m1g=4@SP?AVDv!yhf{|4QG~^t^+EByK0KEooA6 zDf!lF!OLG<@t!-|#7cZS+0~mZLv;Q}q(X%c9?5{u%X#$50a=tNO(-%C5f%T(*m1QJ@e8Ys{~02iDJ*qIzgUavp|)Q>F(4Te9hzZ zeUte%L^tmWqp+}EQGN8lNdO{)I14`1696Y}CkkOH)H$NtXi;M!xCb9;d6YUQuG#Ty zQKpR)A$Oml>$Li(Y9v|9mkNCKG*#--1L@GUb41BdNufuAS_cC*6csO0A&7e_+w_wA z(ViHHPpOhM&r%qT6A{DnyKK~w?wI?r|(SL+M>mMc?A>5-%?QTsP0_;tYY6{YEkm@MEqoM@r(|(iE(u%t81iwj%afg z&F05cT_cJug03IkjAush-(2>#oCLykNLgDf65})aA#YfRM2P5MOj3W#j!!}=baJ6*+6QTwgpGHUqVYGoSx?Y&^OoGut{@L<*Oei#FR zw8mJwH;m((wUXgXhKItHf6h1ch*h6NmN zA$~yQY?D$X_fVWw=yGMO%j2L(0@ulYBSwAYq_7H*ZJ>}>)YSB#a@WD6Ss17J_p!4t zB4A6**+%0C(V-vDomO!3PHS^cQ5ek3z&-2>f+hTi@6|Iv5>6yQ@55^xC#h$aj($Ri(v z25a{^ckCWDe(yX{l#>B7l8iR`2<6yCU{%~OA%#HCfY*aqdiQqXpOXWH2!~;m@4;bG z`lV4}2rE%?4K%op?}Ok^LmO;?PF1_o18I5i33(PVpq0QUtWM4Uv@!lUDiMjOL_RPZ zb?~aXNDUROA&|Z>*9ygbl>XLCpj+cLU`ar5W)P?-VCP?Eh8zmq@SSh+6;W!9ck1-& z`;b3dR)?y00?IYzEE89ymWjR*uAj8=|5}UUz8ltf{NOnNjViGfb@hLV!O3+ju507oUr%kE&pR6!O{4di;AL z{?AsU1i?#!!jde&!vquN3IT=W_kcLCVlx)|%iG_miw9qQy56{~c7EcifdFaa31z<##6Idt%keA}lIE79;9pK+asUIJrt z=7>D|z59Qry!R35|AUi(KUl810aTTI>874IN&zfc!HS#(cncQC{=pjiq;FIL6j!Pi zgg`sNB)Z1ez#V32&T0v1%u2|9ps z0i9e#B>`Ddsy=_d?jLut&89QDa7MJnNs9h2B7>4v0zhPJxrFlquO?{e9%58MLW#WF zKF%Ha7wUwA4{*Pk0(DG*K#4v52yB~DfFBo5Q;@p9kHx)40x;bf$|YT^p(B8QW76V> zjs(Voeo(O>#52TC02W=9k;K~+0ZMDA!ouUH#avk2G8+4o_u#xoEHG7SK*?Wzkt!hb z%P-_m*k#aFq2RV@kP{H66zuZ1m zq?Hw%ON$?E)5RYv*yqYSEymkZ9PrmQ1QOkX&!XMKC&m&0GlxW^vhXTEnne=IHl998 zaQd1V_GcCQ>-v8yVu-*ntHj=iY=haZ$fzAL>%c3=`%z~#*z^^;?MU;#}26U@910V$}aoWhm<4cvI5YkNF61c)qP zELq`)K-s7^5D7Y8+kY7x|FO@={6m?=Cj1~})cT;N$^&UzFmP|#)U5|qssL7R_65d2 z8vlQhVTpiFY(W(NL5^SK49o`KKcGUFbqDAFhc+%;{Jk@zpIK%ynK8w2ia>2 zqF0hKu;f>I2y%a6d8nKUfQX0pjQ^23zz8CmS8GoGKk8KCgcUDhshD{|yd>t$94NE2 z0r=@r=m`1u)T+k7p37$u^!QLEfDcD6jk!TsFY*csqS4%X+SrAxo&{tlxPF=HuHDza zvHs0n`Wp#t3)e3832J84y06izuhXk!|ah6@cP@_fKHq6~Q#A z@n9Ij@XoPN*wRfP6xeTkr=(>=v8@vl&|Eqi*dJXpA}<7c&Sf(s?zm?Bdk|>Ukr+>0 zZoc6%LOYK?5!j4o*q?Jr0&_`%3xc`a1V@7~?p08V5m(})lHB9{7l=QoVxe(cNo0Tb z^*P)4+`uP`U7yzs)b6fWafmliE-PVbrcgO~q=N&0P{`;qqr>{vanI(4azDAzK|PMY zEzTK|oF08698>hRkjqRn%22Cj=r{?45kQ3Tffq0j9Z?I8aHe+mi{A4|<@K)le-$vY zJa*$HoJ|~)?DyN~@;Hsri9ZGY-hQHtpF|{0~Uv<^bnVr z|G3p(s|!-xzKC@VQwbAeJVVelZAb$1ZH%eA-GrUBMY}g?lKeoE9_zmb0mEt`9lUOq zZoTAe_BWNkAjlwW$9zj=Q|Q_wP%768mR9cG5q$u47`2cG7Xkto9GH{RugszUJ)r}r z>Wh?y!za0QU$&+*Wa;(3k@0K#@Q9RVD6#Yc(1m#qsuqL~reUfs!ag@;&&#W|S#^fH zOfR;nB{Ys^{*E~O!(;&xil|w`F_m5y(gPEb#x9BGn!zi@cFX&U=>Uoj#iodE(U)@T zx9fBoeUoop)aY=y=67i1KV)RU4d#cniw%gV24Q!HkyN82?GaiW=`rW#0XLlm)R%Zj z%!p}#xLQRj7g>vhl7HnAGw#@r3;Y?Yit>o~Xzt0T>-t9vFMh1O_R$t3DzN~@48VYr z?0X!%&KDE0Mas#WE=tLlGG#{H}XcbaKj3)b`3#Sm2}>EdpEu4mss{%-%kWxZPNwpiZC z){IC!(YMC3@~*|hGvz%qqIq$S^|xE}d1ID&1#BB4nttHGPl8Zo0WuIj__6(c5wGBcgqQ6vg3Q;p z|N3BHkO!G@Lhq;5Uljb04iSVLRbR7NAm$sHfR6QL_`V;O{)bZA$FcMw@Y12ne*ZRx z{_*60BR%*Zo&JX>;B@?tTmB~t{)dA9W8H$ Date: Tue, 18 Jul 2023 03:49:26 +0000 Subject: [PATCH 02/24] =?UTF-8?q?=F0=9F=8E=A8=20[pre-commit.ci]=20Auto=20f?= =?UTF-8?q?ormat=20from=20pre-commit.com=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/configuration/lineage.rst | 1 - docs/configuration/selecting-excluding.rst | 2 +- docs/configuration/testing-behavior.rst | 2 +- docs/getting_started/astro.rst | 1 - docs/getting_started/gcc.rst | 2 -- docs/getting_started/mwaa.rst | 1 - docs/getting_started/open-source.rst | 1 - 7 files changed, 2 insertions(+), 8 deletions(-) diff --git a/docs/configuration/lineage.rst b/docs/configuration/lineage.rst index 498b61b30..49d96289d 100644 --- a/docs/configuration/lineage.rst +++ b/docs/configuration/lineage.rst @@ -79,4 +79,3 @@ installed "dbt_executable_path": "/usr/local/bin/dbt-ol", }, ) - diff --git a/docs/configuration/selecting-excluding.rst b/docs/configuration/selecting-excluding.rst index 5bcf42beb..9e14a6272 100644 --- a/docs/configuration/selecting-excluding.rst +++ b/docs/configuration/selecting-excluding.rst @@ -40,4 +40,4 @@ Examples: jaffle_shop = DbtDag( # ... select={"paths": ["analytics/tables"]}, - ) \ No newline at end of file + ) diff --git a/docs/configuration/testing-behavior.rst b/docs/configuration/testing-behavior.rst index 6f2b7cbef..7a3698d82 100644 --- a/docs/configuration/testing-behavior.rst +++ b/docs/configuration/testing-behavior.rst @@ -85,4 +85,4 @@ When at least one WARN message is present, the function passed to ``on_warning_c If warnings that are not associated with tests occur (e.g. freshness warnings), they will still trigger the ``on_warning_callback`` method above. However, these warnings will not be included in the ``test_names`` and - ``test_results`` context variables, which are specific to test-related warnings. \ No newline at end of file + ``test_results`` context variables, which are specific to test-related warnings. diff --git a/docs/getting_started/astro.rst b/docs/getting_started/astro.rst index c14f7c94e..f3da7df80 100644 --- a/docs/getting_started/astro.rst +++ b/docs/getting_started/astro.rst @@ -88,7 +88,6 @@ In your ``my_cosmos_dag.py`` file, import the ``DbtDag`` class from Cosmos and c profile_args={ "schema": "public", }, - # normal dag parameters schedule_interval="@daily", start_date=datetime(2023, 1, 1), diff --git a/docs/getting_started/gcc.rst b/docs/getting_started/gcc.rst index 3f4bf7d0e..b9820e48b 100644 --- a/docs/getting_started/gcc.rst +++ b/docs/getting_started/gcc.rst @@ -52,14 +52,12 @@ Make sure to rename the ```` value below to your adapter's Python profile_args={ "schema": "public", }, - # cosmos virtualenv parameters execution_mode="virtualenv", operator_args={ "py_system_site_packages": False, "py_requirements": [""], }, - # normal dag parameters schedule_interval="@daily", start_date=datetime(2023, 1, 1), diff --git a/docs/getting_started/mwaa.rst b/docs/getting_started/mwaa.rst index 29d72f27e..f5182e1ee 100644 --- a/docs/getting_started/mwaa.rst +++ b/docs/getting_started/mwaa.rst @@ -94,7 +94,6 @@ In your ``my_cosmos_dag.py`` file, import the ``DbtDag`` class from Cosmos and c profile_args={ "schema": "public", }, - # normal dag parameters schedule_interval="@daily", start_date=datetime(2023, 1, 1), diff --git a/docs/getting_started/open-source.rst b/docs/getting_started/open-source.rst index 8930bef94..61b42f64d 100644 --- a/docs/getting_started/open-source.rst +++ b/docs/getting_started/open-source.rst @@ -59,7 +59,6 @@ In your ``my_cosmos_dag.py`` file, import the ``DbtDag`` class from Cosmos and c profile_args={ "schema": "public", }, - # normal dag parameters schedule_interval="@daily", start_date=datetime(2023, 1, 1), From e132f97be2d347f054f8ae8a369adff86d6fe5ed Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Mon, 17 Jul 2023 23:53:02 -0400 Subject: [PATCH 03/24] get build working --- docs/conf.py | 13 ------------- pyproject.toml | 2 -- 2 files changed, 15 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index dbf7a5068..69110e5d0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -23,7 +23,6 @@ # "autoapi.extension", "sphinx.ext.autodoc", "sphinx.ext.autosummary", - "sphinx_tabs.tabs", ] add_module_names = False @@ -48,16 +47,4 @@ "image_dark": "cosmos-icon.svg", }, "footer_start": ["copyright"], - "show_nav_level": 2, } - - -# def skip_logger_objects(app, what, name, obj, skip, options): -# if "logger" in name: -# skip = True - -# return skip - - -# def setup(sphinx): -# sphinx.connect("autoapi-skip-member", skip_logger_objects) diff --git a/pyproject.toml b/pyproject.toml index 9c8701193..aca16781a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -87,7 +87,6 @@ docs =[ "sphinx", "pydata-sphinx-theme", "sphinx-autobuild", - "sphinx-tabs", "sphinx-autoapi" ] tests = [ @@ -177,7 +176,6 @@ dependencies = [ "sphinx", "pydata-sphinx-theme", "sphinx-autobuild", - "sphinx-tabs", "sphinx-autoapi", ] From bc0a4a23398137d90fe6e2b9ec5ce9b1b8895a83 Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Mon, 17 Jul 2023 23:55:15 -0400 Subject: [PATCH 04/24] add requirements.txt file --- docs/requirements.txt | 4 ++++ netlify.toml | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 docs/requirements.txt diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 000000000..f159d1674 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,4 @@ +sphinx +pydata-sphinx-theme +sphinx-autobuild +sphinx-autoapi \ No newline at end of file diff --git a/netlify.toml b/netlify.toml index 18bfb5ce4..bbf0ed645 100644 --- a/netlify.toml +++ b/netlify.toml @@ -12,7 +12,7 @@ # Default build command. command = "make dirhtml" - environment = { PYTHON_VERSION = "3.8" } + environment = { PYTHON_VERSION = "3.10" } # Production context: all deploys from the Production branch # set in your site’s Branches settings in the UI will inherit From 810336b9d1d5801dfa5a827419b54a83de10142a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 18 Jul 2023 03:55:26 +0000 Subject: [PATCH 05/24] =?UTF-8?q?=F0=9F=8E=A8=20[pre-commit.ci]=20Auto=20f?= =?UTF-8?q?ormat=20from=20pre-commit.com=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index f159d1674..bb86f233e 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,4 +1,4 @@ sphinx pydata-sphinx-theme sphinx-autobuild -sphinx-autoapi \ No newline at end of file +sphinx-autoapi From 02428b87910071682f77883354e4a27a68470c31 Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Mon, 17 Jul 2023 23:57:04 -0400 Subject: [PATCH 06/24] revert python version --- netlify.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/netlify.toml b/netlify.toml index bbf0ed645..18bfb5ce4 100644 --- a/netlify.toml +++ b/netlify.toml @@ -12,7 +12,7 @@ # Default build command. command = "make dirhtml" - environment = { PYTHON_VERSION = "3.10" } + environment = { PYTHON_VERSION = "3.8" } # Production context: all deploys from the Production branch # set in your site’s Branches settings in the UI will inherit From 826f40489fde0771109c57a0ebe689c25e2f00e0 Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Mon, 17 Jul 2023 23:59:00 -0400 Subject: [PATCH 07/24] fix broken links --- docs/contributing.rst | 4 ++-- docs/getting_started/execution-modes.rst | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 8e65f68fd..95238f482 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -20,9 +20,9 @@ To contribute to the cosmos project: Using Hatch for local development --------------------------------- -We currently use `hatch ` for building and distributing ``astronomer-cosmos``. +We currently use `hatch `_ for building and distributing ``astronomer-cosmos``. -The tool can also be used for local development. The `pyproject.toml ` file currently defines a matrix of supported versions of Python and Airflow for which a user can run the tests against. +The tool can also be used for local development. The `pyproject.toml `_ file currently defines a matrix of supported versions of Python and Airflow for which a user can run the tests against. For instance, to run the tests using Python 3.10 and Apache Airflow 2.5, use the following: diff --git a/docs/getting_started/execution-modes.rst b/docs/getting_started/execution-modes.rst index b2b8ce8e2..49cf93111 100644 --- a/docs/getting_started/execution-modes.rst +++ b/docs/getting_started/execution-modes.rst @@ -92,7 +92,7 @@ The other challenge with the ``docker`` approach is if the Airflow worker is alr This approach can be significantly slower than ``virtualenv`` since it may have to build the ``Docker`` container, which is slower than creating a Virtualenv with ``dbt-core``. -Check the step-by-step guide on using the ``docker`` execution mode at `Docker Operators `. +Check the step-by-step guide on using the ``docker`` execution mode at `Docker Operators `_. Example DAG: @@ -117,7 +117,7 @@ It assumes the user has a Kubernetes cluster. It also expects the user to ensure The ``Kubernetes`` deployment may be slower than ``Docker`` and ``Virtualenv`` assuming that the container image is built (which is slower than creating a Python ``virtualenv`` and installing ``dbt-core``) and the Airflow task needs to spin up a new ``Pod`` in Kubernetes. -Check the step-by-step guide on using the ``kubernetes`` execution mode at `Kubernetes Operators `. +Check the step-by-step guide on using the ``kubernetes`` execution mode at `Kubernetes Operators `_. Example DAG: From 3850088d97e47b8e77b7f00083f1fd4b26ec8ae4 Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Wed, 19 Jul 2023 11:11:31 -0400 Subject: [PATCH 08/24] more docs updates --- README.rst | 2 +- docs/configuration/index.rst | 1 + docs/configuration/parsing-methods.rst | 57 ++++++++++++++++++++++++++ docs/getting_started/index.rst | 11 +++-- 4 files changed, 66 insertions(+), 5 deletions(-) create mode 100644 docs/configuration/parsing-methods.rst diff --git a/README.rst b/README.rst index 6c3072562..a722099cb 100644 --- a/README.rst +++ b/README.rst @@ -45,7 +45,7 @@ You can render an Airflow Task Group using the ``DbtTaskGroup`` class. Here's an from airflow import DAG from airflow.operators.empty import EmptyOperator - from cosmos.providers.dbt.task_group import DbtTaskGroup + from cosmos import DbtTaskGroup with DAG( diff --git a/docs/configuration/index.rst b/docs/configuration/index.rst index 4c82c115e..efd476058 100644 --- a/docs/configuration/index.rst +++ b/docs/configuration/index.rst @@ -8,6 +8,7 @@ Cosmos offers a number of configuration options to customize its behavior. For m .. toctree:: :caption: Contents: + Parsing Methods Configuring Lineage Generating Docs Scheduling diff --git a/docs/configuration/parsing-methods.rst b/docs/configuration/parsing-methods.rst new file mode 100644 index 000000000..696219583 --- /dev/null +++ b/docs/configuration/parsing-methods.rst @@ -0,0 +1,57 @@ +.. _parsing-methods: + +Parsing Methods +=============== + +Cosmos offers several options to parse your dbt project: + +- ``automatic``. Tries to find a user-supplied ``manifest.json`` file. If it can't find one, it will run ``dbt ls`` to generate one. If that fails, it will use Cosmos' dbt parser. +- ``dbt_manifest``. Parses a user-supplied ``manifest.json`` file. This can be generated manually with dbt commands or via a CI/CD process. +- ``dbt_ls``. Parses a dbt project directory using the ``dbt ls`` command. +- ``custom``. Uses Cosmos' custom dbt parser, which extracts dependencies from your dbt's model code. + + +``automatic`` +------------- + +When you don't supply an argument to the ``load_mode`` parameter (or you supply the value ``"automatic"``), Cosmos will attempt the other methods in order: + +1. Use a pre-existing ``manifest.json`` file (``dbt_manifest``) +2. Try to generate a ``manifest.json`` file from your dbt project (``dbt_ls``) +3. Use Cosmos' dbt parser (``custom``) + +``dbt_manifest`` +---------------- + +If you already have a ``manifest.json`` file created by dbt, Cosmos will parse the manifest to generate your DAG. + +You can supply a ``manifest_path`` parameter on the DbtDag / DbtTaskGroup with a path to a ``manifest.json`` file. For example: + +.. code-block:: python + + DbtDag( + manifest_path="/path/to/manifest.json" + ..., + ) + +``dbt_ls`` +---------- + +.. note:: + + This only works for the ``local`` and ``virtualenv`` execution modes. + +If you don't have a ``manifest.json`` file, Cosmos will attempt to generate one from your dbt project. It does this by running ``dbt ls`` and parsing the output. + +When Cosmos runs ``dbt ls``, it also passes your ``select`` and ``exclude`` arguments to the command. This means that Cosmos will only generate a manifest for the models you want to run. + + +``custom`` +---------- + +If the above methods fail, Cosmos will default to using its own dbt parser. This parser is not as robust as dbt's, so it's recommended that you use one of the above methods if possible. + +The following are known limitations of the custom parser: + +- it does not read from the ``dbt_project.yml`` file +- it does not parse Python files or models diff --git a/docs/getting_started/index.rst b/docs/getting_started/index.rst index 7aa42ca5c..6f0325128 100644 --- a/docs/getting_started/index.rst +++ b/docs/getting_started/index.rst @@ -24,12 +24,15 @@ The recommended way to install and run Cosmos depends on how you run Airflow. Fo - `Getting Started on Astro `__ - `Getting Started on MWAA `__ - `Getting Started on GCC `__ -- `Getting Started on Open-Source `__ +- `Getting Started on Open-Source `__ -Other Methods -------------- +While the above methods are recommended, you may require a different setup. Check out the sections below for more information. -While the above methods are recommended, you may require a different setup. For more customization, check out the different execution modes that Cosmos supports on the `Execution Modes `__ page. + +Execution Methods +----------------- + +For more customization, check out the different execution modes that Cosmos supports on the `Execution Modes `__ page. For specific guides, see the following: From b2a6da905e5e0144f4f320cfe00f23e6b0635e17 Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Sat, 22 Jul 2023 16:10:03 -0400 Subject: [PATCH 09/24] address some PR feedback --- docs/configuration/parsing-methods.rst | 4 ++++ docs/getting_started/gcc.rst | 6 +++--- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/docs/configuration/parsing-methods.rst b/docs/configuration/parsing-methods.rst index 696219583..b5d6113af 100644 --- a/docs/configuration/parsing-methods.rst +++ b/docs/configuration/parsing-methods.rst @@ -10,6 +10,10 @@ Cosmos offers several options to parse your dbt project: - ``dbt_ls``. Parses a dbt project directory using the ``dbt ls`` command. - ``custom``. Uses Cosmos' custom dbt parser, which extracts dependencies from your dbt's model code. +The ``dbt_manifest`` and ``dbt_ls`` methods use dbt directly to parse your project. This means that they will respect your ``dbt_project.yml`` file and any other dbt configuration you have set up. + +On the other hand, the ``custom`` method uses Cosmos' own dbt parser. This parser is not as robust as dbt's, so it's recommended that you use one of the other methods if possible. + ``automatic`` ------------- diff --git a/docs/getting_started/gcc.rst b/docs/getting_started/gcc.rst index b9820e48b..3242885f8 100644 --- a/docs/getting_started/gcc.rst +++ b/docs/getting_started/gcc.rst @@ -1,9 +1,9 @@ .. _gcc: -Getting Started on GCC -======================= +Getting Started on Google Cloud Composer (GCC) +================================================ -Because there's no straightforward way of creating a Python virtual environment in GCC, we recommend using Cosmos' built-in virtual environment functionality to run dbt. +Because there's no straightforward way of creating a Python virtual environment in Google Cloud Composer (GCC) , we recommend using Cosmos' built-in virtual environment functionality to run dbt. Install Cosmos -------------- From 7b6e05ac8313022763cadf7b50ebc7039b9cb7fa Mon Sep 17 00:00:00 2001 From: Harel Shein Date: Wed, 26 Jul 2023 10:22:09 -0400 Subject: [PATCH 10/24] update astro docs --- docs/getting_started/astro.rst | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/docs/getting_started/astro.rst b/docs/getting_started/astro.rst index f3da7df80..10b380cd9 100644 --- a/docs/getting_started/astro.rst +++ b/docs/getting_started/astro.rst @@ -20,7 +20,7 @@ Create a virtual environment in your ``Dockerfile`` using the sample below. Be s .. code-block:: docker - FROM quay.io/astronomer/astro-runtime:8.0.0 + FROM quay.io/astronomer/astro-runtime:8.8.0 # install dbt into a virtual environment RUN python -m venv dbt_venv && source dbt_venv/bin/activate && \ @@ -68,7 +68,9 @@ For example, if you wanted to put your dbt project in the ``/usr/local/airflow/d from cosmos import DbtDag my_cosmos_dag = DbtDag( - dbt_project_dir="/usr/local/airflow/dags/my_dbt_project", + roject_config=ProjectConfig( + dbt_project_path="/usr/local/airflow/dags/my_dbt_project", + ), ..., ) @@ -81,13 +83,20 @@ In your ``my_cosmos_dag.py`` file, import the ``DbtDag`` class from Cosmos and c from cosmos import DbtDag + profile_config = ProfileConfig( + profile_name="default", + target_name="dev", + profile_mapping=PostgresUserPasswordProfileMapping( + conn_id="airflow_db", + profile_args={"schema": "public"}, + ), + ) + my_cosmos_dag = DbtDag( - # dbt/cosmos-specific parameters - dbt_project_name="", - conn_id="airflow_db", - profile_args={ - "schema": "public", - }, + project_config=ProjectConfig( + "/usr/local/airflow/dags/my_dbt_project", + ), + profile_config=profile_config, # normal dag parameters schedule_interval="@daily", start_date=datetime(2023, 1, 1), From ba28a83547dcea5e47e1cc12de3ece91f998bb40 Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Wed, 26 Jul 2023 15:07:24 -0400 Subject: [PATCH 11/24] add dynamically generated profile pages --- docs/conf.py | 4 + docs/profiles/DatabricksToken.rst | 82 ++++++ docs/profiles/ExasolUserPassword.rst | 120 ++++++++ .../GoogleCloudServiceAccountDict.rst | 76 +++++ .../GoogleCloudServiceAccountFile.rst | 76 +++++ docs/profiles/PostgresUserPassword.rst | 104 +++++++ docs/profiles/RedshiftUserPassword.rst | 110 ++++++++ docs/profiles/SnowflakePrivateKeyPem.rst | 98 +++++++ docs/profiles/SnowflakeUserPassword.rst | 98 +++++++ docs/profiles/SparkThrift.rst | 68 +++++ docs/profiles/TrinoCertificate.rst | 98 +++++++ docs/profiles/TrinoJWT.rst | 94 +++++++ docs/profiles/TrinoLDAP.rst | 100 +++++++ docs/profiles/index.rst | 262 ++++++------------ docs/scripts/generate_mappings.py | 80 ++++++ docs/scripts/templates/index.rst.jinja2 | 108 ++++++++ .../templates/profile_mapping.rst.jinja2 | 56 ++++ 17 files changed, 1458 insertions(+), 176 deletions(-) create mode 100644 docs/profiles/DatabricksToken.rst create mode 100644 docs/profiles/ExasolUserPassword.rst create mode 100644 docs/profiles/GoogleCloudServiceAccountDict.rst create mode 100644 docs/profiles/GoogleCloudServiceAccountFile.rst create mode 100644 docs/profiles/PostgresUserPassword.rst create mode 100644 docs/profiles/RedshiftUserPassword.rst create mode 100644 docs/profiles/SnowflakePrivateKeyPem.rst create mode 100644 docs/profiles/SnowflakeUserPassword.rst create mode 100644 docs/profiles/SparkThrift.rst create mode 100644 docs/profiles/TrinoCertificate.rst create mode 100644 docs/profiles/TrinoJWT.rst create mode 100644 docs/profiles/TrinoLDAP.rst create mode 100644 docs/scripts/generate_mappings.py create mode 100644 docs/scripts/templates/index.rst.jinja2 create mode 100644 docs/scripts/templates/profile_mapping.rst.jinja2 diff --git a/docs/conf.py b/docs/conf.py index 69110e5d0..e916de843 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,6 +1,8 @@ import os import sys +from docs.scripts.generate_mappings import generate_mapping_docs + # Add the project root to the path so we can import the package sys.path.insert(0, os.path.abspath("../")) @@ -48,3 +50,5 @@ }, "footer_start": ["copyright"], } + +generate_mapping_docs() diff --git a/docs/profiles/DatabricksToken.rst b/docs/profiles/DatabricksToken.rst new file mode 100644 index 000000000..bee5f16de --- /dev/null +++ b/docs/profiles/DatabricksToken.rst @@ -0,0 +1,82 @@ +.. + This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. + + +DatabricksToken +=============== + + + + Maps Airflow Databricks connections with a token to dbt profiles. + + + + https://docs.getdbt.com/reference/warehouse-setups/databricks-setup + + https://airflow.apache.org/docs/apache-airflow-providers-databricks/stable/connections/databricks.html + + + +This profile mapping translates Airflow connections with the type ``databricks`` +into dbt profiles. To use this profile, import it from ``cosmos.profiles``: + +.. code-block:: python + + from cosmos.profiles import DatabricksTokenProfileMapping + + profile = DatabricksTokenProfileMapping( + conn_id = 'my_databricks_connection', + profile_args = { ... }, + ) + +While the profile mapping pulls fields from Airflow connections, you may need to supplement it +with additional ``profile_args``. The below table shows which fields are required, along with those +not required but pulled from the Airflow connection if present. You can also add additional fields +to the ``profile_args`` dict. + +.. list-table:: + :header-rows: 1 + + * - dbt Field Name + - Required + - Airflow Field Name + + + * - ``host`` + - True + + - ``host`` + + + * - ``schema`` + - True + + - ``schema`` + + + * - ``token`` + - True + + - ``['password', 'extra.token']`` + + + * - ``http_path`` + - True + + - ``extra.http_path`` + + + + +Some notes about the table above: + +- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To + see the full list of fields, see the link to the dbt docs at the top of this page. +- If the Airflow field name starts with an ``extra.``, this means that the field is nested under + the ``extra`` field in the Airflow connection. For example, if the Airflow field name is + ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, + and the field name is ``token``. +- If there are multiple Airflow field names, the profile mapping looks at those fields in order. + For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping + will first look for a field named ``password``. If that field is not present, it will look for + ``extra.token``. \ No newline at end of file diff --git a/docs/profiles/ExasolUserPassword.rst b/docs/profiles/ExasolUserPassword.rst new file mode 100644 index 000000000..951271a26 --- /dev/null +++ b/docs/profiles/ExasolUserPassword.rst @@ -0,0 +1,120 @@ +.. + This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. + + +ExasolUserPassword +================== + + + + Maps Airflow Exasol connections with a username and password to dbt profiles. + + https://docs.getdbt.com/reference/warehouse-setups/exasol-setup + + + +This profile mapping translates Airflow connections with the type ``exasol`` +into dbt profiles. To use this profile, import it from ``cosmos.profiles``: + +.. code-block:: python + + from cosmos.profiles import ExasolUserPasswordProfileMapping + + profile = ExasolUserPasswordProfileMapping( + conn_id = 'my_exasol_connection', + profile_args = { ... }, + ) + +While the profile mapping pulls fields from Airflow connections, you may need to supplement it +with additional ``profile_args``. The below table shows which fields are required, along with those +not required but pulled from the Airflow connection if present. You can also add additional fields +to the ``profile_args`` dict. + +.. list-table:: + :header-rows: 1 + + * - dbt Field Name + - Required + - Airflow Field Name + + + * - ``dsn`` + - True + + - ``host`` + + + * - ``user`` + - True + + - ``login`` + + + * - ``password`` + - True + + - ``password`` + + + * - ``dbname`` + - True + + - ``schema`` + + + * - ``encryption`` + - False + + - ``extra.encryption`` + + + * - ``compression`` + - False + + - ``extra.compression`` + + + * - ``connection_timeout`` + - False + + - ``extra.connection_timeout`` + + + * - ``socket_timeout`` + - False + + - ``extra.socket_timeout`` + + + * - ``protocol_version`` + - False + + - ``extra.protocol_version`` + + + * - ``threads`` + - True + + - + + + * - ``schema`` + - True + + - + + + + +Some notes about the table above: + +- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To + see the full list of fields, see the link to the dbt docs at the top of this page. +- If the Airflow field name starts with an ``extra.``, this means that the field is nested under + the ``extra`` field in the Airflow connection. For example, if the Airflow field name is + ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, + and the field name is ``token``. +- If there are multiple Airflow field names, the profile mapping looks at those fields in order. + For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping + will first look for a field named ``password``. If that field is not present, it will look for + ``extra.token``. \ No newline at end of file diff --git a/docs/profiles/GoogleCloudServiceAccountDict.rst b/docs/profiles/GoogleCloudServiceAccountDict.rst new file mode 100644 index 000000000..0fae28d7d --- /dev/null +++ b/docs/profiles/GoogleCloudServiceAccountDict.rst @@ -0,0 +1,76 @@ +.. + This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. + + +GoogleCloudServiceAccountDict +============================= + + + + Maps Airflow GCP connections to dbt BigQuery profiles if they use a service account keyfile dict/json. + + + + https://docs.getdbt.com/reference/warehouse-setups/bigquery-setup#service-account-file + + https://airflow.apache.org/docs/apache-airflow-providers-google/stable/connections/gcp.html + + + +This profile mapping translates Airflow connections with the type ``google_cloud_platform`` +into dbt profiles. To use this profile, import it from ``cosmos.profiles``: + +.. code-block:: python + + from cosmos.profiles import GoogleCloudServiceAccountDictProfileMapping + + profile = GoogleCloudServiceAccountDictProfileMapping( + conn_id = 'my_google_cloud_platform_connection', + profile_args = { ... }, + ) + +While the profile mapping pulls fields from Airflow connections, you may need to supplement it +with additional ``profile_args``. The below table shows which fields are required, along with those +not required but pulled from the Airflow connection if present. You can also add additional fields +to the ``profile_args`` dict. + +.. list-table:: + :header-rows: 1 + + * - dbt Field Name + - Required + - Airflow Field Name + + + * - ``project`` + - True + + - ``extra.project`` + + + * - ``dataset`` + - True + + - ``['extra.dataset', 'dataset']`` + + + * - ``keyfile_dict`` + - True + + - ``['extra.keyfile_dict', 'keyfile_dict', 'extra__google_cloud_platform__keyfile_dict']`` + + + + +Some notes about the table above: + +- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To + see the full list of fields, see the link to the dbt docs at the top of this page. +- If the Airflow field name starts with an ``extra.``, this means that the field is nested under + the ``extra`` field in the Airflow connection. For example, if the Airflow field name is + ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, + and the field name is ``token``. +- If there are multiple Airflow field names, the profile mapping looks at those fields in order. + For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping + will first look for a field named ``password``. If that field is not present, it will look for + ``extra.token``. \ No newline at end of file diff --git a/docs/profiles/GoogleCloudServiceAccountFile.rst b/docs/profiles/GoogleCloudServiceAccountFile.rst new file mode 100644 index 000000000..c0d68044e --- /dev/null +++ b/docs/profiles/GoogleCloudServiceAccountFile.rst @@ -0,0 +1,76 @@ +.. + This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. + + +GoogleCloudServiceAccountFile +============================= + + + + Maps Airflow GCP connections to dbt BigQuery profiles if they use a service account file. + + + + https://docs.getdbt.com/reference/warehouse-setups/bigquery-setup#service-account-file + + https://airflow.apache.org/docs/apache-airflow-providers-google/stable/connections/gcp.html + + + +This profile mapping translates Airflow connections with the type ``google_cloud_platform`` +into dbt profiles. To use this profile, import it from ``cosmos.profiles``: + +.. code-block:: python + + from cosmos.profiles import GoogleCloudServiceAccountFileProfileMapping + + profile = GoogleCloudServiceAccountFileProfileMapping( + conn_id = 'my_google_cloud_platform_connection', + profile_args = { ... }, + ) + +While the profile mapping pulls fields from Airflow connections, you may need to supplement it +with additional ``profile_args``. The below table shows which fields are required, along with those +not required but pulled from the Airflow connection if present. You can also add additional fields +to the ``profile_args`` dict. + +.. list-table:: + :header-rows: 1 + + * - dbt Field Name + - Required + - Airflow Field Name + + + * - ``project`` + - True + + - ``extra.project`` + + + * - ``dataset`` + - True + + - ``extra.dataset`` + + + * - ``keyfile`` + - True + + - ``extra.key_path`` + + + + +Some notes about the table above: + +- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To + see the full list of fields, see the link to the dbt docs at the top of this page. +- If the Airflow field name starts with an ``extra.``, this means that the field is nested under + the ``extra`` field in the Airflow connection. For example, if the Airflow field name is + ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, + and the field name is ``token``. +- If there are multiple Airflow field names, the profile mapping looks at those fields in order. + For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping + will first look for a field named ``password``. If that field is not present, it will look for + ``extra.token``. \ No newline at end of file diff --git a/docs/profiles/PostgresUserPassword.rst b/docs/profiles/PostgresUserPassword.rst new file mode 100644 index 000000000..44e55edaf --- /dev/null +++ b/docs/profiles/PostgresUserPassword.rst @@ -0,0 +1,104 @@ +.. + This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. + + +PostgresUserPassword +==================== + + + + Maps Airflow Postgres connections using user + password authentication to dbt profiles. + + https://docs.getdbt.com/reference/warehouse-setups/postgres-setup + + https://airflow.apache.org/docs/apache-airflow-providers-postgres/stable/connections/postgres.html + + + +This profile mapping translates Airflow connections with the type ``postgres`` +into dbt profiles. To use this profile, import it from ``cosmos.profiles``: + +.. code-block:: python + + from cosmos.profiles import PostgresUserPasswordProfileMapping + + profile = PostgresUserPasswordProfileMapping( + conn_id = 'my_postgres_connection', + profile_args = { ... }, + ) + +While the profile mapping pulls fields from Airflow connections, you may need to supplement it +with additional ``profile_args``. The below table shows which fields are required, along with those +not required but pulled from the Airflow connection if present. You can also add additional fields +to the ``profile_args`` dict. + +.. list-table:: + :header-rows: 1 + + * - dbt Field Name + - Required + - Airflow Field Name + + + * - ``host`` + - True + + - ``host`` + + + * - ``user`` + - True + + - ``login`` + + + * - ``password`` + - True + + - ``password`` + + + * - ``port`` + - True + + - ``port`` + + + * - ``dbname`` + - True + + - ``schema`` + + + * - ``keepalives_idle`` + - False + + - ``extra.keepalives_idle`` + + + * - ``sslmode`` + - False + + - ``extra.sslmode`` + + + * - ``schema`` + - True + + - + + + + +Some notes about the table above: + +- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To + see the full list of fields, see the link to the dbt docs at the top of this page. +- If the Airflow field name starts with an ``extra.``, this means that the field is nested under + the ``extra`` field in the Airflow connection. For example, if the Airflow field name is + ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, + and the field name is ``token``. +- If there are multiple Airflow field names, the profile mapping looks at those fields in order. + For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping + will first look for a field named ``password``. If that field is not present, it will look for + ``extra.token``. \ No newline at end of file diff --git a/docs/profiles/RedshiftUserPassword.rst b/docs/profiles/RedshiftUserPassword.rst new file mode 100644 index 000000000..28e8ffe20 --- /dev/null +++ b/docs/profiles/RedshiftUserPassword.rst @@ -0,0 +1,110 @@ +.. + This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. + + +RedshiftUserPassword +==================== + + + + Maps Airflow Redshift connections to dbt Redshift profiles if they use a username and password. + + https://docs.getdbt.com/reference/warehouse-setups/redshift-setup + + https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/connections/redshift.html + + + +This profile mapping translates Airflow connections with the type ``redshift`` +into dbt profiles. To use this profile, import it from ``cosmos.profiles``: + +.. code-block:: python + + from cosmos.profiles import RedshiftUserPasswordProfileMapping + + profile = RedshiftUserPasswordProfileMapping( + conn_id = 'my_redshift_connection', + profile_args = { ... }, + ) + +While the profile mapping pulls fields from Airflow connections, you may need to supplement it +with additional ``profile_args``. The below table shows which fields are required, along with those +not required but pulled from the Airflow connection if present. You can also add additional fields +to the ``profile_args`` dict. + +.. list-table:: + :header-rows: 1 + + * - dbt Field Name + - Required + - Airflow Field Name + + + * - ``host`` + - True + + - ``host`` + + + * - ``user`` + - True + + - ``login`` + + + * - ``password`` + - True + + - ``password`` + + + * - ``port`` + - False + + - ``port`` + + + * - ``dbname`` + - True + + - ``schema`` + + + * - ``timeout`` + - False + + - ``extra.timeout`` + + + * - ``sslmode`` + - False + + - ``extra.sslmode`` + + + * - ``region`` + - False + + - ``extra.region`` + + + * - ``schema`` + - True + + - + + + + +Some notes about the table above: + +- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To + see the full list of fields, see the link to the dbt docs at the top of this page. +- If the Airflow field name starts with an ``extra.``, this means that the field is nested under + the ``extra`` field in the Airflow connection. For example, if the Airflow field name is + ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, + and the field name is ``token``. +- If there are multiple Airflow field names, the profile mapping looks at those fields in order. + For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping + will first look for a field named ``password``. If that field is not present, it will look for + ``extra.token``. \ No newline at end of file diff --git a/docs/profiles/SnowflakePrivateKeyPem.rst b/docs/profiles/SnowflakePrivateKeyPem.rst new file mode 100644 index 000000000..020220c07 --- /dev/null +++ b/docs/profiles/SnowflakePrivateKeyPem.rst @@ -0,0 +1,98 @@ +.. + This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. + + +SnowflakePrivateKeyPem +====================== + + + + Maps Airflow Snowflake connections to dbt profiles if they use a user/private key. + + https://docs.getdbt.com/docs/core/connect-data-platform/snowflake-setup#key-pair-authentication + + https://airflow.apache.org/docs/apache-airflow-providers-snowflake/stable/connections/snowflake.html + + + +This profile mapping translates Airflow connections with the type ``snowflake`` +into dbt profiles. To use this profile, import it from ``cosmos.profiles``: + +.. code-block:: python + + from cosmos.profiles import SnowflakePrivateKeyPemProfileMapping + + profile = SnowflakePrivateKeyPemProfileMapping( + conn_id = 'my_snowflake_connection', + profile_args = { ... }, + ) + +While the profile mapping pulls fields from Airflow connections, you may need to supplement it +with additional ``profile_args``. The below table shows which fields are required, along with those +not required but pulled from the Airflow connection if present. You can also add additional fields +to the ``profile_args`` dict. + +.. list-table:: + :header-rows: 1 + + * - dbt Field Name + - Required + - Airflow Field Name + + + * - ``account`` + - True + + - ``extra.account`` + + + * - ``user`` + - True + + - ``login`` + + + * - ``database`` + - True + + - ``extra.database`` + + + * - ``warehouse`` + - True + + - ``extra.warehouse`` + + + * - ``schema`` + - True + + - ``schema`` + + + * - ``role`` + - False + + - ``extra.role`` + + + * - ``private_key_content`` + - True + + - ``extra.private_key_content`` + + + + +Some notes about the table above: + +- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To + see the full list of fields, see the link to the dbt docs at the top of this page. +- If the Airflow field name starts with an ``extra.``, this means that the field is nested under + the ``extra`` field in the Airflow connection. For example, if the Airflow field name is + ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, + and the field name is ``token``. +- If there are multiple Airflow field names, the profile mapping looks at those fields in order. + For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping + will first look for a field named ``password``. If that field is not present, it will look for + ``extra.token``. \ No newline at end of file diff --git a/docs/profiles/SnowflakeUserPassword.rst b/docs/profiles/SnowflakeUserPassword.rst new file mode 100644 index 000000000..7624f29af --- /dev/null +++ b/docs/profiles/SnowflakeUserPassword.rst @@ -0,0 +1,98 @@ +.. + This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. + + +SnowflakeUserPassword +===================== + + + + Maps Airflow Snowflake connections to dbt profiles if they use a user/password. + + https://docs.getdbt.com/reference/warehouse-setups/snowflake-setup + + https://airflow.apache.org/docs/apache-airflow-providers-snowflake/stable/connections/snowflake.html + + + +This profile mapping translates Airflow connections with the type ``snowflake`` +into dbt profiles. To use this profile, import it from ``cosmos.profiles``: + +.. code-block:: python + + from cosmos.profiles import SnowflakeUserPasswordProfileMapping + + profile = SnowflakeUserPasswordProfileMapping( + conn_id = 'my_snowflake_connection', + profile_args = { ... }, + ) + +While the profile mapping pulls fields from Airflow connections, you may need to supplement it +with additional ``profile_args``. The below table shows which fields are required, along with those +not required but pulled from the Airflow connection if present. You can also add additional fields +to the ``profile_args`` dict. + +.. list-table:: + :header-rows: 1 + + * - dbt Field Name + - Required + - Airflow Field Name + + + * - ``account`` + - True + + - ``extra.account`` + + + * - ``user`` + - True + + - ``login`` + + + * - ``password`` + - True + + - ``password`` + + + * - ``database`` + - True + + - ``extra.database`` + + + * - ``warehouse`` + - True + + - ``extra.warehouse`` + + + * - ``schema`` + - True + + - ``schema`` + + + * - ``role`` + - False + + - ``extra.role`` + + + + +Some notes about the table above: + +- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To + see the full list of fields, see the link to the dbt docs at the top of this page. +- If the Airflow field name starts with an ``extra.``, this means that the field is nested under + the ``extra`` field in the Airflow connection. For example, if the Airflow field name is + ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, + and the field name is ``token``. +- If there are multiple Airflow field names, the profile mapping looks at those fields in order. + For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping + will first look for a field named ``password``. If that field is not present, it will look for + ``extra.token``. \ No newline at end of file diff --git a/docs/profiles/SparkThrift.rst b/docs/profiles/SparkThrift.rst new file mode 100644 index 000000000..9aad1276a --- /dev/null +++ b/docs/profiles/SparkThrift.rst @@ -0,0 +1,68 @@ +.. + This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. + + +SparkThrift +=========== + + + + Maps Airflow Spark connections to dbt profiles if they use a thrift connection. + + https://docs.getdbt.com/reference/warehouse-setups/spark-setup#thrift + + https://airflow.apache.org/docs/apache-airflow-providers-apache-spark/stable/connections/spark.html + + + +This profile mapping translates Airflow connections with the type ``spark`` +into dbt profiles. To use this profile, import it from ``cosmos.profiles``: + +.. code-block:: python + + from cosmos.profiles import SparkThriftProfileMapping + + profile = SparkThriftProfileMapping( + conn_id = 'my_spark_connection', + profile_args = { ... }, + ) + +While the profile mapping pulls fields from Airflow connections, you may need to supplement it +with additional ``profile_args``. The below table shows which fields are required, along with those +not required but pulled from the Airflow connection if present. You can also add additional fields +to the ``profile_args`` dict. + +.. list-table:: + :header-rows: 1 + + * - dbt Field Name + - Required + - Airflow Field Name + + + * - ``host`` + - True + + - ``host`` + + + * - ``schema`` + - True + + - + + + + +Some notes about the table above: + +- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To + see the full list of fields, see the link to the dbt docs at the top of this page. +- If the Airflow field name starts with an ``extra.``, this means that the field is nested under + the ``extra`` field in the Airflow connection. For example, if the Airflow field name is + ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, + and the field name is ``token``. +- If there are multiple Airflow field names, the profile mapping looks at those fields in order. + For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping + will first look for a field named ``password``. If that field is not present, it will look for + ``extra.token``. \ No newline at end of file diff --git a/docs/profiles/TrinoCertificate.rst b/docs/profiles/TrinoCertificate.rst new file mode 100644 index 000000000..96d94ff84 --- /dev/null +++ b/docs/profiles/TrinoCertificate.rst @@ -0,0 +1,98 @@ +.. + This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. + + +TrinoCertificate +================ + + + + Maps Airflow Trino connections to Certificate Trino dbt profiles. + + https://docs.getdbt.com/reference/warehouse-setups/trino-setup#certificate + + https://airflow.apache.org/docs/apache-airflow-providers-trino/stable/connections.html + + + +This profile mapping translates Airflow connections with the type ``trino`` +into dbt profiles. To use this profile, import it from ``cosmos.profiles``: + +.. code-block:: python + + from cosmos.profiles import TrinoCertificateProfileMapping + + profile = TrinoCertificateProfileMapping( + conn_id = 'my_trino_connection', + profile_args = { ... }, + ) + +While the profile mapping pulls fields from Airflow connections, you may need to supplement it +with additional ``profile_args``. The below table shows which fields are required, along with those +not required but pulled from the Airflow connection if present. You can also add additional fields +to the ``profile_args`` dict. + +.. list-table:: + :header-rows: 1 + + * - dbt Field Name + - Required + - Airflow Field Name + + + * - ``client_certificate`` + - True + + - ``extra.certs__client_cert_path`` + + + * - ``client_private_key`` + - True + + - ``extra.certs__client_key_path`` + + + * - ``host`` + - True + + - ``host`` + + + * - ``port`` + - True + + - ``port`` + + + * - ``session_properties`` + - False + + - ``extra.session_properties`` + + + * - ``database`` + - True + + - + + + * - ``schema`` + - True + + - + + + + +Some notes about the table above: + +- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To + see the full list of fields, see the link to the dbt docs at the top of this page. +- If the Airflow field name starts with an ``extra.``, this means that the field is nested under + the ``extra`` field in the Airflow connection. For example, if the Airflow field name is + ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, + and the field name is ``token``. +- If there are multiple Airflow field names, the profile mapping looks at those fields in order. + For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping + will first look for a field named ``password``. If that field is not present, it will look for + ``extra.token``. \ No newline at end of file diff --git a/docs/profiles/TrinoJWT.rst b/docs/profiles/TrinoJWT.rst new file mode 100644 index 000000000..e1c81298c --- /dev/null +++ b/docs/profiles/TrinoJWT.rst @@ -0,0 +1,94 @@ +.. + This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. + + +TrinoJWT +======== + + + + Maps Airflow Trino connections to JWT Trino dbt profiles. + + + + https://docs.getdbt.com/reference/warehouse-setups/trino-setup#jwt + + https://airflow.apache.org/docs/apache-airflow-providers-trino/stable/connections.html + + + +This profile mapping translates Airflow connections with the type ``trino`` +into dbt profiles. To use this profile, import it from ``cosmos.profiles``: + +.. code-block:: python + + from cosmos.profiles import TrinoJWTProfileMapping + + profile = TrinoJWTProfileMapping( + conn_id = 'my_trino_connection', + profile_args = { ... }, + ) + +While the profile mapping pulls fields from Airflow connections, you may need to supplement it +with additional ``profile_args``. The below table shows which fields are required, along with those +not required but pulled from the Airflow connection if present. You can also add additional fields +to the ``profile_args`` dict. + +.. list-table:: + :header-rows: 1 + + * - dbt Field Name + - Required + - Airflow Field Name + + + * - ``jwt_token`` + - True + + - ``extra.jwt__token`` + + + * - ``host`` + - True + + - ``host`` + + + * - ``port`` + - True + + - ``port`` + + + * - ``session_properties`` + - False + + - ``extra.session_properties`` + + + * - ``database`` + - True + + - + + + * - ``schema`` + - True + + - + + + + +Some notes about the table above: + +- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To + see the full list of fields, see the link to the dbt docs at the top of this page. +- If the Airflow field name starts with an ``extra.``, this means that the field is nested under + the ``extra`` field in the Airflow connection. For example, if the Airflow field name is + ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, + and the field name is ``token``. +- If there are multiple Airflow field names, the profile mapping looks at those fields in order. + For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping + will first look for a field named ``password``. If that field is not present, it will look for + ``extra.token``. \ No newline at end of file diff --git a/docs/profiles/TrinoLDAP.rst b/docs/profiles/TrinoLDAP.rst new file mode 100644 index 000000000..e57dda14d --- /dev/null +++ b/docs/profiles/TrinoLDAP.rst @@ -0,0 +1,100 @@ +.. + This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. + + +TrinoLDAP +========= + + + + Maps Airflow Trino connections to LDAP Trino dbt profiles. + + + + https://docs.getdbt.com/reference/warehouse-setups/trino-setup#ldap + + https://airflow.apache.org/docs/apache-airflow-providers-trino/stable/connections.html + + + +This profile mapping translates Airflow connections with the type ``trino`` +into dbt profiles. To use this profile, import it from ``cosmos.profiles``: + +.. code-block:: python + + from cosmos.profiles import TrinoLDAPProfileMapping + + profile = TrinoLDAPProfileMapping( + conn_id = 'my_trino_connection', + profile_args = { ... }, + ) + +While the profile mapping pulls fields from Airflow connections, you may need to supplement it +with additional ``profile_args``. The below table shows which fields are required, along with those +not required but pulled from the Airflow connection if present. You can also add additional fields +to the ``profile_args`` dict. + +.. list-table:: + :header-rows: 1 + + * - dbt Field Name + - Required + - Airflow Field Name + + + * - ``user`` + - True + + - ``login`` + + + * - ``password`` + - True + + - ``password`` + + + * - ``host`` + - True + + - ``host`` + + + * - ``port`` + - True + + - ``port`` + + + * - ``session_properties`` + - False + + - ``extra.session_properties`` + + + * - ``database`` + - True + + - + + + * - ``schema`` + - True + + - + + + + +Some notes about the table above: + +- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To + see the full list of fields, see the link to the dbt docs at the top of this page. +- If the Airflow field name starts with an ``extra.``, this means that the field is nested under + the ``extra`` field in the Airflow connection. For example, if the Airflow field name is + ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, + and the field name is ``token``. +- If there are multiple Airflow field names, the profile mapping looks at those fields in order. + For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping + will first look for a field named ``password``. If that field is not present, it will look for + ``extra.token``. \ No newline at end of file diff --git a/docs/profiles/index.rst b/docs/profiles/index.rst index 54e2cbca9..bc7766ee2 100644 --- a/docs/profiles/index.rst +++ b/docs/profiles/index.rst @@ -1,208 +1,118 @@ -Profiles +.. + This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. + +.. toctree:: + :caption: Profiles + + self + GoogleCloudServiceAccountFile + GoogleCloudServiceAccountDict + DatabricksToken + PostgresUserPassword + RedshiftUserPassword + SnowflakeUserPassword + SnowflakePrivateKeyPem + SparkThrift + ExasolUserPassword + TrinoLDAP + TrinoCertificate + TrinoJWT + + +Profiles Overview ========================== -Cosmos automatically translates Airflow connections to dbt profiles. This means that you can use the same connection -objects you use in Airflow to authenticate with your database in dbt. To do so, there's a class in Cosmos for each -Airflow connection to dbt profile mapping. +Cosmos supports two methods of authenticating with your database: -Each profile mapping class typically gets defined with the following attributes: +- using your own dbt profiles.yml file +- using Airflow connections via Cosmos' profile mappings -* ``airflow_connection_type``: the Airflow connection type that this profile mapping is for. -* ``required_fields``: a list of required fields for the profile. This refers to the field name as it is in the dbt - profile. -* ``secret_fields``: a list of fields that are secret. These fields will be passed to dbt as environment variables. -* ``airflow_param_mapping``: a dictionary that maps the Airflow connection fields to the dbt profile fields. The keys - are the Airflow connection fields and the values are the dbt profile fields. -* Optionally, a profile mapping can specify a ``transform_{dbt_field_name}`` function for each dbt profile field. This - function will be called on the value of the Airflow connection field before it is passed to dbt. This is useful for - transforming the value of a field before it is passed to dbt. For example, sometimes ``host`` fields need to be passed - to dbt without the ``http://`` prefix. +If you're already interacting with your database from Airflow and have a connection set up, it's recommended +to use a profile mapping to translate that Airflow connection to a dbt profile. This is because it's easier to +maintain a single connection object in Airflow than it is to maintain a connection object in Airflow and a dbt profile +in your dbt project. -Because of this, the profile mapping classes are self-documenting. You can see the available profile mappings below. +If you don't already have an Airflow connection, or if there's no readily-available profile mapping for your database, +you can use your own dbt profiles.yml file. -Specifying Values ------------------ - -The dbt profile values generally come from one of two places: - -1. The ``profile_args`` parameter that you pass into either ``DbtDag`` or ``DbtTaskGroup``. -2. The Airflow connection values. - -Any value can be overridden by the ``profile_args`` parameter, because that value always takes precedence over the -Airflow connection value. For example, if you pass in a ``user`` value in ``profile_args``, that value will be used -instead of the Airflow connection value, even if you have a value for ``user`` in the Airflow connection. - -You can also specify values in the ``profile_args`` to be put in the dbt profile. This is useful for specifying values -that are not in the Airflow connection. - -Secret Fields -------------- - -Secret fields are passed to dbt as environment variables. This is to avoid writing the secret values to disk. The -secret values are passed to dbt as environment variables with the following naming convention: - -``COSMOS_CONN_{AIRFLOW_CONNECTION_TYPE}_{FIELD_NAME}`` - -For example, a Snowflake password field would be passed to dbt as an environment variable with the name -``COSMOS_CONN_SNOWFLAKE_PASSWORD``. - -Profile Name ------------- - -By default, Cosmos will use the dbt profile name specified in your project's dbt_project.yml file. However, you can -override this by passing in a ``profile_name_override`` parameter to either ``DbtDag`` or ``DbtTaskGroup``. This is useful -if you have macros or other code that depends on the profile name. For example, to ensure we always use the profile name -``my_profile_name`` in the following example, we can pass in a ``profile_name_override`` parameter to ``DbtDag``: - -.. code-block:: python - - dag = DbtDag(profile_name_override="my_profile_name", ...) - -If no profile name is specified, and there's no profile target in the dbt_project.yml file, Cosmos will use the -default profile name ``cosmos_profile``. - -Target Name ------------ - -By default, Cosmos will use the target name ``cosmos_target``. However, you can override this by passing in a -``target_name_override`` parameter to either ``DbtDag`` or ``DbtTaskGroup``. This is useful if you have macros or other -code that depends on the target name. For example, to ensure we always use the target name ``my_target_name`` in the -following example, we can pass in a ``target_name_override`` parameter to ``DbtDag``: +Regardless of which method you use, you'll need to tell Cosmos which profile and target name it should use. Profile config +is set in the ``cosmos.config.ProfileConfig`` object, like so: .. code-block:: python - dag = DbtDag(target_name_override="my_target_name", ...) - - -Available Profile Mappings -========================== - - -Google Cloud Platform ---------------------- - -Service Account File -~~~~~~~~~~~~~~~~~~~~ + from cosmos.config import ProfileConfig -.. autoclass:: cosmos.profiles.bigquery.GoogleCloudServiceAccountFileProfileMapping - :undoc-members: - :members: + profile_config = ProfileConfig( + profile_name="my_profile_name", + target_name="my_target_name", + # choose one of the following + profile_mapping=..., + profiles_yml_path=..., + ) -Service Account Dict -~~~~~~~~~~~~~~~~~~~~ + dag = DbtDag(profile_config=profile_config, ...) -.. autoclass:: cosmos.profiles.bigquery.GoogleCloudServiceAccountDictProfileMapping - :undoc-members: - :members: -Databricks ----------- +Using a profile mapping +++++++++++++++++++++++++++++++++++++ -Token -~~~~~~ +Profile mappings are utilities provided by Cosmos that translate Airflow connections to dbt profiles. This means that +you can use the same connection objects you use in Airflow to authenticate with your database in dbt. To do so, there's +a class in Cosmos for each Airflow connection to dbt profile mapping. -.. autoclass:: cosmos.profiles.databricks.DatabricksTokenProfileMapping - :undoc-members: - :members: +You can find the available profile mappings on the left-hand side of this page. Each profile mapping is imported from +``cosmos.profiles`` and takes two arguments: +* ``conn_id``: the Airflow connection ID to use. +* ``profile_args``: a dictionary of additional arguments to pass to the dbt profile. This is useful for specifying + values that are not in the Airflow connection. This also acts as an override for any values that are in the Airflow + connection but should be overridden. -Exasol ------- +Below is an example of using the Snowflake profile mapping, where we take most arguments from the Airflow connection +but override the ``database`` and ``schema`` values: -Username and Password -~~~~~~~~~~~~~~~~~~~~~~ - -.. autoclass:: cosmos.profiles.exasol.ExasolUserPasswordProfileMapping - :undoc-members: - :members: - - -Postgres --------- - -Username and Password -~~~~~~~~~~~~~~~~~~~~~~ - -.. autoclass:: cosmos.profiles.postgres.PostgresUserPasswordProfileMapping - :undoc-members: - :members: - - -Redshift --------- - -Username and Password -~~~~~~~~~~~~~~~~~~~~~~ - -.. autoclass:: cosmos.profiles.redshift.RedshiftUserPasswordProfileMapping - :undoc-members: - :members: - - -Snowflake ---------- - -Username and Password -~~~~~~~~~~~~~~~~~~~~~~ - -.. autoclass:: cosmos.profiles.snowflake.SnowflakeUserPasswordProfileMapping - :undoc-members: - :members: - - -Username and Private Key -~~~~~~~~~~~~~~~~~~~~~~ - -.. autoclass:: cosmos.profiles.snowflake.SnowflakePrivateKeyPemProfileMapping - :undoc-members: - :members: - - -Spark ------ - -Thrift -~~~~~~ - -.. autoclass:: cosmos.profiles.spark.SparkThriftProfileMapping - :undoc-members: - :members: +.. code-block:: python + from cosmos.profiles import SnowflakeUserPasswordProfileMapping -Trino ------ + profile_config = ProfileConfig( + profile_name="my_profile_name", + target_name="my_target_name", + profile_mapping=SnowflakeUserPasswordProfileMapping( + conn_id="my_snowflake_conn_id", + profile_args={ + "database": "my_snowflake_database", + "schema": "my_snowflake_schema", + }, + ), + ) -Base -~~~~ + dag = DbtDag(profile_config=profile_config, ...) -.. autoclass:: cosmos.profiles.trino.TrinoBaseProfileMapping - :undoc-members: - :members: +Note that when using a profile mapping, the profiles.yml file gets generated with the profile name and target name +you specify in ``ProfileConfig``. -LDAP -~~~~ +Using your own profiles.yml file +++++++++++++++++++++++++++++++++++++ -.. autoclass:: cosmos.profiles.trino.TrinoLDAPProfileMapping - :undoc-members: - :members: - :show-inheritance: +If you don't want to use Airflow connections, or if there's no readily-available profile mapping for your database, +you can use your own dbt profiles.yml file. To do so, you'll need to pass the path to your profiles.yml file to the +``profiles_yml_path`` argument in ``ProfileConfig``. +For example, the code snippet below points Cosmos at a ``profiles.yml`` file and instructs Cosmos to use the +``my_snowflake_profile`` profile and ``dev`` target: -JWT -~~~~ +.. code-block:: python -.. autoclass:: cosmos.profiles.trino.TrinoJWTProfileMapping - :undoc-members: - :members: - :show-inheritance: + from cosmos.config import ProfileConfig -Certificate -~~~~~~~~~~~ + profile_config = ProfileConfig( + profile_name="my_snowflake_profile", + target_name="dev", + profiles_yml_path="/path/to/profiles.yml", + ) -.. autoclass:: cosmos.profiles.trino.TrinoCertificateProfileMapping - :undoc-members: - :members: - :show-inheritance: + dag = DbtDag(profile_config=profile_config, ...) diff --git a/docs/scripts/generate_mappings.py b/docs/scripts/generate_mappings.py new file mode 100644 index 000000000..b11eebdb5 --- /dev/null +++ b/docs/scripts/generate_mappings.py @@ -0,0 +1,80 @@ +""" +Script to generate a dedicated docs page per profile mapping. +""" +from __future__ import annotations + +import os +from dataclasses import dataclass +from typing import Type + +from jinja2 import Environment, FileSystemLoader +from cosmos.profiles import profile_mappings, BaseProfileMapping + + +@dataclass +class Field: + "Represents a field in a profile mapping." + dbt_name: str + required: bool = False + airflow_name: str | list[str] | None = None + + +def get_fields_from_mapping(mapping: Type[BaseProfileMapping]) -> list[Field]: + """ + Generates Field objects from a profile mapping. + """ + fields = [] + required_fields = mapping.required_fields + + # get the fields from the airflow param mapping + for key, val in mapping.airflow_param_mapping.items(): + is_required = key in required_fields + fields.append(Field(dbt_name=key, required=is_required, airflow_name=val)) + + # add the required fields that are not in the airflow param mapping + for field in required_fields: + if field not in mapping.airflow_param_mapping: + fields.append(Field(dbt_name=field, required=True)) + + return fields + + +def generate_mapping_docs( + templates_dir: str = "scripts/templates", + output_dir: str = "profiles", +) -> None: + """ + Generate a dedicated docs page per profile mapping. + """ + # first, remove the existing docs + if os.path.exists(output_dir): + for file in os.listdir(output_dir): + os.remove(f"{output_dir}/{file}") + + # get the index template + env = Environment(loader=FileSystemLoader(templates_dir)) + index_template = env.get_template("index.rst.jinja2") + + mapping_template = env.get_template("profile_mapping.rst.jinja2") + # generate the profile mapping pages + for mapping in profile_mappings: + with open(f"{output_dir}/{mapping.__name__.replace('ProfileMapping', '')}.rst", "w", encoding="utf-8") as f: + docstring = mapping.__doc__ or "" + f.write( + mapping_template.render( + { + "mapping_name": mapping.__name__.replace("ProfileMapping", ""), + "mapping_description": "\n\n".join(docstring.split("\n")), + "fields": [field.__dict__ for field in get_fields_from_mapping(mapping=mapping)], + "airflow_conn_type": mapping.airflow_connection_type, + } + ) + ) + + # generate the index page + with open(f"{output_dir}/index.rst", "w", encoding="utf-8") as f: + f.write( + index_template.render( + profile_mapping_names=[mapping.__name__.replace("ProfileMapping", "") for mapping in profile_mappings] + ) + ) diff --git a/docs/scripts/templates/index.rst.jinja2 b/docs/scripts/templates/index.rst.jinja2 new file mode 100644 index 000000000..69407b08d --- /dev/null +++ b/docs/scripts/templates/index.rst.jinja2 @@ -0,0 +1,108 @@ +.. + This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. + +.. toctree:: + :caption: Profiles + + self + {% for profile in profile_mapping_names %}{{ profile }} + {% endfor %} + +Profiles Overview +========================== + +Cosmos supports two methods of authenticating with your database: + +- using your own dbt profiles.yml file +- using Airflow connections via Cosmos' profile mappings + +If you're already interacting with your database from Airflow and have a connection set up, it's recommended +to use a profile mapping to translate that Airflow connection to a dbt profile. This is because it's easier to +maintain a single connection object in Airflow than it is to maintain a connection object in Airflow and a dbt profile +in your dbt project. + +If you don't already have an Airflow connection, or if there's no readily-available profile mapping for your database, +you can use your own dbt profiles.yml file. + +Regardless of which method you use, you'll need to tell Cosmos which profile and target name it should use. Profile config +is set in the ``cosmos.config.ProfileConfig`` object, like so: + +.. code-block:: python + + from cosmos.config import ProfileConfig + + profile_config = ProfileConfig( + profile_name="my_profile_name", + target_name="my_target_name", + + # choose one of the following + profile_mapping=..., + profiles_yml_path=..., + ) + + dag = DbtDag(profile_config=profile_config, ...) + + + +Using a profile mapping +++++++++++++++++++++++++++++++++++++ + +Profile mappings are utilities provided by Cosmos that translate Airflow connections to dbt profiles. This means that +you can use the same connection objects you use in Airflow to authenticate with your database in dbt. To do so, there's +a class in Cosmos for each Airflow connection to dbt profile mapping. + +You can find the available profile mappings on the left-hand side of this page. Each profile mapping is imported from +``cosmos.profiles`` and takes two arguments: + +* ``conn_id``: the Airflow connection ID to use. +* ``profile_args``: a dictionary of additional arguments to pass to the dbt profile. This is useful for specifying + values that are not in the Airflow connection. This also acts as an override for any values that are in the Airflow + connection but should be overridden. + +Below is an example of using the Snowflake profile mapping, where we take most arguments from the Airflow connection +but override the ``database`` and ``schema`` values: + +.. code-block:: python + + from cosmos.profiles import SnowflakeUserPasswordProfileMapping + + profile_config = ProfileConfig( + profile_name="my_profile_name", + target_name="my_target_name", + profile_mapping=SnowflakeUserPasswordProfileMapping( + conn_id="my_snowflake_conn_id", + profile_args={ + "database": "my_snowflake_database", + "schema": "my_snowflake_schema", + }, + ), + ) + + dag = DbtDag(profile_config=profile_config, ...) + +Note that when using a profile mapping, the profiles.yml file gets generated with the profile name and target name +you specify in ``ProfileConfig``. + + +Using your own profiles.yml file +++++++++++++++++++++++++++++++++++++ + +If you don't want to use Airflow connections, or if there's no readily-available profile mapping for your database, +you can use your own dbt profiles.yml file. To do so, you'll need to pass the path to your profiles.yml file to the +``profiles_yml_path`` argument in ``ProfileConfig``. + +For example, the code snippet below points Cosmos at a ``profiles.yml`` file and instructs Cosmos to use the +``my_snowflake_profile`` profile and ``dev`` target: + +.. code-block:: python + + from cosmos.config import ProfileConfig + + profile_config = ProfileConfig( + profile_name="my_snowflake_profile", + target_name="dev", + profiles_yml_path="/path/to/profiles.yml", + ) + + dag = DbtDag(profile_config=profile_config, ...) + diff --git a/docs/scripts/templates/profile_mapping.rst.jinja2 b/docs/scripts/templates/profile_mapping.rst.jinja2 new file mode 100644 index 000000000..f27404bbb --- /dev/null +++ b/docs/scripts/templates/profile_mapping.rst.jinja2 @@ -0,0 +1,56 @@ +.. + This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. + + +{{ mapping_name }} +{{ "=" * mapping_name|length }} + +{{ mapping_description }} + +This profile mapping translates Airflow connections with the type ``{{ airflow_conn_type }}`` +into dbt profiles. To use this profile, import it from ``cosmos.profiles``: + +.. code-block:: python + + from cosmos.profiles import {{ mapping_name }}ProfileMapping + + profile = {{ mapping_name }}ProfileMapping( + conn_id = 'my_{{ airflow_conn_type }}_connection', + profile_args = { ... }, + ) + +While the profile mapping pulls fields from Airflow connections, you may need to supplement it +with additional ``profile_args``. The below table shows which fields are required, along with those +not required but pulled from the Airflow connection if present. You can also add additional fields +to the ``profile_args`` dict. + +.. list-table:: + :header-rows: 1 + + * - dbt Field Name + - Required + - Airflow Field Name + + {% for field in fields %} + * - ``{{ field.dbt_name }}`` + - {{ field.required }} + {% if field.airflow_name %} + - ``{{ field.airflow_name }}`` + {% else %} + - + {% endif %} + {% endfor %} + + +Some notes about the table above: + +- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To + see the full list of fields, see the link to the dbt docs at the top of this page. +- If the Airflow field name starts with an ``extra.``, this means that the field is nested under + the ``extra`` field in the Airflow connection. For example, if the Airflow field name is + ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, + and the field name is ``token``. +- If there are multiple Airflow field names, the profile mapping looks at those fields in order. + For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping + will first look for a field named ``password``. If that field is not present, it will look for + ``extra.token``. \ No newline at end of file From 30a2d4f3416d97fe02f9398f549b5f454b183874 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 26 Jul 2023 19:07:40 +0000 Subject: [PATCH 12/24] =?UTF-8?q?=F0=9F=8E=A8=20[pre-commit.ci]=20Auto=20f?= =?UTF-8?q?ormat=20from=20pre-commit.com=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/profiles/DatabricksToken.rst | 34 ++++----- docs/profiles/ExasolUserPassword.rst | 76 +++++++++---------- .../GoogleCloudServiceAccountDict.rst | 28 +++---- .../GoogleCloudServiceAccountFile.rst | 28 +++---- docs/profiles/PostgresUserPassword.rst | 58 +++++++------- docs/profiles/RedshiftUserPassword.rst | 64 ++++++++-------- docs/profiles/SnowflakePrivateKeyPem.rst | 52 ++++++------- docs/profiles/SnowflakeUserPassword.rst | 52 ++++++------- docs/profiles/SparkThrift.rst | 22 +++--- docs/profiles/TrinoCertificate.rst | 52 ++++++------- docs/profiles/TrinoJWT.rst | 46 +++++------ docs/profiles/TrinoLDAP.rst | 52 ++++++------- docs/profiles/index.rst | 5 +- docs/scripts/templates/index.rst.jinja2 | 3 +- .../templates/profile_mapping.rst.jinja2 | 2 +- 15 files changed, 286 insertions(+), 288 deletions(-) diff --git a/docs/profiles/DatabricksToken.rst b/docs/profiles/DatabricksToken.rst index bee5f16de..e50acd85e 100644 --- a/docs/profiles/DatabricksToken.rst +++ b/docs/profiles/DatabricksToken.rst @@ -15,7 +15,7 @@ DatabricksToken https://airflow.apache.org/docs/apache-airflow-providers-databricks/stable/connections/databricks.html - + This profile mapping translates Airflow connections with the type ``databricks`` into dbt profiles. To use this profile, import it from ``cosmos.profiles``: @@ -25,8 +25,8 @@ into dbt profiles. To use this profile, import it from ``cosmos.profiles``: from cosmos.profiles import DatabricksTokenProfileMapping profile = DatabricksTokenProfileMapping( - conn_id = 'my_databricks_connection', - profile_args = { ... }, + conn_id="my_databricks_connection", + profile_args={...}, ) While the profile mapping pulls fields from Airflow connections, you may need to supplement it @@ -41,31 +41,31 @@ to the ``profile_args`` dict. - Required - Airflow Field Name - + * - ``host`` - True - + - ``host`` - - + + * - ``schema`` - True - + - ``schema`` - - + + * - ``token`` - True - + - ``['password', 'extra.token']`` - - + + * - ``http_path`` - True - + - ``extra.http_path`` - - + + Some notes about the table above: @@ -79,4 +79,4 @@ Some notes about the table above: - If there are multiple Airflow field names, the profile mapping looks at those fields in order. For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. \ No newline at end of file + ``extra.token``. diff --git a/docs/profiles/ExasolUserPassword.rst b/docs/profiles/ExasolUserPassword.rst index 951271a26..52e3b1059 100644 --- a/docs/profiles/ExasolUserPassword.rst +++ b/docs/profiles/ExasolUserPassword.rst @@ -11,7 +11,7 @@ ExasolUserPassword https://docs.getdbt.com/reference/warehouse-setups/exasol-setup - + This profile mapping translates Airflow connections with the type ``exasol`` into dbt profiles. To use this profile, import it from ``cosmos.profiles``: @@ -21,8 +21,8 @@ into dbt profiles. To use this profile, import it from ``cosmos.profiles``: from cosmos.profiles import ExasolUserPasswordProfileMapping profile = ExasolUserPasswordProfileMapping( - conn_id = 'my_exasol_connection', - profile_args = { ... }, + conn_id="my_exasol_connection", + profile_args={...}, ) While the profile mapping pulls fields from Airflow connections, you may need to supplement it @@ -37,73 +37,73 @@ to the ``profile_args`` dict. - Required - Airflow Field Name - + * - ``dsn`` - True - + - ``host`` - - + + * - ``user`` - True - + - ``login`` - - + + * - ``password`` - True - + - ``password`` - - + + * - ``dbname`` - True - + - ``schema`` - - + + * - ``encryption`` - False - + - ``extra.encryption`` - - + + * - ``compression`` - False - + - ``extra.compression`` - - + + * - ``connection_timeout`` - False - + - ``extra.connection_timeout`` - - + + * - ``socket_timeout`` - False - + - ``extra.socket_timeout`` - - + + * - ``protocol_version`` - False - + - ``extra.protocol_version`` - - + + * - ``threads`` - True - + - - - + + * - ``schema`` - True - + - - - + + Some notes about the table above: @@ -117,4 +117,4 @@ Some notes about the table above: - If there are multiple Airflow field names, the profile mapping looks at those fields in order. For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. \ No newline at end of file + ``extra.token``. diff --git a/docs/profiles/GoogleCloudServiceAccountDict.rst b/docs/profiles/GoogleCloudServiceAccountDict.rst index 0fae28d7d..b76073f2a 100644 --- a/docs/profiles/GoogleCloudServiceAccountDict.rst +++ b/docs/profiles/GoogleCloudServiceAccountDict.rst @@ -15,7 +15,7 @@ GoogleCloudServiceAccountDict https://airflow.apache.org/docs/apache-airflow-providers-google/stable/connections/gcp.html - + This profile mapping translates Airflow connections with the type ``google_cloud_platform`` into dbt profiles. To use this profile, import it from ``cosmos.profiles``: @@ -25,8 +25,8 @@ into dbt profiles. To use this profile, import it from ``cosmos.profiles``: from cosmos.profiles import GoogleCloudServiceAccountDictProfileMapping profile = GoogleCloudServiceAccountDictProfileMapping( - conn_id = 'my_google_cloud_platform_connection', - profile_args = { ... }, + conn_id="my_google_cloud_platform_connection", + profile_args={...}, ) While the profile mapping pulls fields from Airflow connections, you may need to supplement it @@ -41,25 +41,25 @@ to the ``profile_args`` dict. - Required - Airflow Field Name - + * - ``project`` - True - + - ``extra.project`` - - + + * - ``dataset`` - True - + - ``['extra.dataset', 'dataset']`` - - + + * - ``keyfile_dict`` - True - + - ``['extra.keyfile_dict', 'keyfile_dict', 'extra__google_cloud_platform__keyfile_dict']`` - - + + Some notes about the table above: @@ -73,4 +73,4 @@ Some notes about the table above: - If there are multiple Airflow field names, the profile mapping looks at those fields in order. For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. \ No newline at end of file + ``extra.token``. diff --git a/docs/profiles/GoogleCloudServiceAccountFile.rst b/docs/profiles/GoogleCloudServiceAccountFile.rst index c0d68044e..143ce8595 100644 --- a/docs/profiles/GoogleCloudServiceAccountFile.rst +++ b/docs/profiles/GoogleCloudServiceAccountFile.rst @@ -15,7 +15,7 @@ GoogleCloudServiceAccountFile https://airflow.apache.org/docs/apache-airflow-providers-google/stable/connections/gcp.html - + This profile mapping translates Airflow connections with the type ``google_cloud_platform`` into dbt profiles. To use this profile, import it from ``cosmos.profiles``: @@ -25,8 +25,8 @@ into dbt profiles. To use this profile, import it from ``cosmos.profiles``: from cosmos.profiles import GoogleCloudServiceAccountFileProfileMapping profile = GoogleCloudServiceAccountFileProfileMapping( - conn_id = 'my_google_cloud_platform_connection', - profile_args = { ... }, + conn_id="my_google_cloud_platform_connection", + profile_args={...}, ) While the profile mapping pulls fields from Airflow connections, you may need to supplement it @@ -41,25 +41,25 @@ to the ``profile_args`` dict. - Required - Airflow Field Name - + * - ``project`` - True - + - ``extra.project`` - - + + * - ``dataset`` - True - + - ``extra.dataset`` - - + + * - ``keyfile`` - True - + - ``extra.key_path`` - - + + Some notes about the table above: @@ -73,4 +73,4 @@ Some notes about the table above: - If there are multiple Airflow field names, the profile mapping looks at those fields in order. For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. \ No newline at end of file + ``extra.token``. diff --git a/docs/profiles/PostgresUserPassword.rst b/docs/profiles/PostgresUserPassword.rst index 44e55edaf..06c06f2bf 100644 --- a/docs/profiles/PostgresUserPassword.rst +++ b/docs/profiles/PostgresUserPassword.rst @@ -13,7 +13,7 @@ PostgresUserPassword https://airflow.apache.org/docs/apache-airflow-providers-postgres/stable/connections/postgres.html - + This profile mapping translates Airflow connections with the type ``postgres`` into dbt profiles. To use this profile, import it from ``cosmos.profiles``: @@ -23,8 +23,8 @@ into dbt profiles. To use this profile, import it from ``cosmos.profiles``: from cosmos.profiles import PostgresUserPasswordProfileMapping profile = PostgresUserPasswordProfileMapping( - conn_id = 'my_postgres_connection', - profile_args = { ... }, + conn_id="my_postgres_connection", + profile_args={...}, ) While the profile mapping pulls fields from Airflow connections, you may need to supplement it @@ -39,55 +39,55 @@ to the ``profile_args`` dict. - Required - Airflow Field Name - + * - ``host`` - True - + - ``host`` - - + + * - ``user`` - True - + - ``login`` - - + + * - ``password`` - True - + - ``password`` - - + + * - ``port`` - True - + - ``port`` - - + + * - ``dbname`` - True - + - ``schema`` - - + + * - ``keepalives_idle`` - False - + - ``extra.keepalives_idle`` - - + + * - ``sslmode`` - False - + - ``extra.sslmode`` - - + + * - ``schema`` - True - + - - - + + Some notes about the table above: @@ -101,4 +101,4 @@ Some notes about the table above: - If there are multiple Airflow field names, the profile mapping looks at those fields in order. For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. \ No newline at end of file + ``extra.token``. diff --git a/docs/profiles/RedshiftUserPassword.rst b/docs/profiles/RedshiftUserPassword.rst index 28e8ffe20..7a0f79471 100644 --- a/docs/profiles/RedshiftUserPassword.rst +++ b/docs/profiles/RedshiftUserPassword.rst @@ -13,7 +13,7 @@ RedshiftUserPassword https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/connections/redshift.html - + This profile mapping translates Airflow connections with the type ``redshift`` into dbt profiles. To use this profile, import it from ``cosmos.profiles``: @@ -23,8 +23,8 @@ into dbt profiles. To use this profile, import it from ``cosmos.profiles``: from cosmos.profiles import RedshiftUserPasswordProfileMapping profile = RedshiftUserPasswordProfileMapping( - conn_id = 'my_redshift_connection', - profile_args = { ... }, + conn_id="my_redshift_connection", + profile_args={...}, ) While the profile mapping pulls fields from Airflow connections, you may need to supplement it @@ -39,61 +39,61 @@ to the ``profile_args`` dict. - Required - Airflow Field Name - + * - ``host`` - True - + - ``host`` - - + + * - ``user`` - True - + - ``login`` - - + + * - ``password`` - True - + - ``password`` - - + + * - ``port`` - False - + - ``port`` - - + + * - ``dbname`` - True - + - ``schema`` - - + + * - ``timeout`` - False - + - ``extra.timeout`` - - + + * - ``sslmode`` - False - + - ``extra.sslmode`` - - + + * - ``region`` - False - + - ``extra.region`` - - + + * - ``schema`` - True - + - - - + + Some notes about the table above: @@ -107,4 +107,4 @@ Some notes about the table above: - If there are multiple Airflow field names, the profile mapping looks at those fields in order. For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. \ No newline at end of file + ``extra.token``. diff --git a/docs/profiles/SnowflakePrivateKeyPem.rst b/docs/profiles/SnowflakePrivateKeyPem.rst index 020220c07..d8b698422 100644 --- a/docs/profiles/SnowflakePrivateKeyPem.rst +++ b/docs/profiles/SnowflakePrivateKeyPem.rst @@ -13,7 +13,7 @@ SnowflakePrivateKeyPem https://airflow.apache.org/docs/apache-airflow-providers-snowflake/stable/connections/snowflake.html - + This profile mapping translates Airflow connections with the type ``snowflake`` into dbt profiles. To use this profile, import it from ``cosmos.profiles``: @@ -23,8 +23,8 @@ into dbt profiles. To use this profile, import it from ``cosmos.profiles``: from cosmos.profiles import SnowflakePrivateKeyPemProfileMapping profile = SnowflakePrivateKeyPemProfileMapping( - conn_id = 'my_snowflake_connection', - profile_args = { ... }, + conn_id="my_snowflake_connection", + profile_args={...}, ) While the profile mapping pulls fields from Airflow connections, you may need to supplement it @@ -39,49 +39,49 @@ to the ``profile_args`` dict. - Required - Airflow Field Name - + * - ``account`` - True - + - ``extra.account`` - - + + * - ``user`` - True - + - ``login`` - - + + * - ``database`` - True - + - ``extra.database`` - - + + * - ``warehouse`` - True - + - ``extra.warehouse`` - - + + * - ``schema`` - True - + - ``schema`` - - + + * - ``role`` - False - + - ``extra.role`` - - + + * - ``private_key_content`` - True - + - ``extra.private_key_content`` - - + + Some notes about the table above: @@ -95,4 +95,4 @@ Some notes about the table above: - If there are multiple Airflow field names, the profile mapping looks at those fields in order. For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. \ No newline at end of file + ``extra.token``. diff --git a/docs/profiles/SnowflakeUserPassword.rst b/docs/profiles/SnowflakeUserPassword.rst index 7624f29af..338185cc2 100644 --- a/docs/profiles/SnowflakeUserPassword.rst +++ b/docs/profiles/SnowflakeUserPassword.rst @@ -13,7 +13,7 @@ SnowflakeUserPassword https://airflow.apache.org/docs/apache-airflow-providers-snowflake/stable/connections/snowflake.html - + This profile mapping translates Airflow connections with the type ``snowflake`` into dbt profiles. To use this profile, import it from ``cosmos.profiles``: @@ -23,8 +23,8 @@ into dbt profiles. To use this profile, import it from ``cosmos.profiles``: from cosmos.profiles import SnowflakeUserPasswordProfileMapping profile = SnowflakeUserPasswordProfileMapping( - conn_id = 'my_snowflake_connection', - profile_args = { ... }, + conn_id="my_snowflake_connection", + profile_args={...}, ) While the profile mapping pulls fields from Airflow connections, you may need to supplement it @@ -39,49 +39,49 @@ to the ``profile_args`` dict. - Required - Airflow Field Name - + * - ``account`` - True - + - ``extra.account`` - - + + * - ``user`` - True - + - ``login`` - - + + * - ``password`` - True - + - ``password`` - - + + * - ``database`` - True - + - ``extra.database`` - - + + * - ``warehouse`` - True - + - ``extra.warehouse`` - - + + * - ``schema`` - True - + - ``schema`` - - + + * - ``role`` - False - + - ``extra.role`` - - + + Some notes about the table above: @@ -95,4 +95,4 @@ Some notes about the table above: - If there are multiple Airflow field names, the profile mapping looks at those fields in order. For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. \ No newline at end of file + ``extra.token``. diff --git a/docs/profiles/SparkThrift.rst b/docs/profiles/SparkThrift.rst index 9aad1276a..a8c33b672 100644 --- a/docs/profiles/SparkThrift.rst +++ b/docs/profiles/SparkThrift.rst @@ -13,7 +13,7 @@ SparkThrift https://airflow.apache.org/docs/apache-airflow-providers-apache-spark/stable/connections/spark.html - + This profile mapping translates Airflow connections with the type ``spark`` into dbt profiles. To use this profile, import it from ``cosmos.profiles``: @@ -23,8 +23,8 @@ into dbt profiles. To use this profile, import it from ``cosmos.profiles``: from cosmos.profiles import SparkThriftProfileMapping profile = SparkThriftProfileMapping( - conn_id = 'my_spark_connection', - profile_args = { ... }, + conn_id="my_spark_connection", + profile_args={...}, ) While the profile mapping pulls fields from Airflow connections, you may need to supplement it @@ -39,19 +39,19 @@ to the ``profile_args`` dict. - Required - Airflow Field Name - + * - ``host`` - True - + - ``host`` - - + + * - ``schema`` - True - + - - - + + Some notes about the table above: @@ -65,4 +65,4 @@ Some notes about the table above: - If there are multiple Airflow field names, the profile mapping looks at those fields in order. For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. \ No newline at end of file + ``extra.token``. diff --git a/docs/profiles/TrinoCertificate.rst b/docs/profiles/TrinoCertificate.rst index 96d94ff84..8b887414f 100644 --- a/docs/profiles/TrinoCertificate.rst +++ b/docs/profiles/TrinoCertificate.rst @@ -13,7 +13,7 @@ TrinoCertificate https://airflow.apache.org/docs/apache-airflow-providers-trino/stable/connections.html - + This profile mapping translates Airflow connections with the type ``trino`` into dbt profiles. To use this profile, import it from ``cosmos.profiles``: @@ -23,8 +23,8 @@ into dbt profiles. To use this profile, import it from ``cosmos.profiles``: from cosmos.profiles import TrinoCertificateProfileMapping profile = TrinoCertificateProfileMapping( - conn_id = 'my_trino_connection', - profile_args = { ... }, + conn_id="my_trino_connection", + profile_args={...}, ) While the profile mapping pulls fields from Airflow connections, you may need to supplement it @@ -39,49 +39,49 @@ to the ``profile_args`` dict. - Required - Airflow Field Name - + * - ``client_certificate`` - True - + - ``extra.certs__client_cert_path`` - - + + * - ``client_private_key`` - True - + - ``extra.certs__client_key_path`` - - + + * - ``host`` - True - + - ``host`` - - + + * - ``port`` - True - + - ``port`` - - + + * - ``session_properties`` - False - + - ``extra.session_properties`` - - + + * - ``database`` - True - + - - - + + * - ``schema`` - True - + - - - + + Some notes about the table above: @@ -95,4 +95,4 @@ Some notes about the table above: - If there are multiple Airflow field names, the profile mapping looks at those fields in order. For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. \ No newline at end of file + ``extra.token``. diff --git a/docs/profiles/TrinoJWT.rst b/docs/profiles/TrinoJWT.rst index e1c81298c..3115a0df5 100644 --- a/docs/profiles/TrinoJWT.rst +++ b/docs/profiles/TrinoJWT.rst @@ -15,7 +15,7 @@ TrinoJWT https://airflow.apache.org/docs/apache-airflow-providers-trino/stable/connections.html - + This profile mapping translates Airflow connections with the type ``trino`` into dbt profiles. To use this profile, import it from ``cosmos.profiles``: @@ -25,8 +25,8 @@ into dbt profiles. To use this profile, import it from ``cosmos.profiles``: from cosmos.profiles import TrinoJWTProfileMapping profile = TrinoJWTProfileMapping( - conn_id = 'my_trino_connection', - profile_args = { ... }, + conn_id="my_trino_connection", + profile_args={...}, ) While the profile mapping pulls fields from Airflow connections, you may need to supplement it @@ -41,43 +41,43 @@ to the ``profile_args`` dict. - Required - Airflow Field Name - + * - ``jwt_token`` - True - + - ``extra.jwt__token`` - - + + * - ``host`` - True - + - ``host`` - - + + * - ``port`` - True - + - ``port`` - - + + * - ``session_properties`` - False - + - ``extra.session_properties`` - - + + * - ``database`` - True - + - - - + + * - ``schema`` - True - + - - - + + Some notes about the table above: @@ -91,4 +91,4 @@ Some notes about the table above: - If there are multiple Airflow field names, the profile mapping looks at those fields in order. For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. \ No newline at end of file + ``extra.token``. diff --git a/docs/profiles/TrinoLDAP.rst b/docs/profiles/TrinoLDAP.rst index e57dda14d..0c04dbb86 100644 --- a/docs/profiles/TrinoLDAP.rst +++ b/docs/profiles/TrinoLDAP.rst @@ -15,7 +15,7 @@ TrinoLDAP https://airflow.apache.org/docs/apache-airflow-providers-trino/stable/connections.html - + This profile mapping translates Airflow connections with the type ``trino`` into dbt profiles. To use this profile, import it from ``cosmos.profiles``: @@ -25,8 +25,8 @@ into dbt profiles. To use this profile, import it from ``cosmos.profiles``: from cosmos.profiles import TrinoLDAPProfileMapping profile = TrinoLDAPProfileMapping( - conn_id = 'my_trino_connection', - profile_args = { ... }, + conn_id="my_trino_connection", + profile_args={...}, ) While the profile mapping pulls fields from Airflow connections, you may need to supplement it @@ -41,49 +41,49 @@ to the ``profile_args`` dict. - Required - Airflow Field Name - + * - ``user`` - True - + - ``login`` - - + + * - ``password`` - True - + - ``password`` - - + + * - ``host`` - True - + - ``host`` - - + + * - ``port`` - True - + - ``port`` - - + + * - ``session_properties`` - False - + - ``extra.session_properties`` - - + + * - ``database`` - True - + - - - + + * - ``schema`` - True - + - - - + + Some notes about the table above: @@ -97,4 +97,4 @@ Some notes about the table above: - If there are multiple Airflow field names, the profile mapping looks at those fields in order. For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. \ No newline at end of file + ``extra.token``. diff --git a/docs/profiles/index.rst b/docs/profiles/index.rst index bc7766ee2..cb4688f8e 100644 --- a/docs/profiles/index.rst +++ b/docs/profiles/index.rst @@ -17,7 +17,7 @@ TrinoLDAP TrinoCertificate TrinoJWT - + Profiles Overview ========================== @@ -45,7 +45,6 @@ is set in the ``cosmos.config.ProfileConfig`` object, like so: profile_config = ProfileConfig( profile_name="my_profile_name", target_name="my_target_name", - # choose one of the following profile_mapping=..., profiles_yml_path=..., @@ -62,7 +61,7 @@ Profile mappings are utilities provided by Cosmos that translate Airflow connect you can use the same connection objects you use in Airflow to authenticate with your database in dbt. To do so, there's a class in Cosmos for each Airflow connection to dbt profile mapping. -You can find the available profile mappings on the left-hand side of this page. Each profile mapping is imported from +You can find the available profile mappings on the left-hand side of this page. Each profile mapping is imported from ``cosmos.profiles`` and takes two arguments: * ``conn_id``: the Airflow connection ID to use. diff --git a/docs/scripts/templates/index.rst.jinja2 b/docs/scripts/templates/index.rst.jinja2 index 69407b08d..dcaabb028 100644 --- a/docs/scripts/templates/index.rst.jinja2 +++ b/docs/scripts/templates/index.rst.jinja2 @@ -51,7 +51,7 @@ Profile mappings are utilities provided by Cosmos that translate Airflow connect you can use the same connection objects you use in Airflow to authenticate with your database in dbt. To do so, there's a class in Cosmos for each Airflow connection to dbt profile mapping. -You can find the available profile mappings on the left-hand side of this page. Each profile mapping is imported from +You can find the available profile mappings on the left-hand side of this page. Each profile mapping is imported from ``cosmos.profiles`` and takes two arguments: * ``conn_id``: the Airflow connection ID to use. @@ -105,4 +105,3 @@ For example, the code snippet below points Cosmos at a ``profiles.yml`` file and ) dag = DbtDag(profile_config=profile_config, ...) - diff --git a/docs/scripts/templates/profile_mapping.rst.jinja2 b/docs/scripts/templates/profile_mapping.rst.jinja2 index f27404bbb..ff9ad8da1 100644 --- a/docs/scripts/templates/profile_mapping.rst.jinja2 +++ b/docs/scripts/templates/profile_mapping.rst.jinja2 @@ -53,4 +53,4 @@ Some notes about the table above: - If there are multiple Airflow field names, the profile mapping looks at those fields in order. For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. \ No newline at end of file + ``extra.token``. From 6b89b887dc5d68953fc2e022041ec7a68f6acc25 Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Wed, 26 Jul 2023 15:11:12 -0400 Subject: [PATCH 13/24] add __init__ --- docs/__init__.py | 0 docs/scripts/__init__.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 docs/__init__.py create mode 100644 docs/scripts/__init__.py diff --git a/docs/__init__.py b/docs/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/docs/scripts/__init__.py b/docs/scripts/__init__.py new file mode 100644 index 000000000..e69de29bb From 1bbb61c4401f6b0c3be6436f29edd0d9123c3856 Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Wed, 26 Jul 2023 15:15:48 -0400 Subject: [PATCH 14/24] ignore docs/profiles --- .gitignore | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.gitignore b/.gitignore index cf06af3d3..d5330e55d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,7 @@ +# cosmos-specific ignores +# these files get autogenerated +docs/profiles/* + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] From 920e078d1deb2ed2b3790731b19e087d79d401e9 Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Wed, 26 Jul 2023 15:16:35 -0400 Subject: [PATCH 15/24] try to fix build issues --- docs/conf.py | 4 +- docs/{scripts => }/generate_mappings.py | 2 +- docs/profiles/DatabricksToken.rst | 82 ------------ docs/profiles/ExasolUserPassword.rst | 120 ------------------ .../GoogleCloudServiceAccountDict.rst | 76 ----------- .../GoogleCloudServiceAccountFile.rst | 76 ----------- docs/profiles/PostgresUserPassword.rst | 104 --------------- docs/profiles/RedshiftUserPassword.rst | 110 ---------------- docs/profiles/SnowflakePrivateKeyPem.rst | 98 -------------- docs/profiles/SnowflakeUserPassword.rst | 98 -------------- docs/profiles/SparkThrift.rst | 68 ---------- docs/profiles/TrinoCertificate.rst | 98 -------------- docs/profiles/TrinoJWT.rst | 94 -------------- docs/profiles/TrinoLDAP.rst | 100 --------------- docs/profiles/index.rst | 117 ----------------- docs/scripts/__init__.py | 0 docs/{scripts => }/templates/index.rst.jinja2 | 0 .../templates/profile_mapping.rst.jinja2 | 0 18 files changed, 3 insertions(+), 1244 deletions(-) rename docs/{scripts => }/generate_mappings.py (98%) delete mode 100644 docs/profiles/DatabricksToken.rst delete mode 100644 docs/profiles/ExasolUserPassword.rst delete mode 100644 docs/profiles/GoogleCloudServiceAccountDict.rst delete mode 100644 docs/profiles/GoogleCloudServiceAccountFile.rst delete mode 100644 docs/profiles/PostgresUserPassword.rst delete mode 100644 docs/profiles/RedshiftUserPassword.rst delete mode 100644 docs/profiles/SnowflakePrivateKeyPem.rst delete mode 100644 docs/profiles/SnowflakeUserPassword.rst delete mode 100644 docs/profiles/SparkThrift.rst delete mode 100644 docs/profiles/TrinoCertificate.rst delete mode 100644 docs/profiles/TrinoJWT.rst delete mode 100644 docs/profiles/TrinoLDAP.rst delete mode 100644 docs/profiles/index.rst delete mode 100644 docs/scripts/__init__.py rename docs/{scripts => }/templates/index.rst.jinja2 (100%) rename docs/{scripts => }/templates/profile_mapping.rst.jinja2 (100%) diff --git a/docs/conf.py b/docs/conf.py index e916de843..775950642 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,11 +1,11 @@ import os import sys -from docs.scripts.generate_mappings import generate_mapping_docs - # Add the project root to the path so we can import the package sys.path.insert(0, os.path.abspath("../")) +from docs.generate_mappings import generate_mapping_docs + # Configuration file for the Sphinx documentation builder. # # For the full list of built-in configuration values, see the documentation: diff --git a/docs/scripts/generate_mappings.py b/docs/generate_mappings.py similarity index 98% rename from docs/scripts/generate_mappings.py rename to docs/generate_mappings.py index b11eebdb5..11edd41a8 100644 --- a/docs/scripts/generate_mappings.py +++ b/docs/generate_mappings.py @@ -40,7 +40,7 @@ def get_fields_from_mapping(mapping: Type[BaseProfileMapping]) -> list[Field]: def generate_mapping_docs( - templates_dir: str = "scripts/templates", + templates_dir: str = "templates", output_dir: str = "profiles", ) -> None: """ diff --git a/docs/profiles/DatabricksToken.rst b/docs/profiles/DatabricksToken.rst deleted file mode 100644 index e50acd85e..000000000 --- a/docs/profiles/DatabricksToken.rst +++ /dev/null @@ -1,82 +0,0 @@ -.. - This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. - - -DatabricksToken -=============== - - - - Maps Airflow Databricks connections with a token to dbt profiles. - - - - https://docs.getdbt.com/reference/warehouse-setups/databricks-setup - - https://airflow.apache.org/docs/apache-airflow-providers-databricks/stable/connections/databricks.html - - - -This profile mapping translates Airflow connections with the type ``databricks`` -into dbt profiles. To use this profile, import it from ``cosmos.profiles``: - -.. code-block:: python - - from cosmos.profiles import DatabricksTokenProfileMapping - - profile = DatabricksTokenProfileMapping( - conn_id="my_databricks_connection", - profile_args={...}, - ) - -While the profile mapping pulls fields from Airflow connections, you may need to supplement it -with additional ``profile_args``. The below table shows which fields are required, along with those -not required but pulled from the Airflow connection if present. You can also add additional fields -to the ``profile_args`` dict. - -.. list-table:: - :header-rows: 1 - - * - dbt Field Name - - Required - - Airflow Field Name - - - * - ``host`` - - True - - - ``host`` - - - * - ``schema`` - - True - - - ``schema`` - - - * - ``token`` - - True - - - ``['password', 'extra.token']`` - - - * - ``http_path`` - - True - - - ``extra.http_path`` - - - - -Some notes about the table above: - -- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To - see the full list of fields, see the link to the dbt docs at the top of this page. -- If the Airflow field name starts with an ``extra.``, this means that the field is nested under - the ``extra`` field in the Airflow connection. For example, if the Airflow field name is - ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, - and the field name is ``token``. -- If there are multiple Airflow field names, the profile mapping looks at those fields in order. - For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping - will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. diff --git a/docs/profiles/ExasolUserPassword.rst b/docs/profiles/ExasolUserPassword.rst deleted file mode 100644 index 52e3b1059..000000000 --- a/docs/profiles/ExasolUserPassword.rst +++ /dev/null @@ -1,120 +0,0 @@ -.. - This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. - - -ExasolUserPassword -================== - - - - Maps Airflow Exasol connections with a username and password to dbt profiles. - - https://docs.getdbt.com/reference/warehouse-setups/exasol-setup - - - -This profile mapping translates Airflow connections with the type ``exasol`` -into dbt profiles. To use this profile, import it from ``cosmos.profiles``: - -.. code-block:: python - - from cosmos.profiles import ExasolUserPasswordProfileMapping - - profile = ExasolUserPasswordProfileMapping( - conn_id="my_exasol_connection", - profile_args={...}, - ) - -While the profile mapping pulls fields from Airflow connections, you may need to supplement it -with additional ``profile_args``. The below table shows which fields are required, along with those -not required but pulled from the Airflow connection if present. You can also add additional fields -to the ``profile_args`` dict. - -.. list-table:: - :header-rows: 1 - - * - dbt Field Name - - Required - - Airflow Field Name - - - * - ``dsn`` - - True - - - ``host`` - - - * - ``user`` - - True - - - ``login`` - - - * - ``password`` - - True - - - ``password`` - - - * - ``dbname`` - - True - - - ``schema`` - - - * - ``encryption`` - - False - - - ``extra.encryption`` - - - * - ``compression`` - - False - - - ``extra.compression`` - - - * - ``connection_timeout`` - - False - - - ``extra.connection_timeout`` - - - * - ``socket_timeout`` - - False - - - ``extra.socket_timeout`` - - - * - ``protocol_version`` - - False - - - ``extra.protocol_version`` - - - * - ``threads`` - - True - - - - - - * - ``schema`` - - True - - - - - - - -Some notes about the table above: - -- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To - see the full list of fields, see the link to the dbt docs at the top of this page. -- If the Airflow field name starts with an ``extra.``, this means that the field is nested under - the ``extra`` field in the Airflow connection. For example, if the Airflow field name is - ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, - and the field name is ``token``. -- If there are multiple Airflow field names, the profile mapping looks at those fields in order. - For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping - will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. diff --git a/docs/profiles/GoogleCloudServiceAccountDict.rst b/docs/profiles/GoogleCloudServiceAccountDict.rst deleted file mode 100644 index b76073f2a..000000000 --- a/docs/profiles/GoogleCloudServiceAccountDict.rst +++ /dev/null @@ -1,76 +0,0 @@ -.. - This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. - - -GoogleCloudServiceAccountDict -============================= - - - - Maps Airflow GCP connections to dbt BigQuery profiles if they use a service account keyfile dict/json. - - - - https://docs.getdbt.com/reference/warehouse-setups/bigquery-setup#service-account-file - - https://airflow.apache.org/docs/apache-airflow-providers-google/stable/connections/gcp.html - - - -This profile mapping translates Airflow connections with the type ``google_cloud_platform`` -into dbt profiles. To use this profile, import it from ``cosmos.profiles``: - -.. code-block:: python - - from cosmos.profiles import GoogleCloudServiceAccountDictProfileMapping - - profile = GoogleCloudServiceAccountDictProfileMapping( - conn_id="my_google_cloud_platform_connection", - profile_args={...}, - ) - -While the profile mapping pulls fields from Airflow connections, you may need to supplement it -with additional ``profile_args``. The below table shows which fields are required, along with those -not required but pulled from the Airflow connection if present. You can also add additional fields -to the ``profile_args`` dict. - -.. list-table:: - :header-rows: 1 - - * - dbt Field Name - - Required - - Airflow Field Name - - - * - ``project`` - - True - - - ``extra.project`` - - - * - ``dataset`` - - True - - - ``['extra.dataset', 'dataset']`` - - - * - ``keyfile_dict`` - - True - - - ``['extra.keyfile_dict', 'keyfile_dict', 'extra__google_cloud_platform__keyfile_dict']`` - - - - -Some notes about the table above: - -- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To - see the full list of fields, see the link to the dbt docs at the top of this page. -- If the Airflow field name starts with an ``extra.``, this means that the field is nested under - the ``extra`` field in the Airflow connection. For example, if the Airflow field name is - ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, - and the field name is ``token``. -- If there are multiple Airflow field names, the profile mapping looks at those fields in order. - For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping - will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. diff --git a/docs/profiles/GoogleCloudServiceAccountFile.rst b/docs/profiles/GoogleCloudServiceAccountFile.rst deleted file mode 100644 index 143ce8595..000000000 --- a/docs/profiles/GoogleCloudServiceAccountFile.rst +++ /dev/null @@ -1,76 +0,0 @@ -.. - This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. - - -GoogleCloudServiceAccountFile -============================= - - - - Maps Airflow GCP connections to dbt BigQuery profiles if they use a service account file. - - - - https://docs.getdbt.com/reference/warehouse-setups/bigquery-setup#service-account-file - - https://airflow.apache.org/docs/apache-airflow-providers-google/stable/connections/gcp.html - - - -This profile mapping translates Airflow connections with the type ``google_cloud_platform`` -into dbt profiles. To use this profile, import it from ``cosmos.profiles``: - -.. code-block:: python - - from cosmos.profiles import GoogleCloudServiceAccountFileProfileMapping - - profile = GoogleCloudServiceAccountFileProfileMapping( - conn_id="my_google_cloud_platform_connection", - profile_args={...}, - ) - -While the profile mapping pulls fields from Airflow connections, you may need to supplement it -with additional ``profile_args``. The below table shows which fields are required, along with those -not required but pulled from the Airflow connection if present. You can also add additional fields -to the ``profile_args`` dict. - -.. list-table:: - :header-rows: 1 - - * - dbt Field Name - - Required - - Airflow Field Name - - - * - ``project`` - - True - - - ``extra.project`` - - - * - ``dataset`` - - True - - - ``extra.dataset`` - - - * - ``keyfile`` - - True - - - ``extra.key_path`` - - - - -Some notes about the table above: - -- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To - see the full list of fields, see the link to the dbt docs at the top of this page. -- If the Airflow field name starts with an ``extra.``, this means that the field is nested under - the ``extra`` field in the Airflow connection. For example, if the Airflow field name is - ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, - and the field name is ``token``. -- If there are multiple Airflow field names, the profile mapping looks at those fields in order. - For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping - will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. diff --git a/docs/profiles/PostgresUserPassword.rst b/docs/profiles/PostgresUserPassword.rst deleted file mode 100644 index 06c06f2bf..000000000 --- a/docs/profiles/PostgresUserPassword.rst +++ /dev/null @@ -1,104 +0,0 @@ -.. - This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. - - -PostgresUserPassword -==================== - - - - Maps Airflow Postgres connections using user + password authentication to dbt profiles. - - https://docs.getdbt.com/reference/warehouse-setups/postgres-setup - - https://airflow.apache.org/docs/apache-airflow-providers-postgres/stable/connections/postgres.html - - - -This profile mapping translates Airflow connections with the type ``postgres`` -into dbt profiles. To use this profile, import it from ``cosmos.profiles``: - -.. code-block:: python - - from cosmos.profiles import PostgresUserPasswordProfileMapping - - profile = PostgresUserPasswordProfileMapping( - conn_id="my_postgres_connection", - profile_args={...}, - ) - -While the profile mapping pulls fields from Airflow connections, you may need to supplement it -with additional ``profile_args``. The below table shows which fields are required, along with those -not required but pulled from the Airflow connection if present. You can also add additional fields -to the ``profile_args`` dict. - -.. list-table:: - :header-rows: 1 - - * - dbt Field Name - - Required - - Airflow Field Name - - - * - ``host`` - - True - - - ``host`` - - - * - ``user`` - - True - - - ``login`` - - - * - ``password`` - - True - - - ``password`` - - - * - ``port`` - - True - - - ``port`` - - - * - ``dbname`` - - True - - - ``schema`` - - - * - ``keepalives_idle`` - - False - - - ``extra.keepalives_idle`` - - - * - ``sslmode`` - - False - - - ``extra.sslmode`` - - - * - ``schema`` - - True - - - - - - - -Some notes about the table above: - -- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To - see the full list of fields, see the link to the dbt docs at the top of this page. -- If the Airflow field name starts with an ``extra.``, this means that the field is nested under - the ``extra`` field in the Airflow connection. For example, if the Airflow field name is - ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, - and the field name is ``token``. -- If there are multiple Airflow field names, the profile mapping looks at those fields in order. - For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping - will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. diff --git a/docs/profiles/RedshiftUserPassword.rst b/docs/profiles/RedshiftUserPassword.rst deleted file mode 100644 index 7a0f79471..000000000 --- a/docs/profiles/RedshiftUserPassword.rst +++ /dev/null @@ -1,110 +0,0 @@ -.. - This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. - - -RedshiftUserPassword -==================== - - - - Maps Airflow Redshift connections to dbt Redshift profiles if they use a username and password. - - https://docs.getdbt.com/reference/warehouse-setups/redshift-setup - - https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/connections/redshift.html - - - -This profile mapping translates Airflow connections with the type ``redshift`` -into dbt profiles. To use this profile, import it from ``cosmos.profiles``: - -.. code-block:: python - - from cosmos.profiles import RedshiftUserPasswordProfileMapping - - profile = RedshiftUserPasswordProfileMapping( - conn_id="my_redshift_connection", - profile_args={...}, - ) - -While the profile mapping pulls fields from Airflow connections, you may need to supplement it -with additional ``profile_args``. The below table shows which fields are required, along with those -not required but pulled from the Airflow connection if present. You can also add additional fields -to the ``profile_args`` dict. - -.. list-table:: - :header-rows: 1 - - * - dbt Field Name - - Required - - Airflow Field Name - - - * - ``host`` - - True - - - ``host`` - - - * - ``user`` - - True - - - ``login`` - - - * - ``password`` - - True - - - ``password`` - - - * - ``port`` - - False - - - ``port`` - - - * - ``dbname`` - - True - - - ``schema`` - - - * - ``timeout`` - - False - - - ``extra.timeout`` - - - * - ``sslmode`` - - False - - - ``extra.sslmode`` - - - * - ``region`` - - False - - - ``extra.region`` - - - * - ``schema`` - - True - - - - - - - -Some notes about the table above: - -- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To - see the full list of fields, see the link to the dbt docs at the top of this page. -- If the Airflow field name starts with an ``extra.``, this means that the field is nested under - the ``extra`` field in the Airflow connection. For example, if the Airflow field name is - ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, - and the field name is ``token``. -- If there are multiple Airflow field names, the profile mapping looks at those fields in order. - For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping - will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. diff --git a/docs/profiles/SnowflakePrivateKeyPem.rst b/docs/profiles/SnowflakePrivateKeyPem.rst deleted file mode 100644 index d8b698422..000000000 --- a/docs/profiles/SnowflakePrivateKeyPem.rst +++ /dev/null @@ -1,98 +0,0 @@ -.. - This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. - - -SnowflakePrivateKeyPem -====================== - - - - Maps Airflow Snowflake connections to dbt profiles if they use a user/private key. - - https://docs.getdbt.com/docs/core/connect-data-platform/snowflake-setup#key-pair-authentication - - https://airflow.apache.org/docs/apache-airflow-providers-snowflake/stable/connections/snowflake.html - - - -This profile mapping translates Airflow connections with the type ``snowflake`` -into dbt profiles. To use this profile, import it from ``cosmos.profiles``: - -.. code-block:: python - - from cosmos.profiles import SnowflakePrivateKeyPemProfileMapping - - profile = SnowflakePrivateKeyPemProfileMapping( - conn_id="my_snowflake_connection", - profile_args={...}, - ) - -While the profile mapping pulls fields from Airflow connections, you may need to supplement it -with additional ``profile_args``. The below table shows which fields are required, along with those -not required but pulled from the Airflow connection if present. You can also add additional fields -to the ``profile_args`` dict. - -.. list-table:: - :header-rows: 1 - - * - dbt Field Name - - Required - - Airflow Field Name - - - * - ``account`` - - True - - - ``extra.account`` - - - * - ``user`` - - True - - - ``login`` - - - * - ``database`` - - True - - - ``extra.database`` - - - * - ``warehouse`` - - True - - - ``extra.warehouse`` - - - * - ``schema`` - - True - - - ``schema`` - - - * - ``role`` - - False - - - ``extra.role`` - - - * - ``private_key_content`` - - True - - - ``extra.private_key_content`` - - - - -Some notes about the table above: - -- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To - see the full list of fields, see the link to the dbt docs at the top of this page. -- If the Airflow field name starts with an ``extra.``, this means that the field is nested under - the ``extra`` field in the Airflow connection. For example, if the Airflow field name is - ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, - and the field name is ``token``. -- If there are multiple Airflow field names, the profile mapping looks at those fields in order. - For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping - will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. diff --git a/docs/profiles/SnowflakeUserPassword.rst b/docs/profiles/SnowflakeUserPassword.rst deleted file mode 100644 index 338185cc2..000000000 --- a/docs/profiles/SnowflakeUserPassword.rst +++ /dev/null @@ -1,98 +0,0 @@ -.. - This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. - - -SnowflakeUserPassword -===================== - - - - Maps Airflow Snowflake connections to dbt profiles if they use a user/password. - - https://docs.getdbt.com/reference/warehouse-setups/snowflake-setup - - https://airflow.apache.org/docs/apache-airflow-providers-snowflake/stable/connections/snowflake.html - - - -This profile mapping translates Airflow connections with the type ``snowflake`` -into dbt profiles. To use this profile, import it from ``cosmos.profiles``: - -.. code-block:: python - - from cosmos.profiles import SnowflakeUserPasswordProfileMapping - - profile = SnowflakeUserPasswordProfileMapping( - conn_id="my_snowflake_connection", - profile_args={...}, - ) - -While the profile mapping pulls fields from Airflow connections, you may need to supplement it -with additional ``profile_args``. The below table shows which fields are required, along with those -not required but pulled from the Airflow connection if present. You can also add additional fields -to the ``profile_args`` dict. - -.. list-table:: - :header-rows: 1 - - * - dbt Field Name - - Required - - Airflow Field Name - - - * - ``account`` - - True - - - ``extra.account`` - - - * - ``user`` - - True - - - ``login`` - - - * - ``password`` - - True - - - ``password`` - - - * - ``database`` - - True - - - ``extra.database`` - - - * - ``warehouse`` - - True - - - ``extra.warehouse`` - - - * - ``schema`` - - True - - - ``schema`` - - - * - ``role`` - - False - - - ``extra.role`` - - - - -Some notes about the table above: - -- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To - see the full list of fields, see the link to the dbt docs at the top of this page. -- If the Airflow field name starts with an ``extra.``, this means that the field is nested under - the ``extra`` field in the Airflow connection. For example, if the Airflow field name is - ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, - and the field name is ``token``. -- If there are multiple Airflow field names, the profile mapping looks at those fields in order. - For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping - will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. diff --git a/docs/profiles/SparkThrift.rst b/docs/profiles/SparkThrift.rst deleted file mode 100644 index a8c33b672..000000000 --- a/docs/profiles/SparkThrift.rst +++ /dev/null @@ -1,68 +0,0 @@ -.. - This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. - - -SparkThrift -=========== - - - - Maps Airflow Spark connections to dbt profiles if they use a thrift connection. - - https://docs.getdbt.com/reference/warehouse-setups/spark-setup#thrift - - https://airflow.apache.org/docs/apache-airflow-providers-apache-spark/stable/connections/spark.html - - - -This profile mapping translates Airflow connections with the type ``spark`` -into dbt profiles. To use this profile, import it from ``cosmos.profiles``: - -.. code-block:: python - - from cosmos.profiles import SparkThriftProfileMapping - - profile = SparkThriftProfileMapping( - conn_id="my_spark_connection", - profile_args={...}, - ) - -While the profile mapping pulls fields from Airflow connections, you may need to supplement it -with additional ``profile_args``. The below table shows which fields are required, along with those -not required but pulled from the Airflow connection if present. You can also add additional fields -to the ``profile_args`` dict. - -.. list-table:: - :header-rows: 1 - - * - dbt Field Name - - Required - - Airflow Field Name - - - * - ``host`` - - True - - - ``host`` - - - * - ``schema`` - - True - - - - - - - -Some notes about the table above: - -- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To - see the full list of fields, see the link to the dbt docs at the top of this page. -- If the Airflow field name starts with an ``extra.``, this means that the field is nested under - the ``extra`` field in the Airflow connection. For example, if the Airflow field name is - ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, - and the field name is ``token``. -- If there are multiple Airflow field names, the profile mapping looks at those fields in order. - For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping - will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. diff --git a/docs/profiles/TrinoCertificate.rst b/docs/profiles/TrinoCertificate.rst deleted file mode 100644 index 8b887414f..000000000 --- a/docs/profiles/TrinoCertificate.rst +++ /dev/null @@ -1,98 +0,0 @@ -.. - This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. - - -TrinoCertificate -================ - - - - Maps Airflow Trino connections to Certificate Trino dbt profiles. - - https://docs.getdbt.com/reference/warehouse-setups/trino-setup#certificate - - https://airflow.apache.org/docs/apache-airflow-providers-trino/stable/connections.html - - - -This profile mapping translates Airflow connections with the type ``trino`` -into dbt profiles. To use this profile, import it from ``cosmos.profiles``: - -.. code-block:: python - - from cosmos.profiles import TrinoCertificateProfileMapping - - profile = TrinoCertificateProfileMapping( - conn_id="my_trino_connection", - profile_args={...}, - ) - -While the profile mapping pulls fields from Airflow connections, you may need to supplement it -with additional ``profile_args``. The below table shows which fields are required, along with those -not required but pulled from the Airflow connection if present. You can also add additional fields -to the ``profile_args`` dict. - -.. list-table:: - :header-rows: 1 - - * - dbt Field Name - - Required - - Airflow Field Name - - - * - ``client_certificate`` - - True - - - ``extra.certs__client_cert_path`` - - - * - ``client_private_key`` - - True - - - ``extra.certs__client_key_path`` - - - * - ``host`` - - True - - - ``host`` - - - * - ``port`` - - True - - - ``port`` - - - * - ``session_properties`` - - False - - - ``extra.session_properties`` - - - * - ``database`` - - True - - - - - - * - ``schema`` - - True - - - - - - - -Some notes about the table above: - -- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To - see the full list of fields, see the link to the dbt docs at the top of this page. -- If the Airflow field name starts with an ``extra.``, this means that the field is nested under - the ``extra`` field in the Airflow connection. For example, if the Airflow field name is - ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, - and the field name is ``token``. -- If there are multiple Airflow field names, the profile mapping looks at those fields in order. - For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping - will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. diff --git a/docs/profiles/TrinoJWT.rst b/docs/profiles/TrinoJWT.rst deleted file mode 100644 index 3115a0df5..000000000 --- a/docs/profiles/TrinoJWT.rst +++ /dev/null @@ -1,94 +0,0 @@ -.. - This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. - - -TrinoJWT -======== - - - - Maps Airflow Trino connections to JWT Trino dbt profiles. - - - - https://docs.getdbt.com/reference/warehouse-setups/trino-setup#jwt - - https://airflow.apache.org/docs/apache-airflow-providers-trino/stable/connections.html - - - -This profile mapping translates Airflow connections with the type ``trino`` -into dbt profiles. To use this profile, import it from ``cosmos.profiles``: - -.. code-block:: python - - from cosmos.profiles import TrinoJWTProfileMapping - - profile = TrinoJWTProfileMapping( - conn_id="my_trino_connection", - profile_args={...}, - ) - -While the profile mapping pulls fields from Airflow connections, you may need to supplement it -with additional ``profile_args``. The below table shows which fields are required, along with those -not required but pulled from the Airflow connection if present. You can also add additional fields -to the ``profile_args`` dict. - -.. list-table:: - :header-rows: 1 - - * - dbt Field Name - - Required - - Airflow Field Name - - - * - ``jwt_token`` - - True - - - ``extra.jwt__token`` - - - * - ``host`` - - True - - - ``host`` - - - * - ``port`` - - True - - - ``port`` - - - * - ``session_properties`` - - False - - - ``extra.session_properties`` - - - * - ``database`` - - True - - - - - - * - ``schema`` - - True - - - - - - - -Some notes about the table above: - -- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To - see the full list of fields, see the link to the dbt docs at the top of this page. -- If the Airflow field name starts with an ``extra.``, this means that the field is nested under - the ``extra`` field in the Airflow connection. For example, if the Airflow field name is - ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, - and the field name is ``token``. -- If there are multiple Airflow field names, the profile mapping looks at those fields in order. - For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping - will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. diff --git a/docs/profiles/TrinoLDAP.rst b/docs/profiles/TrinoLDAP.rst deleted file mode 100644 index 0c04dbb86..000000000 --- a/docs/profiles/TrinoLDAP.rst +++ /dev/null @@ -1,100 +0,0 @@ -.. - This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. - - -TrinoLDAP -========= - - - - Maps Airflow Trino connections to LDAP Trino dbt profiles. - - - - https://docs.getdbt.com/reference/warehouse-setups/trino-setup#ldap - - https://airflow.apache.org/docs/apache-airflow-providers-trino/stable/connections.html - - - -This profile mapping translates Airflow connections with the type ``trino`` -into dbt profiles. To use this profile, import it from ``cosmos.profiles``: - -.. code-block:: python - - from cosmos.profiles import TrinoLDAPProfileMapping - - profile = TrinoLDAPProfileMapping( - conn_id="my_trino_connection", - profile_args={...}, - ) - -While the profile mapping pulls fields from Airflow connections, you may need to supplement it -with additional ``profile_args``. The below table shows which fields are required, along with those -not required but pulled from the Airflow connection if present. You can also add additional fields -to the ``profile_args`` dict. - -.. list-table:: - :header-rows: 1 - - * - dbt Field Name - - Required - - Airflow Field Name - - - * - ``user`` - - True - - - ``login`` - - - * - ``password`` - - True - - - ``password`` - - - * - ``host`` - - True - - - ``host`` - - - * - ``port`` - - True - - - ``port`` - - - * - ``session_properties`` - - False - - - ``extra.session_properties`` - - - * - ``database`` - - True - - - - - - * - ``schema`` - - True - - - - - - - -Some notes about the table above: - -- This table doesn't necessarily show the full list of fields you *can* pass to the dbt profile. To - see the full list of fields, see the link to the dbt docs at the top of this page. -- If the Airflow field name starts with an ``extra.``, this means that the field is nested under - the ``extra`` field in the Airflow connection. For example, if the Airflow field name is - ``extra.token``, this means that the field is nested under ``extra`` in the Airflow connection, - and the field name is ``token``. -- If there are multiple Airflow field names, the profile mapping looks at those fields in order. - For example, if the Airflow field name is ``['password', 'extra.token']``, the profile mapping - will first look for a field named ``password``. If that field is not present, it will look for - ``extra.token``. diff --git a/docs/profiles/index.rst b/docs/profiles/index.rst deleted file mode 100644 index cb4688f8e..000000000 --- a/docs/profiles/index.rst +++ /dev/null @@ -1,117 +0,0 @@ -.. - This file is autogenerated by `docs/scripts/generate_mappings.py`. Do not edit by hand. - -.. toctree:: - :caption: Profiles - - self - GoogleCloudServiceAccountFile - GoogleCloudServiceAccountDict - DatabricksToken - PostgresUserPassword - RedshiftUserPassword - SnowflakeUserPassword - SnowflakePrivateKeyPem - SparkThrift - ExasolUserPassword - TrinoLDAP - TrinoCertificate - TrinoJWT - - -Profiles Overview -========================== - -Cosmos supports two methods of authenticating with your database: - -- using your own dbt profiles.yml file -- using Airflow connections via Cosmos' profile mappings - -If you're already interacting with your database from Airflow and have a connection set up, it's recommended -to use a profile mapping to translate that Airflow connection to a dbt profile. This is because it's easier to -maintain a single connection object in Airflow than it is to maintain a connection object in Airflow and a dbt profile -in your dbt project. - -If you don't already have an Airflow connection, or if there's no readily-available profile mapping for your database, -you can use your own dbt profiles.yml file. - -Regardless of which method you use, you'll need to tell Cosmos which profile and target name it should use. Profile config -is set in the ``cosmos.config.ProfileConfig`` object, like so: - -.. code-block:: python - - from cosmos.config import ProfileConfig - - profile_config = ProfileConfig( - profile_name="my_profile_name", - target_name="my_target_name", - # choose one of the following - profile_mapping=..., - profiles_yml_path=..., - ) - - dag = DbtDag(profile_config=profile_config, ...) - - - -Using a profile mapping -++++++++++++++++++++++++++++++++++++ - -Profile mappings are utilities provided by Cosmos that translate Airflow connections to dbt profiles. This means that -you can use the same connection objects you use in Airflow to authenticate with your database in dbt. To do so, there's -a class in Cosmos for each Airflow connection to dbt profile mapping. - -You can find the available profile mappings on the left-hand side of this page. Each profile mapping is imported from -``cosmos.profiles`` and takes two arguments: - -* ``conn_id``: the Airflow connection ID to use. -* ``profile_args``: a dictionary of additional arguments to pass to the dbt profile. This is useful for specifying - values that are not in the Airflow connection. This also acts as an override for any values that are in the Airflow - connection but should be overridden. - -Below is an example of using the Snowflake profile mapping, where we take most arguments from the Airflow connection -but override the ``database`` and ``schema`` values: - -.. code-block:: python - - from cosmos.profiles import SnowflakeUserPasswordProfileMapping - - profile_config = ProfileConfig( - profile_name="my_profile_name", - target_name="my_target_name", - profile_mapping=SnowflakeUserPasswordProfileMapping( - conn_id="my_snowflake_conn_id", - profile_args={ - "database": "my_snowflake_database", - "schema": "my_snowflake_schema", - }, - ), - ) - - dag = DbtDag(profile_config=profile_config, ...) - -Note that when using a profile mapping, the profiles.yml file gets generated with the profile name and target name -you specify in ``ProfileConfig``. - - -Using your own profiles.yml file -++++++++++++++++++++++++++++++++++++ - -If you don't want to use Airflow connections, or if there's no readily-available profile mapping for your database, -you can use your own dbt profiles.yml file. To do so, you'll need to pass the path to your profiles.yml file to the -``profiles_yml_path`` argument in ``ProfileConfig``. - -For example, the code snippet below points Cosmos at a ``profiles.yml`` file and instructs Cosmos to use the -``my_snowflake_profile`` profile and ``dev`` target: - -.. code-block:: python - - from cosmos.config import ProfileConfig - - profile_config = ProfileConfig( - profile_name="my_snowflake_profile", - target_name="dev", - profiles_yml_path="/path/to/profiles.yml", - ) - - dag = DbtDag(profile_config=profile_config, ...) diff --git a/docs/scripts/__init__.py b/docs/scripts/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/scripts/templates/index.rst.jinja2 b/docs/templates/index.rst.jinja2 similarity index 100% rename from docs/scripts/templates/index.rst.jinja2 rename to docs/templates/index.rst.jinja2 diff --git a/docs/scripts/templates/profile_mapping.rst.jinja2 b/docs/templates/profile_mapping.rst.jinja2 similarity index 100% rename from docs/scripts/templates/profile_mapping.rst.jinja2 rename to docs/templates/profile_mapping.rst.jinja2 From dd983867098a99dfcb553a737ecc205714700714 Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Wed, 26 Jul 2023 15:17:38 -0400 Subject: [PATCH 16/24] add airflow to docs requirements --- docs/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/requirements.txt b/docs/requirements.txt index bb86f233e..c20fdc8d0 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -2,3 +2,4 @@ sphinx pydata-sphinx-theme sphinx-autobuild sphinx-autoapi +apache-airflow From 2489f5ef7a7f6580444f8775da429b108e02a76d Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Wed, 26 Jul 2023 15:22:25 -0400 Subject: [PATCH 17/24] use relative paths --- docs/generate_mappings.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/generate_mappings.py b/docs/generate_mappings.py index 11edd41a8..4c38f07d4 100644 --- a/docs/generate_mappings.py +++ b/docs/generate_mappings.py @@ -40,8 +40,8 @@ def get_fields_from_mapping(mapping: Type[BaseProfileMapping]) -> list[Field]: def generate_mapping_docs( - templates_dir: str = "templates", - output_dir: str = "profiles", + templates_dir: str = "./templates", + output_dir: str = "./profiles", ) -> None: """ Generate a dedicated docs page per profile mapping. From f4cee7ff8098f30f2cc29e151f05d29c06dec8aa Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Wed, 26 Jul 2023 15:36:31 -0400 Subject: [PATCH 18/24] make dir if it doesnt exist --- docs/generate_mappings.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/generate_mappings.py b/docs/generate_mappings.py index 4c38f07d4..f7f0696cf 100644 --- a/docs/generate_mappings.py +++ b/docs/generate_mappings.py @@ -51,6 +51,9 @@ def generate_mapping_docs( for file in os.listdir(output_dir): os.remove(f"{output_dir}/{file}") + # then, create the directory + os.makedirs(output_dir, exist_ok=True) + # get the index template env = Environment(loader=FileSystemLoader(templates_dir)) index_template = env.get_template("index.rst.jinja2") From c2079408c9c7dd47225c6544de528666cb866502 Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Wed, 26 Jul 2023 15:38:31 -0400 Subject: [PATCH 19/24] fix ruff error --- docs/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 775950642..d8945d109 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,11 +1,11 @@ import os import sys +from docs.generate_mappings import generate_mapping_docs + # Add the project root to the path so we can import the package sys.path.insert(0, os.path.abspath("../")) -from docs.generate_mappings import generate_mapping_docs - # Configuration file for the Sphinx documentation builder. # # For the full list of built-in configuration values, see the documentation: From 1900cfe02445c83feeb7a41a6bf4147fbda97d25 Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Wed, 26 Jul 2023 15:40:58 -0400 Subject: [PATCH 20/24] add noqa --- docs/conf.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index d8945d109..e55ad1786 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,11 +1,12 @@ +# noqa import os import sys -from docs.generate_mappings import generate_mapping_docs - # Add the project root to the path so we can import the package sys.path.insert(0, os.path.abspath("../")) +from docs.generate_mappings import generate_mapping_docs + # Configuration file for the Sphinx documentation builder. # # For the full list of built-in configuration values, see the documentation: From 8ce8cd3a8cc7e2a5a310acbaf78fca4d9600a89a Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Wed, 26 Jul 2023 17:09:54 -0400 Subject: [PATCH 21/24] update docs after config changes --- docs/configuration/compiled-sql.rst | 13 +++++- docs/configuration/execution-config.rst | 5 +++ docs/configuration/generating-docs.rst | 12 +++--- docs/configuration/index.rst | 5 +++ docs/configuration/lineage.rst | 12 +++--- docs/configuration/parsing-methods.rst | 47 +++++++++++++++++++--- docs/configuration/profile-config.rst | 4 ++ docs/configuration/project-config.rst | 31 ++++++++++++++ docs/configuration/render-config.rst | 13 ++++++ docs/configuration/scheduling.rst | 3 -- docs/configuration/selecting-excluding.rst | 34 ++++++++++------ docs/configuration/testing-behavior.rst | 16 ++++++-- docs/getting_started/astro.rst | 12 ++++-- docs/getting_started/execution-modes.rst | 16 +++++--- docs/getting_started/gcc.rst | 36 +++++++++++------ docs/getting_started/mwaa.rst | 30 +++++++++----- docs/getting_started/open-source.rst | 37 ++++++++--------- 17 files changed, 237 insertions(+), 89 deletions(-) create mode 100644 docs/configuration/execution-config.rst create mode 100644 docs/configuration/profile-config.rst create mode 100644 docs/configuration/project-config.rst create mode 100644 docs/configuration/render-config.rst diff --git a/docs/configuration/compiled-sql.rst b/docs/configuration/compiled-sql.rst index 5d42a234f..67643d575 100644 --- a/docs/configuration/compiled-sql.rst +++ b/docs/configuration/compiled-sql.rst @@ -5,4 +5,15 @@ Compiled SQL When using the local execution mode, Cosmos will store the compiled SQL for each model in the ``compiled_sql`` field of the task's ``template_fields``. This allows you to view the compiled SQL in the Airflow UI. -If you'd like to disable this feature, you can set ``should_store_compiled_sql=False`` on the local operator (or via the ``operator_args`` parameter on the DAG/Task Group). +If you'd like to disable this feature, you can set ``should_store_compiled_sql=False`` on the local operator (or via the ``operator_args`` parameter on the DAG/Task Group). For example: + +.. code-block:: python + + from cosmos import DbtDag + + DbtDag( + operator_args={ + "should_store_compiled_sql": False + }, + # ..., + ) diff --git a/docs/configuration/execution-config.rst b/docs/configuration/execution-config.rst new file mode 100644 index 000000000..d1094107c --- /dev/null +++ b/docs/configuration/execution-config.rst @@ -0,0 +1,5 @@ +Execution Config +================== + +Cosmos supports multiple ways of executing your dbt models. +For more information, see the `execution modes <../getting_started/execution-modes.html>`_ page. diff --git a/docs/configuration/generating-docs.rst b/docs/configuration/generating-docs.rst index c3ebf587d..925b60e04 100644 --- a/docs/configuration/generating-docs.rst +++ b/docs/configuration/generating-docs.rst @@ -34,8 +34,8 @@ You can use the :class:`~cosmos.operators.DbtDocsS3Operator` to generate and upl generate_dbt_docs_aws = DbtDocsS3Operator( task_id="generate_dbt_docs_aws", project_dir="path/to/jaffle_shop", - conn_id="airflow_db", - schema="public", + profile_config=profile_config, + # docs-specific arguments aws_conn_id="test_aws", bucket_name="test_bucket", ) @@ -55,8 +55,8 @@ You can use the :class:`~cosmos.operators.DbtDocsAzureStorageOperator` to genera generate_dbt_docs_azure = DbtDocsAzureStorageOperator( task_id="generate_dbt_docs_azure", project_dir="path/to/jaffle_shop", - conn_id="airflow_db", - schema="public", + profile_config=profile_config, + # docs-specific arguments azure_conn_id="test_azure", container_name="$web", ) @@ -101,7 +101,7 @@ If you want to run custom code after the docs are generated, you can use the :cl generate_dbt_docs = DbtDocsOperator( task_id="generate_dbt_docs", project_dir="path/to/jaffle_shop", - conn_id="airflow_db", - schema="public", + profile_config=profile_config, + # docs-specific arguments callback=upload_docs, ) diff --git a/docs/configuration/index.rst b/docs/configuration/index.rst index efd476058..9697b8b73 100644 --- a/docs/configuration/index.rst +++ b/docs/configuration/index.rst @@ -8,6 +8,11 @@ Cosmos offers a number of configuration options to customize its behavior. For m .. toctree:: :caption: Contents: + Project Config + Profile Config + Execution Config + Render Config + Parsing Methods Configuring Lineage Generating Docs diff --git a/docs/configuration/lineage.rst b/docs/configuration/lineage.rst index 49d96289d..ffee8fbe2 100644 --- a/docs/configuration/lineage.rst +++ b/docs/configuration/lineage.rst @@ -49,9 +49,9 @@ installed jaffle_shop = DbtTaskGroup( ..., - dbt_args={ - "dbt_executable_path": "/usr/local/airflow/dbt_venv/bin/dbt-ol", - }, + ExecutionConfig( + dbt_executable_path="/usr/local/airflow/dbt_venv/bin/dbt-ol", + ), ) @@ -75,7 +75,7 @@ installed jaffle_shop = DbtTaskGroup( ..., - dbt_args={ - "dbt_executable_path": "/usr/local/bin/dbt-ol", - }, + ExecutionConfig( + dbt_executable_path="/usr/local/airflow/dbt_venv/bin/dbt-ol", + ), ) diff --git a/docs/configuration/parsing-methods.rst b/docs/configuration/parsing-methods.rst index b5d6113af..8fece5260 100644 --- a/docs/configuration/parsing-methods.rst +++ b/docs/configuration/parsing-methods.rst @@ -10,9 +10,15 @@ Cosmos offers several options to parse your dbt project: - ``dbt_ls``. Parses a dbt project directory using the ``dbt ls`` command. - ``custom``. Uses Cosmos' custom dbt parser, which extracts dependencies from your dbt's model code. -The ``dbt_manifest`` and ``dbt_ls`` methods use dbt directly to parse your project. This means that they will respect your ``dbt_project.yml`` file and any other dbt configuration you have set up. +There are benefits and drawbacks to each method: -On the other hand, the ``custom`` method uses Cosmos' own dbt parser. This parser is not as robust as dbt's, so it's recommended that you use one of the other methods if possible. +- ``dbt_manifest``: You have to generate the manifest file on your own. When using the manifest, Cosmos gets a complete set of metadata about your models. However, Cosmos uses its own selecting & excluding logic to determine which models to run, which may not be as robust as dbt's. +- ``dbt_ls``: Cosmos will generate the manifest file for you. This method uses dbt's metadata AND dbt's selecting/excluding logic. This is the most robust method. However, this requires the dbt executable to be installed on your machine (either on the host directly or in a virtual environment). +- ``custom``: Cosmos will parse your project and model files for you. This means that Cosmos will not have access to dbt's metadata. However, this method does not require the dbt executable to be installed on your machine. + +If you're using the ``local`` mode, you should use the ``dbt_ls`` method. + +If you're using the ``docker`` or ``kubernetes`` modes, you should use either ``dbt_manifest`` or ``custom`` modes. ``automatic`` @@ -24,17 +30,26 @@ When you don't supply an argument to the ``load_mode`` parameter (or you supply 2. Try to generate a ``manifest.json`` file from your dbt project (``dbt_ls``) 3. Use Cosmos' dbt parser (``custom``) +To use this method, you don't need to supply any additional config. This is the default. + ``dbt_manifest`` ---------------- If you already have a ``manifest.json`` file created by dbt, Cosmos will parse the manifest to generate your DAG. -You can supply a ``manifest_path`` parameter on the DbtDag / DbtTaskGroup with a path to a ``manifest.json`` file. For example: +You can supply a ``manifest_path`` parameter on the DbtDag / DbtTaskGroup with a path to a ``manifest.json`` file. + +To use this: .. code-block:: python DbtDag( - manifest_path="/path/to/manifest.json" + project_config=ProjectConfig( + manifest_path="/path/to/manifest.json", + ), + render_config=RenderConfig( + load_mode=LoadMode.DBT_MANIFEST, + ) ..., ) @@ -43,12 +58,23 @@ You can supply a ``manifest_path`` parameter on the DbtDag / DbtTaskGroup with a .. note:: - This only works for the ``local`` and ``virtualenv`` execution modes. + This only works for the ``local`` execution mode. If you don't have a ``manifest.json`` file, Cosmos will attempt to generate one from your dbt project. It does this by running ``dbt ls`` and parsing the output. When Cosmos runs ``dbt ls``, it also passes your ``select`` and ``exclude`` arguments to the command. This means that Cosmos will only generate a manifest for the models you want to run. +To use this: + +.. code-block:: python + + DbtDag( + render_config=RenderConfig( + load_mode=LoadMode.DBT_LS, + ) + ..., + ) + ``custom`` ---------- @@ -59,3 +85,14 @@ The following are known limitations of the custom parser: - it does not read from the ``dbt_project.yml`` file - it does not parse Python files or models + +To use this: + +.. code-block:: python + + DbtDag( + render_config=RenderConfig( + load_mode=LoadMode.CUSTOM, + ) + ..., + ) \ No newline at end of file diff --git a/docs/configuration/profile-config.rst b/docs/configuration/profile-config.rst new file mode 100644 index 000000000..dcc5c784a --- /dev/null +++ b/docs/configuration/profile-config.rst @@ -0,0 +1,4 @@ +Profile Config +================ + +Cosmos has multiple methods for supplying profiles. For more information, click on the Profiles tab on the top navbar. diff --git a/docs/configuration/project-config.rst b/docs/configuration/project-config.rst new file mode 100644 index 000000000..fcb3bd565 --- /dev/null +++ b/docs/configuration/project-config.rst @@ -0,0 +1,31 @@ +Project Config +================ + +The ``cosmos.config.ProjectConfig`` allows you to specify information about where your dbt project is located. It +takes the following arguments: + +- ``dbt_project_path`` (required): The full path to your dbt project. This directory should have a ``dbt_project.yml`` file +- ``models_relative_path``: The path to your models directory, relative to the ``dbt_project_path``. This defaults to + ``models/`` +- ``seeds_relative_path``: The path to your seeds directory, relative to the ``dbt_project_path``. This defaults to + ``data/`` +- ``snapshots_relative_path``: The path to your snapshots directory, relative to the ``dbt_project_path``. This defaults + to ``snapshots/`` +- ``manifest_path``: The absolute path to your manifests directory. This is only required if you're using Cosmos' manifest + parsing mode + + +Project Config Example +---------------------- + +.. code-block:: python + + from cosmos.config import ProjectConfig + + config = ProjectConfig( + dbt_project_path='/path/to/dbt/project', + models_relative_path='models', + seeds_relative_path='data', + snapshots_relative_path='snapshots', + manifest_path='/path/to/manifests' + ) \ No newline at end of file diff --git a/docs/configuration/render-config.rst b/docs/configuration/render-config.rst new file mode 100644 index 000000000..b8f8ea274 --- /dev/null +++ b/docs/configuration/render-config.rst @@ -0,0 +1,13 @@ +Render Config +================ + + +Cosmos aims to give you control over how your dbt project is rendered as an Airflow DAG or Task Group. +It does this by exposing a ``cosmos.config.RenderConfig`` class that you can use to configure how your DAGs are rendered. + +The ``RenderConfig`` class takes the following arguments: + +- ``emit_datasets``: whether or not to emit Airflow datasets to be used for data-aware scheduling. Defaults to True +- ``test_behavior``: how to run tests. Defaults to running a model's tests immediately after the model is run. For more information, see the `Testing Behavior `_ section. +- ``load_method``: how to load your dbt project. See `Parsing Methods `_ for more information. +- ``select`` and ``exclude``: which models to include or exclude from your DAGs. See `Selecting & Excluding `_ for more information. diff --git a/docs/configuration/scheduling.rst b/docs/configuration/scheduling.rst index 625280309..738031848 100644 --- a/docs/configuration/scheduling.rst +++ b/docs/configuration/scheduling.rst @@ -44,15 +44,12 @@ Then, you can use Airflow's data-aware scheduling capabilities to schedule ``my_ project_one = DbtDag( # ... - conn_id="my_conn", start_date=datetime(2023, 1, 1), schedule_interval="@daily", - dbt_project_name="project_one", ) project_two = DbtDag( # ... - start_date=datetime(2023, 1, 1), schedule=[get_dbt_dataset("my_conn", "project_one", "my_model")], dbt_project_name="project_two", ) diff --git a/docs/configuration/selecting-excluding.rst b/docs/configuration/selecting-excluding.rst index 9e14a6272..fadea1485 100644 --- a/docs/configuration/selecting-excluding.rst +++ b/docs/configuration/selecting-excluding.rst @@ -3,25 +3,33 @@ Selecting & Excluding ======================= -Cosmos allows you to filter by configs (e.g. ``materialized``, ``tags``) using the ``select`` and ``exclude`` parameters. If a model contains any of the configs in the ``select``, it gets included as part of the DAG/Task Group. Similarly, if a model contains any of the configs in the ``exclude``, it gets excluded from the DAG/Task Group. +Cosmos allows you to filter to a subset of your dbt project in each ``DbtDag`` / ``DbtTaskGroup`` using the ``select`` and ``exclude`` parameters in the ``RenderConfig`` class. -The ``select`` and ``exclude`` parameters are dictionaries with the following keys: +The ``select`` and ``exclude`` parameters are lists, with values like the following: + +- ``tag:my_tag``: include/exclude models with the tag ``my_tag`` +- ``config.materialized:table``: include/exclude models with the config ``materialized: table`` +- ``path:analytics/tables``: include/exclude models in the ``analytics/tables`` directory -- ``configs``: a list of configs to filter by. The configs are in the format ``key:value``. For example, ``tags:daily`` or ``materialized:table``. -- ``paths``: a list of paths to filter by. The paths are in the format ``path/to/dir``. For example, ``analytics`` or ``analytics/tables``. .. note:: - Cosmos currently reads from (1) config calls in the model code and (2) .yml files in the models directory for tags. It does not read from the dbt_project.yml file. + + If you're using the ``dbt_ls`` parsing method, these arguments are passed directly to the dbt CLI command. + + If you're using the ``dbt_manifest`` parsing method, Cosmos will filter the models in the manifest before creating the DAG. This does not directly use dbt's CLI command, but should include all metadata that dbt would include. + + If you're using the ``custom`` parsing method, Cosmos does not currently read the ``dbt_project.yml`` file. You can still select/exclude models if you're selecting on metadata defined in the model code or ``.yml`` files in the models directory. Examples: .. code-block:: python - from cosmos import DbtDag + from cosmos import DbtDag, RenderConfig jaffle_shop = DbtDag( - # ... - select={"configs": ["tags:daily"]}, + render_config=RenderConfig( + select=["tag:my_tag"], + ) ) .. code-block:: python @@ -29,8 +37,9 @@ Examples: from cosmos import DbtDag jaffle_shop = DbtDag( - # ... - select={"configs": ["schema:prod"]}, + render_config=RenderConfig( + select=["config.schema:prod"], + ) ) .. code-block:: python @@ -38,6 +47,7 @@ Examples: from cosmos import DbtDag jaffle_shop = DbtDag( - # ... - select={"paths": ["analytics/tables"]}, + render_config=RenderConfig( + select=["path:analytics/tables"], + ) ) diff --git a/docs/configuration/testing-behavior.rst b/docs/configuration/testing-behavior.rst index 7a3698d82..951c90ca5 100644 --- a/docs/configuration/testing-behavior.rst +++ b/docs/configuration/testing-behavior.rst @@ -6,7 +6,13 @@ Testing Behavior Testing Configuration --------------------- -By default, Cosmos will add a test after each model. This can be overridden using the ``test_behavior`` field. The options are: +By default, Cosmos will add a test after each model. This can be overridden using the ``test_behavior`` field in the ``RenderConfig`` object. +Note that this behavior is different from dbt's default behavior, which runs all tests after all models have been run. +Cosmos defaults to running tests after each model to take a "fail-fast" approach to testing. This means that if a model +runs with failing tests, the rest of the project is stopped and the failure is reported. This is in contrast to dbt's +default behavior, which runs all models and tests, and then reports all failures at the end. + +Cosmos supports the following test behaviors: - ``after_each`` (default): turns each model into a task group with two steps: run the model, and run the tests - ``after_all``: each model becomes a single task, and the tests only run if all models are run successfully @@ -16,11 +22,13 @@ Example: .. code-block:: python - from cosmos import DbtTaskGroup + from cosmos import DbtTaskGroup, RenderConfig + from cosmos.constants import TestBehavior jaffle_shop = DbtTaskGroup( - # ... - test_behavior="snowflake_default", + render_config=RenderConfig( + test_behavior=TestBehavior.AFTER_ALL, + ) ) diff --git a/docs/getting_started/astro.rst b/docs/getting_started/astro.rst index 10b380cd9..a3fa14577 100644 --- a/docs/getting_started/astro.rst +++ b/docs/getting_started/astro.rst @@ -65,13 +65,13 @@ For example, if you wanted to put your dbt project in the ``/usr/local/airflow/d .. code-block:: python - from cosmos import DbtDag + from cosmos import DbtDag, ProjectConfig my_cosmos_dag = DbtDag( - roject_config=ProjectConfig( + project_config=ProjectConfig( dbt_project_path="/usr/local/airflow/dags/my_dbt_project", ), - ..., + # ..., ) Create a dagfile @@ -81,7 +81,8 @@ In your ``my_cosmos_dag.py`` file, import the ``DbtDag`` class from Cosmos and c .. code-block:: python - from cosmos import DbtDag + from cosmos import DbtDag, ProjectConfig, ProfileConfig, ExecutionConfig + from cosmos.profiles import PostgresUserPasswordProfileMapping profile_config = ProfileConfig( profile_name="default", @@ -97,6 +98,9 @@ In your ``my_cosmos_dag.py`` file, import the ``DbtDag`` class from Cosmos and c "/usr/local/airflow/dags/my_dbt_project", ), profile_config=profile_config, + execution_config=ExecutionConfig( + dbt_executable_path=f"{os.environ['AIRFLOW_HOME']}/dbt_venv/bin/dbt", + ), # normal dag parameters schedule_interval="@daily", start_date=datetime(2023, 1, 1), diff --git a/docs/getting_started/execution-modes.rst b/docs/getting_started/execution-modes.rst index 49cf93111..c155134ca 100644 --- a/docs/getting_started/execution-modes.rst +++ b/docs/getting_started/execution-modes.rst @@ -55,10 +55,10 @@ When using the ``local`` execution mode, Cosmos converts Airflow Connections int Example of how to use, for instance, when ``dbt`` was installed together with Cosmos: - .. literalinclude:: ../../dev/dags/basic_cosmos_dag.py - :language: python - :start-after: [START local_example] - :end-before: [END local_example] +.. literalinclude:: ../../dev/dags/basic_cosmos_dag.py + :language: python + :start-after: [START local_example] + :end-before: [END local_example] Virtualenv @@ -100,7 +100,9 @@ Example DAG: docker_cosmos_dag = DbtDag( # ... - execution_mode="docker", + execution_config=ExecutionConfig( + execution_mode=ExecutionMode.DOCKER, + ), operator_args={ "image": "dbt-jaffle-shop:1.0.0", "network_mode": "bridge", @@ -132,7 +134,9 @@ Example DAG: docker_cosmos_dag = DbtDag( # ... - execution_mode="kubernetes", + execution_config=ExecutionConfig( + execution_mode=ExecutionMode.KUBERNETES, + ), operator_args={ "image": "dbt-jaffle-shop:1.0.0", "get_logs": True, diff --git a/docs/getting_started/gcc.rst b/docs/getting_started/gcc.rst index 3242885f8..00fa503a0 100644 --- a/docs/getting_started/gcc.rst +++ b/docs/getting_started/gcc.rst @@ -26,11 +26,13 @@ For example, if you wanted to put your dbt project in the ``/usr/local/airflow/d .. code-block:: python - from cosmos import DbtDag + from cosmos import DbtDag, ProjectConfig my_cosmos_dag = DbtDag( - dbt_project_dir="/usr/local/airflow/dags/my_dbt_project", - ..., + project_config=ProjectConfig( + dbt_project_path="/usr/local/airflow/dags/my_dbt_project", + ), + # ..., ) @@ -43,17 +45,27 @@ Make sure to rename the ```` value below to your adapter's Python .. code-block:: python - from cosmos import DbtDag + from cosmos import DbtDag, ProjectConfig, ProfileConfig, ExecutionConfig + from cosmos.constants import ExecutionMode + from cosmos.profiles import PostgresUserPasswordProfileMapping + + profile_config = ProfileConfig( + profile_name="default", + target_name="dev", + profile_mapping=PostgresUserPasswordProfileMapping( + conn_id="airflow_db", + profile_args={"schema": "public"}, + ), + ) my_cosmos_dag = DbtDag( - # dbt/cosmos-specific parameters - dbt_project_name="", - conn_id="airflow_db", - profile_args={ - "schema": "public", - }, - # cosmos virtualenv parameters - execution_mode="virtualenv", + project_config=ProjectConfig( + "", + ), + profile_config=profile_config, + execution_config=ExecutionConfig( + execution_mode=ExecutionMode.VIRTUALENV, + ), operator_args={ "py_system_site_packages": False, "py_requirements": [""], diff --git a/docs/getting_started/mwaa.rst b/docs/getting_started/mwaa.rst index f5182e1ee..0a3fa400e 100644 --- a/docs/getting_started/mwaa.rst +++ b/docs/getting_started/mwaa.rst @@ -70,11 +70,13 @@ For example, if you wanted to put your dbt project in the ``/usr/local/airflow/d .. code-block:: python - from cosmos import DbtDag + from cosmos import DbtDag, ProjectConfig my_cosmos_dag = DbtDag( - dbt_project_dir="/usr/local/airflow/dags/my_dbt_project", - ..., + project_config=ProjectConfig( + dbt_project_path="/usr/local/airflow/dags/my_dbt_project", + ), + # ..., ) @@ -85,15 +87,23 @@ In your ``my_cosmos_dag.py`` file, import the ``DbtDag`` class from Cosmos and c .. code-block:: python - from cosmos import DbtDag + from cosmos import DbtDag, ProjectConfig, ProfileConfig + from cosmos.profiles import PostgresUserPasswordProfileMapping + + profile_config = ProfileConfig( + profile_name="default", + target_name="dev", + profile_mapping=PostgresUserPasswordProfileMapping( + conn_id="airflow_db", + profile_args={"schema": "public"}, + ), + ) my_cosmos_dag = DbtDag( - # dbt/cosmos-specific parameters - dbt_project_name="", - conn_id="airflow_db", - profile_args={ - "schema": "public", - }, + project_config=ProjectConfig( + "", + ), + profile_config=profile_config, # normal dag parameters schedule_interval="@daily", start_date=datetime(2023, 1, 1), diff --git a/docs/getting_started/open-source.rst b/docs/getting_started/open-source.rst index 61b42f64d..2c44be4a6 100644 --- a/docs/getting_started/open-source.rst +++ b/docs/getting_started/open-source.rst @@ -36,29 +36,26 @@ For example, if you wanted to put your dbt project in the ``/usr/local/airflow/d .. code-block:: python - from cosmos import DbtDag - - my_cosmos_dag = DbtDag( - dbt_project_dir="/usr/local/airflow/dags/my_dbt_project", - ..., + from cosmos import DbtDag, ProjectConfig, ProfileConfig, ExecutionConfig + from cosmos.profiles import PostgresUserPasswordProfileMapping + + profile_config = ProfileConfig( + profile_name="default", + target_name="dev", + profile_mapping=PostgresUserPasswordProfileMapping( + conn_id="airflow_db", + profile_args={"schema": "public"}, + ), ) -Create a dagfile -~~~~~~~~~~~~~~~~ - -In your ``my_cosmos_dag.py`` file, import the ``DbtDag`` class from Cosmos and create a new DAG instance. Make sure to use the ``dbt_executable_path`` argument to point to the virtual environment you created in step 1. - -.. code-block:: python - - from cosmos import DbtDag - my_cosmos_dag = DbtDag( - # dbt/cosmos-specific parameters - dbt_project_name="", - conn_id="airflow_db", - profile_args={ - "schema": "public", - }, + project_config=ProjectConfig( + "/usr/local/airflow/dags/my_dbt_project", + ), + profile_config=profile_config, + execution_config=ExecutionConfig( + dbt_executable_path=f"{os.environ['AIRFLOW_HOME']}/dbt_venv/bin/dbt", + ), # normal dag parameters schedule_interval="@daily", start_date=datetime(2023, 1, 1), From 8d9fd3030475bbf4032e6685e4717dc8db2ff92d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 26 Jul 2023 21:10:58 +0000 Subject: [PATCH 22/24] =?UTF-8?q?=F0=9F=8E=A8=20[pre-commit.ci]=20Auto=20f?= =?UTF-8?q?ormat=20from=20pre-commit.com=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/configuration/compiled-sql.rst | 4 +--- docs/configuration/parsing-methods.rst | 2 +- docs/configuration/project-config.rst | 12 ++++++------ docs/getting_started/execution-modes.rst | 4 ++-- 4 files changed, 10 insertions(+), 12 deletions(-) diff --git a/docs/configuration/compiled-sql.rst b/docs/configuration/compiled-sql.rst index 67643d575..0618853f5 100644 --- a/docs/configuration/compiled-sql.rst +++ b/docs/configuration/compiled-sql.rst @@ -12,8 +12,6 @@ If you'd like to disable this feature, you can set ``should_store_compiled_sql=F from cosmos import DbtDag DbtDag( - operator_args={ - "should_store_compiled_sql": False - }, + operator_args={"should_store_compiled_sql": False}, # ..., ) diff --git a/docs/configuration/parsing-methods.rst b/docs/configuration/parsing-methods.rst index 8fece5260..34eeb4577 100644 --- a/docs/configuration/parsing-methods.rst +++ b/docs/configuration/parsing-methods.rst @@ -95,4 +95,4 @@ To use this: load_mode=LoadMode.CUSTOM, ) ..., - ) \ No newline at end of file + ) diff --git a/docs/configuration/project-config.rst b/docs/configuration/project-config.rst index fcb3bd565..9f00930ba 100644 --- a/docs/configuration/project-config.rst +++ b/docs/configuration/project-config.rst @@ -23,9 +23,9 @@ Project Config Example from cosmos.config import ProjectConfig config = ProjectConfig( - dbt_project_path='/path/to/dbt/project', - models_relative_path='models', - seeds_relative_path='data', - snapshots_relative_path='snapshots', - manifest_path='/path/to/manifests' - ) \ No newline at end of file + dbt_project_path="/path/to/dbt/project", + models_relative_path="models", + seeds_relative_path="data", + snapshots_relative_path="snapshots", + manifest_path="/path/to/manifests", + ) diff --git a/docs/getting_started/execution-modes.rst b/docs/getting_started/execution-modes.rst index c155134ca..a9eb5bf65 100644 --- a/docs/getting_started/execution-modes.rst +++ b/docs/getting_started/execution-modes.rst @@ -101,7 +101,7 @@ Example DAG: docker_cosmos_dag = DbtDag( # ... execution_config=ExecutionConfig( - execution_mode=ExecutionMode.DOCKER, + execution_mode=ExecutionMode.DOCKER, ), operator_args={ "image": "dbt-jaffle-shop:1.0.0", @@ -135,7 +135,7 @@ Example DAG: docker_cosmos_dag = DbtDag( # ... execution_config=ExecutionConfig( - execution_mode=ExecutionMode.KUBERNETES, + execution_mode=ExecutionMode.KUBERNETES, ), operator_args={ "image": "dbt-jaffle-shop:1.0.0", From 8f2ee871adffb479e0b1272e82924e264870e7ac Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Wed, 26 Jul 2023 17:12:17 -0400 Subject: [PATCH 23/24] update readmes --- README.rst | 18 ++++++++++++------ docs/index.rst | 15 ++++++++++----- 2 files changed, 22 insertions(+), 11 deletions(-) diff --git a/README.rst b/README.rst index 823367da9..66a5e05a6 100644 --- a/README.rst +++ b/README.rst @@ -39,14 +39,23 @@ ___________________ You can render an Airflow Task Group using the ``DbtTaskGroup`` class. Here's an example with the jaffle_shop project: + .. code-block:: python from pendulum import datetime from airflow import DAG from airflow.operators.empty import EmptyOperator - from cosmos import DbtTaskGroup + from cosmos.task_group import DbtTaskGroup + profile_config = ProfileConfig( + profile_name="default", + target_name="dev", + profile_mapping=PostgresUserPasswordProfileMapping( + conn_id="airflow_db", + profile_args={"schema": "public"}, + ), + ) with DAG( dag_id="extract_dag", @@ -56,11 +65,8 @@ You can render an Airflow Task Group using the ``DbtTaskGroup`` class. Here's an e1 = EmptyOperator(task_id="pre_dbt") dbt_tg = DbtTaskGroup( - dbt_project_name="jaffle_shop", - conn_id="airflow_db", - profile_args={ - "schema": "public", - }, + project_config=ProjectConfig("jaffle_shop"), + profile_config=profile_config, ) e2 = EmptyOperator(task_id="post_dbt") diff --git a/docs/index.rst b/docs/index.rst index 4220f7668..6e63f5698 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -50,6 +50,14 @@ You can render an Airflow Task Group using the ``DbtTaskGroup`` class. Here's an from airflow.operators.empty import EmptyOperator from cosmos.task_group import DbtTaskGroup + profile_config = ProfileConfig( + profile_name="default", + target_name="dev", + profile_mapping=PostgresUserPasswordProfileMapping( + conn_id="airflow_db", + profile_args={"schema": "public"}, + ), + ) with DAG( dag_id="extract_dag", @@ -59,11 +67,8 @@ You can render an Airflow Task Group using the ``DbtTaskGroup`` class. Here's an e1 = EmptyOperator(task_id="pre_dbt") dbt_tg = DbtTaskGroup( - dbt_project_name="jaffle_shop", - conn_id="airflow_db", - profile_args={ - "schema": "public", - }, + project_config=ProjectConfig("jaffle_shop"), + profile_config=profile_config, ) e2 = EmptyOperator(task_id="post_dbt") From ef59a61e96b157613350ffa21e1afe1f112bdca1 Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Wed, 26 Jul 2023 17:14:24 -0400 Subject: [PATCH 24/24] make pre commit happy --- docs/conf.py | 3 +-- docs/configuration/parsing-methods.rst | 6 +++--- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index e55ad1786..f3faf6f3a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,11 +1,10 @@ -# noqa import os import sys # Add the project root to the path so we can import the package sys.path.insert(0, os.path.abspath("../")) -from docs.generate_mappings import generate_mapping_docs +from docs.generate_mappings import generate_mapping_docs # noqa: E402 # Configuration file for the Sphinx documentation builder. # diff --git a/docs/configuration/parsing-methods.rst b/docs/configuration/parsing-methods.rst index 34eeb4577..3153956a3 100644 --- a/docs/configuration/parsing-methods.rst +++ b/docs/configuration/parsing-methods.rst @@ -50,7 +50,7 @@ To use this: render_config=RenderConfig( load_mode=LoadMode.DBT_MANIFEST, ) - ..., + # ..., ) ``dbt_ls`` @@ -72,7 +72,7 @@ To use this: render_config=RenderConfig( load_mode=LoadMode.DBT_LS, ) - ..., + # ..., ) @@ -94,5 +94,5 @@ To use this: render_config=RenderConfig( load_mode=LoadMode.CUSTOM, ) - ..., + # ..., )