diff --git a/.fossa.yml b/.fossa.yml deleted file mode 100755 index f7edc69558..0000000000 --- a/.fossa.yml +++ /dev/null @@ -1,17 +0,0 @@ -# Generated by FOSSA CLI (https://github.com/fossas/fossa-cli) -# Visit https://fossa.com to learn more - -version: 2 -cli: - server: https://app.fossa.com - fetcher: custom - project: github.com/theupdateframework/python-tuf -analyze: - modules: - - name: tuf - type: pip - target: . - path: . - options: - strategy: requirements - requirements: requirements.txt diff --git a/.gitattributes b/.gitattributes index 66709ac428..c35f57e223 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,3 @@ -# Files that will always have LF line endings on checkout. -tests/repository_data/** text eol=lf - +# All JSON files will always have LF line endings on checkout. +# This prevents git replacing line endings with CRLF on Windows. +*.json text eol=lf diff --git a/.github/dependabot.yml b/.github/dependabot.yml index f4952bab42..ec5a47efab 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,8 +1,16 @@ version: 2 updates: -- package-ecosystem: pip + +- package-ecosystem: "pip" directory: "/" schedule: - interval: daily + interval: "daily" + time: "10:00" + open-pull-requests-limit: 10 + +- package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" time: "10:00" open-pull-requests-limit: 10 diff --git a/.github/workflows/_test.yml b/.github/workflows/_test.yml new file mode 100644 index 0000000000..328037863a --- /dev/null +++ b/.github/workflows/_test.yml @@ -0,0 +1,90 @@ +on: + workflow_call: + # Permissions inherited from caller workflow + + +jobs: + tests: + name: Tests + strategy: + fail-fast: false + # Run regular TUF tests on each OS/Python combination, plus special tests + # (sslib master) and linters on Linux/Python3.x only. + matrix: + python-version: ["3.7", "3.8", "3.9", "3.10"] + os: [ubuntu-latest, macos-latest, windows-latest] + toxenv: [py] + include: + - python-version: 3.x + os: ubuntu-latest + toxenv: with-sslib-master + experimental: true + - python-version: 3.x + os: ubuntu-latest + toxenv: lint + + env: + # Set TOXENV env var to tell tox which testenv (see tox.ini) to use + # NOTE: The Python 2.7 runner has two Python versions on the path (see + # setup-python below), so we tell tox explicitly to use the 'py27' + # testenv. For all other runners the toxenv configured above suffices. + TOXENV: ${{ matrix.toxenv }} + + runs-on: ${{ matrix.os }} + + steps: + - name: Checkout TUF + uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: 'requirements*.txt' + + - name: Install dependencies + run: | + python3 -m pip install --upgrade pip + python3 -m pip install --upgrade tox coveralls + + - name: Run tox (${{ env.TOXENV }}) + # See TOXENV environment variable for the testenv to be executed here + run: tox + + - name: Publish on coveralls.io + # A failure to publish coverage results on coveralls should not + # be a reason for a job failure. + continue-on-error: true + # TODO: Maybe make 'lint' a separate job instead of case handling here + if: ${{ env.TOXENV != 'lint' }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + COVERALLS_FLAG_NAME: ${{ runner.os }} / Python ${{ matrix.python-version }} / ${{ env.TOXENV }} + COVERALLS_PARALLEL: true + # Use cp workaround to publish coverage reports with relative paths + # FIXME: Consider refactoring the tests to not require the test + # aggregation script being invoked from the `tests` directory, so + # that `.coverage` is written to and .coveragrc can also reside in + # the project root directory as is the convention. + run: | + cp tests/.coverage . + coveralls --service=github --rcfile=tests/.coveragerc + + coveralls-fin: + # Always run when all 'tests' jobs have finished even if they failed + # TODO: Replace always() with a 'at least one job succeeded' expression + if: always() + needs: tests + runs-on: ubuntu-latest + container: python:3-slim + steps: + - name: Install dependencies + run: | + python3 -m pip install --upgrade pip + python3 -m pip install --upgrade coveralls + - name: Finalize publishing on coveralls.io + continue-on-error: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: coveralls --finish diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml new file mode 100644 index 0000000000..ccc67798ad --- /dev/null +++ b/.github/workflows/cd.yml @@ -0,0 +1,87 @@ +name: CD +concurrency: cd + +on: + push: + tags: + - v* + +permissions: + contents: write + +jobs: + test: + uses: ./.github/workflows/_test.yml + + build: + name: Build + runs-on: ubuntu-latest + needs: test + outputs: + release_id: ${{ steps.gh-release.outputs.id }} + steps: + - name: Checkout release tag + uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b + with: + ref: ${{ github.event.workflow_run.head_branch }} + + - name: Set up Python + uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 + with: + python-version: '3.x' + + - name: Install build dependency + run: python3 -m pip install --upgrade pip build + + - name: Build binary wheel and source tarball + run: python3 -m build --sdist --wheel --outdir dist/ . + + - id: gh-release + name: Publish GitHub release candiate + uses: softprops/action-gh-release@1e07f4398721186383de40550babbdf2b84acfc5 + with: + name: ${{ github.ref_name }}-rc + tag_name: ${{ github.ref }} + body: "Release waiting for review..." + files: dist/* + + - name: Store build artifacts + uses: actions/upload-artifact@6673cd052c4cd6fcf4b4e6e60ea986c889389535 + # NOTE: The GitHub release page contains the release artifacts too, but using + # GitHub upload/download actions seems robuster: there is no need to compute + # download URLs and tampering with artifacts between jobs is more limited. + with: + name: build-artifacts + path: dist + + release: + name: Release + runs-on: ubuntu-latest + needs: build + environment: release + steps: + - name: Fetch build artifacts + uses: actions/download-artifact@fb598a63ae348fa914e94cd0ff38f362e927b741 + with: + name: build-artifacts + path: dist + + - name: Publish binary wheel and source tarball on PyPI + uses: pypa/gh-action-pypi-publish@717ba43cfbb0387f6ce311b169a825772f54d295 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} + + - name: Finalize GitHub release + uses: actions/github-script@9ac08808f993958e9de277fe43a64532a609130e + with: + script: | + await github.rest.repos.updateRelease({ + owner: context.repo.owner, + repo: context.repo.repo, + release_id: '${{ needs.build.outputs.release_id }}', + name: '${{ github.ref_name }}', + body: 'See [CHANGELOG.md](https://github.com/' + + context.repo.owner + '/' + context.repo.repo + + '/blob/${{ github.ref_name }}/docs/CHANGELOG.md) for details.' + }) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1355ab29de..87c8ccdbe6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,104 +1,16 @@ -name: Run TUF tests and linter +name: CI on: push: branches: - develop + pull_request: workflow_dispatch: -jobs: - build: - strategy: - fail-fast: false - # Run regular TUF tests on each OS/Python combination, plus special tests - # (sslib master) and linters on Linux/Python3.x only. - matrix: - python-version: [3.6, 3.7, 3.8, 3.9] - os: [ubuntu-latest, macos-latest, windows-latest] - toxenv: [py] - include: - - python-version: 3.x - os: ubuntu-latest - toxenv: with-sslib-master - experimental: true - # TODO: Change to 3.x once pylint fully supports Python 3.9 - - python-version: 3.8 - os: ubuntu-latest - toxenv: lint - - env: - # Set TOXENV env var to tell tox which testenv (see tox.ini) to use - # NOTE: The Python 2.7 runner has two Python versions on the path (see - # setup-python below), so we tell tox explicitly to use the 'py27' - # testenv. For all other runners the toxenv configured above suffices. - TOXENV: ${{ matrix.toxenv }} - - runs-on: ${{ matrix.os }} - - steps: - - name: Checkout TUF - uses: actions/checkout@v2 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} +permissions: + contents: read - - name: Find pip cache dir - id: pip-cache - run: echo "::set-output name=dir::$(pip cache dir)" - - - name: pip cache - uses: actions/cache@v2 - with: - # Use the os dependent pip cache directory found above - path: ${{ steps.pip-cache.outputs.dir }} - # A match with 'key' counts as cache hit - key: ${{ runner.os }}-pip-${{ hashFiles('requirements*.txt') }} - # A match with 'restore-keys' is used as fallback - restore-keys: ${{ runner.os }}-pip- - - - name: Install dependencies - run: | - python3 -m pip install --upgrade pip - python3 -m pip install --upgrade tox coveralls - - - name: Run tox (${{ env.TOXENV }}) - # See TOXENV environment variable for the testenv to be executed here - run: tox - - - name: Publish on coveralls.io - # A failure to publish coverage results on coveralls should not - # be a reason for a job failure. - continue-on-error: true - # TODO: Maybe make 'lint' a separate job instead of case handling here - if: ${{ env.TOXENV != 'lint' }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - COVERALLS_FLAG_NAME: ${{ runner.os }} / Python ${{ matrix.python-version }} / ${{ env.TOXENV }} - COVERALLS_PARALLEL: true - # Use cp workaround to publish coverage reports with relative paths - # FIXME: Consider refactoring the tests to not require the test - # aggregation script being invoked from the `tests` directory, so - # that `.coverage` is written to and .coveragrc can also reside in - # the project root directory as is the convention. - run: | - cp tests/.coverage . - coveralls --service=github --rcfile=tests/.coveragerc - - coveralls-fin: - # Always run when all 'build' jobs have finished even if they failed - # TODO: Replace always() with a 'at least one job succeeded' expression - if: always() - needs: build - runs-on: ubuntu-latest - container: python:3-slim - steps: - - name: Finalize publishing on coveralls.io - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - python3 -m pip install --upgrade pip - python3 -m pip install --upgrade coveralls - coveralls --finish +jobs: + test: + uses: ./.github/workflows/_test.yml diff --git a/.github/workflows/maintainer-permissions-reminder.yml b/.github/workflows/maintainer-permissions-reminder.yml new file mode 100644 index 0000000000..c6b02aa166 --- /dev/null +++ b/.github/workflows/maintainer-permissions-reminder.yml @@ -0,0 +1,55 @@ +name: Maintainer review reminder + +on: + schedule: + - cron: '10 10 10 2 *' + workflow_dispatch: + +permissions: + issues: write + +jobs: + file-reminder-issue: + name: File issue to review maintainer permissions + runs-on: ubuntu-latest + steps: + - uses: actions/github-script@9ac08808f993958e9de277fe43a64532a609130e + with: + script: | + await github.rest.issues.create({ + owner: context.repo.owner, + repo: context.repo.repo, + title: "Yearly maintainer permissions review", + body: ` + This is a checklist for evaluating python-tuf maintainer accounts and permissions. This issue is automatically opened once a year. + + ### Tasks + + 1. Update this list to include any new services + 2. Evaluate the accounts and permissions for each service on the list. Some rules of thumb: + * Critical services should have a minimum of 3 _active_ maintainers/admins to prevent project lockout + * Each additional maintainer/admin increases the risk of project compromise: for this reason permissions should be removed if they are no longer used + * For services that are not frequently used, each maintainer/admin should check that they really are still able to authenticate to the service and confirm this in the comments + 3. Update MAINTAINERS.txt to reflect current permissions + 4. (Bonus) Update significant contributors in README.md#acknowledgements + + ### Critical services + + * [ ] **PyPI**: maintainer list is visible to everyone at https://pypi.org/project/tuf/ + * Only enough maintainers and org admins to prevent locking the project out + * [ ] **GitHub**: release environment reviewers listed in https://github.com/theupdateframework/python-tuf/settings/environments + * Maintainers who can approve releases to PyPI + * [ ] **GitHub**: permissions visible to admins at https://github.com/theupdateframework/python-tuf/settings/access + * "admin" permission: Only for maintainers and org admins who do project administration + * "push/maintain" permission: Maintainers who actively approve and merge PRs (+admins) + * "triage" permission: All contributors trusted to manage issues + + ### Other + + * [ ] **ReadTheDocs**: admin list is visible to everyone at https://readthedocs.org/projects/theupdateframework/ + * [ ] **Coveralls**: everyone with github "admin" permissions is a Coveralls admin: https://coveralls.io/github/theupdateframework/python-tuf + ` + }) + console.log("New issue created.") + + diff --git a/.github/workflows/specification-version-check.yml b/.github/workflows/specification-version-check.yml new file mode 100644 index 0000000000..717a6a47c6 --- /dev/null +++ b/.github/workflows/specification-version-check.yml @@ -0,0 +1,31 @@ +on: + schedule: + - cron: "0 13 * * *" + workflow_dispatch: + push: +name: Specification version check +jobs: + # Get the version of the TUF specification the project states it supports + get-supported-tuf-version: + runs-on: ubuntu-latest + outputs: + version: ${{ steps.get-version.outputs.version }} + steps: + - uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b + - uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 + - id: get-version + run: | + python3 -m pip install -e . + script="from tuf.api.metadata import SPECIFICATION_VERSION; \ + print(f\"v{'.'.join(SPECIFICATION_VERSION)}\")" + ver=$(python3 -c "$script") + echo "::set-output name=version::$ver" + # Get the latest TUF specification release and open an issue (if needed) + specification-bump-check: + permissions: + contents: read + issues: read + needs: get-supported-tuf-version + uses: rdimitrov/specification/.github/workflows/check-latest-spec-version.yml@dimitrovr/spec-bump-workflow + with: + tuf-version: ${{needs.get-supported-tuf-version.outputs.version}} diff --git a/.gitignore b/.gitignore index e988195f05..ff032a6f68 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,10 @@ # root level directories -dist/* -build/* -env/* +dist/ +build/ +env/ # docs build directory -docs/build/* +docs/build/ # global file patterns *.log @@ -14,19 +14,20 @@ docs/build/* *.swp *.egg-info .coverage -.tox/* -tests/htmlcov/* +.tox/ +tests/htmlcov/ .DS_Store -.pybuild/* +.pybuild/ .python-version *~ *.tmp .pre-commit-config.yaml +.vscode # Debian generated files -debian/.debhelper/* +debian/.debhelper/ debian/*-stamp debian/files debian/*.debhelper debian/*.substvars -debian/python*-tuf/* +debian/python*-tuf/ diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 58dec99503..11d82d2ab5 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -7,13 +7,16 @@ version: 2 # Build documentation with Sphinx sphinx: - builder: html - configuration: docs/conf.py + builder: html + configuration: docs/conf.py + fail_on_warning: true # Optionally build your docs in additional formats such as PDF formats: [] # Optionally set the version of Python and requirements required to build your docs python: - install: - - requirements: requirements-docs.txt + install: + - requirements: requirements-docs.txt + - method: pip + path: . diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 6e2a7cbb01..0000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,29 +0,0 @@ -include LICENSE* -include README.md -include tox.ini -include tests/repository_data/keystore/delegation_key -include tests/repository_data/keystore/root_key* -include tests/repository_data/keystore/snapshot_key -include tests/repository_data/keystore/targets_key -include tests/repository_data/keystore/timestamp_key -include tests/ssl_certs/*.crt -include tests/ssl_certs/*.key - -recursive-include docs *.txt -recursive-include docs *.md -recursive-include docs *.rst -recursive-include docs/images *.png -recursive-include tuf/scripts *.py -recursive-include examples * -recursive-include tests *.py -recursive-include tests *.pem -recursive-include tests *.json -recursive-include tests *.txt -recursive-include tests *.cfg -recursive-include tests *.coveragerc -recursive-include tests *.gz -recursive-include tests *.pub -recursive-include tuf *.gitignore -recursive-include tuf *.md -recursive-include tuf *.rst -recursive-include tuf *.yml diff --git a/README.md b/README.md index 4429de9be9..03caec8e7c 100644 --- a/README.md +++ b/README.md @@ -1,41 +1,29 @@ # TUF A Framework for Securing Software Update Systems -![Build](https://github.com/theupdateframework/python-tuf/workflows/Run%20TUF%20tests%20and%20linter/badge.svg) -[![Coveralls](https://coveralls.io/repos/theupdateframework/tuf/badge.svg?branch=develop)](https://coveralls.io/r/theupdateframework/tuf?branch=develop) +![Build](https://github.com/theupdateframework/python-tuf/actions/workflows/ci.yml/badge.svg) +[![Coveralls](https://coveralls.io/repos/theupdateframework/python-tuf/badge.svg?branch=develop)](https://coveralls.io/r/theupdateframework/python-tuf?branch=develop) +[![Docs](https://readthedocs.org/projects/theupdateframework/badge/)](https://theupdateframework.readthedocs.io/) [![CII](https://bestpractices.coreinfrastructure.org/projects/1351/badge)](https://bestpractices.coreinfrastructure.org/projects/1351) [![PyPI](https://img.shields.io/pypi/v/tuf)](https://pypi.org/project/tuf/) ---------------------------- -This repository is the **reference implementation** of -[The Update Framework (TUF)](https://theupdateframework.github.io/). -It is written in Python and intended to conform to version 1.0 of the -[TUF specification](https://theupdateframework.github.io/specification/latest/). - -The repository currently includes two implementations: -1) A *legacy implementation*, with - [`tuf/client/updater.py`](tuf/client/updater.py) implementing the detailed - client workflow and [`tuf/repository_tool.py`](tuf/repository_tool.py) - providing a high-level interface for repository operations. - The legacy implementation is in use in production systems, but is [no longer - being actively worked on](docs/adr/0002-pre-1-0-deprecation-strategy.md). -2) A *modern implementation*. We are in the process of rewriting the reference - implementation in [modern Python](docs/adr/0001-python-version-3-6-plus.md) - to both: a) address scalability and integration issues identified in - supporting integration into the Python Package Index (PyPI), and other - large-scale repositories, and b) to ensure maintainability of the project. - This implementation consists of: - * a "low-level" metadata API, designed to provide easy and safe access to - TUF metadata and handle (de)serialization from/to files, provided in the - [`tuf/api/metadata.py`](tuf/api/metadata.py) module. - * an implementation of the detailed client workflow built on top of the - metadata API, provided in the - [`tuf/ngclient/updater.py`](tuf/ngclient/updater.py) module. - The modern implementation is not considered production ready and does not yet - provide any high-level support for implementing - [repository operations](https://theupdateframework.github.io/specification/latest/#repository-operations), - though the addition of API to support them is planned. - - +[The Update Framework (TUF)](https://theupdateframework.io/) is a framework for +secure content delivery and updates. It protects against various types of +supply chain attacks and provides resilience to compromise. This repository is a +**reference implementation** written in Python. It is intended to conform to +version 1.0 of the [TUF +specification](https://theupdateframework.github.io/specification/latest/). + +Python-TUF provides two APIs: + * [`tuf.api.metadata`](https://theupdateframework.readthedocs.io/en/latest/api/tuf.api.html), + a "low-level" API, designed to provide easy and safe access to TUF + metadata and to handle (de)serialization from/to files. + * [`tuf.ngclient`](https://theupdateframework.readthedocs.io/en/latest/api/tuf.ngclient.html), + a client implementation built on top of the metadata API. + +High-level support for implementing +[repository operations](https://theupdateframework.github.io/specification/latest/#repository-operations) +is planned but not yet provided: see [ADR 10](https://github.com/theupdateframework/python-tuf/blob/develop/docs/adr/0010-repository-library-design.md). The reference implementation strives to be a readable guide and demonstration for those working on implementing TUF in their own languages, environments, or @@ -59,26 +47,27 @@ by various tech companies and open source organizations. A variant of TUF called [Uptane](https://uptane.github.io/) is used to secure over-the-air updates in automobiles. -Please see the [TUF Introduction](docs/OVERVIEW.rst) and -[TUF's website](https://theupdateframework.com/) for more information about TUF! +Please see [TUF's website](https://theupdateframework.com/) for more information about TUF! Documentation ------------- -* [Introduction to TUF's Design](docs/OVERVIEW.rst) +* [Introduction to TUF's Design](https://theupdateframework.io/overview/) * [The TUF Specification](https://theupdateframework.github.io/specification/latest/) -* [Getting Started with the TUF Reference Implementation](docs/GETTING_STARTED.rst) -* [Governance](docs/GOVERNANCE.md) and [Maintainers](docs/MAINTAINERS.txt) +* [API Reference](https://theupdateframework.readthedocs.io/) +* [Usage examples](https://github.com/theupdateframework/python-tuf/tree/develop/examples/) +* [Governance](https://github.com/theupdateframework/python-tuf/blob/develop/docs/GOVERNANCE.md) +and [Maintainers](https://github.com/theupdateframework/python-tuf/blob/develop/docs/MAINTAINERS.txt) for the reference implementation -* [Miscellaneous Docs](docs/) +* [Miscellaneous Docs](https://github.com/theupdateframework/python-tuf/tree/develop/docs) Contact ------- -Please contact us via our [mailing -list](https://groups.google.com/forum/?fromgroups#!forum/theupdateframework). -Questions, feedback, and suggestions are welcomed on this low volume mailing -list. +Questions, feedback, and suggestions are welcomed on our low volume [mailing +list](https://groups.google.com/forum/?fromgroups#!forum/theupdateframework) or +the [#tuf](https://cloud-native.slack.com/archives/C8NMD3QJ3) channel on [CNCF +Slack](https://slack.cncf.io/). We strive to make the specification easy to implement, so if you come across any inconsistencies or experience any difficulty, do let us know by sending an @@ -101,7 +90,9 @@ PGP key fingerprint **E9C0 59EC 0D32 64FA B35F 94AD 465B F9F6 F8EB 475A**. Please do not use the GitHub issue tracker to submit vulnerability reports. The issue tracker is intended for bug reports and to make feature requests. Major feature requests, such as design changes to the specification, should -be proposed via a [TUF Augmentation Proposal](docs/TAP.rst) (TAP). +be proposed via a +[TUF Augmentation Proposal](https://theupdateframework.github.io/specification/latest/#tuf-augmentation-proposal-tap-support) +(TAP). Limitations ----------- @@ -114,22 +105,23 @@ License This work is [dual-licensed](https://en.wikipedia.org/wiki/Multi-licensing) and distributed under the (1) MIT License and (2) Apache License, Version 2.0. -Please see [LICENSE-MIT](LICENSE-MIT) and [LICENSE](LICENSE). +Please see [LICENSE-MIT](https://github.com/theupdateframework/python-tuf/blob/develop/LICENSE-MIT) +and [LICENSE](https://github.com/theupdateframework/python-tuf/blob/develop/LICENSE). Acknowledgements ---------------- This project is hosted by the Linux Foundation under the Cloud Native Computing -Foundation. TUF's early development was managed by -members of the [Secure Systems Lab](https://ssl.engineering.nyu.edu/) at [New -York University](https://engineering.nyu.edu/). We appreciate the efforts of -Konstantin Andrianov, Geremy Condra, Vladimir Diaz, Yuyu Zheng, Sebastien Awwad, -Santiago Torres-Arias, Trishank Kuppusamy, Zane Fisher, Pankhuri Goyal, Tian Tian, -Konstantin Andrianov, and Justin Samuel who are among those who helped significantly -with TUF's reference implementation. [Contributors](https://github.com/theupdateframework/python-tuf/blob/develop/docs/AUTHORS.txt) -and -[maintainers](https://github.com/theupdateframework/python-tuf/blob/develop/docs/MAINTAINERS.txt) +Foundation. TUF's early development was managed by members of the [Secure +Systems Lab](https://ssl.engineering.nyu.edu/) at [New York +University](https://engineering.nyu.edu/). We appreciate the efforts of all +[maintainers and emeritus +maintainers](https://github.com/theupdateframework/python-tuf/blob/develop/docs/MAINTAINERS.txt), +as well as the contributors Konstantin Andrianov, Kairo de Araujo, Ivana +Atanasova, Geremy Condra, Zane Fisher, Pankhuri Goyal, Justin Samuel, Tian +Tian, Martin Vrachev and Yuyu Zheng who are among those who helped +significantly with TUF's reference implementation. Maintainers and Contributors are governed by the [CNCF Community Code of Conduct](https://github.com/cncf/foundation/blob/master/code-of-conduct.md). diff --git a/docs/1.0.0-ANNOUNCEMENT.md b/docs/1.0.0-ANNOUNCEMENT.md new file mode 100644 index 0000000000..0020632b22 --- /dev/null +++ b/docs/1.0.0-ANNOUNCEMENT.md @@ -0,0 +1,41 @@ +# Announcing TUF 1.0.0 + +Python-TUF v1.0.0 is a rewritten stable reference implementation of the TUF +specification, which *currently* includes: +- a modern low-level [*metadata + API*](https://theupdateframework.readthedocs.io/en/latest/api/tuf.api.html) +- a fully specification-compliant [*updater + client*](https://theupdateframework.readthedocs.io/en/latest/api/tuf.ngclient.html), + serving as a more robust and yet more flexible stand-in replacement + for the legacy client updater + +For the reasons outlined in [ADR 10](https://github.com/theupdateframework/python-tuf/blob/develop/docs/adr/0010-repository-library-design.md +), this release *does not yet* include *repository tool*-like functionality. +However, the new *metadata API* makes it easy to replicate the desired +functionality tailored to the specific needs of any given repository (see +*Migration* for details). + +As discussed in [ADR 2](https://github.com/theupdateframework/python-tuf/blob/develop/docs/adr/0002-pre-1-0-deprecation-strategy.md), this +release *does not* include any legacy code, as its maintenance has become +infeasible for the python-tuf team. The pre-1.0.0 deprecation strategy from ADR +2 applies as follows: + +> *Bugs reported with tuf versions prior to 1.0.0 will likely not be addressed +directly by tuf’s maintainers. Pull Requests to fix bugs in the last release +prior to 1.0.0 will be considered, and merged (subject to normal review +processes). Note that there may be delays due to the lack of developer resources +for reviewing such pull requests.* + + +## Migration + +Given the clean cut with the legacy reference implementation, we provide the +following migration support: + +- detailed code documentation on + [https://theupdateframework.readthedocs.io](https://theupdateframework.readthedocs.io/) +- verbose [code examples](https://github.com/theupdateframework/python-tuf/tree/develop/examples) for *client updater* usage, and + repository-side operations based on the low-level *metadata API* +- individual migration support upon + [request](https://github.com/theupdateframework/python-tuf#contact) +- targeted migration support initiative for known users diff --git a/docs/AUTHORS.txt b/docs/AUTHORS.txt deleted file mode 100644 index 376f201ee0..0000000000 --- a/docs/AUTHORS.txt +++ /dev/null @@ -1,62 +0,0 @@ -The TUF project is managed by Justin Cappos at NYU (jcappos@nyu.edu). See GOVERNANCE.md for more information. - -Contributors: - -Organizations -------------- - -Advanced Telematic Systems -Datadog -Docker -Flynn -LEAP -OCaml -Quay by CoreOS - -Individuals ------------ - -Alan Castonguay -Andrew Meyer -Arturo Filastò -Benno Fünfstück -David Halls -David Lawrence -Diogo Monica -Eric Hartsuyker -Evan Cordell -Felix Wang -Geremy Condra -goldenMetteyya -Hannes Mehnert -Jerry Trieu -Johannes Dorfner -John Ward -Jonathan Rudenberg -Julian Hille -Justin Cappos -Justin Samuel -Konstantin Andrianov -Linda Vigdor -Lois DeLong -Lukas Puehringer -María José Barrera -Martin Peck -Max Goodman -Monzur Muhammad -Nektarios Tsoutsos -Nick Mathewson -Pankhuri Goyal -Riyaz Faizullabhoy -Roger Dingledine -Ruben Pollan -Santiago Torres -Sebastian Hahn -Sebastien Awwad -Tian Tian -Trishank Karthik Kuppusamy -Vladimir Diaz -Wilson Ding -Ying Li -Yuyu Zheng -Zane Fisher diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 0ceb224767..3c712ba93e 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -1,5 +1,212 @@ # Changelog + +## v1.1.0 + +This release contains major build improvements as well as fixes and +backwards-compatible API improvements. + +### Added +* build: Release process was moved to CD platform (#1946, #1971, #1976) +* build: Build is now reproducible thanks to Hatchling (#1896, #1900) +* build: Build results are now verifiable (#1913, #1926, #1947, #1979) +* build: test dependencies are now pinned for reproducibility (#1867, #1918) +* Metadata API: Validation is now possible during serialization (#1775) +* Infrastructure: Setup development blog (#1886, #1887) + +### Changed +* Metadata API: Supported specification version updated (#1908, #1960) +* Metadata API: unrecognized_fields annotation fix (#1950) +* Metadata API: Constructors are now easier to use (#1922) +* Metadata API: Logging and error message improvements (#1876) +* build: Include examples in source distribution (#1970) +* build: Updated pinned dependency versions +* tests: Various improvements (#1707, #1758, #1808, #1860, #1915, #1936, + #1953, #1954, #1955) + + +## v1.0.0 + +This release makes ngclient and the Metadata API the supported python-tuf APIs. +It also removes the legacy implementation as documented in the +[1.0.0 announcement](1.0.0-ANNOUNCEMENT.md): all library code is now contained +in `tuf.api` or `tuf.ngclient`. + +### Added +* tests: Extend testing (#1689, #1703, #1711, #1728, #1735, #1738, + #1742, #1766, #1777, #1809, #1831) + +### Changed +* Metadata API: Disallow microseconds in expiry (#1712) +* Metadata API: Preserve role keyid order (#1754) +* Metadata API: Make exceptions more consistent (#1725, #1734, #1787, #1840, + #1836) +* Metadata API: Update supported spec version to "1.0.28" (#1825) +* Metadata API: Accept legacy spec version "1.0" (#1796) +* Metadata API: Accept custom fields in Metadata (#1861) +* ngclient: Remove temporary file in failure cases (#1757) +* ngclient: Explicitly encode rolename in URL (#1759) +* ngclient: Allow HTTP payload compression (#1774) +* ngclient: Make exceptions more consistent (#1799, #1810) +* docs: Improve documentation (#1744, #1749, #1750, #1755, #1771, #1776, #1772, + #1780, #1781, #1800, #1815, #1820, #1829, #1838, #1850, #1853, #1855, #1856 + #1868, #1871) +* build: Various build infrastructure improvements (#1718, #1724, #1760, #1762, + #1767, #1803, #1830, #1832, #1837, #1839) +* build: Stop supporting EOL Python 3.6 (#1783) +* build: Update dependencies (#1809, #1827, #1834, #1863, #1865, #1870) + +### Removed +* Remove all legacy code including old client, repository_tool, repository_lib + and the scripts (#1790) +* Metadata API: Remove modification helper methods that are no longer necessary + (#1736, #1740, #1743) +* tests: Remove client tests that were replaced with better ones (#1741) +* tests: Stop using unittest_toolbox (#1792) +* docs: Remove deprecated documentation (#1768, #1769, #1773, #1848) + + +## v0.20.0 + +*__NOTE:__ This will be the final release of python-tuf that includes the +legacy implementation code. Please see the [*1.0.0 +announcement*](1.0.0-ANNOUNCEMENT.md) page for more details about the next +release and the deprecation of the legacy implementation, including migration +instructions.* + +### Added +* metadata API: misc input validation (#1630, #1688, #1668, #1672, #1690) +* doc: repository library design document and ADR (#1693) +* doc: 1.0.0 announcement (#1706) +* doc: misc docstrings in metadata API (#1620) +* doc: repository and client examples (#1675, #1685, #1700) +* test: ngclient key rotation (#1635, #1649, #1691) +* test: ngclient top-level role update (#1636) +* test: ngclient non-consistent snapshot (#1666, #1705) +* test: more lint/type checks and auto-formatting (#1658, #1664, #1659, #1674, + #1677, #1687, #1699, #1701, #1708, #1710, #1720, #1726) +* build: Python 3.10 support (#1628) + +### Changed +* ngclient: misc API changes (#1604, #1731) +* ngclient: avoid re-loading verified targets metadata (#1593) +* ngclient: implicitly call refresh() (#1654) +* ngclient: return loaded metadata (#1680) +* ngclient: skip visited nodes on delegation tree traversal (#1683) +* ngclient: remove URL normalisation (#1686) +* build: modernise packaging configuration (#1626) +* build: bump dependencies (#1609, #1611, #1616, #1621) +* build: limit GitHub Action token visibility and permissions (#1652, #1663) +* test: misc test changes (#1715, #1670, #1671, #1631, #1695, #1702) + +### Removed +* doc: obsolete roadmap (#1698) + +## v0.19.0 + +For users of legacy client (tuf.client module) this is purely a security fix +release with no API or functionality changes. For ngclient (tuf.ngclient) and +Metadata API (tuf.api.metadata), some API changes are included. + +**All users are advised to upgrade**. + +Note that python-tuf has required python>=3.5 since release 0.18.0. + +### Fixed +* GHSA-wjw6-2cqr-j4qr: Fix client side issue in both legacy client (tuf.client) + and ngclient (tuf.ngclient) where a malicious repository could trick client + to overwrite files outside the client metadata store during a metadata + update. The fix includes percent-encoding the metadata rolename before using + it as part of a filename + https://github.com/theupdateframework/python-tuf/security/advisories/GHSA-wjw6-2cqr-j4qr +* ngclient: Do not use urljoin to form metadata URL (included in + GHSA-wjw6-2cqr-j4qr) +* ngclient: Persist metadata safely (#1574) +* ngclient: Handle timeout on session.get() (#1588) + +### Added +* build: Dependabot now monitors GitHub Actions (#1572) +* tests: ngclient test improvements (#1564, #1569, #1587) +* Metadata API: Add TargetFile.from_file() (#1521) + +### Changed +* build: Bump dependency charset-normalizer (#1581, #1586) +* build: Bump dependency urllib3 (#1589) +* build: Bump dependency cryptography (#1596) +* Metadata API: Documentation improvements (#1533, #1590) +* Metadata API: change Timestamp meta API (#1446) +* Metadata API: change Delegations roles API (#1537) +* ngclient: Remove unnecessary sleep() (#1608) +* ngclient: Fix consistent targets URL resolution (#1591) +* ngclient: Don't use target path as local path (#1592) + +## v0.18.1 + +### Changed +* Update setup.cfg to not build universal wheels (#1566) + +## v0.18.0 + +0.18 is a big release with 3 main themes: +* Support only Python 3 and modernize the infrastructure accordingly +* Metadata API (a low-level API for metadata de/serialization and + modification) is now feature-complete for the client use cases +* ngclient (a new high-level client API) was added. ngclient should be + considered an unstable API and is not yet recommended for production + use. + +Additionally the Github project name changed: project is now "python-tuf" +instead of "tuf". Redirects are in place for the old name but updating links is +advised. + +### Added +* Add ADR6: Where to implement serialization (#1270) +* Add ADR8: Unrecognized fields (#1343) +* Add ADR9: Refine reference implementation purpose (#1554) +* Add client Network IO abstraction (#1250, #1302) +* Add many features to Metadata API to support de/serializing + specification-compliant metadata, and safer access through API: + * Metadata.from_bytes()/to_bytes() (#1354, #1490) + * Key, Role (#1360, #1386, #1423, #1480, #1481, #1520) + * DelegationRole, Delegations (#1370, #1512) + * MetaFile, TargetFile (#1329, #1437, #1454, #1514) + * verification of threshold of signatures (#1435, #1436) + * expiration check method (#1347) + * support unrecognized fields in metadata (#1345) + * use Generics to improve static typing (#1457) +* Extensive Metadata API testing and validation + (#1359, #1416, #1416, #1430, #1449, #1450, #1451, #1460, #1466, #1511) +* Add ngclient: a new client library implementation + (#1408, #1448, #1463 #1467, #1470, #1474, #1501, #1509, #1519, #1524) +* Infrastructure improvements: + * mypy, black and isort integration (#1314, #1363, #1395, #1455, #1489) + * API reference documentation build (#1517) + +### Removed +* Remove Python 2 support (#1293) +* Remove direct dependency on six +* Remove obsolete reference to Thandy in a LICENSE file (#1472) + +### Changed +* Bump dependencies: + * Certifi + * Cryptography + * Idna + * Requests + * Securesystemslib + * Six + * Urllib3 +* Replace indirect dependency chardet with charset-normalizer +* Move Metadata API serialization to sub-package (#1279) +* Use SecureSystemslib Signer interface in Metadata API (#1272) +* Make imports compatible with vendoring (#1261) + +### Fixed +* 'ecdsa' is a supported key type (#1453) +* Fix various build infrastructure issues (#1289, #1295, #1321, #1327, #1364, + #1369, #1542) +* Test fixes (#1337, #1346) + ## v0.17.0 **NOTE**: this will be the final release of tuf that supports Python 2.7. This is because Python 2.7 was marked [end-of-life]( diff --git a/docs/CLI.md b/docs/CLI.md deleted file mode 100644 index c07b73be7c..0000000000 --- a/docs/CLI.md +++ /dev/null @@ -1,447 +0,0 @@ -# Command-Line Interface # - -The TUF command-line interface (CLI) requires a full -[TUF installation](INSTALLATION.rst). Be sure to include the installation of -extra dependencies and C extensions ( -```python3 -m pip install securesystemslib[crypto,pynacl]```). - -The use of the CLI is documented with examples below. - ----- -# Basic Examples # - -## Create a repository ## - -Create a TUF repository in the current working directory. A cryptographic key -is created and set for each top-level role. The written Targets metadata does -not sign for any targets, nor does it delegate trust to any roles. The -`--init` call will also set up a client directory. By default, these -directories will be `./tufrepo` and `./tufclient`. - -```Bash -$ repo.py --init -``` - -Optionally, the repository can be written to a specified location. -```Bash -$ repo.py --init --path -``` - -The default top-level key files created with `--init` are saved to disk -encrypted, with a default password of 'pw'. Instead of using the default -password, the user can enter one on the command line for each top-level role. -These optional command-line options also work with other CLI actions (e.g., -repo.py --add). -```Bash -$ repo.py --init [--targets_pw, --root_pw, --snapshot_pw, --timestamp_pw] -``` - - - -Create a bare TUF repository in the current working directory. A cryptographic -key is *not* created nor set for each top-level role. -```Bash -$ repo.py --init --bare -``` - - - -Create a TUF repository with [consistent -snapshots](https://github.com/theupdateframework/specification/blob/master/tuf-spec.md#7-consistent-snapshots) -enabled, where target filenames have their hash prepended (e.g., -`.README.txt`), and metadata filenames have their version numbers -prepended (e.g., `.snapshot.json`). -```Bash -$ repo.py --init --consistent -``` - - - -## Add a target file ## - -Copy a target file to the repo and add it to the Targets metadata (or the -Targets role specified in --role). More than one target file, or directory, -may be specified in --add. The --recursive option may be toggled to also -include files in subdirectories of a specified directory. The Snapshot -and Timestamp metadata are also updated and signed automatically, but this -behavior can be toggled off with --no_release. -```Bash -$ repo.py --add -$ repo.py --add [--recursive] -``` - -Similar to the --init case, the repository location can be chosen. -```Bash -$ repo.py --add --path -``` - - - -## Remove a target file ## - -Remove a target file from the Targets metadata (or the Targets role specified -in --role). More than one target file or glob pattern may be specified in ---remove. The Snapshot and Timestamp metadata are also updated and signed -automatically, but this behavior can be toggled off with --no_release. - -```Bash -$ repo.py --remove ... -``` - -Examples: - -Remove all target files, that match `foo*.tgz,` from the Targets metadata. -```Bash -$ repo.py --remove "foo*.tgz" -``` - -Remove all target files from the `my_role` metadata. -```Bash -$ repo.py --remove "*" --role my_role --sign tufkeystore/my_role_key -``` - - -## Generate key ## -Generate a cryptographic key. The generated key can later be used to sign -specific metadata with `--sign`. The supported key types are: `ecdsa`, -`ed25519`, and `rsa`. If a keytype is not given, an Ed25519 key is generated. - -If adding a top-level key to a bare repo (i.e., repo.py --init --bare), -the filenames of the top-level keys must be "root_key," "targets_key," -"snapshot_key," "timestamp_key." The filename can vary for any additional -top-level key. -```Bash -$ repo.py --key -$ repo.py --key -$ repo.py --key [--path --pw [my_password], - --filename ] -``` - -Instead of using a default password, the user can enter one on the command -line or be prompted for it via password masking. -```Bash -$ repo.py --key ecdsa --pw my_password -``` - -```Bash -$ repo.py --key rsa --pw -Enter a password for the RSA key (...): -Confirm: -``` - - - -## Sign metadata ## -Sign, with the specified key(s), the metadata of the role indicated in --role. -The Snapshot and Timestamp role are also automatically signed, if possible, but -this behavior can be disabled with --no_release. -```Bash -$ repo.py --sign ... [--role , --path ] -``` - -For example, to sign the delegated `foo` metadata: -```Bash -$ repo.py --sign --role foo -``` - - - -## Trust keys ## - -The Root role specifies the trusted keys of the top-level roles, including -itself. The --trust command-line option, in conjunction with --pubkeys and ---role, can be used to indicate the trusted keys of a role. - -```Bash -$ repo.py --trust --pubkeys --role -``` - -For example: -```Bash -$ repo.py --init --bare -$ repo.py --trust --pubkeys tufkeystore/my_key.pub tufkeystore/my_key_too.pub - --role root -``` - - - -### Distrust keys ### - -Conversely, the Root role can discontinue trust of specified key(s). - -Example of how to discontinue trust of a key: -```Bash -$ repo.py --distrust --pubkeys tufkeystore/my_key_too.pub --role root -``` - - - -## Delegations ## - -Delegate trust of target files from the Targets role (or the one specified in ---role) to some other role (--delegatee). --delegatee is trusted to sign for -target files that match the delegated glob pattern(s). The --delegate option -does not create metadata for the delegated role, rather it updates the -delegator's metadata to list the delegation to --delegatee. The Snapshot and -Timestamp metadata are also updated and signed automatically, but this behavior -can be toggled off with --no_release. - -```Bash -$ repo.py --delegate ... --delegatee --pubkeys - ... [--role --terminating --threshold ---sign ] -``` - -For example, to delegate trust of `foo*.gz` packages to the `foo` role: - -``` -$ repo.py --delegate "foo*.tgz" --delegatee foo --pubkeys tufkeystore/foo.pub -``` - - - -## Revocations ## - -Revoke trust of target files from a delegated role (--delegatee). The -"targets" role performs the revocation if --role is not specified. The ---revoke option does not delete the metadata belonging to --delegatee, instead -it removes the delegation to it from the delegator's (or --role) metadata. The -Snapshot and Timestamp metadata are also updated and signed automatically, but -this behavior can be toggled off with --no_release. - - -```Bash -$ repo.py --revoke --delegatee [--role ---sign ] -``` - - - -## Verbosity ## - -Set the verbosity of the logger (2, by default). The lower the number, the -greater the verbosity. Logger messages are saved to `tuf.log` in the current -working directory. -```Bash -$ repo.py --verbose <0-5> -``` - - - -## Clean ## - -Delete the repo in the current working directory, or the one specified with -`--path`. Specifically, the `tufrepo`, `tufclient`, and `tufkeystore` -directories are deleted. - -```Bash -$ repo.py --clean -$ repo.py --clean --path -``` ----- - - - - - - - - -# Further Examples # - -## Basic Update Delivery ## - -Steps: - -(1) initialize a repo. - -(2) delegate trust of target files to another role. - -(3) add a trusted file to the delegated role. - -(4) fetch the trusted file from the delegated role. - -```Bash -Step (1) -$ repo.py --init - -Step (2) -$ repo.py --key ed25519 --filename mykey -$ repo.py --delegate "README.*" --delegatee myrole --pubkeys tufkeystore/mykey.pub -$ repo.py --sign tufkeystore/mykey --role myrole -Enter a password for the encrypted key (tufkeystore/mykey): -$ echo "my readme text" > README.txt - -Step (3) -$ repo.py --add README.txt --role myrole --sign tufkeystore/mykey -Enter a password for the encrypted key (tufkeystore/mykey): -``` - -Serve the repo -```Bash -$ python3 -m http.server 8001 -``` - -```Bash -Step (4) -$ client.py --repo http://localhost:8001 README.txt -$ tree . -. -├── tuf.log -├── tufrepo -│   └── metadata -│   ├── current -│   │   ├── 1.root.json -│   │   ├── myrole.json -│   │   ├── root.json -│   │   ├── snapshot.json -│   │   ├── targets.json -│   │   └── timestamp.json -│   └── previous -│   ├── 1.root.json -│   ├── root.json -│   ├── snapshot.json -│   ├── targets.json -│   └── timestamp.json -└── tuftargets - └── README.txt - - 5 directories, 13 files -``` - - -## Correcting a Key ## -The filename of the top-level keys must be "root_key," "targets_key," -"snapshot_key," and "root_key." The filename can vary for any additional -top-level key. - -Steps: - -(1) initialize a repo containing default keys for the top-level roles. -(2) distrust the default key for the root role. -(3) create a new key and trust its use with the root role. -(4) sign the root metadata file. - -```Bash -Step (1) -$ repo.py --init - -Step (2) -$ repo.py --distrust --pubkeys tufkeystore/root_key.pub --role root - -Step (3) -$ repo.py --key ed25519 --filename root_key -$ repo.py --trust --pubkeys tufkeystore/root_key.pub --role root - -Step (4) -$ repo.py --sign tufkeystore/root_key --role root -Enter a password for the encrypted key (tufkeystore/root_key): -``` - - -## More Update Delivery ## - -Steps: - -(1) create a bare repo. - -(2) add keys to the top-level roles. - -(3) delegate trust of particular target files to another role X, where role X -has a signature threshold 2 and is marked as a terminating delegation. The -keys for role X and Y should be created prior to performing the delegation. - -(4) Delegate from role X to role Y. - -(5) have role X sign for a file also signed by the Targets role, to demonstrate -the expected file that should be downloaded by the client. - -(6) perform an update. - -(7) halt the server, add README.txt to the Targets role, restart the server, -and fetch the Target's role README.txt. - -(8) Add LICENSE to 'role_y' and demonstrate that the client must not fetch it -because 'role_x' is a terminating delegation (and hasn't signed for it). - -```Bash -Steps (1) and (2) -$ repo.py --init --consistent --bare -$ repo.py --key ed25519 --filename root_key -$ repo.py --trust --pubkeys tufkeystore/root_key.pub --role root -$ repo.py --key ecdsa --filename targets_key -$ repo.py --trust --pubkeys tufkeystore/targets_key.pub --role targets -$ repo.py --key rsa --filename snapshot_key -$ repo.py --trust --pubkeys tufkeystore/snapshot_key.pub --role snapshot -$ repo.py --key ecdsa --filename timestamp_key -$ repo.py --trust --pubkeys tufkeystore/timestamp_key.pub --role timestamp -$ repo.py --sign tufkeystore/root_key --role root -Enter a password for the encrypted key (tufkeystore/root_key): -$ repo.py --sign tufkeystore/targets_key --role targets -Enter a password for the encrypted key (tufkeystore/targets_key): -``` - -```Bash -Steps (3) and (4) -$ repo.py --key ed25519 --filename key_x -$ repo.py --key ed25519 --filename key_x2 - -$ repo.py --delegate "README.*" "LICENSE" --delegatee role_x --pubkeys - tufkeystore/key_x.pub tufkeystore/key_x2.pub --threshold 2 --terminating -$ repo.py --sign tufkeystore/key_x tufkeystore/key_x2 --role role_x - -$ repo.py --key ed25519 --filename key_y - -$ repo.py --delegate "README.*" "LICENSE" --delegatee role_y --role role_x - --pubkeys tufkeystore/key_y.pub --sign tufkeystore/key_x tufkeystore/key_x2 - -$ repo.py --sign tufkeystore/key_y --role role_y -``` - -```Bash -Steps (5) and (6) -$ echo "role_x's readme" > README.txt -$ repo.py --add README.txt --role role_x --sign tufkeystore/key_x tufkeystore/key_x2 -``` - -Serve the repo -```Bash -$ python3 -m http.server 8001 -``` - -Fetch the role x's README.txt -```Bash -$ client.py --repo http://localhost:8001 README.txt -$ cat tuftargets/README.txt -role_x's readme -``` - - -```Bash -Step (7) -halt the server... - -$ echo "Target role's readme" > README.txt -$ repo.py --add README.txt - -restart the server... -``` - -```Bash -$ rm -rf tuftargets/ tuf.log -$ client.py --repo http://localhost:8001 README.txt -$ cat tuftargets/README.txt -Target role's readme -``` - -```Bash -Step (8) -$ echo "role_y's license" > LICENSE -$ repo.py --add LICENSE --role role_y --sign tufkeystore/key_y -``` - -```Bash -$ rm -rf tuftargets/ tuf.log -$ client.py --repo http://localhost:8001 LICENSE -Error: 'LICENSE' not found. -``` diff --git a/docs/CONTRIBUTING.rst b/docs/CONTRIBUTING.rst new file mode 100644 index 0000000000..94e10f6fb0 --- /dev/null +++ b/docs/CONTRIBUTING.rst @@ -0,0 +1,88 @@ +Instructions for contributors +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Contribute to python-tuf by submitting pull requests against the "develop" +branch of this repository. Detailed instructions are available in our +`development guidelines +`_. +All submitted code should follow our `style guidelines +`_ +and must be `unit tested <#unit-tests>`_. + +.. note:: + + Also see `development installation instructions `_. + +Testing +======= + +With `tox `_ the whole test suite can be executed in +a separate *virtual environment* for each supported Python version available on +the system. ``tuf`` and its dependencies are installed automatically for each +tox run. + +:: + + $ tox + +Below, you will see more details about each step managed by ``tox``, in case +you need debug/run outside ``tox``. + +Unit tests +---------- + +More specifically, the Update Framework's test suite can be executed by invoking +the test aggregation script inside the *tests* subdirectory. ``tuf`` and its +dependencies must already be installed. +:: + + $ cd tests/ + $ python3 aggregate_tests.py + + +Individual tests can also be executed. Optional ``-v`` flags can be added to +increase log level up to DEBUG (``-vvvv``). +:: + + $ cd tests/ + $ python3 test_updater_ng.py -v + + +Coverage +-------- + +To run the tests and measure their code coverage, the aggregation script can be +invoked with the ``coverage`` tool (requires installation of ``coverage``, e.g. +via PyPI). +:: + + $ cd tests/ + $ coverage run aggregate_tests.py && coverage report + + +Auto-formatting +--------------- + +CI/CD will check that new TUF code is formatted with `black +`__ and `isort `__. +Auto-formatting can be done on the command line: +:: + + $ black + $ isort + +or via source code editor plugin +[`black `__, +`isort `__] or +`pre-commit `__-powered git hooks +[`black `__, +`isort `__]. + + +DCO +=== + +Contributors must also indicate acceptance of the `Developer Certificate of +Origin `_ by appending a ``Signed-off-by: +Your Name `` to each git commit message (see `git commit +--signoff `_). diff --git a/docs/CONTRIBUTORS.rst b/docs/CONTRIBUTORS.rst deleted file mode 100644 index 302c8c205b..0000000000 --- a/docs/CONTRIBUTORS.rst +++ /dev/null @@ -1,202 +0,0 @@ -Instructions for Contributors -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Note: Development of TUF occurs on the "develop" branch of this repository. - -Contributions can be made by submitting GitHub pull requests. Submitted code -should follow our `code style guidelines -`_, which are -enforced with linters and auto-formatters (details below). - -Contributors must also indicate acceptance of the `Developer Certificate of -Origin `_ (DCO) when making a contribution -to the project. Acceptance of the DCO can be established by appending a -``Signed-off-by: Your Name `` to the Git commit message. -For example: - -:: - - Commit message - - Signed-off-by: Vladimir Diaz - -The required ``Signed-off-by`` text can be automatically appended to the commit -message via the ``-s`` command-line option to ``git commit``: - -:: - - $ git commit -s -m "Commit message" - -The full text of the DCO: - -:: - - Developer Certificate of Origin - Version 1.1 - - Copyright (C) 2004, 2006 The Linux Foundation and its contributors. - 1 Letterman Drive - Suite D4700 - San Francisco, CA, 94129 - - Everyone is permitted to copy and distribute verbatim copies of this - license document, but changing it is not allowed. - - Developer's Certificate of Origin 1.1 - - By making a contribution to this project, I certify that: - - (a) The contribution was created in whole or in part by me and I have the - right to submit it under the open source license indicated in the file; or - - (b) The contribution is based upon previous work that, to the best of my - knowledge, is covered under an appropriate open source license and I have - the right under that license to submit that work with modifications, - whether created in whole or in part by me, under the same open source - license (unless I am permitted to submit under a different license), as - indicated in the file; or - - (c) The contribution was provided directly to me by some other person who - certified (a), (b) or (c) and I have not modified it. - - (d) I understand and agree that this project and the contribution are - public and that a record of the contribution (including all personal - information I submit with it, including my sign-off) is maintained - indefinitely and may be redistributed consistent with this project or the - open source license(s) involved. - - -To facilitate development and installation of edited version of the code base, -developers are encouraged to install `Virtualenv `_, -which is a tool to create isolated Python environments. It includes -``pip`` and ``setuptools``, Python packages that can be used to -install TUF and its dependencies. All installation methods of -virtualenv are outlined in the `installation -section `_, -and instructions for installing locally from source are provided here: -:: - - $ curl -O https://pypi.python.org/packages/source/v/virtualenv/virtualenv-15.0.3.tar.gz - $ tar xvfz virtualenv-15.0.3.tar.gz - $ cd virtualenv-15.0.3 - $ python3 virtualenv.py myVE - - -Development Installation -======================== - -To work on the TUF project, it's best to perform a development install. - -1. First, `install non-Python dependencies `_. - -2. Then clone this repository: - -:: - - $ git clone https://github.com/theupdateframework/python-tuf - -3. Then perform a full, editable/development install. This will include all - optional cryptographic support, the testing/linting dependencies, etc. - With a development installation, modifications to the code in the current - directory will affect the installed version of TUF. - -:: - - $ python3 -m pip install -r requirements-dev.txt - - -Auto-formatting -=============== - -CI/CD will check that new TUF code is formatted with `black -`__ and `isort `__. -Auto-formatting can be done on the command line: -:: - - $ # TODO: configure black and isort args in pyproject.toml (see #1161) - $ black --line-length 80 tuf/api - $ isort --line-length 80 --profile black -p tuf tuf/api - -or via source code editor plugin -[`black `__, -`isort `__] or -`pre-commit `__-powered git hooks -[`black `__, -`isort `__]. - - -Testing -======= - -The Update Framework's unit test suite can be executed by invoking the test -aggregation script inside the *tests* subdirectory. ``tuf`` and its -dependencies must already be installed (see above). -:: - - $ cd tests - $ python3 aggregate_tests.py - -Individual tests can also be executed. Optional '-v' flags can be added to -increase log level up to DEBUG ('-vvvv'). -:: - - $ python3 test_updater.py # run a specific test file - $ python3 test_updater.py TestUpdater.test_4_refresh # run a specific test - $ python3 test_updater.py -vvvv TestUpdater.test_4_refresh # run test with DEBUG log level - - -All of the log levels and the corresponding options that could be used for testing are: - -.. list-table:: - :widths: 20 25 - :header-rows: 1 - - * - Option - - Log Level - * - default (no argument passed) - - ERROR (test names are not printed) - * - `-v` - - ERROR (test names are printed at this level and above) - * - `-vv` - - WARNING - * - `-vvv` - - INFO - * - `-vvvv` - - DEBUG - - -To run the tests and measure their code coverage, the aggregation script can be -invoked with the ``coverage`` tool (requires installation of ``coverage``, e.g. -via PyPI). -:: - - $ coverage run aggregate_tests.py && coverage report - - -To develop and test ``tuf`` with above commands alongside its in-house dependency -`securesystemslib `_, -it is recommended to first make an editable install of ``tuf`` (in -a *venv*), and then install ``securesystemslib`` in editable mode too (in the same *venv*). -:: - - $ cd path/to/tuf - $ python3 -m pip install -r requirements-dev.txt - $ cd path/to/securesystemslib - $ python3 -m pip install -r requirements-dev.txt - - -With `tox `_ the test suite can be executed in a -separate *venv* for each supported Python version. While the supported -Python versions must already be available, ``tox`` will install ``tuf`` and its -dependencies anew in each environment. -:: - - $ tox - - -An additional non-default ``tox`` environment is available and can be used to -test ``tuf`` against the tip of development of ``securesystemslib`` on GitHub, -to e.g. prepare the former for a new release of the latter. -:: - - $ tox -e with-sslib-master diff --git a/docs/GETTING_STARTED.rst b/docs/GETTING_STARTED.rst deleted file mode 100644 index fad0d847fb..0000000000 --- a/docs/GETTING_STARTED.rst +++ /dev/null @@ -1,10 +0,0 @@ -Getting Started ---------------- - -- `Overview of TUF `_ -- `Installation `_ -- Beginner Tutorials (using the basic command-line interface): - - `Quickstart `_ - - `CLI Documentation and Examples `_ -- `Advanced Tutorial `_ -- `Guidelines for Contributors `_ diff --git a/docs/GOVERNANCE.md b/docs/GOVERNANCE.md index 627c7c83f2..75d9a16e21 100644 --- a/docs/GOVERNANCE.md +++ b/docs/GOVERNANCE.md @@ -10,16 +10,16 @@ The project is maintained by the people indicated in review GitHub pull requests and (2) open issues or [submit vulnerability reports](https://github.com/theupdateframework/python-tuf#security-issues-and-bugs). A maintainer has the authority to approve or reject pull requests submitted by -contributors. +contributors. More significant changes in the project, such as those that require a TAP or -changes in governance, are guided by a maintainer called the Consensus -Builder (CB). The project's Consensus Builder (CB) is Justin Cappos +changes in governance, are guided by a maintainer called the Consensus +Builder (CB). The project's Consensus Builder (CB) is Justin Cappos , who has a lifetime appointment. ## Contributions [A contributor can submit GitHub pull -requests](CONTRIBUTORS.rst) +requests](CONTRIBUTING.rst) to the project's repositories. They must follow the project's [code of conduct](CODE-OF-CONDUCT.md), the [developer certificate of origin](https://developercertificate.org/), the [code style @@ -54,7 +54,7 @@ The CB has the authority to add or remove maintainers. ## Changes in governance -The CB supervises changes in governance, but a majority of maintainers must vote +1 on the PR. +The CB supervises changes in governance, but a majority of maintainers must vote +1 on the PR. ## Changes in the consensus builder diff --git a/docs/INSTALLATION.rst b/docs/INSTALLATION.rst index 6a85122e70..ae6d1d6f1f 100644 --- a/docs/INSTALLATION.rst +++ b/docs/INSTALLATION.rst @@ -1,93 +1,86 @@ Installation ============ -*pip* is the recommended installer for installing and managing Python packages. -The project can be installed either locally or from the Python Package Index. -All `TUF releases -`_ are cryptographically -signed, with GPG signatures available on both GitHub and `PyPI -`_. PGP key information for our maintainers -is available on our `website -`_, on major keyservers, -and on the `maintainers page -`_. +All versions of ``python-tuf`` can be installed from +`PyPI `_ with +`pip `_. +:: -Release Verification --------------------- + python3 -m pip install tuf -Assuming you trust `the maintainer's PGP key -`_, -the detached ASC signature can be downloaded and verified. For example:: +By default tuf is installed as pure python package with limited cryptographic +abilities. See `Install with full cryptographic abilities`_ for more options. - $ gpg --verify securesystemslib-0.10.8.tar.gz.asc - gpg: assuming signed data in 'securesystemslib-0.10.8.tar.gz' - gpg: Signature made Wed Nov 8 15:21:47 2017 EST - gpg: using RSA key 3E87BB339378BC7B3DD0E5B25DEE9B97B0E2289A - gpg: Good signature from "Vladimir Diaz (Vlad) " [ultimate] +Install with full cryptographic abilities +----------------------------------------- +Default installation supports signature verification only, using a pure Python +*ed25519* implementation. While this allows to operate a *basic client* on +almost any computing device, you will need additional cryptographic abilities +for *repository* code, i.e. key and signature generation, additional +algorithms, and more performant backends. Opt-in is available via +``securesystemslib``. -Simple Installation -------------------- +.. note:: -If you are only using ed25519-based cryptography, you can employ a pure-Python -installation, done simply with one of the following commands: + Please consult with underlying crypto backend installation docs -- + `cryptography `_ and + `pynacl `_ -- + for possible system dependencies. -Installing from Python Package Index (https://pypi.python.org/pypi). -(Note: Please use "python3 -m pip install --no-use-wheel tuf" if your version -of pip <= 1.5.6):: +:: - $ python3 -m pip install tuf + python3 -m pip securesystemslib[crypto,pynacl] tuf -**Alternatively**, if you wish to install from a GitHub release you've already -downloaded, or a package you obtained in another way, you can instead: - -Install from a local source archive:: - - $ python3 -m pip install - -Or install from the root directory of the unpacked archive:: - - $ python3 -m pip install . - - - -Install with More Cryptographic Flexibility -------------------------------------------- +Install for development +----------------------- -By default, C extensions are not installed and only Ed25519 signatures can -be verified, in pure Python. To fully support RSA, Ed25519, ECDSA, and -other crypto, you must install the extra dependencies declared by -securesystemslib. **Note**: that may require non-Python dependencies, so if -you encounter an error attempting this pip command, see -`more instructions below <#non-python-dependencies>`_). :: +To install tuf in editable mode together with development dependencies, +`clone `_ the +`python-tuf repository `_ +from GitHub, change into the project root directory, and install with pip +(using `venv `_ is recommended). - $ python3 -m pip install securesystemslib[crypto,pynacl] tuf +.. note:: + Development installation will `Install with full cryptographic abilities`_. + Please check above for possible system dependencies. +:: -Non-Python Dependencies ------------------------ + python3 -m pip install -r requirements-dev.txt -If you encounter errors during installation, you may be missing -certain system libraries. -For example, PyNaCl and Cryptography -- two libraries used in the full -installation to support certain cryptographic functions -- may require FFI -(Foreign Function Interface) development header files. +Verify release signatures +------------------------- -Debian-based distributions can install the necessary header libraries with apt:: +Releases on PyPI are signed with a maintainer key using +`gpg `_ (see +`MAINTAINERS.txt `_ +for key fingerprints). Signatures can be downloaded from the +`GitHub release `_ +page (look for *\*.asc* files in the *Assets* section). - $ apt-get install build-essential libssl-dev libffi-dev python-dev +Below code shows how to verify the signature of a +`built `_ distribution, +signed by the maintainer *Lukas Pühringer*. It works +alike for `source `_ distributions. -Fedora-based distributions can instead install these libraries with dnf:: +:: - $ dnf install libffi-devel redhat-rpm-config openssl-devel + # Get wheel from PyPI and signature from GitHub + python3 -m pip download --no-deps tuf==0.20.0 + wget https://github.com/theupdateframework/python-tuf/releases/download/v0.20.0/tuf-0.20.0-py3-none-any.whl.asc -OS X users can install these header libraries with the `Homebrew `_ -package manager, among other options:: + # Get public key, compare fingerprint in MAINTAINERS.txt, and verify with gpg + gpg --recv-keys 89A2AD3C07D962E8 + gpg --verify tuf-0.20.0-py3-none-any.whl.asc - $ brew install python3 - $ brew install libffi + # Output: + # gpg: assuming signed data in 'tuf-0.20.0-py3-none-any.whl' + # gpg: Signature made Thu Dec 16 09:21:38 2021 CET + # gpg: using RSA key 8BA69B87D43BE294F23E812089A2AD3C07D962E8 + # gpg: Good signature from "Lukas Pühringer " [ultimate] diff --git a/docs/MAINTAINERS.txt b/docs/MAINTAINERS.txt index ba87ff01b3..b6515b9ea9 100644 --- a/docs/MAINTAINERS.txt +++ b/docs/MAINTAINERS.txt @@ -10,11 +10,6 @@ Consensus Builder: Maintainers: - Sebastien Awwad - Email: sebastien.awwad@nyu.edu - GitHub username: @awwad - PGP fingerprint: C2FB 9C91 0758 B682 7BC4 3233 BC0C 6DED D5E5 CC03 - Marina Moore Email: mm9693@nyu.edu GitHub username: @mnm678 @@ -24,17 +19,12 @@ Maintainers: GitHub username: @trishankatdatadog PGP fingerprint: 8C48 08B5 B684 53DE 06A3 08FD 5C09 0ED7 318B 6C1E Keybase username: trishankdatadog - + Lukas Puehringer Email: lukas.puehringer@nyu.edu GitHub username: @lukpueh PGP fingerprint: 8BA6 9B87 D43B E294 F23E 8120 89A2 AD3C 07D9 62E8 - Santiago Torres-Arias - Email: santiago@nyu.edu - GitHub username: @SantiagoTorres - PGP fingerprint: 903B AB73 640E B6D6 5533 EFF3 468F 122C E816 2295 - Joshua Lock Email: jlock@vmware.com GitHub username: @joshuagl @@ -46,6 +36,9 @@ Maintainers: GitHub username: @jku PGP fingerprint: 1343 C98F AB84 859F E5EC 9E37 0527 D8A3 7F52 1A2F +Emeritus Maintainers: + + Sebastien Awwad + Vladimir Diaz Teodora Sechkova - Email: tsechkova@vmware.com - GitHub username: @sechkova + Santiago Torres-Arias diff --git a/docs/METADATA.md b/docs/METADATA.md deleted file mode 100644 index 9f140fe8fe..0000000000 --- a/docs/METADATA.md +++ /dev/null @@ -1,91 +0,0 @@ -# Metadata - -Metadata files provide information that clients can use to make update decisions. Different metadata files provide different information. The various metadata files are signed by different keys as are indicated by the root role. The concept of roles allows TUF to only trust information that a role is trusted to provide. - -The signed metadata files always include the time they were created and their expiration dates. This ensures that outdated metadata will be detected and that clients can refuse to accept metadata older than that which they've already seen. - -All TUF metadata uses a subset of the JSON object format. When calculating the digest of an object, we use the [Canonical JSON](http://wiki.laptop.org/go/Canonical_JSON) format. Implementation-level detail about the metadata can be found in the [spec](docs/tuf-spec.txt). - -There are four required top-level roles and one optional top-level role, each with their own metadata file. - -Required: - -* Root -* Targets -* Snapshot -* Timestamp - -Optional: - -* Mirrors (unimplemented) - -There may also be any number of delegated target roles. - -## Root Metadata (root.json) - -Signed by: Root role. - -Specifies the other top-level roles. When specifying these roles, the trusted keys for each role are listed along with the minimum number of those keys which are required to sign the role's metadata. We call this number the signature threshold. - -See [example](https://raw.githubusercontent.com/theupdateframework/tuf/develop/tests/repository_data/repository/metadata/root.json) of Root metadata. - -## Targets Metadata (targets.json) - -Signed by: Targets role. - -The targets.json metadata file lists hashes and sizes of target files. Target files are the actual files that clients are intending to download (for example, the software updates they are trying to obtain). - -This file can optionally define other roles to which it delegates trust. Delegating trust means that the delegated role is trusted for some or all of the target files available from the repository. When delegated roles are specified, they are specified in a similar way to how the Root role specifies the top-level roles: the trusted keys and signature threshold for each role is given. Additionally, one or more patterns are specified which indicate the target file paths for which clients should trust each delegated role. - -See [example](https://raw.githubusercontent.com/theupdateframework/tuf/develop/tests/repository_data/repository/metadata/targets.json) of Targets metadata. - -## Delegated Targets Metadata (role1.json) - -Signed by: A delegated targets role. - -The metadata files provided by delegated targets roles follow exactly the same format as the metadata file provided by the top-level Targets role. - -When the targets role delegates trust to other roles, each delegated role provides one signed metadata file. As is the -case with the directory structure of top-level metadata, the delegated files are relative to the base URL of metadata available from a given repository mirror. - -A delegated role file is located at: - -/DELEGATED_ROLE.json - -where DELEGATED_ROLE is the name of the delegated role that has been specified in targets.json. If this role further delegates trust to a role named ANOTHER_ROLE, that role's signed metadata file is made available at: - -/ANOTHER_ROLE.json - -See -[example](https://raw.githubusercontent.com/theupdateframework/tuf/develop/tests/repository_data/repository/metadata/role1.json) -of delegated Targets metadata and [example](https://raw.githubusercontent.com/theupdateframework/tuf/develop/tests/repository_data/repository/metadata/role2.json) of a nested delegation. - -## snapshot Metadata (snapshot.json) - -Signed by: Snapshot role. - -The snapshot.json metadata file lists the version, and optionally the file hashes and sizes, of the top-level targets metadata and all delegated targets metadata. This file ensures that clients will see a consistent view of the files on the repository. That is, metadata files (and thus target file) that existed on the repository at different times cannot be combined and presented to clients by an attacker. - -​See [example](https://raw.githubusercontent.com/theupdateframework/tuf/develop/tests/repository_data/repository/metadata/snapshot.json) of Snapshot metadata. - -## Timestamp Metadata (timestamp.json) - -Signed by: Timestamp role. - -The timestamp.json metadata file lists the hashes and size of the snapshot.json file. This is the first and potentially only file that needs to be downloaded when clients poll for the existence of updates. This file is frequently resigned and has a short expiration date, thus allowing clients to quickly detect if they are being prevented from obtaining the most recent metadata. An online key is generally used to automatically resign this file at regular intervals. - -There are two primary reasons why the timestamp.json file doesn't contain all of the information that the snapshot.json file does. - -* The timestamp.json file is downloaded very frequently and so should be kept as small as possible, especially considering that the snapshot.json file grows in size in proportion to the number of delegated target roles. -* As the Timestamp role's key is an online key and thus at high risk, separate keys should be used for signing the snapshot.json metadata file so that the Snapshot role's keys can be kept offline and thus more secure. - -See [example](https://raw.githubusercontent.com/theupdateframework/tuf/develop/tests/repository_data/repository/metadata/timestamp.json) of Timestamp metadata. - -## Mirrors Metadata (mirrors.json) - -Optionally signed by: Mirrors role. - -The mirrors.json file provides an optional way to provide mirror list updates to TUF clients. Mirror lists can alternatively be provided directly by the software update system and obtained in any way the system sees fit, including being hard coded if that is what an applications wants to do. - -No example available. At the time of writing, this hasn't been implemented in -TUF. Currently mirrors are specified by the client code. diff --git a/docs/OVERVIEW.rst b/docs/OVERVIEW.rst deleted file mode 100644 index 44f942c652..0000000000 --- a/docs/OVERVIEW.rst +++ /dev/null @@ -1,120 +0,0 @@ -What Is a Software Update System? ---------------------------------- - -Generally, a software update system is an application (or part of an -application) running on a client system that obtains and installs -software. These systems typically update the applications installed -on client systems to introduce new features, enhancements, and security -fixes. - -Three major classes of software update systems are: - -- **Application updaters** which are used by applications to update - themselves. For example, Firefox updates itself through its own - application updater. - -- **Library package managers** such as those offered by many - programming languages for installing additional libraries. These are - systems such as Python's pip/easy_install + PyPI, Perl's CPAN, - Ruby's RubyGems, and PHP's Composer. - -- **System package managers** used by operating systems to update and - install all of the software on a client system. Debian's APT, Red - Hat's YUM, and openSUSE's YaST are examples of these. - -Our Approach ------------- - -There are literally thousands of different software update systems in -common use today. (In fact the average Windows user has about `two -dozen `_ -different software updaters on their machine!) - -We are building a library that can be universally (and in most cases -transparently) used to secure software update systems. - -Overview --------- - -On the surface, the update procedure followed by a software update system can be regarded -as straightforward. Obtaining and installing an update just means: - -- Knowing when an update exists. -- Downloading the update. -- Applying the changes introduced by the update. - -The problem with this view is that it is only straightforward when there -are no malicious parties involved throughout the update procedure. If an attacker -is trying to interfere with these seemingly simple steps, there is plenty -that they can do. - -TUF is designed to perform the first two steps of the above update procedure, -while guarding against the majority of attacks that malicious actors have at -their disposal; especially those attacks that are overlooked by security-conscious -developers. - - -Background ----------- - -Let's assume you take the approach that most systems do (at least, the -ones that even try to be secure). You download both the file you want -and a cryptographic signature of the file. You already know which key -you trust to make the signature. You check that the signature is correct -and was made by this trusted key. All seems well, right? Wrong. You are -still at risk in many ways, including: - -- An attacker keeps giving you the same file, so you never realize - there is an update. -- An attacker gives you an older, insecure version of a file that you - already have, so you download that one and blindly use it thinking - it's newer. -- An attacker gives you a newer version of a file you have but it's not - the newest one. It's newer to you, but it may be insecure and - exploitable by the attacker. -- An attacker compromises the key used to sign these files and now you - download a malicious file that is properly signed. - -These are just some of the attacks software update systems are -vulnerable to when only using signed files. See -`Security `_ for a full list of attacks and updater -weaknesses TUF is designed to prevent. - -The following papers provide detailed information on securing software -updater systems, TUF's design and implementation details, attacks on -package managers, and package management security: - -- `Mercury: Bandwidth-Effective Prevention of Rollback Attacks Against Community Repositories - ` - -- `Diplomat: Using Delegations to Protect Community Repositories - ` - -- `Survivable Key Compromise in Software Update - Systems ` - -- `A Look In the Mirror: Attacks on Package - Managers ` - -- `Package Management - Security ` - -What TUF Does -------------- - -In order to securely download and verify target files, TUF requires a -few extra files to exist on a repository. These are called metadata -files. TUF metadata files contain additional information, including -information about which keys are trusted, the cryptographic hashes of -files, signatures on the metadata, metadata version numbers, and the -date after which the metadata should be considered expired. - -When a software update system using TUF wants to check for updates, it -asks TUF to do the work. That is, your software update system never has -to deal with this additional metadata or understand what's going on -underneath. If TUF reports back that there are updates available, your -software update system can then ask TUF to download these files. TUF -downloads them and checks them against the TUF metadata that it also -downloads from the repository. If the downloaded target files are -trustworthy, TUF hands them over to your software update system. See -`Metadata `_ for more information and examples. diff --git a/docs/QUICKSTART.md b/docs/QUICKSTART.md deleted file mode 100644 index 6d35fb1d7d..0000000000 --- a/docs/QUICKSTART.md +++ /dev/null @@ -1,149 +0,0 @@ -# Quickstart # - -In this quickstart tutorial, we'll use the basic TUF command-line interface -(CLI), which includes the `repo.py` script and the `client.py` script, to set -up a repository with an update and metadata about that update, then download -and verify that update as a client. - -Unlike the underlying TUF modules that the CLI uses, the CLI itself is a bit -bare-bones. Using the CLI is the easiest way to familiarize yourself with -how TUF works, however. It will serve as a very basic update system. - ----- - -**Step (0)** - Make sure TUF is installed. - -Make sure that TUF is installed, along with some of the optional cryptographic -libraries and C extensions. Try this command to do that: -`python3 -m pip install securesystemslib[colors,crypto,pynacl] tuf` - -If you run into errors during that pip command, please consult the more -detailed [TUF Installation Instructions](INSTALLATION.rst). (There are some -system libraries that you may need to install first.) - - -**Step (1)** - Create a basic repository and client. - -The following command will set up a basic update repository and basic client -that knows about the repository. `tufrepo`, `tufkeystore`, and -`tufclient` directories will be created in the current directory. - -```Bash -$ repo.py --init -``` - -Four sets of keys are created in the `tufkeystore` directory. Initial metadata -about the repository is created in the `tufrepo` directory, and also provided -to the client in the `tufclient` directory. - - -**Step (2)** - Add an update to the repository. - -We'll create a target file that will later be delivered as an update to clients. -Metadata about that file will be created and signed, and added to the -repository's metadata. - -```Bash -$ echo 'Test file' > testfile -$ repo.py --add testfile -$ tree tufrepo/ -tufrepo/ -├── metadata -│   ├── 1.root.json -│   ├── root.json -│   ├── snapshot.json -│   ├── targets.json -│   └── timestamp.json -├── metadata.staged -│   ├── 1.root.json -│   ├── root.json -│   ├── snapshot.json -│   ├── targets.json -│   └── timestamp.json -└── targets - └── testfile - - 3 directories, 11 files -``` - -The new file `testfile` is added to the repository, and metadata is updated in -the `tufrepo` directory. The Targets metadata (`targets.json`) now includes -the file size and hashes of the `testfile` target file, and this metadata is -signed by the Targets role's key, so that clients can verify that metadata -about `testfile` and then verify `testfile` itself. - - -**Step (3)** - Serve the repo. - -We'll host a toy http server containing the `testfile` update and the -repository's metadata. - -```Bash -$ cd "tufrepo/" -$ python3 -m http.server 8001 -``` - -**Step (4)** - Obtain and verify the `testfile` update on a client. - -The client can request the package `testfile` from the repository. TUF will -download and verify metadata from the repository as necessary to determine -what the trustworthy hashes and length of `testfile` are, then download -the target `testfile` from the repository and keep it only if it matches that -trustworthy metadata. - -```Bash -$ cd "../tufclient/" -$ client.py --repo http://localhost:8001 testfile -$ tree -. -├── tufrepo -│   └── metadata -│   ├── current -│   │   ├── 1.root.json -│   │   ├── root.json -│   │   ├── snapshot.json -│   │   ├── targets.json -│   │   └── timestamp.json -│   └── previous -│   ├── 1.root.json -│   ├── root.json -│   ├── snapshot.json -│   ├── targets.json -│   └── timestamp.json -└── tuftargets - └── testfile - - 5 directories, 11 files -``` - -Now that a trustworthy update target has been obtained, an updater can proceed -however it normally would to install or use the update. - ----- - -### Next Steps - -TUF provides functionality for both ends of a software update system, the -**update provider** and the **update client**. - -`repo.py` made use of `tuf.repository_tool`'s functionality for an update -provider, helping you produce and sign metadata about your updates. - -`client.py` made use of `tuf.client.updater`'s client-side functionality, -performing download and the critical verification steps for metadata and the -update itself. - -You can look at [CLI.md](CLI.md) to toy with the TUF CLI a bit more. -After that, try out using the underlying modules for a great deal more control. -The more detailed [Advanced Tutorial](TUTORIAL.md) shows you how to use the -underlying modules, `repository_tool` and `updater`. - -Ultimately, a sophisticated update client will use or re-implement those -underlying modules. The TUF design is intended to play well with any update -workflow. - -Please provide feedback or questions for this or other tutorials, or -TUF in general, by checking out -[our contact info](https://github.com/theupdateframework/python-tuf#contact), or -creating [issues](https://github.com/theupdateframework/python-tuf/issues) in this -repository! diff --git a/docs/RELEASE.md b/docs/RELEASE.md index ec1ad2a1e0..a0a8862027 100644 --- a/docs/RELEASE.md +++ b/docs/RELEASE.md @@ -1,36 +1,54 @@ # Release process -* Ensure you have a backup of all working files and then remove files not tracked by git - `git clean -xdf`. **NOTE**: this will delete all files in the tuf tree that aren't - tracked by git -* Ensure `docs/CHANGELOG.md` contains a one-line summary of each [notable + +**Prerequisites (one-time setup)** + + +1. Go to [PyPI management page](https://pypi.org/manage/account/#api-tokens) and create + an [API token](https://pypi.org/help/#apitoken) with its scope limited to the tuf project. +1. Go to [GitHub + settings](https://github.com/theupdateframework/python-tuf/settings/environments), + create an + [environment](https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment#creating-an-environment) + called `release` and configure [review + protection](https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment#required-reviewers). +1. In the environment create a + [secret](https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment#environment-secrets) + called `PYPI_API_TOKEN` and paste the token created above. + +## Release + +1. Ensure `docs/CHANGELOG.md` contains a one-line summary of each [notable change](https://keepachangelog.com/) since the prior release -* Update `setup.py` and `tuf/__init__.py` to the new version number vA.B.C -* Test packaging, uploading to Test PyPI and installing from a virtual environment - (ensure commands invoking `python` below are using Python 3) - * Remove existing dist build dirs - * Create source dist `python3 setup.py sdist` - * Create wheel (with 2 and 3 support) `python3 setup.py bdist_wheel --universal` - * Sign the dists `gpg --detach-sign -a dist/tuf-vA.B.C.tar.gz` - * Upload to test PyPI `twine upload --repository testpypi dist/*` - * Verify the uploaded package https://testpypi.python.org/pypi/tuf/ -* Create a PR with updated `CHANGELOG.md` and version bumps -* Once the PR is merged, pull the updated `develop` branch locally -* Create a signed tag matching the updated version number on the merge commit +2. Update `tuf/__init__.py` to the new version number `A.B.C` +3. Create a PR with updated `CHANGELOG.md` and version bumps + +➔ Review PR on GitHub + +4. Once the PR is merged, pull the updated `develop` branch locally +5. Create a signed tag for the version number on the merge commit `git tag --sign vA.B.C -m "vA.B.C"` - * Push the tag to GitHub `git push origin vA.B.C` -* Create a new release on GitHub, copying the `CHANGELOG.md` entries for the - release -* Create a package for the formal release - (ensure commands invoking `python` below are using Python 3) - * Remove existing dist build dirs - * Create source dist `python3 setup.py sdist` - * Create wheel (with 2 and 3 support) `python3 setup.py bdist_wheel --universal` - * Sign source dist `gpg --detach-sign -a dist/tuf-vA.B.C.tar.gz` - * Sign wheel `gpg --detach-sign -a dist/tuf-vA.B.C-py2.py3-none-any.whl` - * Upload to test PyPI `twine upload --repository testpypi dist/*` - * Verify the uploaded package https://testpypi.python.org/pypi/tuf/ - * Upload to PyPI `twine upload dist/*` -* Attach the signed dists to the release on GitHub -* Announce the release on [#tuf on CNCF Slack](https://cloud-native.slack.com/archives/C8NMD3QJ3) -* Ensure [POUF 1](https://github.com/theupdateframework/taps/blob/master/POUFs/reference-POUF/pouf1.md), for the reference implementation, is up-to-date +6. Push the tag to GitHub `git push origin vA.B.C` + + *A tag push triggers the [CD + workflow](https://github.com/theupdateframework/python-tuf/blob/develop/.github/workflows/cd.yml), + which runs the tests, builds source dist and wheel, creates a preliminary GitHub + release under `vA.B.C-rc`, and pauses for review.* + +7. Run `verify_release --skip-pypi` locally to make sure a build on your machine matches + the preliminary release artifacts published on GitHub. + +➔ [Review *deployment*](https://docs.github.com/en/actions/managing-workflow-runs/reviewing-deployments) +on GitHub + + *An approval resumes the CD workflow to publish the release on PyPI, and to finalize the + GitHub release (removes `-rc` suffix and updates release notes).* + +8. Run `verify_release` to make sure the PyPI release artifacts match the local build as + well. When called as `verify_release --sign []` the script additionally + creates gpg release signatures. When signed by maintainers with a corresponding GPG + fingerprint in the MAINTAINERS.md file, these signature files should be made available on + the GitHub release page under Assets. +9. Announce the release on [#tuf on CNCF Slack](https://cloud-native.slack.com/archives/C8NMD3QJ3) +10. Ensure [POUF 1](https://github.com/theupdateframework/taps/blob/master/POUFs/reference-POUF/pouf1.md), + for the reference implementation, is up-to-date diff --git a/docs/ROADMAP.md b/docs/ROADMAP.md deleted file mode 100644 index cf6baf2f3d..0000000000 --- a/docs/ROADMAP.md +++ /dev/null @@ -1,80 +0,0 @@ -# ROADMAP - -This is the roadmap for the project. - -## Release schedule -A new release of the project is expected every 3 months. The release cycle, -upcoming tasks, and any stated goals are subject to change. - -Releases are available both on [GitHub](https://github.com/theupdateframework/python-tuf/releases) -and on [PyPI](https://pypi.org/project/tuf/#history). The GitHub listing -includes release notes. - - -## Latest release -Please consult the repository's -[releases page on GitHub](https://github.com/theupdateframework/python-tuf/releases) -for information about the latest releases. - -As of the last editing of this document, the latest release was: -Pre-release v0.11.2.dev3, January 10, 2019. -* [Release notes and Download](https://github.com/theupdateframework/python-tuf/releases/tag/v0.11.1) -* [PyPI release](https://pypi.org/project/tuf/) -* Packaged by Sebastien Awwad -* PGP fingerprint: C2FB 9C91 0758 B682 7BC4 3233 BC0C 6DED D5E5 CC03 - -A number of older releases were packaged by Vladimir V Diaz: -* Vladimir Diaz -* PGP fingerprint: 3E87 BB33 9378 BC7B 3DD0 E5B2 5DEE 9B97 B0E2 289A - - -## Tasks for upcoming releases - -In no particular order... - -- [ ] Provide protection against a class of slow retrieval attacks using long -inter-byte delays, without sacrificing the use of clean, modern, -production-quality HTTP libraries (requests currently). - -- [ ] Support ASN.1 metadata: loading, writing, signing, and verification. - -- [x] [CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/1351) badge. - - [x] silver badge - - [ ] gold badge (currently at 74%) - -- [ ] [Support graph of delegations](https://github.com/theupdateframework/python-tuf/issues/660) -(requires refactor of API and client code). - -- [ ] [TAP 3: Multi-role delegations](https://github.com/theupdateframework/taps/blob/master/tap3.md). - -- [x] [TAP 4: Multiple repository consensus on entrusted targets](https://github.com/theupdateframework/taps/blob/master/tap4.md). - -- [ ] [TAP 5: Setting URLs for roles in the Root metadata file](https://github.com/theupdateframework/taps/blob/master/tap5.md). - -- [ ] [TAP 8: Key rotation and explicit self-revocation](https://github.com/theupdateframework/taps/blob/master/tap8.md). - -- [x] CLI tool and quickstart guide. - -- [x] Improve update speed. - -- [x] Fully support Windows. - -- [ ] Generalize metadata format in specification. - -- [ ] Support post quantum resilient crypto. - -- [ ] Resolve TODOs in the code. - -- [ ] Support Python's multilingual internationalization and localization -services. - -- [ ] Improved regression and attack testing. - -- [ ] Automated tutorial and instructions testing to enforce doc maintenance. - -- [ ] Continue resolution of outstanding tickets on the issue tracker. - -- [ ] Generalize encrypted key files. Allow different forms of encryption, key derivation functions, etc. - -- [ ] Speed up loading and saving of metadata. Support option to save metadata to memory. - diff --git a/docs/SECURITY.md b/docs/SECURITY.md deleted file mode 100644 index 8535acd3ef..0000000000 --- a/docs/SECURITY.md +++ /dev/null @@ -1,76 +0,0 @@ -# Security - -Generally, a software update system is secure if it can be sure that it knows about the latest available updates in a timely manner, any files it downloads are the correct files, and no harm results from checking or downloading files. The details of making this happen are complicated by various attacks that can be carried out against software update systems. - -## Attacks and Weaknesses - -The following are some of the known attacks on software update systems, including weaknesses that make attacks possible. In order to design a secure software update framework, these need to be understood and protected against. Some of these issues are or can be related depending on the design and implementation of a software update system. - -* **Arbitrary software installation**. An attacker installs anything they want on the client system. That is, an attacker can provide arbitrary files in response to download requests and the files will not be detected as illegitimate. - -* **Rollback attacks**. An attacker presents a software update system with older files than those the client has already seen, causing the client to use files older than those the client knows about. - -* **Fast-forward attacks**. An attacker arbitrarily increases the version numbers of project metadata files in the snapshot -metadata well beyond the current value, thus tricking a software update system into thinking any subsequent updates are trying -to rollback the package to a previous, out-of-date version. In some situations, such as those where there is a maximum possible -version number, the perpetrator could use a number so high that the system would never be able to match it with the one in the -snapshot metadata, and thus new updates could never be downloaded. - -* **Indefinite freeze attacks**. An attacker continues to present a software update system with the same files the client has already seen. The result is that the client does not know that new files are available. - -* **Endless data attacks**. An attacker responds to a file download request with an endless stream of data, causing harm to clients (e.g. a disk partition filling up or memory exhaustion). - -* **~~Slow retrieval attacks~~**. An attacker responds to clients with a very slow stream of data that essentially results in the client never continuing the update process.\ -**_NOTE: Due to limitations in a 3rd-party HTTP library, the TUF reference implementation currently provides only limited protection against slow retrieval attacks (see [tuf#932](https://github.com/theupdateframework/python-tuf/issues/932)). We plan to fix this in a future release._** - -* **Extraneous dependencies attacks**. An attacker indicates to clients that in order to install the software they wanted, they also need to install unrelated software. This unrelated software can be from a trusted source but may have known vulnerabilities that are exploitable by the attacker. - -* **Mix-and-match attacks**. An attacker presents clients with a view of a repository that includes files that did not exist together on the repository at the same time. This can result in, for example, outdated versions of dependencies being installed. - -* **Wrong software installation**. An attacker provides a client with a trusted file that is not the one the client wanted. - -* **Malicious mirrors preventing updates**. An attacker in control of one repository mirror is able to prevent users from obtaining updates from other, good mirrors. - -* **Vulnerability to key compromises**. An attacker who is able to compromise a single key or less than a given threshold of keys can compromise clients. This includes relying on a single online key (such as only being protected by SSL) or a single offline key (such as most software update systems use to sign files). - -## Design Concepts - -The design and implementation of TUF aims to be secure against all of the above attacks. A few general ideas drive much of the security of TUF. - -For the details of how TUF conveys the information discussed below, see the [Metadata documentation](METADATA.md). - -## Trust - -Trusting downloaded files really means trusting that the files were provided by some trusted party. Two frequently overlooked aspects of trust in a secure software update system are: - -* Trust should not be granted forever. Trust should expire if it is not renewed. -* Compartmentalized trust. A trusted party should only be trusted for files that it is supposed to provide. - -## Mitigated Key Risk - -Cryptographic signatures are a necessary component in securing a software update system. The safety of the keys that are used to create these signatures affects the security of clients. Rather than incorrectly assume that private keys are always safe from compromise, a secure software update system must strive to keep clients as safe as possible even when compromises happen. - -Keeping clients safe despite dangers to keys involves: - -* Fast and secure key replacement and revocation. -* Minimally trusting keys that are at high risk. Keys that are kept online or used in an automated fashion shouldn't pose immediate risk to clients if compromised. -* Supporting the use of multiple keys with threshold/quorum signatures trust. - -## Integrity - -File integrity is important both with respect to single files as well as collections of files. It's fairly obvious that clients must verify that individual downloaded files are correct. Not as obvious but still very important is the need for clients to be certain that their entire view of a repository is correct. For example, if a trusted party is providing two files, a software update system should see the latest versions of both of those files, not just one of the files and not versions of the two files that were never provided together. - -## Freshness - -As software updates often fix security bugs, it is important for software update systems to be able to obtain the latest versions of files that are available. An attacker may want to trick a client into installing outdated versions of software or even just convince a client that no updates are available. - -Ensuring freshness means to: - -* Never accept files older than those that have been seen previously. -* Recognize when there may be a problem obtaining updates. - -Note that it won't always be possible for a client to successfully update if an attacker is responding to their requests. However, a client should be able to recognize that updates may exist that they haven't been able to obtain. - -## Implementation Safety - -In addition to a secure design, TUF also works to be secure against implementation vulnerabilities including those common to software update systems. In some cases this is assisted by the inclusion of additional information in metadata. For example, knowing the expected size of a target file that is to be downloaded allows TUF to limit the amount of data it will download when retrieving the file. As a result, TUF is secure against endless data attacks (discussed above). diff --git a/docs/TAP.rst b/docs/TAP.rst deleted file mode 100644 index 3c20fde4f0..0000000000 --- a/docs/TAP.rst +++ /dev/null @@ -1,12 +0,0 @@ -What is a TAP? --------------- - -A TAP (TUF Augmentation Proposal) is a design document providing information to -the TUF community, or describing a new feature for TUF or its processes or -environment. We intend TAPs to be the primary mechanisms for proposing major -new features, for collecting community input on an issue, and for documenting -the design decisions that have gone into TUF. - -Please visit the `TAPs GitHub repo `_ -to review design changes that have been proposed to date, or to submit -your own new feature. diff --git a/docs/TUTORIAL.md b/docs/TUTORIAL.md deleted file mode 100644 index d8659e7213..0000000000 --- a/docs/TUTORIAL.md +++ /dev/null @@ -1,696 +0,0 @@ -# Advanced Tutorial # - -## Table of Contents ## -- [How to Create and Modify a TUF Repository](#how-to-create-and-modify-a-tuf-repository) - - [Overview](#overview) - - [Keys](#keys) - - [Create RSA Keys](#create-rsa-keys) - - [Import RSA Keys](#import-rsa-keys) - - [Create and Import Ed25519 Keys](#create-and-import-ed25519-keys) - - [Create Top-level Metadata](#create-top-level-metadata) - - [Create Root](#create-root) - - [Create Timestamp, Snapshot, Targets](#create-timestamp-snapshot-targets) - - [Targets](#targets) - - [Add Target Files](#add-target-files) - - [Remove Target Files](#remove-target-files) - - [Delegations](#delegations) - - [Revoke Delegated Role](#revoke-delegated-role) - - [Wrap-up](#wrap-up) -- [Delegate to Hashed Bins](#delegate-to-hashed-bins) -- [Consistent Snapshots](#consistent-snapshots) -- [How to Perform an Update](#how-to-perform-an-update) - -## How to Create and Modify a TUF Repository ## - -### Overview ### -A software update system must follow two steps to integrate The Update -Framework (TUF). First, it must add the framework to the client side of the -update system. The [tuf.client.updater](../tuf/client/README.md) module assists in -integrating TUF on the client side. Second, the software repository on the -server side must be modified to include a minimum of four top-level metadata -(root.json, targets.json, snapshot.json, and timestamp.json). No additional -software is required to convert a software repository to a TUF one. The -low-level repository tool that generates the required TUF metadata for a -software repository is the focus of this tutorial. There is also separate -document that [demonstrates how TUF protects against malicious -updates](../tuf/ATTACKS.md). - -The [repository tool](../tuf/repository_tool.py) contains functions to generate -all of the files needed to populate and manage a TUF repository. The tool may -either be imported into a Python module, or used with the Python interpreter in -interactive mode. - -A repository object that encapsulates the metadata files of the repository can -be created or loaded by the repository tool. Repository maintainers can modify -the repository object to manipulate the metadata files stored on the -repository. TUF clients use the metadata files to validate files requested and -downloaded. In addition to the repository object, where the majority of -changes are made, the repository tool provides functions to generate and -persist cryptographic keys. The framework utilizes cryptographic keys to sign -and verify metadata files. - -To begin, cryptographic keys are generated with the repository tool. However, -before metadata files can be validated by clients and target files fetched in a -secure manner, public keys must be pinned to particular metadata roles and -metadata signed by role's private keys. After covering keys, the four required -top-level metadata are created next. Examples are given demonstrating the -expected work flow, where the metadata roles are created in a specific order, -keys imported and loaded, and metadata signed and written to disk. Lastly, -target files are added to the repository, and a custom delegation performed to -extend the default roles of the repository. By the end, a fully populated TUF -repository is generated that can be used by clients to securely download -updates. - -### Keys ### -The repository tool supports multiple public-key algorithms, such as -[RSA](https://en.wikipedia.org/wiki/RSA_%28cryptosystem%29) and -[Ed25519](https://ed25519.cr.yp.to/), and multiple cryptography libraries. - -Using [RSA-PSS](https://tools.ietf.org/html/rfc8017#section-8.1) or -[ECDSA](https://en.wikipedia.org/wiki/Elliptic_Curve_Digital_Signature_Algorithm) -signatures requires the [cryptography](https://cryptography.io/) library. If -generation of Ed25519 signatures is needed -[PyNaCl](https://github.com/pyca/pynacl) library should be installed. This -tutorial assumes both dependencies are installed: refer to -[Installation Instructions](INSTALLATION.rst#install-with-more-cryptographic-flexibility) -for details. - -The Ed25519 and ECDSA keys are stored in JSON format and RSA keys are stored in PEM -format. Private keys are encrypted and passphrase-protected (strengthened with -PBKDF2-HMAC-SHA256.) Generating, importing, and loading cryptographic key -files can be done with functions available in the repository tool. - -To start, a public and private RSA key pair is generated with the -`generate_and_write_rsa_keypair()` function. The keys generated next are -needed to sign the repository metadata files created in upcoming sub-sections. - -Note: In the instructions below, lines that start with `>>>` denote commands -that should be entered by the reader, `#` begins the start of a comment, and -text without prepended symbols is the output of a command. - -#### Create RSA Keys #### -```python ->>> from tuf.repository_tool import * - -# Generate and write the first of two root keys for the TUF repository. The -# following function creates an RSA key pair, where the private key is saved to -# "root_key" and the public key to "root_key.pub" (both saved to the current -# working directory). ->>> generate_and_write_rsa_keypair(password="password", filepath="root_key", bits=2048) - -# If the key length is unspecified, it defaults to 3072 bits. A length of less -# than 2048 bits raises an exception. A similar function is available to supply -# a password on the prompt. If an empty password is entered, the private key -# is saved unencrypted. ->>> generate_and_write_rsa_keypair_with_prompt(filepath="root_key2") -enter password to encrypt private key file '/path/to/root_key2' -(leave empty if key should not be encrypted): -Confirm: -``` -The following four key files should now exist: - -1. **root_key** -2. **root_key.pub** -3. **root_key2** -4. **root_key2.pub** - -If a filepath is not given, the KEYID of the generated key is used as the -filename. The key files are written to the current working directory. -```python -# Continuing from the previous section . . . ->>> generate_and_write_rsa_keypair_with_prompt() -enter password to encrypt private key file '/path/to/KEYID' -(leave empty if key should not be encrypted): -Confirm: -``` - -### Import RSA Keys ### -```python -# Continuing from the previous section . . . - -# Import an existing public key. ->>> public_root_key = import_rsa_publickey_from_file("root_key.pub") - -# Import an existing private key. Importing a private key requires a password, -# whereas importing a public key does not. ->>> private_root_key = import_rsa_privatekey_from_file("root_key") -enter password to decrypt private key file '/path/to/root_key' -(leave empty if key not encrypted): -``` - -### Create and Import Ed25519 Keys ### -```Python -# Continuing from the previous section . . . - -# The same generation and import functions as for rsa keys exist for ed25519 ->>> generate_and_write_ed25519_keypair_with_prompt(filepath='ed25519_key') -enter password to encrypt private key file '/path/to/ed25519_key' -(leave empty if key should not be encrypted): -Confirm: - -# Import the ed25519 public key just created . . . ->>> public_ed25519_key = import_ed25519_publickey_from_file('ed25519_key.pub') - -# and its corresponding private key. ->>> private_ed25519_key = import_ed25519_privatekey_from_file('ed25519_key') -enter password to decrypt private key file '/path/to/ed25519_key' -(leave empty if key should not be encrypted): -``` - -Note: Methods are also available to generate and write keys from memory. -* generate_ed25519_key() -* generate_ecdsa_key() -* generate_rsa_key() - -* import_ecdsakey_from_pem(pem) -* import_rsakey_from_pem(pem) - -### Create Top-level Metadata ### -The [metadata document](METADATA.md) outlines the JSON files that must exist -on a TUF repository. The following sub-sections demonstrate the -`repository_tool.py` calls repository maintainers may issue to generate the -required roles. The top-level roles to be created are `root`, `timestamp`, -`snapshot`, and `target`. - -We begin with `root`, the locus of trust that specifies the public keys of the -top-level roles, including itself. - - -#### Create Root #### -```python -# Continuing from the previous section . . . - -# Create a new Repository object that holds the file path to the TUF repository -# and the four top-level role objects (Root, Targets, Snapshot, Timestamp). -# Metadata files are created when repository.writeall() or repository.write() -# are called. The repository directory is created if it does not exist. You -# may see log messages indicating any directories created. ->>> repository = create_new_repository("repository") - -# The Repository instance, 'repository', initially contains top-level Metadata -# objects. Add one of the public keys, created in the previous section, to the -# root role. Metadata is considered valid if it is signed by the public key's -# corresponding private key. ->>> repository.root.add_verification_key(public_root_key) - -# A role's verification key(s) (to be more precise, the verification key's -# keyid) may be queried. Other attributes include: signing_keys, version, -# signatures, expiration, threshold, and delegations (attribute available only -# to a Targets role). ->>> repository.root.keys -['b23514431a53676595922e955c2d547293da4a7917e3ca243a175e72bbf718df'] - -# Add a second public key to the root role. Although previously generated and -# saved to a file, the second public key must be imported before it can added -# to a role. ->>> public_root_key2 = import_rsa_publickey_from_file("root_key2.pub") ->>> repository.root.add_verification_key(public_root_key2) - -# The threshold of each role defaults to 1. Maintainers may change the -# threshold value, but repository_tool.py validates thresholds and warns users. -# Set the threshold of the root role to 2, which means the root metadata file -# is considered valid if it's signed by at least two valid keys. We also load -# the second private key, which hasn't been imported yet. ->>> repository.root.threshold = 2 ->>> private_root_key2 = import_rsa_privatekey_from_file("root_key2", password="password") - -# Load the root signing keys to the repository, which writeall() or write() -# (write multiple roles, or a single role, to disk) use to sign the root -# metadata. ->>> repository.root.load_signing_key(private_root_key) ->>> repository.root.load_signing_key(private_root_key2) - -# repository.status() shows missing verification and signing keys for the -# top-level roles, and whether signatures can be created (also see #955). -# This output shows that so far only the "root" role meets the key threshold and -# can successfully sign its metadata. ->>> repository.status() -'targets' role contains 0 / 1 public keys. -'snapshot' role contains 0 / 1 public keys. -'timestamp' role contains 0 / 1 public keys. -'root' role contains 2 / 2 signatures. -'targets' role contains 0 / 1 signatures. - -# In the next section we update the other top-level roles and create a repository -# with valid metadata. -``` - -#### Create Timestamp, Snapshot, Targets -Now that `root.json` has been set, the other top-level roles may be created. -The signing keys added to these roles must correspond to the public keys -specified by the Root role. - -On the client side, `root.json` must always exist. The other top-level roles, -created next, are requested by repository clients in (Root -> Timestamp -> -Snapshot -> Targets) order to ensure required metadata is downloaded in a -secure manner. - -```python -# Continuing from the previous section . . . - -# 'datetime' module needed to optionally set a role's expiration. ->>> import datetime - -# Generate keys for the remaining top-level roles. The root keys have been set above. ->>> generate_and_write_rsa_keypair(password='password', filepath='targets_key') ->>> generate_and_write_rsa_keypair(password='password', filepath='snapshot_key') ->>> generate_and_write_rsa_keypair(password='password', filepath='timestamp_key') - -# Add the verification keys of the remaining top-level roles. - ->>> repository.targets.add_verification_key(import_rsa_publickey_from_file('targets_key.pub')) ->>> repository.snapshot.add_verification_key(import_rsa_publickey_from_file('snapshot_key.pub')) ->>> repository.timestamp.add_verification_key(import_rsa_publickey_from_file('timestamp_key.pub')) - -# Import the signing keys of the remaining top-level roles. ->>> private_targets_key = import_rsa_privatekey_from_file('targets_key', password='password') ->>> private_snapshot_key = import_rsa_privatekey_from_file('snapshot_key', password='password') ->>> private_timestamp_key = import_rsa_privatekey_from_file('timestamp_key', password='password') - -# Load the signing keys of the remaining roles so that valid signatures are -# generated when repository.writeall() is called. ->>> repository.targets.load_signing_key(private_targets_key) ->>> repository.snapshot.load_signing_key(private_snapshot_key) ->>> repository.timestamp.load_signing_key(private_timestamp_key) - -# Optionally set the expiration date of the timestamp role. By default, roles -# are set to expire as follows: root(1 year), targets(3 months), snapshot(1 -# week), timestamp(1 day). ->>> repository.timestamp.expiration = datetime.datetime(2080, 10, 28, 12, 8) - -# Mark roles for metadata update (see #964, #958) ->>> repository.mark_dirty(['root', 'snapshot', 'targets', 'timestamp']) - -# Write all metadata to "repository/metadata.staged/" ->>> repository.writeall() -``` - -### Targets ### -TUF makes it possible for clients to validate downloaded target files by -including a target file's length, hash(es), and filepath in metadata. The -filepaths are relative to a `targets/` directory on the software repository. A -TUF client can download a target file by first updating the latest copy of -metadata (and thus available targets), verifying that their length and hashes -are valid, and saving the target file(s) locally to complete the update -process. - -In this section, the target files intended for clients are added to a -repository and listed in `targets.json` metadata. - -#### Add Target Files #### - -The repository maintainer adds target files to roles (e.g., `targets` and -`unclaimed`) by specifying their filepaths. The target files must exist at the -specified filepaths before the repository tool can generate and add their -(hash(es), length, and filepath) to metadata. - -First, the actual target files are manually created and saved to the `targets/` -directory of the repository: - -```Bash -# Create and save target files to the targets directory of the software -# repository. -$ cd repository/targets/ -$ echo 'file1' > file1.txt -$ echo 'file2' > file2.txt -$ echo 'file3' > file3.txt -$ mkdir myproject; echo 'file4' > myproject/file4.txt -$ cd ../../ -``` - -With the target files available on the `targets/` directory of the software -repository, the `add_targets()` method of a Targets role can be called to add -the target filepaths to metadata. - -```python -# Continuing from the previous section . . . - -# NOTE: If you exited the Python interactive interpreter above you need to -# re-import the repository_tool-functions and re-load the repository and -# signing keys. ->>> from tuf.repository_tool import * - -# The 'os' module is needed to gather file attributes, which will be included -# in a custom field for some of the target files added to metadata. ->>> import os - -# Load the repository created in the previous section. This repository so far -# contains metadata for the top-level roles, but no target paths are yet listed -# in targets metadata. ->>> repository = load_repository('repository') - -# Create a list of all targets in the directory. ->>> list_of_targets = ['file1.txt', 'file2.txt', 'file3.txt'] - -# Add the list of target paths to the metadata of the top-level Targets role. -# Any target file paths that might already exist are NOT replaced, and -# add_targets() does not create or move target files on the file system. Any -# target paths added to a role must fall under the expected targets directory, -# otherwise an exception is raised. The targets added to a role should actually -# exist once writeall() or write() is called, so that the hash and size of -# these targets can be included in Targets metadata. ->>> repository.targets.add_targets(list_of_targets) - -# Individual target files may also be added to roles, including custom data -# about the target. In the example below, file permissions of the target -# (octal number specifying file access for owner, group, others e.g., 0755) is -# added alongside the default fileinfo. All target objects in metadata include -# the target's filepath, hash, and length. -# Note: target path passed to add_target() method has to be relative -# to the targets directory or an exception is raised. ->>> target4_filepath = 'myproject/file4.txt' ->>> target4_abspath = os.path.abspath(os.path.join('repository', 'targets', target4_filepath)) ->>> octal_file_permissions = oct(os.stat(target4_abspath).st_mode)[4:] ->>> custom_file_permissions = {'file_permissions': octal_file_permissions} ->>> repository.targets.add_target(target4_filepath, custom_file_permissions) -``` - -The private keys of roles affected by the changes above must now be imported and -loaded. `targets.json` must be signed because a target file was added to its -metadata. `snapshot.json` keys must be loaded and its metadata signed because -`targets.json` has changed. Similarly, since `snapshot.json` has changed, the -`timestamp.json` role must also be signed. - -```Python -# Continuing from the previous section . . . - -# The private key of the updated targets metadata must be re-loaded before it -# can be signed and written (Note the load_repository() call above). ->>> private_targets_key = import_rsa_privatekey_from_file('targets_key') -enter password to decrypt private key file '/path/to/targets_key' -(leave empty if key not encrypted): - ->>> repository.targets.load_signing_key(private_targets_key) - -# Due to the load_repository() and new versions of metadata, we must also load -# the private keys of Snapshot and Timestamp to generate a valid set of metadata. ->>> private_snapshot_key = import_rsa_privatekey_from_file('snapshot_key') -enter password to decrypt private key file '/path/to/snapshot_key' -(leave empty if key not encrypted): ->>> repository.snapshot.load_signing_key(private_snapshot_key) - ->>> private_timestamp_key = import_rsa_privatekey_from_file('timestamp_key') -enter password to decrypt private key file '/path/to/timestamp_key' -(leave empty if key not encrypted): ->>> repository.timestamp.load_signing_key(private_timestamp_key) - -# Mark roles for metadata update (see #964, #958) ->>> repository.mark_dirty(['snapshot', 'targets', 'timestamp']) - -# Generate new versions of the modified top-level metadata (targets, snapshot, -# and timestamp). ->>> repository.writeall() -``` - -#### Remove Target Files #### - -Target files previously added to roles may also be removed. Removing a target -file requires first removing the target from a role and then writing the -new metadata to disk. -```python -# Continuing from the previous section . . . - -# Remove a target file listed in the "targets" metadata. The target file is -# not actually deleted from the file system. ->>> repository.targets.remove_target('myproject/file4.txt') - -# Mark roles for metadata update (see #964, #958) ->>> repository.mark_dirty(['snapshot', 'targets', 'timestamp']) - ->>> repository.writeall() -``` - -#### Excursion: Dump Metadata and Append Signature #### - -The following two functions are intended for those that wish to independently -sign metadata. Repository maintainers can dump the portion of metadata that is -normally signed, sign it with an external signing tool, and append the -signature to already existing metadata. - -First, the signable portion of metadata can be generated as follows: - -```Python ->>> signable_content = dump_signable_metadata('repository/metadata.staged/timestamp.json') -``` - -Then, use a tool like securesystemslib to create a signature over the signable -portion. *Note, to make the signing key count towards the role's signature -threshold, it needs to be added to `root.json`, e.g. via -`repository.timestamp.add_verification_key(key)` (not shown in below snippet).* -```python ->>> from securesystemslib.formats import encode_canonical ->>> from securesystemslib.keys import create_signature ->>> private_ed25519_key = import_ed25519_privatekey_from_file('ed25519_key') -enter password to decrypt private key file '/path/to/ed25519_key' ->>> signature = create_signature( -... private_ed25519_key, encode_canonical(signable_content).encode()) -``` - -Finally, append the signature to the metadata -```Python ->>> append_signature(signature, 'repository/metadata.staged/timestamp.json') -``` - -Note that the format of the signature is the format expected in metadata, which -is a dictionary that contains a KEYID, the signature itself, etc. See the -specification and [METADATA.md](METADATA.md) for a detailed example. - -### Delegations ### -All of the target files available on the software repository created so far -have been added to one role (the top-level Targets role). However, what if -multiple developers are responsible for the files of a project? What if -responsibility separation is desired? Performing a delegation, where one role -delegates trust of some paths to another role, is an option for integrators -that require additional roles on top of the top-level roles available by -default. - -In the next sub-section, the `unclaimed` role is delegated from the top-level -`targets` role. The `targets` role specifies the delegated role's public keys, -the paths it is trusted to provide, and its role name. - -```python -# Continuing from the previous section . . . - -# Generate a key for a new delegated role named "unclaimed". ->>> generate_and_write_rsa_keypair(password='password', filepath='unclaimed_key', bits=2048) ->>> public_unclaimed_key = import_rsa_publickey_from_file('unclaimed_key.pub') - -# Make a delegation (delegate trust of 'myproject/*.txt' files) from "targets" -# to "unclaimed", where "unclaimed" initially contains zero targets. ->>> repository.targets.delegate('unclaimed', [public_unclaimed_key], ['myproject/*.txt']) - -# Thereafter, we can access the delegated role by its name to e.g. add target -# files, just like we did with the top-level targets role. ->>> repository.targets("unclaimed").add_target("myproject/file4.txt") - -# Load the private key of "unclaimed" so that unclaimed's metadata can be -# signed, and valid metadata created. ->>> private_unclaimed_key = import_rsa_privatekey_from_file('unclaimed_key', password='password') - ->>> repository.targets("unclaimed").load_signing_key(private_unclaimed_key) - -# Mark roles for metadata update (see #964, #958) ->>> repository.mark_dirty(['snapshot', 'targets','timestamp', 'unclaimed']) - ->>> repository.writeall() -``` - - - - -#### Wrap-up #### - -In summary, the five steps a repository maintainer follows to create a TUF -repository are: - -1. Create a directory for the software repository that holds the TUF metadata and the target files. -2. Create top-level roles (`root.json`, `snapshot.json`, `targets.json`, and `timestamp.json`.) -3. Add target files to the `targets` role. -4. Optionally, create delegated roles to distribute target files. -5. Write the changes. - -The repository tool saves repository changes to a `metadata.staged` directory. -Repository maintainers may push finalized changes to the "live" repository by -copying the staged directory to its destination. -```Bash -# Copy the staged metadata directory changes to the live repository. -$ cp -r "repository/metadata.staged/" "repository/metadata/" -``` - -## Consistent Snapshots ## -The basic TUF repository we have generated above is adequate for repositories -that have some way of guaranteeing consistency of repository data. A community -software repository is one example where consistency of files and metadata can -become an issue. Repositories of this kind are continually updated by multiple -maintainers and software authors uploading their packages, increasing the -likelihood that a client downloading version X of a release unexpectedly -requests the target files of a version Y just released. - -To guarantee consistency of metadata and target files, a repository may -optionally support multiple versions of `snapshot.json` simultaneously, where a -client with version 1 of `snapshot.json` can download `target_file.zip` and -another client with version 2 of `snapshot.json` can also download a different -`target_file.zip` (same file name, but different file digest.) If the -`consistent_snapshot` parameter of writeall() or write() are `True`, metadata -and target file names on the file system have their digests prepended (note: -target file names specified in metadata do not contain digests in their names.) - -The repository maintainer is responsible for the duration of multiple versions -of metadata and target files available on a repository. Generating consistent -metadata and target files on the repository is enabled by setting the -`consistent_snapshot` argument of `writeall()` or `write()` . Note that -changing the consistent_snapshot setting involves writing a new version of -root. - - - -## Delegate to Hashed Bins ## -Why use hashed bin delegations? - -For software update systems with a large number of target files, delegating to -hashed bins (a special type of delegated role) might be an easier alternative -to manually performing the delegations. How many target files should each -delegated role contain? How will these delegations affect the number of -metadata that clients must additionally download in a typical update? Hashed -bin delegations are available to integrators that rather not deal with the -management of delegated roles and a great number of target files. - -A large number of target files may be distributed to multiple hashed bins with -`delegate_hashed_bins()`. The metadata files of delegated roles will be nearly -equal in size (i.e., target file paths are uniformly distributed by calculating -the target filepath's digest and determining which bin it should reside in.) -The updater client will use "lazy bin walk" (visit and download the minimum -metadata required to find a target) to find a target file's hashed bin -destination. This method is intended for repositories with a large number of -target files, a way of easily distributing and managing the metadata that lists -the targets, and minimizing the number of metadata files (and size) downloaded -by the client. - -The `delegate_hashed_bins()` method has the following form: -```Python -delegate_hashed_bins(list_of_targets, keys_of_hashed_bins, number_of_bins) -``` - -We next provide a complete example of retrieving target paths to add to hashed -bins, performing the hashed bin delegations, signing them, and delegating paths -to some role. - -```Python -# Continuing from the previous section . . . - -# Remove 'myproject/file4.txt' from unclaimed role and instead further delegate -# all targets in myproject/ to hashed bins. ->>> repository.targets('unclaimed').remove_target("myproject/file4.txt") - -# Get a list of target paths for the hashed bins. ->>> targets = ['myproject/file4.txt'] - -# Delegate trust to 32 hashed bin roles. Each role is responsible for the set -# of target files, determined by the path hash prefix. TUF evenly distributes -# hexadecimal ranges over the chosen number of bins (see output). -# To initialize the bins we use one key, which TUF warns us about (see output). -# However, we can assign separate keys to each bin, with the method used in -# previous sections, accessing a bin by its hash prefix range name, e.g.: -# "repository.targets('00-07').add_verification_key('public_00-07_key')". ->>> repository.targets('unclaimed').delegate_hashed_bins( -... targets, [public_unclaimed_key], 32) -Creating hashed bin delegations. -1 total targets. -32 hashed bins. -256 total hash prefixes. -Each bin ranges over 8 hash prefixes. -Adding a verification key that has already been used. [repeated 32x] - -# The hashed bin roles can also be accessed by iterating the "delegations" -# property of the delegating role, which we do here to load the signing key. ->>> for delegation in repository.targets('unclaimed').delegations: -... delegation.load_signing_key(private_unclaimed_key) - -# Mark roles for metadata update (see #964, #958) ->>> repository.mark_dirty(['00-07', '08-0f', '10-17', '18-1f', '20-27', '28-2f', -... '30-37', '38-3f', '40-47', '48-4f', '50-57', '58-5f', '60-67', '68-6f', -... '70-77', '78-7f', '80-87', '88-8f', '90-97', '98-9f', 'a0-a7', 'a8-af', -... 'b0-b7', 'b8-bf', 'c0-c7', 'c8-cf', 'd0-d7', 'd8-df', 'e0-e7', 'e8-ef', -... 'f0-f7', 'f8-ff', 'snapshot', 'timestamp', 'unclaimed']) - ->>> repository.writeall() - -``` - -## How to Perform an Update ## - -The following [repository tool](../tuf/repository_tool.py) function creates a directory -structure that a client downloading new software using TUF (via -[tuf/client/updater.py](../tuf/client/updater.py)) expects. The `root.json` metadata file must exist, and -also the directories that hold the metadata files downloaded from a repository. -Software updaters integrating TUF may use this directory to store TUF updates -saved on the client side. - -```python ->>> from tuf.repository_tool import * ->>> create_tuf_client_directory("repository/", "client/tufrepo/") -``` - -`create_tuf_client_directory()` moves metadata from `repository/metadata` to -`client/` in this example. The repository in `repository/` may be the -repository example created earlier in this document. - -## Test TUF Locally ## -Run the local TUF repository server. -```Bash -$ cd "repository/"; python3 -m http.server 8001 -``` - -We next retrieve targets from the TUF repository and save them to `client/`. -The `client.py` script is available to download metadata and files from a -specified repository. In a different command-line prompt, where `tuf` is -installed . . . -```Bash -$ cd "client/" -$ ls -tufrepo/ - -$ client.py --repo http://localhost:8001 file1.txt -$ ls . tuftargets/ -.: -tufrepo tuftargets - -tuftargets/: -file1.txt -``` diff --git a/docs/_config.yml b/docs/_config.yml new file mode 100644 index 0000000000..586bd60b9a --- /dev/null +++ b/docs/_config.yml @@ -0,0 +1,16 @@ +title: Python-TUF +author: Python-TUF community +#email: your-email@domain.com +description: > # for footer and for search engines + Development blog for Python-TUF, a supply chain security framework + for secure content delivery and updates. + +github_username: theupdateframework + +show_excerpts: true # set to false to remove excerpts on the homepage + +header_pages: # make sure ordinary docs are not linked from blog header + - index.md + +theme: minima + diff --git a/docs/_posts/2022-02-21-release-1-0-0.md b/docs/_posts/2022-02-21-release-1-0-0.md new file mode 100644 index 0000000000..9370597cc9 --- /dev/null +++ b/docs/_posts/2022-02-21-release-1-0-0.md @@ -0,0 +1,47 @@ +--- +title: "Python-TUF reaches version 1.0.0" +author: Jussi Kukkonen and Lukas Pühringer +--- + +The Python-TUF community is proud to announce the release of Python-TUF 1.0.0. +The release, which is available on [PyPI](https://pypi.org/project/tuf/) and +[GitHub](https://github.com/theupdateframework/python-tuf/), introduces new +stable and more ergonomic APIs. + + + +Python-TUF is the reference implementation of [The Update +Framework](https://theupdateframework.io/) specification, an open source +framework for securing content delivery and updates. It protects against +various types of supply chain attacks and provides resilience to compromise. + +For the past 7 releases the project has introduced new designs and +implementations, which have gradually formed two new stable APIs: +- [`ngclient`](https://theupdateframework.readthedocs.io/en/latest/api/tuf.ngclient.html): + A client API that offers a robust internal design providing implementation + safety and flexibility to application developers. +- [`Metadata API`](https://theupdateframework.readthedocs.io/en/latest/api/tuf.api.html): + A low-level interface for both consuming and creating TUF metadata. Metadata + API is a flexible and easy-to-use building block for any higher level tool or + library. + +Python-TUF 1.0.0 is the result of a comprehensive rewrite of the project, +removing several hard to maintain modules and replacing them with safer and +easier to use APIs: +- The project was reduced from 4700 lines of hard to maintain code to 1400 + lines of modern, maintainable code +- The implementation details are now easier to reason about, which should + accelerate future improvements on the project +- Metadata API provides a solid base to build other tools on top of – as proven + by the ngclient implementation and the [repository code + examples](https://github.com/theupdateframework/python-tuf/tree/develop/examples/repo_example) +- Both new APIs are highly extensible and allow application developers to + include custom network stacks, file storage systems or public-key + cryptography algorithms, while providing easy-to-use default implementations + +With this foundation laid, Python-TUF developers are currently planning next +steps. At the very least, you can expect improved repository side tooling, but +we're also open to new ideas. Pop in to +[#tuf](https://cloud-native.slack.com/archives/C8NMD3QJ3) on CNCF Slack or +[Github issues](https://github.com/theupdateframework/python-tuf/issues/new) +and let’s talk. diff --git a/docs/_posts/2022-05-04-ngclient-design.md b/docs/_posts/2022-05-04-ngclient-design.md new file mode 100644 index 0000000000..3c5623f662 --- /dev/null +++ b/docs/_posts/2022-05-04-ngclient-design.md @@ -0,0 +1,46 @@ +--- +title: "What's new in Python-TUF ngclient?" +author: Jussi Kukkonen +--- + +We recently released a new TUF client implementation, `ngclient`, in Python-TUF. This post explains why we ended up doing that when a client already existed. + +# Simpler implementation, "correct" abstractions + +The legacy code had a few problems that could be summarized as non-optimal abstractions: Significant effort had been put to code re-use, but not enough attention had been paid to ensure the expectations and promises of that shared code were the same in all cases of re-use. This combined with Pythons type ambiguity, use of dictionaries as "blob"-like data structures and extensive use of global state meant touching the shared functions was a gamble: there was no way to be sure something wouldn't break. + +During the redesign, we really concentrated on finding abstractions that fit the processes we wanted to implement. It may be worth mentioning that in some cases this meant abstractions that have no equivalent in the TUF specification: some of the issues in the legacy implementation look like the result of mapping the TUF specifications [_Detailed client workflow_](https://theupdateframework.github.io/specification/latest/#detailed-client-workflow) directly into code. + +Here are the core abstractions we ended up with (number of lines of code in parenthesis to provide a bit of context, alongside links to sources and docs): +* `Metadata` (900 SLOC, [docs](https://theupdateframework.readthedocs.io/en/latest/api/tuf.api.html)) handles everything related to individual pieces of TUF metadata: deserialization, signing, and verifying +* `TrustedMetadataSet` (170 SLOC) is a collection of local, trusted metadata. It defines rules for how new metadata can be added into the set and ensures that metadata in it is always consistent and valid: As an example, if `TrustedMetadataSet` contains a targets metadata, the set guarantees that the targets metadata is signed by trusted keys and is part of a currently valid TUF snapshot +* `Updater` (250 SLOC, [docs](https://theupdateframework.readthedocs.io/en/latest/api/tuf.ngclient.updater.html)) makes decisions on what metadata should be loaded into `TrustedMetadataSet`, both from the local cache and from a remote repository. While `TrustedMetadataSet` always raises an exception if a metadata is not valid, `Updater` considers the context and handles some failures as a part of the process and some as actual errors. `Updater` also handles persisting validated metadata and targets onto local storage and provides the user-facing API +* `FetcherInterface` (100 SLOC, [docs](https://theupdateframework.readthedocs.io/en/latest/api/tuf.ngclient.fetcher.html)) is the abstract file downloader. By default, a Requests-based implementation is used but clients can use custom fetchers to tweak how downloads are done + +No design is perfect but so far we're quite happy with the above split. It has dramatically simplified the implementation: The code is subjectively easier to understand but also has significantly lower code branching counts for the same operations. + +# PyPI client requirements + +A year ago we added TUF support into pip as a prototype: this revealed some design issues that made the integration more difficult than it needed to be. As the potential pip integration is a goal for Python-TUF we wanted to smooth those rough edges. + +The main addition here was the `FetcherInterface`: it allows pip to keep doing all of the HTTP tweaks they have collected over the years. + +There were a bunch of smaller API tweaks as well: as an example, legacy Python-TUF had not anticipated downloading target files from a different host than it downloads metadata from. This is the design that PyPI uses with pypi.org and files.pythonhosted.org. + +# better API + +Since we knew we had to break API with the legacy implementation anyway, we also fixed multiple paper cuts in the API: + * Actual data structures are now exposed instead of dictionary "blobs" + * Configuration was removed or made non-global + * Exceptions are defined in a way that is useful to client applications + +# Plain old software engineering + +In addition to the big-ticket items, the rewrite allowed loads of improvements in project engineering practices. Some highlights: +* Type annotations are now used extensively +* Coding style is now consistent (and is now a common Python style) +* There is a healthy culture of review in the project: bar for accepting changes is where it should be for a security project +* Testing has so many improvements they probably need a blog post of their own + +These are not `ngclient` features as such but we expect they will show in the quality of products built with it. + diff --git a/docs/adr/0008-accept-unrecognised-fields.md b/docs/adr/0008-accept-unrecognised-fields.md index 424ca0ff1f..7d4b4a8a0e 100644 --- a/docs/adr/0008-accept-unrecognised-fields.md +++ b/docs/adr/0008-accept-unrecognised-fields.md @@ -33,15 +33,18 @@ intermediate operations: then, the checksum (the content) of the file must not be changed. - Flexibility to add new fields in the spec without adding breaking changes. +- Don't store unrecognized fields when it is not allowed by the specification. ## Considered Options - Ignore and drop unrecognized fields. - Ignore, but store unrecognized fields as an additional attribute. +- Ignore, but store unrecognized fields as an additional attribute +except for a couple of places where it's not allowed by the specification. ## Decision Outcome -Chosen option: "Ignore, but store unrecognized fields as an additional -attribute." +Chosen option: "Ignore, but store unrecognized fields as an additional attribute +except for a couple of places where it's not allowed by the specification." The motivation for this decision is that the TUF specification already implies that we should accept unrecognized fields for backward compatibility and easier future extensibility. @@ -49,3 +52,7 @@ future extensibility. Additionally, it seems unacceptable to change a metadata file content just by reading and writing it back. +There are exceptions however for places in the metadata format when it is not +allowed by specification: keys, roles, meta, hashes, and targets are +actual dictionaries (vs JSON objects that most structures in the format are) +where `unrecognized field` is not a meaningful concept. diff --git a/docs/adr/0010-repository-library-design.md b/docs/adr/0010-repository-library-design.md new file mode 100644 index 0000000000..0673063e89 --- /dev/null +++ b/docs/adr/0010-repository-library-design.md @@ -0,0 +1,136 @@ +# Repository library design built on top of Metadata API + + +## Context and Problem Statement + +The Metadata API provides a modern Python API for accessing individual pieces +of metadata. It does not provide any wider context help to someone looking to +implement a TUF repository. + +The legacy python-tuf implementation offers tools for this but suffers from +some issues (as do many other implementations): +* There is a _very_ large amount of code to maintain: repo.py, + repository_tool.py and repository_lib.py alone are almost 7000 lines of code. +* The "library like" parts of the implementation do not form a good coherent + API: methods routinely have a large number of arguments, code still depends + on globals in a major way and application (repo.py) still implements a lot of + "repository code" itself +* The "library like" parts of the implementation make decisions that look like + application decisions. As an example, repository_tool loads _every_ metadata + file in the repository: this is fine for CLI that operates on a small + repository but is unlikely to be a good choice for a large scale server. + + +## Decision Drivers + +* There is a consensus on removing the legacy code from python-tuf due to + maintainability issues +* Metadata API makes modifying metadata far easier than legacy code base: this + makes significantly different designs possible +* Not providing a "repository library" (and leaving implementers on their own) + may be a short term solution because of the previous point, but to make + adoption easier and to help adopters create safe implementations the project + would benefit from some shared repository code and a shared repository design +* Maintainability of new library code must be a top concern +* Allowing a wide range of repository implementations (from CLI tools to + minimal in-memory implementations to large scale application servers) + would be good: unfortunately these can have wildly differing requirements + + +## Considered Options + +1. No repository packages +2. repository_tool -like API +3. Minimal repository abstraction + + +## Decision Outcome + +Option 3: Minimal repository abstraction + +While option 1 might be used temporarily, the goal should be to implement a +minimal repository abstraction as soon as possible: this should give the +project a path forward where the maintenance burden is reasonable and results +should be usable very soon. The python-tuf repository functionality can be +later extended as ideas are experimented with in upstream projects and in +python-tuf example code. + +The concept is still unproven but validating the design should be straight +forward: decision could be re-evaluated in a few months if not in weeks. + + +## Pros and Cons of the Options + +### No repository packages + +Metadata API makes editing the repository content vastly simpler. There are +already repository implementations built with it[^1] so clearly a repository +library is not an absolute requirement. + +Not providing repository packages in python-tuf does mean that external +projects could experiment and create implementations without adding to the +maintenance burden of python-tuf. This would be the easiest way to iterate many +different designs and hopefully find good ones in the end. + +That said, there are some tricky parts of repository maintenance (e.g. +initialization, snapshot update, hashed bin management) that would benefit from +having a canonical implementation, both for easier adoption of python-tuf and +as a reference for other implementations. Likewise, a well designed library +could make some repeated actions (e.g. version bumps, expiry updates, signing) +much easier to manage. + +### repository_tool -like API + +It won't be possible to support the repository_tool API as it is but a similar +one would certainly be an option. + +This would likely be the easiest upgrade path for any repository_tool users out +there. The implementation would not be a huge amount of work as Metadata API +makes many things easier. + +However, repository_tool (and parts of repo.py) are not a great API. It is +likely that a similar API suffers from some of the same issues: it might end up +being a substantial amount of code that is only a good fit for one application. + +### Minimal repository abstraction + +python-tuf could define a tiny repository API that +* provides carefully selected core functionality (like core snapshot update) +* does not implement all repository actions itself, instead it makes it easy + for the application code to do them +* leaves application details to specific implementations (examples of decisions + a library should not always decide: "are targets stored with the repo?", + "which versions of metadata are stored?", "when to load metadata?", "when to + unload metadata?", "when to bump metadata version?", "what is the new expiry + date?", "which targets versions should be part of new snapshot?") + +python-tuf could also provide one or more implementations of this abstraction +as examples -- this could include a _repo.py_- or _repository_tool_-like +implementation. + +This could be a compromise that allows: +* low maintenance burden on python-tuf: initial library could be tiny +* sharing the important, canonical parts of a TUF repository implementation +* ergonomic repository modification, meaning most actions do not have to be in + the core code +* very different repository implementations using the same core code and the + same abstract API + +The approach does have some downsides: +* it's not a drop in replacement for repository_tool or repo.py +* A prototype has been implemented (see Links below) but the concept is still + unproven + +More details in [Design document](../repository-library-design.md). + +## Links +* [Design document for minimal repository abstraction](../repository-library-design.md) +* [Prototype implementation of minimal repository abstraction](https://github.com/vmware-labs/repository-editor-for-tuf/) + + +[^1]: + [RepositorySimulator](https://github.com/theupdateframework/python-tuf/blob/develop/tests/repository_simulator.py) + in python-tuf tests is an in-memory implementation, while + [repository-editor-for-tuf](https://github.com/vmware-labs/repository-editor-for-tuf) + is an external Command line repository maintenance tool. + diff --git a/docs/adr/index.md b/docs/adr/index.md index 54a9be0861..46d9d84b5d 100644 --- a/docs/adr/index.md +++ b/docs/adr/index.md @@ -14,6 +14,7 @@ This log lists the architectural decisions for tuf. - [ADR-0008](0008-accept-unrecognised-fields.md) - Accept metadata that includes unrecognized fields - [ADR-0009](0009-what-is-a-reference-implementation.md) - Primary purpose of the reference implementation +- [ADR-0010](0010-repository-library-design.md) - Repository library design built on top of Metadata API diff --git a/docs/api/api-reference.rst b/docs/api/api-reference.rst index 0c4b17bc05..d0805d8512 100644 --- a/docs/api/api-reference.rst +++ b/docs/api/api-reference.rst @@ -18,12 +18,10 @@ TUF provides multiple APIs: is implemented on top of the Metadata API and can be used to implement various TUF clients with relatively little effort. -.. note:: Major API changes are unlikely but these APIs are not yet - considered stable, and a higher-level repository operations API is not yet - included. +Code `examples `_ +are available for client implementation using ngclient and a +basic repository using Metadata API. - There is a legacy implementation in the source code (not covered by this - documentation): it is in maintenance mode and receives no feature work. .. toctree:: :maxdepth: 2 diff --git a/docs/api/tuf.api.metadata.metadata.rst b/docs/api/tuf.api.metadata.metadata.rst new file mode 100644 index 0000000000..bac11a3133 --- /dev/null +++ b/docs/api/tuf.api.metadata.metadata.rst @@ -0,0 +1,4 @@ +Metadata class +--------------------------------- + +.. autoclass:: tuf.api.metadata.Metadata diff --git a/docs/api/tuf.api.metadata.root.rst b/docs/api/tuf.api.metadata.root.rst new file mode 100644 index 0000000000..ab6194bcc0 --- /dev/null +++ b/docs/api/tuf.api.metadata.root.rst @@ -0,0 +1,4 @@ +Root class +--------------------------------- + +.. autoclass:: tuf.api.metadata.Root diff --git a/docs/api/tuf.api.metadata.rst b/docs/api/tuf.api.metadata.rst deleted file mode 100644 index c4a58bb4e2..0000000000 --- a/docs/api/tuf.api.metadata.rst +++ /dev/null @@ -1,7 +0,0 @@ -Metadata ---------------------------------- - -.. automodule:: tuf.api.metadata - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/api/tuf.api.metadata.snapshot.rst b/docs/api/tuf.api.metadata.snapshot.rst new file mode 100644 index 0000000000..1d1c202565 --- /dev/null +++ b/docs/api/tuf.api.metadata.snapshot.rst @@ -0,0 +1,4 @@ +Snapshot class +--------------------------------- + +.. autoclass:: tuf.api.metadata.Snapshot diff --git a/docs/api/tuf.api.metadata.supporting.rst b/docs/api/tuf.api.metadata.supporting.rst new file mode 100644 index 0000000000..906e70e95c --- /dev/null +++ b/docs/api/tuf.api.metadata.supporting.rst @@ -0,0 +1,27 @@ +Supporting classes +--------------------------------- + +The Metadata API includes multiple classes that are used by the top-level +ones (Root, Timestamp, Snapshot, Targets): + +.. autosummary:: + :nosignatures: + + tuf.api.metadata.DelegatedRole + tuf.api.metadata.Delegations + tuf.api.metadata.Key + tuf.api.metadata.MetaFile + tuf.api.metadata.Role + tuf.api.metadata.TargetFile + +.. autoclass:: tuf.api.metadata.DelegatedRole + +.. autoclass:: tuf.api.metadata.Delegations + +.. autoclass:: tuf.api.metadata.Key + +.. autoclass:: tuf.api.metadata.MetaFile + +.. autoclass:: tuf.api.metadata.Role + +.. autoclass:: tuf.api.metadata.TargetFile diff --git a/docs/api/tuf.api.metadata.targets.rst b/docs/api/tuf.api.metadata.targets.rst new file mode 100644 index 0000000000..a8af3ab326 --- /dev/null +++ b/docs/api/tuf.api.metadata.targets.rst @@ -0,0 +1,4 @@ +Targets class +--------------------------------- + +.. autoclass:: tuf.api.metadata.Targets diff --git a/docs/api/tuf.api.metadata.timestamp.rst b/docs/api/tuf.api.metadata.timestamp.rst new file mode 100644 index 0000000000..2d29d37dc4 --- /dev/null +++ b/docs/api/tuf.api.metadata.timestamp.rst @@ -0,0 +1,4 @@ +Timestamp class +--------------------------------- + +.. autoclass:: tuf.api.metadata.Timestamp diff --git a/docs/api/tuf.api.rst b/docs/api/tuf.api.rst index b93902c6bb..7d3126d23d 100644 --- a/docs/api/tuf.api.rst +++ b/docs/api/tuf.api.rst @@ -1,18 +1,20 @@ Metadata API =============== -The low-level Metadata API contains two modules: +.. toctree:: -* :doc:`tuf.api.metadata` contains the actual Metadata abstraction - that higher level libraries and application code should use to interact - with TUF metadata. This abstraction provides safe reading and writing to - supported file formats and helper functions for accessing and modifying - the metadata contents. -* :doc:`tuf.api.serialization` covers serializing the metadata into - specific wire formats (like json). + tuf.api.metadata.metadata + tuf.api.metadata.root + tuf.api.metadata.timestamp + tuf.api.metadata.snapshot + tuf.api.metadata.targets .. toctree:: :hidden: - tuf.api.metadata + tuf.api.metadata.supporting tuf.api.serialization + +.. automodule:: tuf.api.metadata + :no-members: + :no-inherited-members: diff --git a/docs/api/tuf.api.serialization.rst b/docs/api/tuf.api.serialization.rst index 1603148dc6..610ab910d1 100644 --- a/docs/api/tuf.api.serialization.rst +++ b/docs/api/tuf.api.serialization.rst @@ -1,10 +1,10 @@ Serialization ============================= +.. automodule:: tuf.api.serialization + JSON serialization ----------------------------- .. automodule:: tuf.api.serialization.json - :members: - :undoc-members: :show-inheritance: diff --git a/docs/api/tuf.ngclient.config.rst b/docs/api/tuf.ngclient.config.rst index 150df08273..b69d7cf484 100644 --- a/docs/api/tuf.ngclient.config.rst +++ b/docs/api/tuf.ngclient.config.rst @@ -2,6 +2,4 @@ Configuration ============= .. automodule:: tuf.ngclient.config - :members: :undoc-members: - :show-inheritance: diff --git a/docs/api/tuf.ngclient.fetcher.rst b/docs/api/tuf.ngclient.fetcher.rst index 1f689d4fd9..f37ea14f6f 100644 --- a/docs/api/tuf.ngclient.fetcher.rst +++ b/docs/api/tuf.ngclient.fetcher.rst @@ -2,6 +2,4 @@ Fetcher ============ .. automodule:: tuf.ngclient.fetcher - :members: :undoc-members: - :show-inheritance: diff --git a/docs/api/tuf.ngclient.updater.rst b/docs/api/tuf.ngclient.updater.rst index 0fc46757c9..3f032c6b3b 100644 --- a/docs/api/tuf.ngclient.updater.rst +++ b/docs/api/tuf.ngclient.updater.rst @@ -2,5 +2,3 @@ Updater ========= .. automodule:: tuf.ngclient.updater - :members: - :special-members: __init__ diff --git a/docs/conf.py b/docs/conf.py index 2b0876200b..a80b6af618 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -30,27 +30,48 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ['sphinx.ext.napoleon'] +extensions = [ + 'sphinx.ext.napoleon', 'sphinx.ext.autosummary', + 'sphinx.ext.autosectionlabel' +] + +autosectionlabel_prefix_document = True # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = ['GETTING_STARTED.rst', 'OVERVIEW.rst', 'TAP.rst'] - # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'sphinx_rtd_theme' +html_theme_options = { + 'logo_only': True +} +html_logo = 'tuf-horizontal-white.png' +html_favicon = 'tuf-icon-32.png' + # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". #html_static_path = ['_static'] +# -- Autodoc configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html + autodoc_mock_imports = ['securesystemslib'] +# Tone down the "tuf.api.metadata." repetition +add_module_names = False +python_use_unqualified_type_names = True + +# Show typehints in argument doc lines, but not in signatures +autodoc_typehints = "description" + +autodoc_default_options = { + 'members': True, + 'inherited-members': 'Exception', # excl. members inherited from 'Exception' + 'exclude-members': 'to_dict, from_dict' +} diff --git a/docs/images/all_logos.ai b/docs/images/all_logos.ai deleted file mode 100644 index b2b7173be6..0000000000 --- a/docs/images/all_logos.ai +++ /dev/null @@ -1,5476 +0,0 @@ -%PDF-1.5 % -1 0 obj <>/OCGs[5 0 R 46 0 R 47 0 R 85 0 R 86 0 R]>>/Pages 3 0 R/Type/Catalog>> endobj 2 0 obj <>stream - - - - - application/pdf - - - Imprimir - - - - - 2016-02-17T13:22:37-06:00 - 2016-02-17T13:22:37-06:00 - 2016-02-17T13:15:10-06:00 - Adobe Illustrator CS6 (Macintosh) - - - - 256 - 172 - JPEG - /9j/4AAQSkZJRgABAgEASABIAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAASAAAAAEA AQBIAAAAAQAB/+4ADkFkb2JlAGTAAAAAAf/bAIQABgQEBAUEBgUFBgkGBQYJCwgGBggLDAoKCwoK DBAMDAwMDAwQDA4PEA8ODBMTFBQTExwbGxscHx8fHx8fHx8fHwEHBwcNDA0YEBAYGhURFRofHx8f Hx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8f/8AAEQgArAEAAwER AAIRAQMRAf/EAaIAAAAHAQEBAQEAAAAAAAAAAAQFAwIGAQAHCAkKCwEAAgIDAQEBAQEAAAAAAAAA AQACAwQFBgcICQoLEAACAQMDAgQCBgcDBAIGAnMBAgMRBAAFIRIxQVEGE2EicYEUMpGhBxWxQiPB UtHhMxZi8CRygvElQzRTkqKyY3PCNUQnk6OzNhdUZHTD0uIIJoMJChgZhJRFRqS0VtNVKBry4/PE 1OT0ZXWFlaW1xdXl9WZ2hpamtsbW5vY3R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo+Ck5SVlpeYmZ qbnJ2en5KjpKWmp6ipqqusra6voRAAICAQIDBQUEBQYECAMDbQEAAhEDBCESMUEFURNhIgZxgZEy obHwFMHR4SNCFVJicvEzJDRDghaSUyWiY7LCB3PSNeJEgxdUkwgJChgZJjZFGidkdFU38qOzwygp 0+PzhJSktMTU5PRldYWVpbXF1eX1RlZmdoaWprbG1ub2R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo +DlJWWl5iZmpucnZ6fkqOkpaanqKmqq6ytrq+v/aAAwDAQACEQMRAD8A9U4q7FXYq7FUJqWs6Rpc Qm1O+t7GI1pJcypCpp13cqNsVShPzH/L12CJ5o0hnY0VRf2xJPsBJjSp9b3FvcwpPbypNDIKpLGw ZWHiGFQcVVMVdirsVUb5mSyuHU8WWNypHUEKcVa+ow/zS/8AI6X/AJqxV31GH+aX/kdL/wA1Yq76 jD/NL/yOl/5qxV31GH+aX/kdL/zViqUan5h8m6VN6Oqa5a2E3++rrUBC2/8AkvKpxVSsfNfkTULh baw8xWV3cPssMGpLI5PsqSk4qnn1GH+aX/kdL/zVirvqMP8ANL/yOl/5qxV31GH+aX/kdL/zVirv qMP80v8AyOl/5qxV8W/85jFj+Zelgktx0dFBJJNFv7xRud+gxV4RhV2KuxVkvlr8tvP3mZBJoOgX 1/ATQXMULehXw9Zgsf8Aw2Kskm/5x6/NC3kWG7tLG1uHYosE2qacknIGnHj6/XfFUFrP5FfmzpFs bq48t3M9oAW+sWJjvo+K1q3K1aagFO+KsFZWRirAqymjKdiCOxxVbirsVdir9UMCuxVKfNPmrQvK 2iXGta5dLaafbAcnNSzMdlSNR8Tux2CjFWAQWv5q/mDS6vLufyD5UkNbfT7UL+nLiM0Iaedgy2de oVBzG4bCqb6V+RP5VafL9Yk0GHVL1t5r3Vi+oSyMerubkyLyPsBgVOn/ACy/Ld1Kv5U0ZlPVTp9q QfvjxVjV1+RHlW0la98nXd95M1Q1In0mdxbu3VRPaSl4JEBH2eIrhtVGy/MHzV5Q1S10X8zIoPqV 5IINM852SmOyllOyxXsTV+qytTrX0z2oATir0/ArsVUNQ/3guf8AjE//ABE4qr4q7FWKfmF+Yuje StNhluopb/Vb+T6vo2iWg53V5cGgCRoKniCRyam3uSAVWGwflz+Yvnlfrn5ja7PpGnS7xeUNCl9C NENCFu7scnmbb4lHw13U9sKsj0n8i/yg0pONt5T0+UUoTeRfXW3NftXRmNffAqpqf5JflHqUHo3H lHS403FbW2S0fen7duIm7eOKsal/Knzr5NU3f5Y+YpjbRAsfKeuSNd2Eijf04Jm/e258N9z1YDCr J/y6/M/T/N/1vTrm0l0XzVpR46v5fuiPWhNaepG1AJYiejqPCtKioVmmKuxV8Tf85i/+TM03/tkL /wB1C9xV4RhVFaXpmoarqNtpunQPdX15IsNtbxirvI5oqgfPFXq1xB5D/KcC3urW184/mIn+9EUp 9XR9Lf8A32yCn1q4X9qvwqfdd1WE+aPzR8/+aHP6Z1y6mt9uFjG5htUAFAEt4uES0G32cVYriqZa J5k8waDdC70TUrrTbkf7ttJnhbwoShFR7HFXotv+aXlvzoq6d+aOmpJcOOEPnLTYkh1GFuitcxxg R3UY7grypWm+KsR/MD8v9W8mapFb3Mkd7pl9H9Y0bWbY8ra8tm+zJGwJod/iStV+VCVWLYq7FX6o YFdirybypaf8rK84yeeNSAm8p6DcS2vkyyYVinmhb059UYHZiXUrD/LSuzbkq9ZwK7FXYq7FUFrW i6VrelXWk6tapeadeIYrm2lFVdT+IIO4I3B3G+KvP/yxv9T8t6/ffljrc8l0dOh+veVtRmYM9zpJ fgIpG7y2r/uz4rQ0phV6bgVQ1D/eC5/4xP8A8ROKq+KoXVtUsdJ0u81S/kENjYQyXN1Kd+MUSl3b bwUYq8x/J/y/ea/ez/mv5mhrrWuqRoFpJ8X6O0kk+hHHUAB5kPNnHUN25MMKvWMCuxV2KuxV5l+c vlDUWht/P/lYCLzl5WRp4aV43lklWnspgu7Ky8invsKVrhVm/lLzLp/mjy1pvmDTjWz1KBJ4wdyp YfEjf5SNVT7jAqbYq+Jv+cxf/Jmab/2yF/7qF7irwjCr1nyxMPy5/LT/ABlH8HnHzY09j5bk252d hF+7u71O6ySP+6Q7EDcHriryh3d3Z3Ys7ElmJqSTuSScVW4q7FXYq7FXqv5U6nb+a9Jufyq12QfV 9SL3HlO8k/48tWCkogbqIbn7Dr/NuNyTiry+6tbi0uprW5jMVxbu0U0TbMroSrKfcEYqpYq/VDAr E/za1C70/wDLDzVeWdfrMOl3ZiZa1UmFhzFP5K8voxVgXkDzD+YWqeVdJsPy70Ww07yvptrFa2+u eYPXH10xIqvJb2luUkCM9T6jv8XzqMKpzqHnT82fJ6HUvOOjadrHlyMcr3UPLv1hbizQfalltblp GljXqTG9QKk4qiNV/Nm51TUk0L8t7CLzLqrwRXN1qMkpi0yyhuF5QtcTKC7O6nkIkHKnyxVbJL/z kRZKbtofLGsIlGfTLb67ZzOB1SGeZ5Ywx8ZFp+rFVM/n15dOhLNHp94/mtro6aPJvEDURfqvJoyp 29IL8XrfZ479fhxpUZp//K9r54bjUP0Do9s80cjWMH1i5uY4uVWjlmakLnjsfTVfZhl2A4xfGCdt q73G1UcxA8IgeoXfd+P7Qh/zUEVn51/LbV4HVdUXW305I9uT2l9aSi52rUqnpp8iRlLkvSsCqGof 7wXP/GJ/+InFVfFXnH/ORcd6/wCS3mdbNHkl9GEyLGCW9EXMRnO3hCGJ9uuKsW8maP8AmT+Yfl6x 19vNy+VPLk8YGkaB5eWKRobdBwjWe8bk3qALRkAoPBTVQVRep6z+Z/5VtDqXmLVP8Z+ROax6jem3 WDUtPV24rKRF8M8QJ+In4vl3Vb0i4/Nn80IDrVnq7eRvJdyT+iYYIEm1S8grQTyySGluH+0nDeni KMVV2raV+cf5e2ra3pmvyeetDtayaloepQol8IF+J3tbmIcncCvwsKeAOKqWlecfzB/Ni4luvJN5 /hPyNbMIv03cW6z399KADIsELnhFGh+AvyrXcGtVVQXomi2kfk7yvM2va413b2hluLnVL1ggSMmt CzFjQf5TE1PyGXajKMkrERFxtJp5YocMpGZvmWHf841LIPyzBjWRNKfUr99EEgKn6i9wzRFVNKAs WOUlynqmBXxN/wA5i/8AkzNN/wC2Qv8A3UL3FXhGFX0d+YOkfl7pfl7yV5o84fWdWsn8vafaeW/L OnE28cvowLLcy3d3x+AGWc1CfHWh8QFXn1v53/Jm/lNrq/5enTbGQ8Rf6TqVy15ApoOQS6LwzMPB qYqmo/IXT45z5kn8wR/8qsFsL5fM6KPWkRn9MWaW9eX1v1BwKnYdevw4qlU/nn8mrKZLXS/y7/SF hESHvNV1K6F5OvZitsUhhb2UMMVR8X5Y+W/zDtDqH5WLNb6rDIi6r5R1CZHkgjlcItza3J4+rAjM A/Mcl6ntVVG2Wrfkl+Wevw2kmjv+YOsWJKalqcsyRabHNQq62luUlWcJXjyl2qOS9qKsGOpT+Yfz STUvKelfo+e81RLjSdKtBX0WEodAgQKBQjkaAAdqDFUX+ekdin5v+bVsWDQnUZmcj/fzHlOPolLD FWCYq/VDAqnc21vdW8ttcxJPbTo0c8Eih0dHHFkdWqGVgaEHFXks+leYvyjmkv8AQIp9a/LZmMl9 oCky3mlAirzWRc1kt+7xE/D1H7Rwq9O0TXNF8w6Pb6rpF1Hf6XepyguIzyR1OxBB3BB2ZSKg7HAr zf8AJ7TLHyX5u83/AJexwLBGLga/ozgcTNYXoWNl9xbSx+lX5YVesYFeTfl3pun+aPzT81fmOtvF 9VtSPL2h3CqtZhaUF3dFgDz5Sfuo5AfsAr0wqynz9+ZGn+Vfq2nW1tJrPmrU6ro/l61I9ec7/G53 9KFaHlI2wAPWmBUv8kflzqMWsf4z873Sat5zlVlt1Sv1LTIXFDb2MbdDTZ5ftN9J5KvQMVUNQ/3g uf8AjE//ABE4qr4q4gEEEVB2IOKvIde8g+ZfIWsXPm/8sofrFjcMZ/MHkflwguf55rHqIZ6fsgUP QA/YJVmPlzzV5O/Mzyhd/Un+sWF7FLZapp8tEuIDIpjkhnjqSj0r8+owKx38htUvrfQ9R8i6vIZN a8kXR0x3aoMtiavYzAH9lofhX2XCVR355eaL7RfI8mn6R8XmPzJNHouiRA0Yz3h4M48PTj5Ny6A0 rgCo2K48m/lJ+XFlb6hdpZ6Ro1ukCuR+8nmILP6cYqXkmkLPxHiT0xVh2meU/M35q6hB5h8+20ml +TbZxNoXkt6h5ypPC61LxNPsxdPHavMq9jiiihiSGFFjijUJHGgCqqqKBVA2AAwKuxV8Tf8AOYv/ AJMzTf8AtkL/AN1C9xV4RhVn3kL8zY9J0+Tyt5qszrvka9flcaax/fWshP8AvTYyEj0pVrWlQrbg 9a4qp+f/AMs5NAtIPMWg3Y13yRqLU07WohQo+/8Ao12lAYp1puCKHt3AVTv8spZPNnkHzN+W0js1 2q/4i8sxdS17ZIRcQKAKsZrYniK0BFcVeVYq9X/LK4l8meQPM/5hiRodSu1/w35aINCbi5AkupxQ gj0IVBU0I5GmKsD8oeTvMPm7WotH0K1NzduC7sTxiijX7Us0h+FEXuT8uuKvQNV84eXfy30+48t/ l7crf+Y7hDDr3ndRQjf47bTO6RAijS9X6jbiQq8lJJJJNSdyTirWKv1QwK7FXYq8t17yLr/k7Vbn zb+W8SyJcP62v+TSeFte/wA01oekF1Qdhxf8GKpJ5w86aHqdloH5s+XmdpfKV2bXzLp0iFLuGwvC ILyC4g2YSQPxkUHbYtuMVZr+bXm+40TyLJLojibW9ceLS/LvptX1Lu++CJ0Zf5FJkr/k4FY4fMA8 n6XpX5Wfl3bx615ts7VI53aos7FTvLe37gniZHZnEQPJifdalWVeQPy3sfK5uNTvbl9Z82anRtY1 +5AM0rUH7uIdIoVp8Ma7UpWtBgVmOKuxVQ1D/eC5/wCMT/8AETiqvirsVdirzPz1+V2pDWm87/l9 cppHnOMD63bsKWWqRqamG8QU+Ju0nX8GVV53bfmhpkX5q6J5qmtn0PVb0L5Z8+6Bc0SW3lkPOxvB XiJIuY4+r2Sg/aFSqJ84/mToZ/OK41e8V9St/JUbaV5Z0S2HO41DX7wD6x6ScSeMCUjkYj4G4kVJ AKrM/KP5aa7reuweePzNZLrXYSX0Xy8jc7DSlYgii7rLcbfFJuK9K0UhV6pgV2KuxV8Tf85i/wDk zNN/7ZC/91C9xV4RhV2Kst/L/wDMbWPJt5OIY49Q0TUAItZ0K7HO1u4f5XQ1AcfsOBUe4qCqzX/D 9vo95Yfmt+Vcsl7o2lXEdzqmjOS19pbDeSG4CnlJbSJyUSj9moboTirH/wA2vJsNp+YKDy5E0+j+ a1g1Py4iAAtFqBqsKgEgGOYtHxr2GKs383eRr7zDrOnfl/pF1FZeUfy6sxD5h8wXBKWUN7OfW1Cd mNCztJ+7SPrVey1OKsQ84fmNo9hokvkn8u45LDyy1BqurSDjfatIBQvO3WODrwiG1OvWmKvNcVdi rsVfqhgV2KuxV2Kvn7zfffl95z1Zr/8AL7Vjb+bdQWTTb1WtLqPTtVgcNG9rfB441kVt0WaOpTua AFb8eCUomQ5RcfLqoQnGB+qfJ5RF5z89NrXlnyrr7TeW5fJVvPYSapcWtxfPbSykos6RW0cvqSpZ cEgY/DX4wwqDlLkPo38nta/KaJJvLfku6luNRWM3+pzXltdw3l0WcK9zPNdQwmVmd/orsAMCvS8V dirsVUNQ/wB4Ln/jE/8AxE4qr4q7FXYqxb80vNd/5S/L/W/MenxRTXumwerBFOGMRYuqDmEZGI+L swxV5d5q/JjzZ+Z+mNf+bX0mz1n6ij6PqGlLMgWUnmkU3qAu0JB+MMWKmhSnxBsiRx+GKvj6uLAZ vFkZEeHW3ff4/R5sI8u+SfOH5ReZLaXUNa8nP5q8xSslleav+k7i4DSGknpvGsSRLIz/ABSOByJp y7ZQ5T6B/KLzjqfnL8vdK8x6pDBBf3huVuIrUOIQ1vdS29UDs7AERV3Y4FZhirsVdir4m/5zF/8A Jmab/wBshf8AuoXuKvCMKuxV2KvY/wAivy8/Ma6vtO8z+Wta0/Q4rq6ksI2vZxW5Eah5ofqtD9YU qfsfTt9rFXvWkyfk/d+YLS11S8sdE8xfl5f3Lfo9riKK053SElrMzlawiciVYyf3MlUI48aqvL/z H8qedvOMb6R5b1Py9F5bsBNeWflzTtWhnurmRFaWW6ueO89y+7FmNB97FV85Yq7FXYq7FX6oYFdi rsVdirzf/nHlEb8n/LrFQWT65xYjcVvZwaYSUUyX8x5Nfj8ia5L5fu47HVorSSSC7kUuI1QcpWVR +36Ybh25UrtgS8o/Ju480XX5gaBeeZb6PUdRvPIkF0l2ilXaCe+WWITV+1KqvxZh169anCr3rArs VdiqhqH+8Fz/AMYn/wCInFVfFXYq7FXnf/OQv/kmPNX/ADCL/wAnkxVm2g/8cPTv+YWH/k2MVeYf nB5J0/XPzE/Ly7uNHGowtez22pzNCZUFulvJLEkpoQqh+TCvf5YVTD/nGz/yTGg/8ZNR/wC6lc4C r03FXYq7FXxN/wA5i/8AkzNN/wC2Qv8A3UL3FXhGFXYq7FXpeuMy/kH5TZSQw13UyCNiCIoMVeak kmp3J6nFX1b5HvPIkkP5bPoflaLSb6/03XZP0is3OcpaW9zbSpOwjQzmaRRIGY/B9lRQ4q+UcVdi rsVdir9UMCuxV2KuxV4j+V/nDX/J3kbTvLepeQ/M1xe6e1ws01paW8kDepcySqUZrmMkcXH7OEqy HVPzQm1LTLvTp/y/83rBewyW8pSytAwSVCjcSboitDttiqUflnZahJ+YtldQeXtY0bQdF8pw6Bbz a1FFFLJJb3SMn9zJKrExipO29dsVeyYFdirsVUNQ/wB4Ln/jE/8AxE4qr4q7FXYqkXnnRNP1zyjq mk6hZXGoWd3AUms7N0juJACGAiaR4kDAio5MBirxDR/J/wCXt75bTXjceddP0pr230y1a41NAZJp 7v6j+7SGeWixTbPypt9nlhVV13yf+WujpqBl1bzfcSadqkGiyxRapxL3VxbpcoUaaWGPhwkALOy7 1+eKvX/yw0jRtH8i6Xp+i21xZ6ZEsrW9veSwzzj1Z3kYvJBJNE3J3LfC52PbpgVlGKuxV2Kvib/n MX/yZmm/9shf+6he4q8Iwq7FXYq9V0mTyX5g/KXSPLuoeaLXQNU0vVLy6kiu4LuUPFcRxKhVreKU dUPXFUr/AOVd+Q//AC5Ok/8ASJqf/ZPirPvJWs+T/Ld5otxqXn/TNR03yxY6pb6dY2llfrO51GOZ mHJ4FUkyzdz0xV4BirsVdirsVfqhgVJvN+g3Wu6DcabaahPpdzKUaK+tpZYZEKOGNGheJ6MoK05d 8VYz5e8j/mHpWsWks/nSS/0WC7u559PntxJLLBOo9CA3EjvIBC9SPu+RVLbb8vPzWs9She189OLI yTm4M8TXL+nxAtwsc7SqWqzc/iUdNj2bVvTfy6/NW2WGOf8AMCWaKJYCQbYM5ljvvrEhaRnLMrws YuJNKUFKDG1TCTyD54utI0SK8853Q1fSYL4XN9ap9Xju7m4p9UeeFG4PHbU+y1eftU4qgbv8v/ze e4drf8xXigWZpYFOnws3DgiLG+9CPgJPua0xVVPkT82mvFl/5WI6Q/WriZ4l022I9BiGt4hyP7Hx c/EGm1MVVLPyF+ZKaho15eeeZbhdPkga9tFt1jiuIk2mjbiwqXr9tgTt2qcVei4FUNQ/3guf+MT/ APETiqvirsVdirsVYgfyz0dfI48pQ3VzHbRXLXtrekxtPFc/XTfxuPg9NuEx6Fd12OKoRfyk0i4s fR1m8m1W6m1iLXdQuZo4FW5uIIhAkbwhPTEPpIq8QPpw2rNLKysrG1jtLK3jtbWIUighRY41BNaK igAbnAqtirsVdir4m/5zF/8AJmab/wBshf8AuoXuKvCMKuxV2Ksv/LLUdLttdnh1WGwksJbG/kLa hFBIouLewuJLQI8wPEtcBBxB+M0XfpirI/Imm+Wdct9Cv9QbSIJLHzK935niuprWyrpDraMoSGRo vVjBjuB6cIYgmnH4hiqCuNS8nWX5ZWKQQ2suu391qsc7JDazTpCBALYymUNNCv7x/TZKHr4Yq86x V2KuxV2Kv0P/AOV6eQv9+3P/ACIP9c2P8k5vL5um/l3Td5+Tv+V6eQv9+3P/ACIP9cf5JzeXzX+X dN3n5O/5Xp5C/wB+3P8AyIP9cf5JzeXzX+XdN3n5O/5Xp5C/37c/8iD/AFx/knN5fNf5d03efk7/ AJXp5C/37c/8iD/XH+Sc3l81/l3Td5+Tv+V6eQv9+3P/ACIP9cf5JzeXzX+XdN3n5O/5Xp5C/wB+ 3P8AyIP9cf5JzeXzX+XdN3n5O/5Xp5C/37c/8iD/AFx/knN5fNf5d03efk7/AJXp5C/37c/8iD/X H+Sc3l81/l3Td5+Tv+V6eQv9+3P/ACIP9cf5JzeXzX+XdN3n5J75a86+X/OEF9FpTykW6qk5kQpT 1w4Wlev2DmNqNJPDXF1c3Sa/HqL4P4f0p7wv/wDf0X/Ipv8AqpmM5juF/wD7+i/5FN/1UxV3C/8A 9/Rf8im/6qYq7hf/AO/ov+RTf9VMVdwv/wDf0X/Ipv8AqpiruF//AL+i/wCRTf8AVTFXcL//AH9F /wAim/6qYq7hf/7+i/5FN/1UxV3C/wD9/Rf8im/6qYq7hf8A+/ov+RTf9VMVeVfmj/zjl5e/MPXr bWtT1W7tLi3tRaenbLFwIE8s/L4wx+1OR16Yqw//AKEm8k/9TBqX/Awf80Yq7/oSbyT/ANTBqX/A wf8ANGKu/wChJvJP/Uwal/wMH/NGKu/6Em8k/wDUwal/wMH/ADRirv8AoSbyT/1MGpf8DB/zRirv +hJvJP8A1MGpf8DB/wA0Yq7/AKEm8k/9TBqX/Awf80Yq7/oSbyT/ANTBqX/Awf8ANGKu/wChJvJP /Uwal/wMH/NGKu/6Em8k/wDUwal/wMH/ADRirzvO6fLnYq7FWQeX/I/mDXIXu7eJLbTI/wC+1O7c QWyeNZG+1TvxBpmPm1UMZo7y7huXM0+hyZRxAVD+cdo/P9SZt5e/LrT9tQ8yzX8q7PDplqSAfaaY hGHyGU+Nnl9MAP6x/QG86fSw+rIZeUY/pOy30/yiZVVZtfjfflI6WbL7fCrVw3qf6H+yWtF/tv8A sV6+RdC1So8s+Y7e8uSfg06+Q2U7E9EjZyUkb5EYPzc4f3kCB3jcJ/IY8n9zkEj/ADT6T7hexLGN X0XVdHvWstTtZLS5TrHIKVFaclPRl26g0zLx5YzFxNhwM2GeKXDMUUFk2p2KvaP+ccv+mh/6M/8A mfmi7a/g+P6HqfZr/Kf5v++e0ZonqHYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXY q7FXxfndPlzsVZh5Z0HSbLSf8V+ZUMmmq5j0zTAeL3s69antCh+030exws+WUpeHj+rqf5o/W7LS 4IQh42XePKMf5x/UOv4BKfMnm7WfME6teyBLWEBbSwgHp20KAUVY4xsKDavXLsGnjjG3PqepcfU6 vJmPq5DkBsB7gkuXuK7FXYqzLQPO0VxaroHm5W1HQnosNwxrc2THYSwyULEDuhrt9xws2lIPHi9M /sl73Z6fXCQ8PP6sff1j5g/o/sJN5r8s3Pl7VDaSSLcW0qCexvY/7ue3fdJF69e48fvy7T5xkjfI 9R3FxdVpThnwk2DuD3jvSbL3Ge0f845f9ND/ANGf/M/NF21/B8f0PU+zX+U/zf8AfPaM0T1CXa/5 i0Ly9pkmqa5fw6dp8X27i4cItaEhVruzGmyjc9sVYBH+eQ1eh8meUNc8yQM3GHUVgWysJBWhKXF0 yfimGlVD+Yf5vxr6k/5V3AiFS/o6zp8sgHsgpy+jFVbS/wA9fJ8l/DpfmK3v/KGqz7Q22vW5tEkP f07irQMK9CXFcFK9FVlZQykMrCoI3BBxVvFXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXxfndP lyY+XdHk1nXbDSoyVN5MkRYfsqT8Tf7FanKs2TggZdwb9NhOXJGA/iKf+c7q98x+Y5LHRLSa40vS FFjplrbo8wWGL4OdFDE+owrU7nbwzH00RihcyBKW5vvczWylny8OMEwh6Ygb7D9bGL7TdR0+URX9 rNaSmtI542jbbrs4BzLhOMtwbcDJinA1IEe/ZQRHd1RFLOxoqqKkk9gBkiWAFo+68u+YLS3+s3Wm XdvbncTSwSIn/BMoGVRzQkaEgT726emyxFyjID3FLstaE3svKXmS90q51a20+WTTrNPUnuaBVCAE ll5EFwoFW41p3ymWoxxkIk+ouTDSZZwMxE8Mev45/BkGkyt5i8halpNwed95cX9I6XId2FqSFuoq /wAq1Dj39hmNkHhZhIcp7H39C5mE+Pp5QP1YvVH+r/EPd1YRme6p7R/zjl/00P8A0Z/8z80XbX8H x/Q9T7Nf5T/N/wB89G8/+d9M8l+W59avke4cMsFhYxbzXV1LtDbxChqzn22FT2zRPUMO8p/lXea1 execPzQ4av5jkq9lor0fTtLjbdYYoTVHlA+3I1d+nTkSr1MAAAAUA2AGBXYqgtZ0TR9b0+XTtXso dQsZhSS2uY1kjPgeLA7jse2KvJZodU/JbULeaC4mvvynu5lgubedmmm0OSZqJLG5BdrQsQGUn4T0 3PxFXs0ckckayRsHjcBkdTUEHcEEdQcCt4q7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXxfndPlyf +Q9as9F836Zqd7UWtvKfWZRUqrqULUG548q7Zj6vEcmKURzLm9n544s8Zy5Asu8/a15p0QQW+guN L8oygHSrjS3ISYU3aS4WkhlNPiBP39ThaTFjyWZ+rJ14unw7nY9oZs2Khj9GH+Hh6/53O+/9PNJN F/MjVF/3H+ZWbXtBnPG6tbo+pKoP+7IpW+NXXqPi+7qMjLoo/Vj9E+8fpDiYO05j05f3mM8wefvB 538f1p55kvbL8vVh07ytxk1DUIfrb+YJFR5Pq0zN6MduTVQOC/EwG/X5Y+GJ1PqyfSNuHz625epn HRVHDvKQvj/onkB+k9fuitl+Y3nizuhcprV3K1d455WniIPUGOQsn4Zly0WGQrhH3Ovx9paiJsTl 8TY+RZe8/k1dAi/MB9JSPUpGazi0UrSykvl3NyqHrGF3K9OW1a75hAZePweL08+L+Ku73uyMsHhf meAcf08P8PF/O91dO/bnuxTUPzM8738N5b3GqSG2vl9Oe3VUVPTNRwQBfgBBoePXvXMyGhwxIIju HX5O1NRMEGW0uYTby5pp8r+X9T13W2+rSavp9xYaRprD9/OZwAZmT9mJadT1+7lTmn4s4whvwyBJ 6CunvcjTYvAxSyZNuOBjEdTfX3D8dLgObF072j/nHL/pof8Aoz/5n5ou2v4Pj+h6n2a/yn+b/vkP 5784eVrf854rvzZfLaaD5H0+O4s7dgztPq2pFhGyRIGaQpBH8NBVW32zRvUMhH/OQflGAxS6zo+v 6DpsxAi1bU9Lngs25fZPqrz2bxpjSsr8x/mN5I8u6FBruravBFpd3T6lPGTMbgsKgQJEHaWv+SDg Vig/5yD8owNHJrGj6/oWmTECLV9T0ueCzbl9k+oOZo3YlcNKyrzL+ZPkjy3oltrWratDHp98AdPe ImdrnkAV+rpEHaWvIfZFBXfAqjomvaT590bU7K70e8ttOmT6vPbanAYDNDcR1+yeh4t06gEHvl2X DwCJsHiDj4dR4kpRojgNb9fd+O5IvyGu9RXyffeXb+U3E/lDVbzy+ly1eUkFoVaBjXwhlVR7AZSX IekYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXxfndPlzsVZJ5U84yaQkum6hANS8u3tBfabIdv+ MkJ/YkXsR1+4jF1Gm4/VE8MxyP6/JztJrPDuEhxYpc4/pHcfx3Ur5m8nRWtiuu6BcHUvLkx4ifb1 rZzT91coPssK/a6H6RUYNSSeCY4Z/f5hlqdGIx8TGeLF39Qe6X4/QjZP+dh/LdJAOep+VZfTkPVn sLk1U9yfSk28FXKx+7z/ANHJ/uh+sNp/faW/48J/2J/UfkGH2dpcXl3BaWyGS4uJFihjHVnchVH0 k5nSkIgk8g6yEDIiI5llv5izxLqlj5V02stpoEQso1QVMl25BuHAG/JpPhp4jMLRD0nJLnPf4dHZ dpEcccMNxjHD75fxfMou30nSfI8Cahr8Ud/5mkUPp+hk8o7aoqs15Tv3WP8A21hLJLUHhhtj6y7/ ACj+v8GyOGGkHFkAll/hj0HnL9X4EP1nWtU1rUJdQ1O4a5upTu7dAOyqOiqOwGZuLFGEeGIoOtzZ 55ZcUzZQOWNL2j/nHL/pof8Aoz/5n5ou2v4Pj+h6n2a/yn+b/vkd+ZH5barb+dbX80vKdpBqnmDT olivtCu1UrdQJ+3ayEEw3Srsp6H7w+jeoZl5L88eWPPuhTT2Px8eVtq+kXaAT20hBWS3uYW6dGG+ zYFeXeSPIPlzyj+fV9pN3C0tvLYHUfIiTOXgs0eVjf29vG3wrIJG5Lx3CfPCr3W4ht57eWC5jSW3 lRkmikAZGRhRlZW2KkdQcCvDP+cfvIegXOq6154t4Xk0QX91Z+Rra4LSR2likzmWa2Dk8BNMzBaA FQDueRwlWY+evzNvotYHkryLbR6x52nWsxY/6HpkLbfWL515UpUFY/tH6VDBU8/LfyN/g7Qp7Oa+ k1PVNRu5dT1jUZAEM97cBfVdUXZF+AADFWVYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXxfndPl zsVdiqc+WfNOp+Xrxp7QrLbzr6d7YzDnBcRGoKSodjsTlGfBHIKPPoeocrS6qWGVjcHmDyI83oXl Ww0ObU/095dUvod9G9j5m0FzymtIbgBWkXvJArUbkBUAfPjrtROYjwT+sbxl0NfpdzpIYzPxcQ9E vTOHWIPXzj5/toF5K8naponmbW7t7Zrq70Atb6ZEF/v724+G3YA7cBGfUbf4RRsnqtTHJjiLoT3P kBz/AFNWh0U8WWcquWPaPnI/T8K3O+3NBz3+l+RjKLWZNV87y8hdagf3kFizg8xEW/vJt/iY7Dp4 gzEJajmOHF3dZfqDVLJDSXwnjz9TzEe+u+Xn+0MBuLie5nkuLiRpp5WLyyuSzMzGpLE7knNiAAKD pySTZ5qeFDsVe0f845f9ND/0Z/8AM/NF21/B8f0PU+zX+U/zf989ozRPUPO/Pf5Y3d1q6+cvJN0u ieebZOLTEf6JqEQp/o99GPtAgUVx8S7eC8VWBecfPb+YdFg15LF9I/Mv8tboalqXl2YgSvZ0CX4g cGkltLCeXNa7L7gkqzL83PObXP5fafZeWJxJqfn1oNN0OUbERXyhpZ6HcKkDGp/ZJGKpKmv6nqlt b/lp+UbiDTtFiSw1nzmy87ezWNeLRW3QT3T0qSNhWta/Eqr0fyN5C8u+StH/AEbo0TVlb1b2+nb1 Lm6nP2pp5Tu7n7h2AwKyLFXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXxfndPlzsVdirsVeuXc v5f+QdT1ixtV1AeZLKJRYX7OGUySwB1+FPTTjycc1kVgffpmmiM2ojEnh8M8x8fxyeiyfltHKcY8 fixHpPmR5UK77BR1v+cNtrtjDo8Wl3sWsajxjuW0x4omaSgT907h2AKKKk0Kjau1crPZpxy4jIcM f5zfHtkZojGIyGSXPhrn5Xf7O/qw7zb5T0PT/Lst/aWl/Yaha6kunXNrfSRSbNA03JfTRetBQg0I zO0+onKfCTEgxuxffTq9Xo8cMRlETEhPhIlXdfQMGzPdU7FXYq9o/wCccv8Apof+jP8A5n5ou2v4 Pj+h6n2a/wAp/m/757Rmieodirwf8zrrTPPHmW70TTPLmqL5g8u3MdlB5vskT/R5JkV2jNCfVi4T fFHKVUgn4lrXL8GITu5CNC93G1OeWMDhiZ2a26ef428w8fl8h/nJp2sWGieYdK1afy55WhubOK60 VAedpdsZZxb3MjIFWWJ/TZzUqnwEdRlLkvpX8nfNHl++sdQ8saT5buvKr+WDbw3Gk3aIrKLmMyxO SjMWZ1BZi256kmuBXoeKuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV8X53T5c7FXYq7FWafn J/5MjV/+jb/qFizB7M/uI/H7y7Ttr/Gp/D/chJvJeoXVh5r0q4tpTC5uY4mkFP7uVhHIN/FGIy/V QEscge5xtDMxzwI29Q+3Y/Yzj8xdRvL7y3rhuZjN9X80NbwE/sxR28oVBTsM1+jgI5I0OeK/tDte 0csp4Z8Rus5HwALyzNu8+7FXYq9o/wCccv8Apof+jP8A5n5ou2v4Pj+h6n2a/wAp/m/757Rmieod irzj8rv+Uz/Mr/tuRf8AUDBhVv8A5yG0iLU/yf8AMaSzTRLa2zXgWB+HqNb/ABqkmx5R8gCV9sCs U/I/y7Bpvmn8w9Dsru6jhjttAigu2kElxGJNKqCjyK6/u+VEqpAAApthVNtIh/PDTYPL8kskmoym F3122vfqr1kl1C1iCI8LQ8DHZmaVW+IbEEEkAKsh8pal+a2pab5kTzHplto+oRvJH5ekgdZFaqME ZqtLUK4U1KitemKsRn8xfnzY2duyaY8t5eywW0NtcQW1wyyW2lu9zIxt54Yo4ri+ioHeX7Jqq/ZB VR+s6z+fC65qEljpEP1W0srg2Vqghe2mmYWphJmadJncH6x8PpxhaD7dcVUW17/nIWPTL28/QltL qL6dZNbacfREMV360kd4VkWYu7emEdUNV32NVKuqyDzhafmhquj+V4tGmXR9TuJFfzI8Tx+nB/ob s6BmWZiv1iirwDdq7b4ql+v61+elpKsmlaNZX0DavLAICqJINLib93M0huQC8q1/YFKdN8VWWmt/ n0NXuvW0awk0/wD0z6pGQsZqFvfqgaUXL7F7W25Hh0nPTjsqhX8xf85GHQNMuIPLemNqlx666hay OF9ELIFgkr9YUfGsnIqCacD05UDsqZXesfnd+k9YtrfSbL6pFJCmlXhRSjK15bxtJx+t83/0V55H VhHwKqAWriqa+SLj8yf8Qa7a+aYU/RCTyvod4iwjnEZ3Cq3CQugEYUorITQ/E9dsVZrgV2KuxV2K uxV8X53T5c7FXYq7FXqOta7BqLLrus+Rorie+9NTdi9lUStxWNeKIfDiKffmpxYjD0Qy1XThDvs+ aOT97kw3xVvxnf4BLk1Hy68vop5AQyfWVseP125/3pYkCL/W2y0wyVfi9L+kcmkSwk14HXh+uXPu QnnHW1j0aPy9F5eTQ4zdC/cLcNcMzqjQ78q0+149snpsVz8Qz49q5V5teszCOPwRj8Pfi+q/JhWZ zq3Yq7FXtH/OOX/TQ/8ARn/zPzRdtfwfH9D1Ps1/lP8AN/3z2jNE9Q7FXnN7+UWo/wCIdY1jRfOO q6INanW6u7S1S1aP1ViWKoMsTt0Qd8KrZPyq83yRtHJ+Y+tvG4KujRWBBB2IIMG4OKpx5D/LpfKm oa3qk+s3mt6rr727315eiJWP1SMxRACJIxshp92BWYYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7F XYq7FXxfndPlzsVdirsVTZteYWGkWyQgSaVNLMsjGocyujgFQBSnDxynwfVI/wA5yfzJ4YAD6CT8 6TybzxpiXttc2WnTRhdXTWrtJp1esiNX0YysacV3O5qcoGllRBP8HDy+3m5ctfHiBjE/3nGbPXu5 cmOa1q9xq2oS3kyohdmKrHHHHRWYtQ+mqcjv9oiuZOLGIRoOBmynJIyNb+79FfNAZY1OxV2KvaP+ ccv+mh/6M/8Amfmi7a/g+P6HqfZr/Kf5v++e0ZonqHYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7F XYq7FXYq7FXYq7FXj3/WOv8AxV/0/wCbz/DvxwvK/wCtX48R3/WOv/FX/T/j/h344V/1q/HiO/6x 1/4q/wCn/H/Dvxwr/rV+PEd/1jr/AMVf9P8Aj/h344V/1q/HiO/6x1/4q/6f8f8ADvxwr/rV+PEd /wBY6/8AFX/T/j/h344V/wBavx4jv+sdf+Kv+n/H/Dvxwr/rV+PEd/1jr/xV/wBP+P8Ah344V/1q /HiO/wCsdf8Air/p/wAf8O/HCv8ArV+PEd/1jr/xV/0/4/4d+OFf9avx4jMfy9/5V1/p/wDg3j/u n6/x+sf5fpf3/wDs/s5g63x9vF865fodr2Z+V9X5fyv6vOvq+LMMwXauxV2KuxV2KuxV2KuxV2Ku xV2KuxV2KuxV2KuxV2KuxV2KuxV2Kv8A/9k= - - - - - - uuid:d582b029-3a1b-f149-bd97-74bb63cc2b56 - xmp.did:06801174072068118083D5E0C0CE63C1 - uuid:5D20892493BFDB11914A8590D31508C8 - proof:pdf - - uuid:3e286528-41ee-0646-82dc-fdd40c176a77 - xmp.did:0980117407206811822A897E387FE54C - uuid:5D20892493BFDB11914A8590D31508C8 - proof:pdf - - - - - saved - xmp.iid:06801174072068118083D5E0C0CE63C1 - 2016-02-17T13:15:10-06:00 - Adobe Illustrator CS6 (Macintosh) - / - - - - - - Document - Print - - - False - False - 1 - - 15.000118 - 15.000118 - Centimeters - - - - - HelveticaNeue-Medium - Helvetica Neue - Medium - TrueType - 10.0d40e1 - False - HelveticaNeue.dfont - - - - - - Cyan - Magenta - Yellow - Black - PANTONE Process Blue C - - - - - - Grupo de muestras por defecto - 0 - - - - Blanco - CMYK - PROCESS - 0.000000 - 0.000000 - 0.000000 - 0.000000 - - - Negro - CMYK - PROCESS - 0.000000 - 0.000000 - 0.000000 - 100.000000 - - - Rojo CMYK - CMYK - PROCESS - 0.000000 - 100.000000 - 100.000000 - 0.000000 - - - Amarillo CMYK - CMYK - PROCESS - 0.000000 - 0.000000 - 100.000000 - 0.000000 - - - Verde CMYK - CMYK - PROCESS - 100.000000 - 0.000000 - 100.000000 - 0.000000 - - - Cian CMYK - CMYK - PROCESS - 100.000000 - 0.000000 - 0.000000 - 0.000000 - - - Azul CMYK - CMYK - PROCESS - 100.000000 - 100.000000 - 0.000000 - 0.000000 - - - Magenta CMYK - CMYK - PROCESS - 0.000000 - 100.000000 - 0.000000 - 0.000000 - - - C=15 M=100 Y=90 K=10 - CMYK - PROCESS - 14.999998 - 100.000000 - 90.000000 - 10.000002 - - - C=0 M=90 Y=85 K=0 - CMYK - PROCESS - 0.000000 - 90.000000 - 85.000000 - 0.000000 - - - C=0 M=80 Y=95 K=0 - CMYK - PROCESS - 0.000000 - 80.000000 - 95.000000 - 0.000000 - - - C=0 M=50 Y=100 K=0 - CMYK - PROCESS - 0.000000 - 50.000000 - 100.000000 - 0.000000 - - - C=0 M=35 Y=85 K=0 - CMYK - PROCESS - 0.000000 - 35.000004 - 85.000000 - 0.000000 - - - C=5 M=0 Y=90 K=0 - CMYK - PROCESS - 5.000001 - 0.000000 - 90.000000 - 0.000000 - - - C=20 M=0 Y=100 K=0 - CMYK - PROCESS - 19.999998 - 0.000000 - 100.000000 - 0.000000 - - - C=50 M=0 Y=100 K=0 - CMYK - PROCESS - 50.000000 - 0.000000 - 100.000000 - 0.000000 - - - C=75 M=0 Y=100 K=0 - CMYK - PROCESS - 75.000000 - 0.000000 - 100.000000 - 0.000000 - - - C=85 M=10 Y=100 K=10 - CMYK - PROCESS - 85.000000 - 10.000002 - 100.000000 - 10.000002 - - - C=90 M=30 Y=95 K=30 - CMYK - PROCESS - 90.000000 - 30.000002 - 95.000000 - 30.000002 - - - C=75 M=0 Y=75 K=0 - CMYK - PROCESS - 75.000000 - 0.000000 - 75.000000 - 0.000000 - - - C=80 M=10 Y=45 K=0 - CMYK - PROCESS - 80.000000 - 10.000002 - 45.000000 - 0.000000 - - - C=70 M=15 Y=0 K=0 - CMYK - PROCESS - 70.000000 - 14.999998 - 0.000000 - 0.000000 - - - C=85 M=50 Y=0 K=0 - CMYK - PROCESS - 85.000000 - 50.000000 - 0.000000 - 0.000000 - - - C=100 M=95 Y=5 K=0 - CMYK - PROCESS - 100.000000 - 95.000000 - 5.000001 - 0.000000 - - - C=100 M=100 Y=25 K=25 - CMYK - PROCESS - 100.000000 - 100.000000 - 25.000000 - 25.000000 - - - C=75 M=100 Y=0 K=0 - CMYK - PROCESS - 75.000000 - 100.000000 - 0.000000 - 0.000000 - - - C=50 M=100 Y=0 K=0 - CMYK - PROCESS - 50.000000 - 100.000000 - 0.000000 - 0.000000 - - - C=35 M=100 Y=35 K=10 - CMYK - PROCESS - 35.000004 - 100.000000 - 35.000004 - 10.000002 - - - C=10 M=100 Y=50 K=0 - CMYK - PROCESS - 10.000002 - 100.000000 - 50.000000 - 0.000000 - - - C=0 M=95 Y=20 K=0 - CMYK - PROCESS - 0.000000 - 95.000000 - 19.999998 - 0.000000 - - - C=25 M=25 Y=40 K=0 - CMYK - PROCESS - 25.000000 - 25.000000 - 39.999996 - 0.000000 - - - C=40 M=45 Y=50 K=5 - CMYK - PROCESS - 39.999996 - 45.000000 - 50.000000 - 5.000001 - - - C=50 M=50 Y=60 K=25 - CMYK - PROCESS - 50.000000 - 50.000000 - 60.000004 - 25.000000 - - - C=55 M=60 Y=65 K=40 - CMYK - PROCESS - 55.000000 - 60.000004 - 65.000000 - 39.999996 - - - C=25 M=40 Y=65 K=0 - CMYK - PROCESS - 25.000000 - 39.999996 - 65.000000 - 0.000000 - - - C=30 M=50 Y=75 K=10 - CMYK - PROCESS - 30.000002 - 50.000000 - 75.000000 - 10.000002 - - - C=35 M=60 Y=80 K=25 - CMYK - PROCESS - 35.000004 - 60.000004 - 80.000000 - 25.000000 - - - C=40 M=65 Y=90 K=35 - CMYK - PROCESS - 39.999996 - 65.000000 - 90.000000 - 35.000004 - - - C=40 M=70 Y=100 K=50 - CMYK - PROCESS - 39.999996 - 70.000000 - 100.000000 - 50.000000 - - - C=50 M=70 Y=80 K=70 - CMYK - PROCESS - 50.000000 - 70.000000 - 80.000000 - 70.000000 - - - PANTONE Process Blue C - SPOT - 100.000000 - LAB - 47.451004 - -33 - -53 - - - - - - Grises - 1 - - - - C=0 M=0 Y=0 K=100 - CMYK - PROCESS - 0.000000 - 0.000000 - 0.000000 - 100.000000 - - - C=0 M=0 Y=0 K=90 - CMYK - PROCESS - 0.000000 - 0.000000 - 0.000000 - 89.999405 - - - C=0 M=0 Y=0 K=80 - CMYK - PROCESS - 0.000000 - 0.000000 - 0.000000 - 79.998795 - - - C=0 M=0 Y=0 K=70 - CMYK - PROCESS - 0.000000 - 0.000000 - 0.000000 - 69.999702 - - - C=0 M=0 Y=0 K=60 - CMYK - PROCESS - 0.000000 - 0.000000 - 0.000000 - 59.999104 - - - C=0 M=0 Y=0 K=50 - CMYK - PROCESS - 0.000000 - 0.000000 - 0.000000 - 50.000000 - - - C=0 M=0 Y=0 K=40 - CMYK - PROCESS - 0.000000 - 0.000000 - 0.000000 - 39.999401 - - - C=0 M=0 Y=0 K=30 - CMYK - PROCESS - 0.000000 - 0.000000 - 0.000000 - 29.998802 - - - C=0 M=0 Y=0 K=20 - CMYK - PROCESS - 0.000000 - 0.000000 - 0.000000 - 19.999701 - - - C=0 M=0 Y=0 K=10 - CMYK - PROCESS - 0.000000 - 0.000000 - 0.000000 - 9.999103 - - - C=0 M=0 Y=0 K=5 - CMYK - PROCESS - 0.000000 - 0.000000 - 0.000000 - 4.998803 - - - - - - Brillantes - 1 - - - - C=0 M=100 Y=100 K=0 - CMYK - PROCESS - 0.000000 - 100.000000 - 100.000000 - 0.000000 - - - C=0 M=75 Y=100 K=0 - CMYK - PROCESS - 0.000000 - 75.000000 - 100.000000 - 0.000000 - - - C=0 M=10 Y=95 K=0 - CMYK - PROCESS - 0.000000 - 10.000002 - 95.000000 - 0.000000 - - - C=85 M=10 Y=100 K=0 - CMYK - PROCESS - 85.000000 - 10.000002 - 100.000000 - 0.000000 - - - C=100 M=90 Y=0 K=0 - CMYK - PROCESS - 100.000000 - 90.000000 - 0.000000 - 0.000000 - - - C=60 M=90 Y=0 K=0 - CMYK - PROCESS - 60.000004 - 90.000000 - 0.003099 - 0.003099 - - - - - - - - - Adobe PDF library 10.01 - - - - - - - - - - - - - - - - - - - - - - - - - endstream endobj 3 0 obj <> endobj 7 0 obj <>/Resources<>/ExtGState<>/Font<>/ProcSet[/PDF/Text]/Properties<>>>/TrimBox[0.0 0.0 425.2 425.2]/Type/Page>> endobj 8 0 obj <>/Resources<>/ExtGState<>/Font<>/ProcSet[/PDF/Text]/Properties<>>>/TrimBox[0.0 0.0 425.2 425.2]/Type/Page>> endobj 9 0 obj <>/Resources<>/ExtGState<>/Properties<>>>/TrimBox[0.0 0.0 425.2 425.2]/Type/Page>> endobj 10 0 obj <>/Resources<>/ExtGState<>/Properties<>>>/TrimBox[0.0 0.0 425.2 425.2]/Type/Page>> endobj 11 0 obj <>/Resources<>/ExtGState<>/Font<>/ProcSet[/PDF/Text]/Properties<>>>/TrimBox[0.0 0.0 674.865 425.2]/Type/Page>> endobj 97 0 obj <>stream -HW]5 ߯/'Nl;*Tk4E0A9Ǿқ:7Ncع6ݼ-7i{?7$q+Ik5FO?~Lo7ߗtJ^iC&fK{^ac$rmm%iGMw6~$ZSͥ(ޒ:v\z’Y:l3\j]m -[MR,ϲ=V,aEj+"+1wdM\ԥ~Vxo* -],sX{WJS-- - -o(ی~m`-nX +u5,aaklpIgRO g?4kʕ[9)d+\UOI( bUa;wT6v2Ԯ% $L8 qwe_ -SHnEZEEBIź.L:-VSY:4* sSrC:HAZ5F~B*1%HJpe +l 3:sx9$@=>[$֘[cM@1E`Om pn1V cbw"#[GFO!t3,+G\ --Cj#'bZB…5VJ؊U;k T*jhK;:FtxapUÑX43X~\y0i"Cn,5R=QAI++[X&)H\?)-Dcv Ջ -:"n/ȗ d+( 3ˬ' u/6cA-P3^$B@ԚU\邓߹3 -YdO- *ANBkQ k2ETi C*"u|er1gkHpMa; -$9}"Y|qIȓ=@pcrG2bUj( ?p$򆁅9L?a zY6o~qbdJ|yEGmo4\cF&CG0R,V1d/oH0YbԙVbHJ4${;6"GuW^:nt{M!`t0B>M<LL'SHB5b$)UaEPYb&{ >G1J27wJC!)r)Zl&E16Opx9 W z#4 xeP*!gD&_ȨP: diyvï$m\ x4:.Ɏ9 Ez|lDK4B3%@g}~eqj;VP oHfs0FOcv+ fC<aEh&!x8s5~.qvS**c?hCۨ ü)ژ[bZH량z$qg -a.^=vov8M" jr.7%[8>j$/[!yh88@y#~.898+sv?+QGCe!c s|n<1[Se7t5Ӑ`e= +iU3r'<RqwK udh[Tqe_AZUb(-xgXòxyᝅI|5O[WOѬ]d[4oP|>; _(Ws|n%s h*_2ڞ4C}a_As!\-RK -dl,#^w<1q߆O~:0%.;RDm鐊70LIDO^WݼžLahw S#pn`)s> endobj 86 0 obj <> endobj 100 0 obj [/View/Design] endobj 101 0 obj <>>> endobj 98 0 obj [/View/Design] endobj 99 0 obj <>>> endobj 93 0 obj <> endobj 102 0 obj <> endobj 103 0 obj <>stream -H|VgTUfH !z͋-J[\K,<"ETDl-j4*bKH1b,E!"*v{Gdxo=s{}f 0 N8Y=W"bUZFXCumxШA5;E퇀9;(|l_!@#@Ua!wk aXXrj_XD̘w2(*<2(!y@2j ְ =ZQ#bV6? tVQ!Q7NS&=D iz^\F!pH Vo1 -|֦CGp6s&.Z{IcEI'z}'q4|ZFD{ -mzDjjLӤC戹jpI.\ 7nx&򷼔Wq"4>ǥP)e,V6K쐽-H:8 {Vy\VmjeEYc:kbtt9k;:8?wsNtj -q^+vDwK?IAق - 7H(b@h+>ʢt]~9/({G1\*E*8jȩ2PH\Y#%I٧ ɑ;GYG9G#qq*Y,լ}5Z쓭bQ]{+/ž){({eߨS۶srMP[3FH&抝k_n,ծU0MK3T}ǾmߴvWh/yb\_v^g5EvI;2y>u:^9e993r}KܽrgnOZ^K6:Հpgɺu krX֦,גּTtvO}v=z|Ѱ'E5 g^@K+Y[E(.2SIEsVE}} Hbq2I$22_k1/}<\ UTSI&@E2䢪T,TޠT -ȗ* -` 0 bfX*` c ykVab0$N45'ԚP[jGQ9*CBh°qCq8aHG"q@&9|o01gT.i_Vݻp)tj!Sfi 7 pGM{K -|4?O[jE~XL3uTM) _:c9u -R7XC=z`FU4[([i (( -Rv!q-wxTY[9q2sc|/%9y|wc-M Л$A^DI(fũ\+sGSL*J$Tz{ h2MbS\RSM܁;R=̻^|Ohǽ@~:9!p(Z`Cx(sFQœyO -Z3}7$g)4&㤩4˞K'0MN<)Czjo^[H_o*C?K)DB$f"l0fċgl1[Mfv]9fev4fk9;yړ019f>k2 sҜs|ޜ6gYsΜ7h. -?˳pޒ2Ej O%?e&LZ-T$jpx?E/-Q1xhB=I# -7&/LP0Hҍ7PMn 6%NUFva<bcc 6 0钰ڻ!!4[(V}ZR UJ}X[RZUUTTUMsE7}Xs|ss X= mRA倾YS7? t [Y-<(Pd, u TSKWv%}{?ZK\F6;f٪VzvrvZng w] wxEj:BomvkYړrBU`)>>ٓhV6`&*ɶjO{qӊYW3jrl)l˶Ve[ nk{#|zy;w~_+¾/>XHsN|!6xBWNn"Y?CZ} 1$w_@;P3r@2=$-dXqSh$[%QSB:+;/~.<ЭyK[*^(շ+J>xg1?"_JB 2=>"Erh PW{H$o*&)2xJo -[q`! -3!!9F2EPj)p^ue@P,dDU\?&>UI|u! -s,1A+=Mlbi^6(H2Yh|<eQF$ 9?&~L -~ u٩q'I9AFxQ_<]T bTTMI(קz' -5hG}ٰ-U53w%\LTϮ9Y/iRgo7E9g[`r-L8uҙ|)%&u -5<=^्C2Y掠"Q,YD yss:Ar'(9Zxc0XBVQ.)|M\uk W=La -X[@CLa)G2(q(r(zLbPue'`<. T`ı㝊b*wLO~0աTU=tmRz6Χ+/,)LZ/Xhld.֖5:'GH> -nPPT=V2B=QmUC'ͫ?#[['鯔Dm#6P>gs.Lߜt76jt"f3[7߻L - -jD6v,Ρ{:*o.lRI*B9m(h,9].k/zX:歬$)ڛJPlȻodl6j6Wa`1EBH . e*FUem/8b98{٭qnn#K- % һw%ؖ|B‰#׸=Lr Se -)'%6'n5 !'@;u,Iʈ&<@4hJ$|&O=- >Wy-~ .eE zN=SG\`}S3w-Ҩこ}";)ܪTq]#Vd\`Ȳ'-Ůfks"p_ D fsw0$}n/""ߜ(:I`@"LiR" -gĭaTaT0ևQ ¨a>[b}EF-3 G -jG49ԗ쏬M^yt #HW_hXWGf =~s:WHKtml<[~\ MC~HlΣpj4 ۵ /[p??~?^3F+C!h>Vy d ĄgM?eze2^B ^B ^N`/Nm6{YB,V(Y9eyas\j<}_/齥rZ^(Lm(t\rV.E5 &1#۲K,Y9C2nk4cIN tsFMoyi_[r<{<&}V-5?h?cEQOJu>h.[vnp\vBޢ9bZ F"psntX-xoH,o;:ej>eQu LlU#wS欸3;mªHszv8I^{ɖ;gmZRǽz-4mחW4\vϥt(նW k{xӢ\?e@v@J(S%7pb9D-SUe6y9MRҖbG=渓f|*\?8z8"BZlǴ-BU/a -غϡ1 --h"( -" (h"( - -Yc+)8tI%V]\٥E9e9EUEuseUNZOя -'xAD8iޕX*]cX5\Yyцo +<լvUZ6#UlWRH@W74mV[|P -*שס^+BǸnLȎiMf@g3sh>.U̷Je:k2PrPrdݩ Ś〵jly [eU[ti?I_pؘrI?ԴnUk,;e7h|qUʭ2l#VYhd%+;e9*jw~TYeU0噦]hGźta2|l&?gKk|pgN>cdHݏD:=YrBYС֭=q;Qܣ~5M2hŸ]YS1Cܩ[|ss*0 4RP=NE 3<?fa3W،*c&# -jpz8uަuv]g4.(BΥ:1=cJ|8!G˘s9gHW0€p6Sw9 -c@.!nUQ-sEYs<.*U50G->ۃ= m Y@Z`{&ϲ - 5&>$m/ [2؏葵\/ -W#@>JE۰Rv?s^wj!u+昇vߵ 7z{7]O`lё IACǷOyp!gigMw#TRP>yBi(~例%pF;/EE|NC\dl{s֜'lV54P_L? -9x -t>G?\_ /牝xOI٦RjJ*ɚ0qq'B/5‹DiEpOE,1GC޷BqGWs5k?82wJs ks<8PNQu> -Qs{b0p|]uHcy}=4[ɼ/\\+u56{\0gl.Mp⚻HSQ]okbdʾRc&>&Fal$9ű 91iD,I<Y98FeWٷث'n?-&V2L$4'/Q䑑\k&҇q/GҗHƤܸiye$ǶD~Ku{d4,/quT4sbK c.1%yIWѯ@LٸFV3Pm]{nsl *H$'$ -s -< }0hr.@9CU]/Uw78{?$qf^?{cw8RphZF옅~}W{;ԇVԱ@u6n1o+?:G} ֿ>ޕgí,o?,Cq>X߄}sSûXr̃*ԹW'uuӘO(ޯ{:gEov~r*\LJ̃^UbfuP[Y87s,gḿٻW֑x>H^pJx9\|R>@^f|^nHxg-3:wxe|MD2t=j=O;=o$W! Xl"_|~,Q[uׁGϒűҒ3{pl7 _~g'࠺׍>-Pk*=.@]@= 9SZDk|/U;<8BR PƜ`dmJPt"sō(t@~YOđ_#eDE RH!RH!RH!RH![(2[MT[$leD-)k "TBqC&d 3 -⮈(""W]TTܞ>ߙP<|lYhZ|9h s -|9r#DPZse:h |mn a~n7 wM7w>s;eYC}&}A`\ &}| -9|ύEMpMs3 9y!?.6GPg&a -J< {He&|̈́ۆx'44dz=nEhm<=2JS8qp3Ɠ ݽfDoڴu7aK~aBT Kn?,k8w3^.LEfq*si)Ҫ{cӴgWʌ\W)Uڽe*Zc! -PI &2GI6(- g佉z 3  -l\qyWvEk`X_`H4Pѐ*9&42+TذV6篕C|wru H~F4%͑hVh6UCڣ::0t8 -HtQ莣q z'zX0Jv_C @ 0aP C9NpHhI(I@p2a<11 `2Ni8gLLY8@i6`=F sOHt=/鈢l,.&yEWLڂg ʖllM؈o~g۳;;c.<1،-xq*#1bwc){e!vmXٛak9Se&'90X/"8&+Ldqlϡr8#8Uxq)fc n\gK,9ǹ+WI>|k Z>_K\Ǘp_k7q37;|q;?'s~/_k~{?gW߹s]qmA}oAZޫ{ -G!PZ --R%Y`KHhޞK{݅_3fHmvp$" $#HC{t@::: zzN@ @ ` P pHhA2lLD&a2DL4\LGE -1EY(ls0e(G*1 K ˱ -*QZhX:P Bh*(фf:l&ll6l.>!843p&8< p!.Ÿ2\+p%ո:\p#n͸6܎;p'ݸ>܏ x1<'$x9<"^x57&x=!>3|/%;|#~7?'pmؖ )lGL`"a2S4g#;3+;{'{7/ 0(f0Yq Dp's -OTNIt1^3Xębf K9sYrV8 K˸+>5ƕc 2bꌲMljZznFnfnVnvNn^~AaQSx*OG(|O)>g,|/%W*_|o-w.#~O)?_+~o-?'_+/xǥE- (IdITIAҥtEJ7.=GJ?A_@$e a2\FH%edHdX2^&Hv"dL10Seɕ')R$3el)R#sLʥB*e̗PbY"Ke,H/TKԊ&+NR/A IDD4J4jY#ke Q6f"[elSvn#{erPa9"GѠέW|PЕ[VUwnxny|ZIyOݕSb|㊢CxIVkG+tD+lu'z:xf12J ǿ_uYEvq\,GWꪫ$\pIUbK,.+(5+H)kp>DuWu̺WLYn<JNu™+\XiXi4 J' w]x.¸ZdRIZobU*vcddUk}WFƣvGHk#u(Wp(ՁЛBFqiVs4{4+s JJGmX X8]A\A'۠b|ƧTg!f3nj_ZY ӭh=Q֨7j-B\3ɱ Z5@Lh+jM51'QF/uZL4>v>!ѼR!MIء)=%5"y9IV $w3.̱x>6g%ss,9=/& &$% - %%^`lb($m(ɭ̌TCwNJ1dMESZXC#Xbh_*ҍ@Q-gp8N7*LC5%ת-Tb"QϺS >~)aůNm9YT-FgmoZf44Ch "ic qDZr{|W7M=`#)6WڰU5U`BiP ?`BEST* eK,Eö5ڸr•CZW-`zVHḱfNȳe-,HD[qjխԪ[EZE8`+5ꠦZM-ۦMkCш(W.]mi#KʵKb|$g =JHll2~ْlD!c~H yaizz6-eRymhкwy߽sɡVҧ%e@KB˘\*¨+"e%ޕ.zW+يN|F@5DgZ^_ZOTDeÁ즊IrLy`NH4ybP;IVo* Z/dLhu,,ioK49~ ?݇Dk@}F)<*Ho=乲gtOnOhݞ=g^(& 8T'03j&0ǜ\kȀ_GY,r g*jO*F*294uebzasy8Y~^gZ^YEZrQ -45ౙ2tI6(.?gƟڞH΍䕖IcXv-󬡚WDŽg>N - I381kZks!o 4]HݰH-Yn??S L%eP~M4PG\{Fڞ\be;]1<Į 8WqkC5ϛq.oֆ1u/}ro=J_|+e+UW_^3xv}H0ߴZ筑|]mc;}Fug1dd*;W&~*ȨEoAZ^|^Brm hi -'py%}Lx8(4VD?䄐>wB{%^wx%55Tc9F&p# zdcMo%ތM.9QYɼ%pa6>toKgy&1"}p\%-3Zni)?7V@;W݌}aUk6|_hm]OGn`gĵ{0|^9[N4}3nC*V\m*'/Q CP/V68\/e,hTe`Wk~ akehg-S[dS;]xbW-G밊TҍH7b:単Nu#1t j0~)\B\! GBtzf)AS'jDdv;=L4<,?%^L{FЎzsȜ[uyqEmKo!VěQJG +O쁩yldp -hnZO oC: hWA6VECk C=mUMlU|jL=jة8׃*W= }Ebio†x*N " - Wtp+5_16%ڄ;= [F(1lsrsXĹp'A.\UF-V`0.ytuL磗ncT;65)p0+&\Gt5݆4&m.Fǵ^ǭÓ0tnnr[`^Q\^\9o̕\~(=HEtR(!] {Fڻub MJn6aۂMaç@]{=%?CT.2G\{m (ƆrC6x&&8Џ$RA.S;(,څ o2[-k0f|"؃A ԁ`9 -. U~C)C}ԃ>s{濺9E!QIձ|x|pp 1O]p|bw!1!A=Xm&8dCh7ahJ>3<6haq0F 0xf1 ݃8(԰`HLQrʋsg\(`ӂa8jՎM::e oiū4`x".G5>GwLhMC8KWhm n GL -!ȁ5x<T@C!ف`Ǧl -+f@ X$(ċ-|X r_(^?xfT ZZυGE(Vkx ]j ڢ"EE-bY;mT׬XPyɵVP@^V5<7Mi︀,=UX*)IK& -ˣa'?rv0ܮ,@H<՝miT%guldzk^kk>ENjSQQ: m]'>;K4I;&mҋ?B$I6!m'B\_f `(Wi !k o74e Γ1f߅J2U)AMb/$PP7,8-PȊoA/>g!YUɯaa7q~F#W@7Ϊz ^ ϚgoW 4=)r1zH]X֒4Ȯʧ-:Ϧ|7mzv=g g1c+'a - Ym1^hQ#{~TN@_c}`r= M׃ tfDяKc-%[`]=&| ;?c$"Ex?a -"^C[gw1F>lU+(d*=lK^ gHYZ:R'X<"F7R/J|jpHxH薖% 5!nb D endstream endobj 92 0 obj <> endobj 90 0 obj [/ICCBased 104 0 R] endobj 91 0 obj [/Separation/PANTONE#20Process#20Blue#20C 105 0 R<>] endobj 105 0 obj [/ICCBased 106 0 R] endobj 106 0 obj <>stream -Hb``$P\쓘 11 Ke߮10: 0䂢 }RR N*/)3F"FN@v#]Z_PYQ`hii\Y\[_T_XT 8@~u15 ɄADdܴC"YX98yxED%$edUT54ut ML-,ml]\=<}|CBnOHLJNIMK/(,*.)-+ohljnimk0q)SM1s9s_p%K-_r5k׭߰q-[m߱s={?p#G?q3gϝp+W]q;w'O=7o߽/_}??BY!x˨G ,< endstream endobj 107 0 obj [/Lab<>] endobj 104 0 obj <>stream -HwPS -Ћ!H $!.HZHǂ DAJQDқ4A =3μ;͛y;7g5u HL쌌]\t`-8`nn :\ ñQG܉n@% r KQd yy(D7/2Af)ԛ2 dQNr:@27T.YG3Dd,@/tWG`ϟ -pݑfz9eS+q*8 /?& -1r8jF=(HpE$=n2pjb ^]oMq G-?~:A엂D?0^0*MNQyPp?22W* Pjs5Z4m133u1)ccMcepiL\€*r]p337t4l6|-G,,ZS=MTWߛȔN6O MKkHؾq w"3FjVYvg\.4O0_䭀۷>İԵ,zÇ3!UOIE}ϳ MfրK/s+_ut~yMFp;^>l{A!vT#8jBh1?=1?;12_wt ̱2Kڅu⛰ŭov"v])@s0R%BBM4)t~0)FE.R#,86.'\a܎3.\(O)/?'"%hJ A*YG$FEFO:/ux1Ᏺw '.[$%&'>MN8{Mefޜ\<|[ GPߕg_tR21Uդ'IOKj:j꥟=~8 oh%|վ!ة:B;|m_xCQHcqiSgftgX>_] Z2[^L6~nUbqf6݁xk  LAQ(л;1ɆYyH35VO6U8+|#Ӊ ~Sg/̿)q4$C -Qߎ'JHKJKde+19X\#P^IA\G^iWyIeLOs' -]tub#NcfZ+ 3V֟N ~85`o08zzi/.[{P"ܓKK ȁ)M!}a,(h^bgT_x{q)DaoBT4'WFAKfZވ̺\p]{닧JKe**?sPD-˟ugmFME-}mKtݫ؎΁n7o= -{ /@? J ؏z~$Os9=}s&w6w|B䥸s+~_\l׍on"ߖvwǿM_ U D -2-&6Øa YNL(%#,Vk 3*G g >70^_><@QkbJ$, w\uD65eI(H2HKJee;J\ / УX줢 -W]TkS/?nyB@K׬] =I1!$?P9+F'М$埈s aN_bg8SaKbmk`/lvp:&ғIFGh HCH'8+y ;89E,{P8[R^4V< -Kxl qQW2:HG -r;A;bjJ -߹/3?z(HjU4r6\ttL(ӧ!è^+!d Eeo_87}/tg}KLsurc.p eӎ`qt 3,#|Zh80I5]^K= ǜ;yyC1TxA6G6zW[h -><1]^~|h hO#PT ->$P@%hیoK/{W4yw8 f8e D*RBy;`nQtVy:_@ϨgaJ.?c}z5M7D>XP j2{89 -O -#/a^#.y?FPgq?:K1b4wߘUuF]WqP&:=9ݵ`ez}1mޒpd7lY1-uCrjz19"z/ -2 caZnpHu| -zP1iDhTL.YeP]G&G,,` ]` Aѡ-/s">BUuF.{T}ؗ) N!"GB;dl߅[LpـnF9 ZXcL8xէeW[o@ē1VL-#C&k*K&vV,*p$i;Rxټ_PG?+\ʽMbL6))r>=@x5*fv]^Zduz]X?P41p 9Q~*^ *,0#%8b;Q'p*$LEf"޵wǕSC戽Hah :-"dk>uy7C/)d%x`'qyo SY "&]#6؂5;4uWy -tk/ G &0,#Bv"R9 59oQm@;/D5RIP+-GM#º'uhAt0";=kմ?{-We_]ډ[jA )H>Au%W#Ժ>D.ٜJA1~ S̕V asUB -䪪/j)+68˼=wpmbnv\ߝtGOx;`uKJ ̄wAߕx~Ϟl.ɂQ!h0RȥjEGlsܦ5# J58/G V//"N1J;mh`$?,Wq$L$0lJVzq+ĬЈAaix!dpTP/H_ zZYs-dqN -U)$Yǭ, H -Ϻg,7xeIHh6 <{*M@+{ Pw/fID. O-Me̙mu+M­y!@*D> Ox0LA % v[Flrt@q+ /4r -X8Dm*Q/ZB\@a8~r;E=,7!oM2D[..Reړ7tA};GbsWh q2A];%mi"+g=Uf*l3xݲ?I a~ /&.u -궥 ->'ggUi AN?oO%),z~U޶ 3^Gj)ȀZMۂFiPnQY#TVtgSVD^O˿:JOg+SMnveKlou"̋'gøSzّ[ oIpkк½+/eM=Pm4?EAO =Ф>˭=@tVw {[u)jW>'jcJ|e57R1a7/zu c\V˞ Bb-C~}5ְGU*Z$SmƩૌL "CƼ+g~>dwE͢B`Ѥ*]vQ)TTڒWЬ~E0!U~Jqb `,<7q |?ķz\w5)r-C#n[-V 1/ceSˤpm^/hOC0eծXd#GQ(HJ3`7޼yofq-Eߦ~_ڎOZKW>(C8(8>=%šЦx>ǝ/@'3EHeʹb#ӓ"=ݯQMcOu䅂A~d+ʡ^yk2͈pRtzdswB8u9]7}CꫲQ] )>^6&$Gk{«C\L`ȣnggw -;.ˊr_/C:x)P]aΥ:媀B3-⊗cpcـ=#ӗ=Z"#j%"b7KJːйcle/_>‡*E7@+V6ŸnvQGM]wTnG<2 !_,>P蓧VwN5[,*tgGvR20B'džɫ+ tGj x1jMTn"AKFoUkoȱ*h$e1%#*54N nRΝ7ⶂĎ -V"8%{4G(ȫ*d+~?ɟt#gG9w[wߺ4ݑĈvOrb4鴶ר=yR:p(bHY-Ƞ$D~)5a:GXmr ]c^vzKS8=UĊ3c7o1UNϹu!8EGpҮ69 Esu|?Wx5ɻ3FI a ~۸ߕ -TCY bY#thG:tW5-BG^pw+.xZ t#4r uI_o$郅t/*-]V:SI#< -BqEd_07Ɉp0 e,%(IkLgu=Xin## Ί9W"o;!t%-Izx`to[!֦Z?Q,YjwgW tCOTmW}9Ӓ B$pqL?5LRDH|]gpOhz셠kJ$ۨ -sOk|8C "rզqc (:/k~%Dj[ihH5nߛ-787_A%V"RjxwڶᎣͿ3"汬6EVÄA'X*{G~GZVAZv}&(lݽ6$>Yԟn-ZT3(( D;峰&pۜ :J2gkSIJ}']V -WC;KX_="ŸXʰ #ލ=Omp(e{K  nK 溞b@mfl_#va^Xw4"f0aAҰUҷ`,."4*kĔQ~$pwf k33vRIܞ =|߬oS-KmaRf@8(M_!^3<.ẂXש?{غΏ{h*0I 8G.6hy MBYnW"v vhZn?lL,6XS(i)IfړH|E8Mۀ])>ZɿݳzK=IǴAK)jT|~/|x7L |{R.YXv /hiqM:=cMYCgHZrqqeNfFz.R&)φ,+.Obp.5^W۸&P4^?7H~(+Pjřv$JӇi|zK:mS`Rץ~q1Zo8#π)Ɂg.<}CL(pd,KWJKJo>v܂1%~#6ÜGyuQ`|ƿopCćKc˴WbJ>5&0"J⛰)nÈij||bocv勒WJ=h0Ρ=("hhψБ{oj{;cFVPDJht"=ލ:0"ݜ6׉20}{bnbVEJҒN=~80="ZӋ2z&njub2~U/^IEz=F0fJ"5ƗƄˇ¨zqťm -aUIrÚ=! 0KN"𚙓0qq(ynmi]a PU>?IG<բ038|"5|;"yh!lYf`TڧHϦݤ<30ŝ)#ae^ڼxภl`ST_'HF<{ڤ0iN#({-~~r}f}Z}OGD~ C~7H+)Ѥ݁j~n -rI;fS炜ZKO^?Cm]7{‚+EܣՄ'd+т~@( rf"Zv&NܶCG67^4+(>0N?~/m r7if+ZegN͵ҊC:Y;7R5*g^W΃}̪qwen"Z/NPCF7>n*H2HSՃU}qhʙfe{/Yڹ=NZWBђF7#I*Ɛ3D.r݃},kq;e-,3YVNB'E7 *Xxi<|pķ]dྠZYHMmB}6S+ffOBT;|rp8dX0M@Bp#6񧴜+™@ |r|`p;˯dNSXMpEBPg~6-+- <{$*6aWm: ;^h(zUfb 3`7)GH$I܊賏[:.mjrupTFgʛQ_:= e4V}C?1]@8l^|Eb?,Υkz, -00X m[<?bPAߩ,C] IGe_ ߠ .1^IxO/'`c R.1B8- 梛x*g;ԗ~KtCP^W.T -Dw2EkH|0`W*q"]|P&XbĀC\1=.kCլĊIUl(ha^ȊKJmXpq}P9Vaâo^Q#ŜdD@@φPފ YAɈ~joJ V@ZK.9@Ya΃d˷glĝsg5Ņh7\ |[X̑BAa#7qb̔+z *&3(Vcҗ.' -&1O%D'M DzFHq K0;J5 7GdQX"/! C\8"wv%>+>r'ф1N[Mƿɢuo✔b/HRl+W|[{ṕ)LvŝiC؋pׯ—g߬ +1+KL!Ԙ@|m#>`V(S<{;PXVK~JxY'VV}b{7anWT4[ ҿ\݈lbR%oNCl:^sZEѯZQ-33*HqB6Jwhgڛ:gդ73s=?Wѐ*vn):AНy=liC4X Qע.273ghgq4WZܜtm|>x xRr!P s{P"_PŠyW;f3x^CSW) ((\< 25;} y$RrF|_*@uN$-'?hp -08^`lw`Yll`ײ/lp q28u{lbۇ̝9M;]w[c]|B[v4hL̫%B"HPgp"?es1c\cekE -B7)*ѧ7{>ĭGulk(67aRܶcS|}<()2L/ΤX.`sk$Z<%S0aF~l7'y i^2N@HS&xpyRF}Nք5Qd-!c%٘$cL# - Zx..m}1K橤ybI֜%sSBqrMї8mr0)YͳRܹB wWX8Je3)}U[ )WO{eTS pJJyςG5ꪲ5\E2zP$\ -YFtQwd^Nk+J*6VDy[JB~3{?=cƌifhEJcpn|\VtP rfW,ծB;?Cq9eXc8ß+͋ǚSO붅R.s+ƀ}tຎͰ՝W7۔Av+dol6\9)<:`Z2^M{ר8vWA1it}+4~iD?Q)pC63:>lm~8Q=ӌ*`{3~|wT7Q@AX-Re1CYTL/&Na%づ!o<.ΆD@hLX!V[0|rrɳ$ `M[q>/C -1wEtHt8Eu%UTTl^*!%U,kF܂0!wi %"I(s\׿knu6F rH( &U1?OP|vINI"(* b خ0'kMԸRO3 -KyشvYV:^tFƔ.{od%;V:6C&1nG(y%^eOacD=4ONi6썂; ˭L`f7Qk{2f7PI"ăW(^\ $b1S.%.F6;oAAj[ T>f+(n_I߉^ʻǫbȌ[nq 13ߏBb7C Ӂ|S綵<*/У%ڋDrܱo|RהulsUZ_fת@^uP6T)#:ςlama$Hl^z'1`XPҺ}*%]ibd9&e@՗}@v^HdnKn( -̖{]1;R ;e#ZT"?hpiK Bzޒf1zlRIP<΅R1bm],2;%E7THM>+aOέbw?R HGQK+6f[ev&ŧj|xR4X%kZ% Nhr  HzOޢ[WJk2V\^Jpsκ[GFH'3Yb}O. =דgM!.grWsG @d+Mw8|oA8a -6'GCg5ɇE-h-;^)(pdϔPY u_Te.#!S*0zo#,,~vï:9wՔsU CKWWP)PbE8ʊ w_{zr_ YY{X~ */? 9e!+s<>&.F6һL1jX:U*$ION{y/Yf?ֳb fRowgJ[9T ܹ,$~Z͠I3!MZ)%rKi&yNl!쟢#" ы? p{1(n?%#T3Bym#la ߼oÌW$e-meҡrlwũ ߏ`n9q#l#O#&&ELI -*&/x }vuL7 Z -ڡ>k$G-)1="Y_w: j˱LiYm! ;ʀ}bZFr'^;hͰYնg0hE6AL%eO%wAݞa?UBW24a:Qc᪜~yʦ/ dWm_Z!ÿlq* MlX[͔+l;):|UKeo3W # -)ln>fĆ3 _yL3̀yVii߇k4[p+Z~ɮNW -WB~l^Q%¸k=LzbBw -lƯ* -BD+R7[T*Pu2Gn.(+QO^K#6`.m@6?e÷ Yk7|RO;1KKCXo }Ic?bVx*r7k23{7}M ;=3˚P1YldHv^Jg1͘܇L>Lۡ=mz!b= ,;1) ,%=3|~XEq1o[9^iUSlwzXRkTNP'JeR@݌1p4@ٕK\Ƽ42i|S`D7n.\|"Pm4QX!:# 9Qqb.'$g/jj)?cNF=R%ȟ5TC"/A%Ԡ̟uVmvMI@V#Gxt%CG:T5ɻ5?I'Lwn|ͱ |!\d-CWnd l[ ujoT^6ݖDJ(?X`yt+`˃c8Be^bUvRs-ga>ru{?VK3(r[ܓ|%_#|F|=Iާi m߈0l108 1"+9L(h_UK4wFw#ˏqyZ)vԨ -"kSͲE~fy,pN<<6 eݽXYۻm-; ujp]ɥcRd94M8HjhpMq:)ޞ=3U޶ 5Oq}Ri'!.gEWOL::Le˾FMq.3J\%|G/)^yS -7%Tw#UUCs0}* ڇ(jWA,ӘTR r`]Crz5_VYA9?uayK$mZiin%44Q5Zq%!'Jj$;tр -a,ߛp,=Dշ{nk>Rg,*CcL57iRXZ#EoP ?ҶC_=~vǹ{*N6uW2'{R_irUR(5kx;$d#D^vS:2r[i<WΩ텇wƬw.n4]hlt9/ъx‘;vjjG)Df{׵'8yGWm~MSE"70]˝4Oڑ5|j/@@t'("2h8@$$j$pX]md%WO, 1{O?mShʌ#e3# ($NxX{OZlߢ@Ñ"G\t!nTӺ1~Z%I9zBu-r]f@&\Y˥%RMN:g@W2ݚ $cMSBԶ(~glqIeY8 -LԠ?2o$?9a s}DZ4qlse -X"Lk-?pr2i@$~Ó2Mʉii}ppʬdsƝX3K?P20l,$i|d%|_^pc˧jWSKS>񛎛P1$Nݎ/M,{oi{c6WK>112$6+ X;݉ӅQ8c{ anBzbݮVVJ>W1F$"ӘnD qo3nw9wQwkax_[y)SfVyGqz:{-|}Kŀ`}vػ}j}^~S{~kG%~:k-Ǡ%𑙂GmvEjs`^< RFߨ7:-i Zk/J恌\u9i$^;qRr凋Fp:V>-Y'q4ᐹu;iwrO]ɯR1#FQP:-kBQɘj``t¹Qhⓧ]H$QEp9ݡَr-L I'ar+tLhe\ٮQ=E9D@-2Fg X}F֌r^s˸hf\f$P٩EZ9pO- *7+~s>g[魱APnE97<,- %Yr[~e°Frcg2[{OPD9О,曹 ,ٕ{o$}߼lXrHf["tOĨWsDƦ8ܟx,ԛ\ 2r0ۊwDxlGŬy`*yYUyJz?h{3׫|p't}t~ S%? 8w>~lĭ~`/~~U~Jb~?KQ3Ū'iĢ Ђ?w;8kð:`r:UZLJJ.?873u't⡤ 읠8 ڂv>ks°`4>U>*J<χ?1H3 'C ,aIvsk'ޏp_hU%J ?R3'ģ%U x͏vk͖j6_TmI۬E3R'ˢ׎7Y [Hv B|j/_b$TaΖI>r43u8'ТTꎾ9f#-u¢CjU _T7IC>ԗZ3c%'ڡm#% niu`Gj/^к kS̴Ih>\>3Vc'ǖmH̗ЊXՃuҮi*^S$HѮ碟>:C3Kޝ -':Ғ1)Xtjwi|Ȱ^T5=SV,H?>V3Bs(Ț1V6IȋybtPyuz_v֯zwA{}6xBxvPŬ gcn -7N\VYgM`TfM~o1*:hgKxD:ppqR /Gx-5-Ӡ`_ol ts&+s0K2'AkO* -7UdAjD-Q&!Q B \ 3h(;l 0weX{!i)5$uۙ.mD&WdXQ6l44+\e8\SaOƞ*˞!}S|ILJ\ -h8񜴸x$<~PZ?wJQԔ5 FvS:r]P"6(F^]Se >:1*ٛ>LUh\W-[VLIkIHl t0,x Ԝwe\q$**&6(w$׮|~<NDYo@cuh"hA(w3$ϛvY?qz;*܁ˋ ͔o\/׸, %)gaNh{N*/P0v(= -0?ER.ocqE-輏'l$m'X -<۝|!b_7sD]&1b+r'"ؔ\ާ7-rܕbMxswq >"5OE"ϞGG2/#w5m_QwRrRq<$W`K(ABc>%%D 0oD`D :P˝nYE5e۹+F2Wͪy"9u/pQF$'ߕ>r3&=70N@9 -mUQ)!..U[_ -?KFJ4GĹÄY$ܐ''\]fG63IAv_mfax"C%k4BbT4TՂ"k<\{dq s7/d4V -{lhxZ]b[~=ifig 5Fb(ק9;O٧ViGGX_}n]wӸik6f:,p,@бkbmuFR!s37:!?`CW֧&`':gUDtx4DLؠ²CKh#NLoosR -mRҦUa9B6c.oo18g JVnv[[mFGvzB+J02Nhļɍr/ 䜲Cy5(raDlGx~H?I.ȩ GH;A!A0sARxJ8*ۛD[Ѯ~b -W.Q{!n(1_9W'IlCw'Cp^~u% ^pV& > Mфy6_E%.xA01݇"QC͈ZK+gD2sϑKtOt z nf9 ?Md$n|c-G\zx 1~F;ĚBtB":E&'>"3$vAPrfm#t&pJN866n^yOOMZ,z~ Py$;l*|\7󱇭 ؒvߪUDl?ᐺ,"uy )mܞhqS:wdREZk4cS>nW=kvKPmGYpfo&]*YV.#^LCxTrz )s=\||V+w]mԇZc&r~2fAcS_ he)>t*qQt`1x w֏+I۾:hYK *9I|8Yk^nT| uڿkVܥCA$r*S$s,ౝ+z`;mP걢dbP% @]6 -UUu -nIR:^ar]J& :þҝm)%қFҚp} 2e d';(MdFڶk ]*HДPPCjߙ m> wE2j샽/;Έ?* ,oqS6wniZMfOdI&~Ej*tcHV=֗7<Lk2XF]E=m,6rU蹲P/WSEFlPyzS8Q:/~`~kLS 0RߪnFS+d@wQlf6Vkd`j⻪5)0T5 8+F9yᲹl( ܺ),,&L@!A~VSHIe"3o%4*%yV "_;Sa8iqk¦&m&*F^XbBϓ.$%_d nC^36^$LI|yIB 3Uu ê+z;EQ^}NW%'%3mD35+۞AFI#7'_1m 1˚wEkG eZaV7Uͅf5‚"m/)euw"FYwI׵Xq [8f2n88EOfQk:sXpcuʒ@}5u ?TUf݅]m[-S]Лc<"_.×z;,^Aɝwp36 -i‡WRj|pI%(E:"L{+:eȾgdOژ9ؔgF:fՓ ԙdb:JJ[pdR"QfQPgeGE}bOSZ p0Jd9 ܽIrpGm]$h$0Åzm㍝|Si aYvq%ҞY[iS= z D>x̢L# dsV6S'ɷb]T%/y%7[]X%iLjj#]W=f&0]h=0yx\P ؋ǯ]uG_i?M^.ZRbG*I;Ҙh.}h!BQ0bPEYBtZi+m]xR)^FПf:s.C?!%8^k gbztٱh]3YQǟbFc:y'.!ܖYi3s{Wrp?sBe t`Y|u]NZLvB@w6}x*@z1y{l <1}8${ -uxo﷑yӚRPg΂MyiniƊc]*,X;߉aMAI6:)phgz<όۂx葿mݴbʯeW9.L2A\58)ݛ?htz2uzv{'1wR{Qx|Mlx|X4y}PCzZ}-ez~ z~-qԃ s-t;£9u(iv1}wjwW5xByvl,y6ycpm9+qr󊦡s2t|uivVKw߅B!x,[yiyweoSRp3q𒩠sōt{wu,lhv2Unw&TAzw+xXxZԳntQ gXSТ(LRp*৥U -!!$$wߛ{C I0B{M ED[T~{QfiŌ=~\C¤P/P%`vָ)Y /Y4T?5hMsqjq+:m'E)̐/kby@}{DR39놑>cvvSDžƖM t?65)`o&Z]d?qF4۳}lA(h|}]f;+ʹ @갋.+'n&oA\!R\;u"Q<7!ia2Sp>&ΠڴA2NPKGU;PomBp: oBv:khC8|Zp@_+nPy_@iiyi1&o)y -wSuP$-_gK?fBX/zQ է+v [+Џ3(-]tY!(y e辬9h ro/r%shxu/cP}O:kՏ@9`Ys3Crz 'Xk޸?i1GF)eDK;hز_6AЧ;rl%ۙ:$QFVwp04sf8) w|1-R͓ykx ELуCSL]*R)p̜:Rیh_<.9]QqAJKAmao:Bv/20%!;9)L͓ka{&(}{9lI:RV*҅VG܁`@*a/&]-ad{K͔5-O oۄs kgNO^˟J5YQhl:/=y%qNXD2D^Z@:3sz0ow=+,b{/. R0q 3NyG.:d*K%4U=أ9Wu"gb{3Umg5 g}xFO]RqޗŲfv5gW]eWC;SpFbHy flO0: W띀q\~#?RҤwPw@TzښY0ldӮ㣖e: s/:!g1!w<<>`Eսºshsj-AgAZB#J>ݍmȨtwXTa))A? -wGPXkm -_BIѨ+ ڣ㞠t7tj}7Deߣo]3%j%Hn- E/GXr#3,+b=?^'NC%}Uss6p)n2|0%wiC^0mo#703:|ܞ+¤ sW -TmJocz}a Ҽ؈ WǖSKl$t_@*CTV z7vdr]&4DiOQi lp;#K}&B׉/4"a$+08Mh~TRiu+a2&dg}y j`D/%@3J`<[rO@ojƛiCA F-y\H4tj Ys~T:eK3 -xJкDg&vhh\ -6+5DľGѺHs͉}dU3 ͫV>2  - ]-jxP$#LmNܫyZ44CDcXj66DgmZHSd}Wlɾ[oy@nę&)Ι;-~(.xrZG`znr YoB)`cТzٶmixC,WSeH)5ڗ>B>dS#YZ퐉FyQ R!,-j"~jo**e; -I`h]<nVD¤Hb,q(Ȕ4ZcGP=sQpv/R0!pxe_l&!?F%GNw33䊘""=XC7Βq/߉)˗M9h;F奢.qY8$I-BD/ĵ#L(>f+oZˎL;T -O'{XzE|H>PcU)'ItJ&}>wͧ3dQ%{z ;-'L5CXj>^!#ͪIWhMsfZC&9 -3WʏeL)!acE2'(h*A)?+f7GHtcfի-(U+bІ]Qx[Beb; \h3J0 DMM;Q[9Ԙ8L*fd@kR9rLDg5ٍBuJCa&NOlٯAM :m!vİ̞;('y *1\_M -UH1("D H٠hdC fܛABf"S"Y EQ˧ҴFTɍ -Ϧmߔq;K/G(:'ܷb^# jQtkA}R*SY<~522]t%/pN+t֬ Nh*{]YqxJ<+KKG/4x1%vJ1aKtׄ\&I0GIW)O1)}+% rD"Pc@?CNM_^(6ڔLAʯȿo>-}r7"w:()2I1_vZ4^u)j'6kV6V, Vb# ?/;̱;o5g:nĬ7},4mG6_n) yҰ_2l(󊞎hs`L]x7z.թQһޜ첱nEͣc:6ԥ4ݨ`j(a֍4kɪRM]Լe+-[Zb_Hq6Zن:2+}Ec]ZH&:LQОkdèi_V.36$o^ϰQ-E4<7*,+}3a$}r RHjgKV\zRffmÉl@$]{%qf,h`=Zo:5snX5~ ą(J||5v hGa};WO{p S*8"7xD"U+7euIR -wejwtpO1Yt37z h3HjYh+0`ҕ#ݶb?&ZQ~ )TKty ɘ1r)G^T>Q0ON\PP+8\6zQ-*Ze^Pxe -dP&ub~JY!Y/!]1h>@jVuF'lqV螩m1:UH≢8QEvWb\#ˡyNſ'\u/H$wuψ -MxoI&v}!&yh嫎\0txcL0UA6,*ilȔntmTiy+n^֯pLTqIG#s>jt2١2v&&Xw+Ix -̙+{Sޏ;~sRsi -t^yuSvHw>-xx2!y&.z_| 7~irzhz^{S[{H|=u|2}|&~NE hw GrW?gÀc]{RπHR=l2h=&j BP!qƴ%rgk݆*]ŅRztG77=_285&w3%ח :.q=Z\f߯ \wQ=7G= ׉2z&ff P!9pKfU>\r"QG,<ş:1՛׌t&W-  Œ yhYp,Leέ[_Q' FϢSu<{L1C`&A/; 5Èo}beHq[\P}FkǗQ<*#1g& ; un4dʬZnPM@FOQ;❰12D&Y <Ո6nk̫0d\Z;OE̠b;R1䛨%ꖞr TS_Ahnn^xo{TYcpJWq@WsP6OZt+v[ >?wy9|@%hz=tq^\uTI6uJHv@Niw6N;x+Yy j{$\|h=1~LW4h9zd^%szTzJ*{Z@S׀.IxB@%y6;+Ĥ7  -Z5x+!KVӘЍ -+B eq~[8RtH^ 6>ͧȚ5R*+p!$ -ɑK<ł0e9P[}Q-H.>R254$+a_!8F -feh`Nrj3.s%l5Vt#n#bu oJuqzvs[gzwtTaxv@vyww*yxfzyoѾotK*puH|r/v@sPw0thx"xuzyfPvyS_wz?xd{* x|fy|`m~o#~"p~8q~_Is~wt6~ewfI)wNgxE|k݇%mêo -gpqlqńvPsjd:tRQuu>Dv}))vgẃ~j -OlJrmo^pƋQu!r$c9svPt=u(v'w8i^kdminsx(tOgu@jgyiikȒl 8ԉp"GV_lRȑ!R1>f13fc3B}]ښX߯6ax4Ḫձ쵺9U[sKmyS GP mWlDk#XLꎇ }qK/cH+Mg53Y;+' 2rcz(NwV@87S߾kRn4M5> 6d\t"gE7scLìyCQte;Cop1\mFM9ky+@$z FJ(lSʐߑ44l1/W1cVģz&yOqu]&ЉO(aKv\qX!0͡#,WٖM - r]S~³Km#jc=Bf3>"d',EodbU})XO%yڪ!J@U~e ѼvӋ@qnM!{ n3۲c iwuCcS:dZ7;Kw%7@ks5N'A̝ra{;N93&`qhJۭSיά׺#420$j9CJ9gVK0+H`&~=s* Da%ZCC B l#n#Wϑ+Kd0y WĠFy怙V\0FjLMa o ".3y;XJēj3?$&LmB[Bzth1Cӑ|MƸkXm4LP3Gq!pbM-ܾtw$!2fFz$(b_n-zz1脝[Xdaz=YnQyKU?؎Z'c+U܋]ʅ3QC~"tN~R184֭5Bک+HR.MJ0Q ,; -=PmhbaOZG|mXs̩^4 >?@Uzڽ'a8,Y oۃ<)P [i H60?[^&=NBG 'i]a# -@LA5&i6u3QsN=g6P`f+]i@'RG@=`eU~Qa!:ǶWK_5PK)P>-hΎ7Tհ\W9'i6["%ӡvȘY) -IN0aI !5t9#$1$g]0d 8.{t7MO "nmaa1%T*CJPyj^^ln+T,@ :b̴S6,Ik0JA\,H!̭\R(`H:.pqIx5ͯkR|=m;WRq|)Yf+1~M"%gz6[*eF_gN>OEm{Qy%yI)/O"mr#yBAԐJ#ӖQz9Gz!dA[~2di"nw^XO(Y /"%gbJ'GXmlz@EIDz^9sޯ9̫166j?ǫ֢e,]wrUX>>%{|K@); 4jw@QOZO'8ey`2:<@}]bQ_fy֓hе>"!R Ycݚn0R}˶5\?fU$ 0FuR(` y>| \c]m4ӫ {RiB:okd"*> ~QuIP.|u7 d%T_bB=ֻI<~E=cn~ѣ#ߪ'l͞5u S[ 7\pBEbQL^:l +RV&yrQ՟2G4&sNwԶ3+Ji}/jZx,"d{~yGRX;w)G="q{,xں?U>)!崸tU3Dz.#>8DÁεǰ`G  5T<8 5tvnk0'#O<2PwndO(aY2~ ,⫈o0"fwqrMnHDKC BP.U%XdBfЁ0):}`8M{ğE[KHC沱K_Õ0A#^Z}G>lK0zAK؁7{"LA[ -CA# -nrf,lp%ǜ(l0yL@-yDS~-~e -S4c^z -]5Y&p^G6 -tk -΋@ qѶN6I`a?bꢗ `Pl{i0>#xz)FXہvgڮPxKkn Y^㫳3\~qUv*mI{RXj;,&9N\FD Dwx#YvZ򚹠NۙW)+(YCv:+4CYM,ٸrNF"I~y?)LZiI|`N]{ɿ;fHw2IİeL{Kx`*Q }7c}*72`Sٔ# (%,FS>?cu0 !.VvjnEe6f_^uUM~j~@∋j&}TX/jkB_yzn/a2=ȁ#v u({X/CߘGs ;9!猬/~u읪O8YeIqKLoRQӴfKK.ADcgT4X}nzQ+i=˸BXK}6T {e) rtv}I o.-p`-\񪰸YxM78WmM' :{*p^ʆ8_׾@u+dIWЧ-CRy:((f{3/ X']5ex%] -~%^ At "X]-ig48|GҴ 3" _pd=DSFEK T)[d/JsI.].K.,T *VWgŭu<9>GIy1iAH"M Pq n_}^1E:c:hڶ~*}kK2g%d_o: ~_/l9PKo:~Ty(mmaWHЫͳ/stMDxI,LQ5 SMzt)$"`rSh -K qr#D O[5}yH4&{zEk&Rkd)Y%V2@q3(9w1>h?h FFf &R#`T>e 7kv1ЊȬZ5mMӣЙC^(Vϗ~Z՞Iͼi:XjXG|(=EY6{T 5aȭ@<_P56{>HZV)r}{)owYWc\i#m5Zݑ4ӉUau -XQU<]-}!e \NfQ,u,.|O|f~\Z? έA }sy%*_I)ο`ߞ6_ҷ;I\la Ch)Ha{~ T' 3zIgKw{5czֺUw: bĀvX~fծrCi ˳?qLr(|# -/[cW}BwsZk>KcYpڸ?fr+"4'90Q&OVt\q]s_x>[2Tލ (Z_kN4 -LEF)"bv׸ۊ<.|ԲBT?X  hLcԀ ci.W$-lBB5 +_ƭ$SYo^,^rhT (ʥ )4gɒ?s"z`Gyو=\ϣ_RHO Oq|<  -6J rU>{E10Ipe6V DOy! - -ldRp*<s-qwTGJ*_Y$,';b,^vO'χjtZJԡ' c;l?o7t=-~Ԯui87uԕX.W涐h'y` Syn?wdT [{/\߻یל,yĐBs S-\r77dho߁Ua[ancOIw]}Yf#Z  ZBLmDCᖧnnGrν79 -Ep/dtu]9 _]l .}oz"{D| {~׋lajͯ{a{W%|%M,|C$j}9-}.VM~S"  ڒfJj*H`l`VqLŁvB x8ǝ8.+V"΂ƃ6 -/1(i2_VJLE0Bc8~u-"ٔ' -{ jh_`-RUKJB=8>ˉ-ؘ܉p"ۖ1vވ - <kсhcH^ۨU>K~Ab7--:K"ɕ  -.d!g뫮^Y}TO"KA]“t7]-q" ^ g|g[]٧Z9TLěJlA 8S7g-> g"W Ŏ .fê&]cSTJ]@Ŝȝ57/R-"q蔻H D=F7a_iGW¸RjN56l?D9m;EQo1qz'@sEwtbxvјz~a-oGWpANqUDr;3 -s1KuY'vxLϞyX |`ܹu)WQuMױvpDuwG;x<1 yG'zj柤{/;}ϕ![`oN{VM{QMY{D9x|":쨖|1}`'U~Z~9nK3`MVBMCICm:1c'<72lؚ3~_efVLZLcCJ:/1`Ӆ4'Sb[0(qtU!ɀ_gVL@Cyɉ:Y;1D'zQlqJ x_uU/LrڏC=#:(0l1#]'ό&ˆA0^UgxL&B@9s0/'uÎ˖H[[^YTUXKjBç9£Ζ0Ӡ}'y -5,~^BTѮŠKߞBl9I0'i6f *!^ȁ ne og -pirl(s7n;sGt\pBa|ur0O-vt<wu&xv ywxƹk^nmpx2nq;psQ+qxtrru`ctw+NHgŠ;i؉Ck\/m^no ]~pKr<9vsI%Jsuւtd6wfxhҐCjP}l{mn8W\oފK%q\8rN%rUWu'3cA.e#g蘟 iՖL|klm}[o.Jp8q$r1tb}d壉glif{jklҗ[nIp^85qJ -$q@t2ua߯媧d@fx\hozjX)jl2ZjmSI|o'7pN$qDsfaDceWgziͧj)kYmkIn7p;$}psN/`Jc0EeyO:gnoyii[ik8Y^l@Hn7fo՜u$ep&+sAt1dK#uf2ui`vk}wAmm wo[xqJ <ԉ]*DmW-9:E"%f}` rXJVwmzm,j[Q^KٍHG(َE4kXׁgJ./ w$턔@OxP -y9VlxZk|][$HC?nW3^#4#`'-q%;Q/خf̛`ƭrw O'erϩutKeJ4@x!@/I_!; y@8,Z=lKzQ{T/!G=Jl-x A 3F6rpJǚ;jhi-+ -4OyUVX=oRNwPFV<m1*GP<,yyfC%T/QyM&_Y88$ 8Rt8 -l/E@Cૺx+6]0Bl)q -kd "Ym?7hk$"P!(0 ,V ohJH}nJޣDY"?ޔU=a-\Zq`8g9f\ -Y|7j) -[ݤ^UtsNo׬!(rZ7C5y'gϫA;?JݎBņȐzc*K?]3'7(;\ޮ4*|KNsB!+| pF`OWp"+&ނhĤ=- {/5&2=(yA+=?9SJ} t*DlBU(ѵv"9s>y1zȭV!eQ6_T~7FD]Cw[k>D3 |LEH-'P8YOz2j = ^ u)>QJ$$~=Q Dp7!aa΄E?Z6n;u95~VT^>) -x\q})Q*flfwD,*'}=?BuppQF~)GOFWW/h4^֪͡kUj\uEʌmd_? -ʞouQ) E) Rp!%~^݀mlD+sbW4M -I"Tq`h孁;2-"1̄G ?XxAs~=pQN5F -T-Qjau!MPNi.? B+SO a_BL~nhw%G~y]wJB`E} oe7c[_j$'(߇ |6lTB򴽉6AX+aO.-P5醲=DMHPo5r1O+qkխfIRzjY).A[6FW|{./]Lm lcŻې/13p9紸9.Ï4F>Kع[Gi9—5XkhvY_݃ͩfs?% ixy@v '}̓7ŴG>yC_A^9zM4̠ug(.'z Oӡ }LՔFFO3^3iIfW -ENֆ\$,K*䙊,rBV.ܠeXyIiw[8Z'Uqhc%(>5$ܑj2\gvi,8ɂԂ;^ q8 -N(7O`AlYʿ3WGvRF wy&]=B)+D_7:zrzC%?*!SfS[x?j(S:PKѭt*tV{>9}ޣMJh 5ڙ" -)dfe$)eǵ5ߜJ \iC &)֬ Gb^QQCBe@|%xD2,Z*첄ɳMI}{Hϳ^k -:%(ʩ$?'펠dIW;Fs/K->#n\J5d["tY`L-UӮx["!.vψoKh++쭼 'VxhU]'V'7 |IJl!ԍ. ~;pcn -kT!8+G6|| {H+ƌ+h(4f[  Gg`W': h?|&b(~Rݘ!|_nCRעGL$ EJqtʽL:#sZD,QREIpY͊ŴX.ϰ),vUO_Q ǁ, &˵ +0kěY -0/J}+^,"8xd@1;t$6c-IC S~1L -%d57SE;g65N|v#e!!YH/(~*!)h#j |xT"JJӆ)0nGya OY9^`%*$'LTgw@!!a=>>j>mPbػnX!mSY%#C!ﵼ='4Ut|KaO+aWc%^^[^=3lXqv[^dVo{P fܮ=2spӏ7Bl{B -:ط3&N_=@ 9ˑlW>` =Fx8*>zL[`㛻ͱ%(U;hn頑̯ϛ'@5[\#/N^>-a#J-P -mW -ͲXY}^%=<\7yE0jw\XzZU͈޻g7{BmTtnj U5'_%_hI*OWVωsCU%oT_`-Id$,$5~u>UVjKDj2*u}? -MTC.aB.3j:@c~h$:䧒.u^H^*GAf5\џ] U-Lpt]TцPcY@V%/S%1z$f!h)ȥ,g$:b,6Ь5ex]>9R 'I*krG]ȴP$菜)LZ͈|s-hxVQWtTӧ'1uܭ}[3,n YYшXfU| qȌ5sg;\_Q=oqEI:GW-qU& _` "x{8pfj["PE֗9Ӗ-۔7ރڐ肇nMvz`'pCQe -D[;PCKE.x9Uu,ڄx|njozycp,%eregZiOel&DnX7ɑpY*r1fs u{Dy|io=ikdzC -9\ݔ.ޓ#܌iB  fhS_dU+LnȜB͒9qn.r9#ŏ` '2 -)eS}c\eRhHj&?0lB5Wnf*bphrC\s{w&}#di[XkR'm[H$n>ߠ;p5lr\*t6uw/z~Ud &oZqPQrHs>u 4ݜ"vX*uwFxKz:x} dcjuZIvQ swGx>"y4zl*Op{XOc|M=}]9<W;b| Yh|PV}G0z}=Ŝ}4O~*,]VCxGCbYpPf)Fʞ-=rH4v* v[H, aqXkO#FqÆ=*3ܘ0])𕮆X_oّ!؋pG`ٍgX)ȌO@ь2F|<ޚ93zn)͔#X f (`n!>WaNɟvEBt<3a͎)GOD4ۊr 5W:P_z.W1lN`tEYsרNx 6 x-<|y#z| G}3t&XQ{xP{GX|1>"|5ᣳ}A-&O}#ʝJ~r}<Xi`#5Z-W#ԜE(뙑eWق@4+X(4pOFܨ>.Fn5]-m#ݛcA8hdWծIO: F_=l5l,蝴R#ܚZ2 產9܆WpGN4?FSS=5<;,Ɯ퍐#Йƍ8DՋ <)WŖNvpF$>=˓@5v,A#Ù~P8LKVANDךEޥ=^.4,{#YTO -osP>j _kbӝmSewnhJ} p#jl{qm[|roZItAq~7u\si#ut Ixufihkjmlo]{n&qkForZsqStKIru6sw"t_w ewmxd's@ftEhuNEjv\zFl[wXj'n#xNYoy4HGqmz 6Arz"?s{ vk{a|adj|vf|h}1y -j}~il}Xn~ Gp4~D5q~i!q~U u~P`~Dbe,sg^wiyhkTWmnFo&59pz+!p tγ^}aR ֬:ڕPrXU6_Q8Z}^XpjCNd1dD @Hx*55{he5~,tUs;GyQF.9[YTʼn$fW`60|Di]FJ폌t;g\Ot*hx]/G$m#iZS;ڊ{s7K.>rv:܋¿zv ց h/1!y[D2j䛋 ~P4W?_Ia%lM wO ]mk~絬*Q5K_9]]V0c3KxVHK-/DP>H\/$iv",w ڬ(v ܳ76Ye4T" c.Ɂ\ 3[5]|ޢA -fL2`=t"C_P(2NW>K$ -Iy(¼=鏰5#Eli+qe#3^d]$ h7F2:c/۸# KBOq vJb;\VI; :O̤Ӥ`_ -Atp5X ]}=E-c&%k ~(N[঱) 4⧆x W@F6ȩٰuf#O~ W+ y_#Tw#B>(fbK%,rИ5yS02xbymbE##@ 4>-4zڅhx{]8)9lۜ{dX+$PV?V¸eexoz2u4y/o;I;,QjUO#?w1>5>S{)4\s(nfY' \[gv*!Ag6f˾NsN|r0q4Jby@Y}LN2M2G2+K7W <$?ъ0Gy+)~Ia/G<(z@K8;HҽYLIM )aQD*W3[Q'g4fp;&]It:\/&S1NbXsɣBߓ#A_3qnKnBp4E¦WعK%8\8{BOy$X1K!Cܦb b9!]?z_h8O.ȣmzL{8i H jE^ %g{2ekUsjOW W,X\5h[83] &ӧ~j"GmmnWi{^U_`LfF_~!q%>ߘu  "A6pGK1?noAV7nb֭J2wlk11`O'(S7JiO.v07U筂L,biHb ?D307!iX 4{/#R& soUAaW ^M;ēȋhmSNH>5VzgK'6J~.O]PCKCDkU-e' -TM3U$o;* -(oAnUqpsz?n 46ڝD%.C kXP$,P~ I)ldIN?Apc\\1k?6JCѩҩ9i)Ih_M͒'ږ6syuV*s_WaOUcJlt*! GcP,Iyi&Rae@(v\ RxBڕW/!GU,OdرAaV\ǨT0n|z E:G-X23nEOBQ`EH2ߊ8bc:t-= 0X(x6Ctca$CvnqOIl lO) DZ]%nbJx\>^O ~Z lV+Bܡ|߫8z~ܵ\_jՎxVGR?>4ցa?xN }g wFue>E'qʮvl"h(6xDQJ8PUd -/T$; -E1TIYA:H9>`'rd'G #0k6H,AE7e %a1% J"u|L8>b>_a~is[|3J e_*$6Rv{ݎ -|.'8= Ѹ)Î̥x{*\9RkD.25t{ʍoum xX%+0_NjEG7Ům}*V`JÖ2,fMsқE_g'ڷnejiRP GZ)E[Lёh_i"jeڿgm-&rG?j-< CNIUq-*lβEmS P""ϛMήӈhG#Gx\{"IaԤg@ry  #]3Ý HSGrDDD7]85/bEF"]PuI*2,[VAGBx͖;h/TZ><a(XdLg,hC~DW[޽j}2埶holpQITVE)\`MBU ^r"9vux: ɋ4W.Q26EpOhb۝{UȢ68dC]X=_v5oXk)aYAvKU`/IS9}Z=U4=h(݁{s07dZkNhm"}&\I&˭ !} v0V|bO&8>A 9Ѵ71iK]"]_@3W"CV}(s@pJvJc>a54.QS2ǜ(1#ZoZe]dWP_ֿIe^R-L!VgFz'j+~f]@fgy$#hw̦<{=IնS 9޷Ȯr>.~i-14%Q,{9&mP5&SS-\t94! l HˎrBSfӋV#aSL@Wb.{Qi\B;ыudqw0qecje* #3LMV* D4UA@e#CH `2_򒼑e(T-XQz=Qi-ZkUruz[-Bv )-|-=]󨌖d?PM;+{(h -ؓw8+x*ր!4Eǣ(ܨgè (1[ZBU/Bwߥ/ -h[t]<\,S -;H%JS ' aRWIzBr2ɑSads|b%P&Rdރ |NX9,DaB㬒;܆rKaH(_/DˆlYDzEFs<?;%-:m+ Kk)~xsnRB<Ÿry`^ye!d$_.\ -B>ق)p]4\RYNYenvV>2H5;qз=S$@ Ju"0@Gs j/fXfV!옫*մ /€,+ -]?TsdQ8i9CЦ1xWvWuZLOoeb0;\ hrC.LJM_OՌ5 NY@8oQ9Y7g:m]Řc pcMcu\Oi -h)%L۩Z<ËBMg˚pͣ1ZFx~|I=[BOIOPԧUAq6ڼEyio"Ebg7h04FnQ1b<IT?8=DCUr7? -sv2BԬ%F`w s0/Bw\CM0A& HԆ[p70NEӜ[" [2,2xKȈv@]uCE޷TzȓtST8{C2O%.x{)w95@j 6Pc@ N7a.% ֍.+:>XRP:UH(?˕7O.ȑo%[3ύ^D`S*7_&JapBc:%f1hu)j\m+;o: "Lr 㒐wPE}ReJfBeh9j1l( -npr Uu*Zy~R!jJK1l9BCm9UoB1[[p(trȜt7ěu 䗑xDÏ| QopIکqAr9ss1+u1(؝vxϛuwty 3{U<~WQnvIv:vAh]w99{x1ys(ɜzd{f2|} ~=} Q8{I:{A-.|g9 -W|0q}(›~4~s 3ڋxЀPH@'8X<0ϝy( s( -)PJH@*8a0( Ӕ, jl! UP1H^@{/8~fB0 (ъ  _FȀ/OťϐH &@=Us8N0cʎ|(~ L S`ËK Ï"|ЈvUPOdTGĢ[@ -8'ړX0E!(jk , pd:jkeZڣg^iCaLjdculg3eneiUplDqn3rq 5s0rw wtbdxdBfǒf]ihhk`tjemydlUo~Tn4qiCos52qDtqru qv.*rx -YrxwzRbu]Sdv:fwyhwkbjx]lyMnz\>Fp%{ -q]{-qR{w |ա3`~ b~1e~]xUg6~jJiJ~\kQ~M9m>=n4-fp3Dp)%u-cgfIYhQK WS`e)ꩈ X, (C-(8B {so !܈ WxĞj}V=u=p|?~JuՄ9ҊMnUMI)8eռ`y~~hcԏ.6vI6#e6W|]Agp-U&SMh[LӮ vuUnE![sK]iR:tP+gp6U~U/~(Cg=yhжsP"%/V%5}◄*q0Rœw0g3ƆoLD>>C|F^ 6oO WW.!$Ր OԯjSD-o!Z\))2ege.˟ 8PB$Ũh$v! - -fQojvlK|7%܃(.CDZaM\)>-́˧!ct (Z1n{sBsj*!-QU2!w\h?||s:#^.#LE)!56Fy.[%O:"m].^(-#9UdiҰxiGMF*]As nks!Yv-XsN8MU-ɉF-@k*uOէdB٬3VxeuP<4/۸X'ߘ&@\ٵm'FUpW?<]U4fˏ"VYղAkJ)n -K{vʯl1krȴDnʚS}n8)re]Նl*-QOdW*b@8~(M;H?e>amаcHlBy>ӡ/ʰ%iK:B/ߒI -};p"[՟z.Mfہ1vX˅\^0S}oe4FVZ~gZxe FaL ˮWܯMZ?Z[Xj~ـß++V!+*8X3 -syxhYSYhO>yi*6D `R}䪪p3=]rM>\ ^9#&ăOennfj)Ĩ4_Gg._'HBhwj7zЉ5T>)̵;*lSTmVlؚw -Ԟ64%O5:N '7Mu^.V~yG޵wBuh`4K%?#I>/%O -f c~՟%xoGP*/91KOGcjGc噉X0oNE ->X Jb'(%7^Rg#=X T@NKdAב!i0j-;2nLtO/o]eJl~?'{W x<}G:Eo-<e;Ѽ$Ϳ2H% W!'E~R'~cK2Lf nZlB''x j3i%y15Dȷ"@ 'c8~<F DWxwŇ0C\Tkq ̸ lbLst'FL<$&ec爗I՘=r_c>Cl)1^шnjK/׶PqB?]#-XkFϫ[ЃjϨ9Ruf";!8(ae E;˲Н]BSȨє(C HP -iw1#BmÌxhp;_dRJ I߰!_cΒ#Ղ*MWw[q#`~]\zii&Nw-;e-=aufKP;j WXuXs/zjՉ‹\57&Sj̵4 h3VRf(.wJgܺ5-\S ‚zt.1m͝_n|WX0R:rV4&DYovA^iTm &I5&`0dd(7FؔaXv[sG&?N}kb*7\}O愫V% þ>еK%J>=.w56]-8 -bsl u _7L^c"^hϥQ&RP=FeYs B1*Hյ!,^Xֈ6gv>hEs׷m}NWp:,g~a -gRQi"hū@>"iVV!dKAd2)Y酼8ʟ"/CgGdrǝ--D8cK(>5!LqU -r*d$("eu1Lj9YlhW ߣRUΗ4K !6=%5MQ&3 -s&$&H(QEf-1n%/ش‡i BnKL@h&>W&Ap[Z72(MIIҩl7l'i)3KhAwt-p]t"r"k;)%-[[_px޳:iqDG_LÎ̭q)?5u:gm/H "ϊU + -[Rb'3 )m{7Lm(l,!;k-X5;o)0w#+fk[ԑ<c6[^ h\@6Lzy: VZZQ0!V7߯-i7yOܓ|]lrcmY@x:I{sT3Uy9bWͱ\ȃ :|@Q*x%MnU͜ D ԕ{S˜8QU^{v[>o!za^5<zsr -/i DfhsSNF|QD1_-&AʘppL/lh2t3fQ{ 6 - RxW9)?§Xk%'_nXHG)_3p<Z i1TU<-t9K<TWRA2E  =[JW#ʼnPZw̗O{T[N: )8r.F!>2pĢS2-kBؖZ79=B)ZuRzt+Ey/Gب\rLiqZ-0  o^njCiFy=>2ҹ.Cv '[jxKuJ zTEz\qP]ټv|}%vOiG{l2FEYs7\"eJmA:}K:Al8ڲ65#feBZZޙ DNC@@ăLY+k'kwڬƮ)۲tl6Y{YSuY9[;ܤN iAEOX}]8E2ua<&"v'ĹIpL) -LWҞ3]LUiyZ _|pi#5M>AV@V! - s)lx -"hqblyR%I@SsbO(PIe+{HĊ&ӽ _[OBTF]|}ہEҷ-W 7O ^b:ޝ\TR]SJtæ|9د"cl 1V45.^z^t5{Z\6]͂pEZ[<-V;s{4%Ikp^?]ir{b pS/j8(@+Ԁ` Cr-L>Uduea;-(KOsaU3HEې#ce7gc?ݛ@fXO+ExF3ׯD+jT%nOgkGW%[XWw$g8g=I2N''$JxXI²;co򚑄2r%C[(f18dav1O f!}y︆d`@Xnd.# 9h-e12IaX\a :D&n= ~!mo/p¬鏆=^*KA׋oLdI?$%KncTM?UK<]VT=Nˆ^Ki+y*Q.` @,C7zuBz U8@îRmF$+~P=Vhʍ*zP/UQ/mx/N2Iyj>7K.g:R 0*g\rMY~il_MxĪSrPSwz6c9b Q6@['g(WZ8DmkIz8^":y@N> \ڛPay~+(a]3DŽ 9ݞ -nqwSwVfj92DT1DvPF k8b~;d]~>n.!NyN7NXəC+T8NIQS`Y0?4 x<'Z7̃g!mDmm6ߑ Q쬣?h:il2vNR? wϢEQYN,Vo"~rH-ķzte>P7ϘioΊdXYq[p0VkWUtڰ &K_$k8d0O#VneYH36`e. )2ZNf2LS~]@Ld*Jq7wE1-S &0)xnZ{DeYJ], A-bt?H4/s+5iCD9ڴ$"!Ղ@UDm#r5VY5?bkwp\|i_91:eM4m~2@WꞁlZ틐Vr+S#Z-^[s1;QثemYeֆ$9cքZ˺eM= ""Jy\@ -.lsMY#q3'dy)ZmŴ-IF^hFgmwp97ĤՌ^U,_{ܳ[ǯyTM'Ӗá5<\abcP%:9;ӉK}*WP­3`Ŝ(jJ:vc3%ޫ$(DJi}f~s4G1x"n<rz<"^Ֆux371|;qˁZ -8W8SX.yކ>MXƠjD!E8];SZ]O!ﲶ+'+{_"6@ cpZDN.᧩X3Agis$ 7 U -6ޢ*$EtQpj\ bnIH_xBPe4K˜\FTQ -FעkRrhiq< -CӰ:5տ5<)J)79[ |#{N9X2O=xp?AوQc_;|0,QPm Yůh{CM6gk uZk9>gjc:cY[.[|Uۊ9'a%9IOeUN97%|\-$_[׏Չx"e1/\gUjneji˜n`U[ %b%?dw}Zs,pvH[xr|S%PE9'#.xjy[/jm=jݿ!i<4]lmuS"l% :^hh^ԥj9ض3t"w݉e dzv~6CD],x36H!&V>;[hdtٻH{q d|Y,> c9xD$[B\%xLGԂwYlAMk2 `jP_4 TG2¡p[st?pÑ,4<qqr1g.56W[yJ忢)[HE|ou|4.X|$^s6lE2C#v/^X I/ӧ yD"Am |y̨,̬MHkU]{i%鉪Uo*c`YO.ӊ$&1P/* _l4 -qڞx 03&<@öAoްRӉ p-bѿ*pq&s[n)kŻ뚒ӊ50NC q,1˧ʯEϴ3T֒=Eʳ -_s] ʞDCheG)I oT?NbEܜf+'}#hX<'{NfHs$*'Ofb:JCi+ά֯cXgd2_ -y/\"çs T=u^an`. ґk# ,hs,׭c/K_jsH;s]d72߰Eΰήm{KL41#_myZHTm v3n8r4aᗰ3HeG[v;#6_֔>!IFQG '|6,܍7Xo`+.8aycM}M?E$D%Ւ۞M_M=.ZT@Q(KC[Kk%%ɪos<˲Khhp+PlJn i;YӎLucuj8R%lWf&EUC%X[9ᚲ~$~uۖ0Tq,D]S䀡m{W?Dn:V,QW -N*p}5y HQD lElY  ɳWd2DPpQڏB=ֽjښ‰@(={zW7?rv﬷@c />QhxּĽqqѻ?V*w}Pm0ЀJ:# J|)|)ǿ*RKDžQYk(PԱuA=- sIfXX@x?X<[Kz 'ᖔr"^lQVFJtJĹHqyB+h PE+(@tjp['y `-J9:ޥv]DWY94AK+EC>;z\5G?Ko 9 n-,@շ5^"0u;^=I'aEI挪(Wζ@_s͟a8+ A e5mu݋%a3mN&Rjoس?ϑޭg5hRRXL@,L u`~p),?CSI[O܃|_HKES<5JE)q@Gŀ!/S oz.A9"*Za[)h_1pl@`ڞlHR7x!l~敟 `sYC`OItny)ՎQԦd -AF~+Cc9/NtӯܲavUџ$MWM }ō1hmohF^*; q'-oܤcE[BZ[5)8!j RkIT}):Sה̨]\B`~] -4if$"׵aR?\]i1`qCC6g~zsə³2_D2ܪl~\ܟڀVCf?/f2]INTx9"Wsu.BuN"*Ţ}U4LN_ψU>Ѱ$%&]*_u7q28}Gjr442/%dt2m qS٦#;WC37Y((Z_qN՛oKc~V9iP0" -D729X.i' P# rCpRML޸;a7ʠ+-7c|\LY05|-lŭE0ERJ8:t;)adsNr j,܋KM3 g3l+\do9 c6 ]]7 rPݰ7sƺtl˛NYxnIFaxKX9|ޤpL.6"YƯ\fbl~#W.v ݠly%yrބöuW. SskhnsY'{[̅d6=Q20Ha `Y2}z-j:\Qw(LkK܎u3Q;OX-_Jȵs,ӢHy_al+I1\vX8Ɵ?3E xn>P0p_c4ET~OSؽxz!^߮nYn,9p^ƸiiEyS#Ui1FvHo6T_?SvSthfցS&+O(1;cQ.a";=JDPDcL&aH'lZ}GuyC cfBׯ:-IPS}*}!G{nu`Ev쀟i4i- 蛨I 2@""'t-æ՘Gj1#@ffkB -p͢Lp 8GVms\iiIIWBO2zig?UYng`N,DLs9Vv(ؐN\+f6D)->Èj7zYO3 M3v]@Ĭh~LcptGk4^ZbEJh}]K'eM=ƧYI/ #b Id}H/0,1hg*%X*$v="}U牮ℤu8QE S!ڑʉ!J%x-荺j8FD -q`5zkE!sLFI{ML&ًEt:pbi)8Ϥi/%N`s:Gu Mhp_y4!i2+ -l4V?lT4-w Jj?ƕN @hQϦ`RP1Ȧc+_UߗTB7Ia -P` &h -ަ^YvUYTWv-sVT+Ť`()z:ܚE<Cp BT_ "U/<,V#J PS0uF5E@ Fr(:Z'NWW8\[ P%BI m,3K2i+a) h^VS(9sP.D}ƴ%DfB+҃ --6 -ڪ K(ڛt[ַ=sWƲə0M9fmS-E5i~[H3NKt-dI:;D&:I4PEJ͛$n3%lO -si9պ11BR:`.nX?/ceDh#-/~xV/FS,jLH|h,FB-AfC"49<jY2hďlBV햎PY}`4щD/P9[_񚏚Ή!3VTSsgO߯!5҃N,MC,RXQjT_Tښfbe_OJ{|\]EäۘɊm+>@3%io}Ԑ{@\ƭW}YM]LLyn(0rQ>bFH<)(K((pHOK P%:HBOTg=/v\e )/HŌމTyjj𔎡bj"!N}ܷ! vG*&)H!J~NN3wA|Rj&}z54E=sWG>Rm#,RQm:{Qưp>mhVɞ/ Gqr -Źd*G>ꈚ!wM/U?mvtKZȴ*;FQ3lI(Qe'NUܳmym˴+[o)>3SF&׺n‹9@-Υ/C>Sc%R_k!xԩBn2d^0Fy㫀h|oڒL6ǍB> 4 -6۳+ wկۓݏ?ݝR,R6dvCSO\e=~|?2}^k[Ez+Q f۶ xޤBB%],]`4M/%Cw)XMiz -J猑R/ܴ łb掲Qp5/_^%reD]}&g{ !e 5"b(,CssnօRї+cE)Jzo^.Mu?% ZN{J1FWś5\.9(*7Ǖ#h5WUL:[%ΑHۼ|R2I@wƷ*X[J:\ר -㉙"r^vۘ[z1:._%DUkv 1kD{$cf4a5_V}ӌCSNp0@jʭ4'Ua([b7$ p& -sn^m7&9都،_cDUPlZ[ʳ>${;^AǑnΔW{CfG Vj<0AP\2{#g? `XH-)7ٲTvTo4n/iL >f=4k=Èkԇ>B -R:Wj??8T5ՁzҸ9Iه8RRhLa<g-Osy:@Ɩ^gqɽӵI%*/9b{V 1]-6v= 1 r}ʌOex-HS\+MBɉWPGIx2^sMGO-V]UD` bOǞ ݓ㜢bZA<((&)<I/Hpq++Mt}B?5XHbcLb\k>Ex@X(eJSq!dUG3OMRiq |-!at.B.ɯR|])P!dHTOk$'QkV:~FnⰖmU W*qCZ\k>E -#ؤ'k#t]6M -^qwQXoM9CPAQ),go! X.zZ,. -ˈe1e*S%^[W5xQav53< 'xJ8:b$W[Tǘ%-HI;Ѭa4²6ܤ8z0҈p̩K{oU/w -K>R,VR;c=eNfMHNI?=)P7hkʔ]i"r:3S8=3n뼝X6&ՖO6O]DRǩGvT[UCay;쩢}.g\ 8^O(+omd)Ke/Ψ -W @4)q&*KJVo''7F)$b4^RuHUO0b -*vc& _'ddEP[+~!nZ}A`a e52$mGhTuxoHAR4F -u.ؽo {A6 -2:JӘ;rZ9C7h]]yz.olcG`z&y^F/~Ƞ_KAƧ fZ:ol#fCd$wg)xp{WJR;%=NH9ﵽFI+dvy ֻ^K0Ϭoo;X2 -G4KAĝuGZ5uQڛAmY@FO+/2vgrZ)*8߬)hR89\` $u,0(Ԗyn5bcr*bc('X=-%@gdM|eODs+' VÓ˶z4oПXS-տl1,ddnc ڌkwDu![M2$sV}@P9x4xKD1QKξ;t0;fN Htݰ 1}-_Yv,}{] inxuQ~LzC;5j_:O᳠Gʗ - *hPV5 ohiT$ǯU{O`[|qmWհ=$|w2gVkiVy4AeLRakpWC4tzEEgvؤ2yA3T8#)RbGaomUCuHa!n@ȍV0OlVZFkSh/9L6A,Q>ceZ(kWaTw)."1ߢnag=c 1 -R&!Ӎd%WS.Y]-㐧qbE($L\J%+49-MQ蜧H}1HRڂQɀ+CJencJcrZ٣}^$?)CL~5cMBɋVJSk`!rjY>E]Ϳ hC5d-~sޚ+2f;%l<)P,l\(N5زM1bǬ揙Lq8!1,ws["mqO~[tY^]Y|_~kMlkd&g~(5M'Vڔsm|2[Z]-j㪭<[鐟5 j|s$N;xGݮ^Zs_Z5PMYԤ_<]q(MO>\_HNW/Z7Buvff{R9VܰK [fISB3E;F<BBF>F97g9 -.V)mvpx"#nɿwqN-U5UqNs\PBQB[9zvfA4!k'VcdBuUWC~&?ܭ8ktJlf!Çx%< -U5D@a-{Q"1uE⁹&)?Gϳ7bɓ2`b<[X*v(4Y%bAi-ߠ54_L1ӑ~Lz e $Vٞ5=[TĺnqA+ -,} Z/xD@'ʻIMPO;k( XnfȇE zX,oau -)T_[# Hc (\8 MҫH7ZkM(K|)G^*QŤbV1Tڂ<(nAHPX)e";F~ 9 ztybTނ4]W@̆y!R[sdz|T&{}0G= -dgn=UTcn`"Ahn~k{/} -}Z[i1Xsɍ PSeWcsoD>/Dk:=*lagzN`CCU+kSiտ< Y6QU/r,gF<'H -IoGi'UQ2PؚEqmђO"ٕ-`#)t˴|7W2c4RruP>v -R*\a|k(㍇Wdngt~_hרZahGHU+]5T:9HcG~MLߩ]B5r4~ΉH օ/Pf -O(,Ϳ~*w&{,;>UϔRؿyXTԊG=d -V^0"@2 o=VL2'\0} Z.D"O! B!悰U )P)sy w" #|8/Fs6 |l9WQ}VөD]3n$3 -O fu[}M:L"C8fXI]jczI<8yS);GwwtƱx3e᫳2fשs6yZwx ԦҩIC_MB?H)]iRKx9e= {|e i,nAspTY꬯jzb1|#~6[(4 [`! - ."2.@HHɗޓ$$0PԖ{VQfHXlSD3l=;92v^µŁ`l'ԱX׊w`5qꨣڌ:~!vbht8x\JpFG[zP9P=V!:[1^Lע6cTDgt)egS:HpK>\S4փ>jo8D|(-@7*i cAx6Rw Dv_+h̔y յKCF'6^8I6k|.,jU -x4>40wPs)x|i5t{.ڰ{KJ :u[p \A Jz!`Q,Fiٌ8D?GHR; -[vq]'UNkB%?2/N,w.i$e~ -֌+3: e.BC1'N39OP;6Mz%s5[2`eSO)YZ6֩2nPDYY^gLGVɞcdIvuxn~ z똝KvPG*KsPydI=X&(I6SC7"g#]V3 NyjFo7e R 2!]$u#?*^M38_O/hPyn6* nen"°D shF0hÚc^'ѽBB-BP ;܄)WNܩߒ(-k!7o<&X7n*,4z%_=^ I6"56`F_T9ܴGsXoM -@FM1 - %Pw9'{l=nZ -ZĪ8njŢx\g}/N0NXe0Bhp]*lbNezW,Q򏐇ҍ*2er<+b"g!L#@rH -H4TGDR*>]&_ e Kk -PyP*Vo致ADZAZC .Bh3U0oPWT}wwI^8QP@_M` jݸb{?0Qe{DYP Ah ::*o=,:IsE3d=XX4?tS땏TIoCC#f \&IdR - C&X({%彈>S©!JT*Լ»gfDWCJPǚ5(ZLϒd(PwȪ5hTwTSo3 E|e8bvmrSe{9#Z?Bu&:f9I2 ,,H(2I0싟"[ȏX>6 bsx杴AWV$|mٛƚE<֋)zf&1HD7c #Xn -؍k"(oh3.w榈gF&r߆t]炻S8:c~_X{}[i}xLJxy_FopAhg,d}'UZ8 ͂Z1v4Ա6I@SؒӁ0R;`m4} -lQic5O򡖶c0g-űZ;wN!\ҝ?l !uK7#ө&n (m~RʝexꟵf>&|Igg 6Ͽc~&BNŜmȓ7`s2tIaunv:c* -zSҩ3 4ɛZmhe7]I&Bߜi$/P:7ji<zՂ O3d{4ycB>"FMA zK@~ ![Ea[mkn>9 |^ *Ř#tÑ}<H:S`9̌lv>h\4pEqyIu}^\2agYs&9~DV =W?8K{Za׷&[]P$`)9GݒFS6ИHzԱJl_Zp7',n)[O:kL8r 5l6rrK ȡ@}Md&pJ]zѺRU*RD_UŃ/y>y:?Z_>;6kfD>o]BnE!6oDYb-*c,{VU DkeQstl5ǕuRly"1 7YZ -RR V=jeپp@ݶ]c 1ij@#-+g5n@&efF}܇)2W - -~U^rViĠؖ?#SNf4sNJjKtrBs1t\ Vn-WT߆|pUa%jq.>!=NϢu HٯڥJ{e^ 0)A !@+b>"I$uAQOe {CrIYlPfh.<*4M_hAtr&Z^W,1薔#cƤ!F1G&yʌEHbh-¿$V屬y|,} XG]ߵeJu{py5w@]lq kAWy+ۇOy>muIMG<[QgD[0nIr+gKs`o)1op؈{JS6G'2rc%ddu*I[=*ƍ -u#jM|G+Tk*+g7Ls0U.lf!9q]gAiӂb1Yrރ^YvqM( Y^;嶧^GFxIZ*)zE b2Kύ R2J7q/8H;2 ((I ˥y]qj)[/V }P3oppsq~ǜP<*432%)m2sx}y971~X|),4)sk>TQf}-37$ö˲xIb4OxPtmNXUiY6^ٗ`K΋Z]I8XԩWsߡt5[ ;|?ݐh1S Q 5#A?0M:Bg/t]ڐfbNB(sdkBEu8$FxM;-Cfr4v0ࣰ@gVW]lB+6~ƈ=Kchۄkz&{nj&92ڌ.ՠq]!T!_2ؚ1SQp,a>~~)nZd|,&-AB "PwWl( ,Uz 5K%!DYE]EDVPOt mQqAXx̛7;s9|1*FJ=#ESkh aJڼ -AojJq%{Zpr?^lTcOVIG9z9{1!RSޏyk\7(I&dca0)>wM V{V+y?JEVH3\~;~FC]I<$m %+CU8S\ -pjƷ -i:T 7u8/ -O_zil,+'u^|,%Sbqz1LFt!&8t -KVo~N4Ғ B'~@ u3j,'Y9 -)qs~n -ͳs䖖')r|fql6s#f!xyu4_^O֐#&rw)6AZ o, [dm -rS VY< SNy>wp -zm9 1\09Maoԝ>jjXd|nY!^1GXYUaV(bpm߱q<'u KŭabwLf#fA -1 @(.$0cdJaFQBZU{c$+h&1Ľ8*cA4S"ӻɺ&nǠ8[E7׸m2{¡ 0\½ȏT7> &Qߥ|> R#` -cjmZҒ՟U**ByN,GQh<*.{KTmI1i!(GAO!A,9pyqX)0qP5T Of@`ͯ8h+a8 @;48:/:|@hx^lʻ$6Xqc<y/# `}ߝxC!`08b1 tT3J||5=sEI~*(8$K6#Z<+ųmָX;{'WI6FjR;#sCyvʺ&+6(7J7-[s~k?P]r]v7ir7Ѿa¿D`"c͟[87S?_MiX\FZ#P#jJt QSKրvB7K/Hoǫ5L -LffSV[uVC-6k!7)!NjZ҉LqrF&Yk{<@\+$;rH?<@0A%BCD -EEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abdef%g0hdۣ/Zߪ ;iǯ0Nlɸ -,Os>lśHq̙4WxԘշ 5I[lzy`F+qO, uO)lF!gD!l%m8c , - V # g;^0{N"qE  !"#p$G%%&'()j*L+/,,-./012p3\4J586(7889:;<=>?@AB~CoDaETFGGx?R@,AABCDqEMF)GGHIJ|K[L;MMNOPQRuS\TEU0VWWXYZ[\]^_`valbacXdNeEfd(؀+ˀ:JXkeVtA,|〟~׉~ˈҦ~ч~󆧨 [56j_M.Ub@p, w$}ߓ}LВ}=~e~#z~H}~tAi8~T~͆@'~2+|םB|"Υ|/}s}F2}||}h,~S~F?~p+~| |0|INK|gϣ||ݕ{m}+g=}S}Ҍ?!~+l~aW{{P{ӧl{|'w|k*zj|_fo}R}q>}Œ+{~{J޲{Y{j{he{n|y|ae|lR}>}r"+}D{ďa{ȯ{δN{0ˠK{lv{_x| e!|kHQ|ȕ>Y}%1+}mzhz8tz)!zU9{.ŋ{q6w{d|EQP|m>-|ڒ4+}3zB z[gzȉzNzv{;w[{d&{ɣQ |> |+}z*z|ōzNz ԉCpu>#%4cV5)jjJ:6.;s~jj2ըVCEJV+ǫ,o]i 6(S{.}_gH]W-Pđڬ%AiM>N{+q&CUro*pHh m7:BO"xQOi -a;|q7bA'):8%Xܯul4 -}D0[~->IpcOYd1XF h3SB4rE:"<;$;:ׯk"2wP\%U:%#y+hAZqgwu" {^!xH_m0ξJʹ(%@h3ҭyx -1@]!],\~HJ_? L@ M)LD"%9)ey% :h~!> n`NN<ٵ/?Ss{=ᰬjxkz^[uIN;3є=&^<仚 o҂WxM<iOC/2ECm*wdJ'ni}gOٲx1X{2M+HIP4#x06fV r]2&n2F߯dV n;ʣxfI;Q5Xm`\sc2EAV ]+< =.ಈKqytHK?dy]D{irn-< %c4VlsNg06Oߎ.'p dwIsmSzWzWc (e4VT8k^ԨxD͉I "ǍLŋ]ؐc?fljj5[c/ -ʯ]1GjÖNgGW,T$=]EN:uİwv E{䱙6Chf()VލLu&oޛ,fR@7W}(\N:__zQ> -}3;/e3ކTgMN2_6Ç=^9ADUѵ,S  ׂT]DA`ATT%L3Lf2i$!  D -b{}z_![JR?&Z;AHQan@({.YOwL/ W qxTd`Tk  -18mL~H nx`,F}/ék?٢ɥ<ܧ8d~G% I=='o\*_!9&(=zN4脹QQy,\e;B*LM//eG9?~?&ʯ -8QWeH. hU61f`@T'@@qˀp =­ ځWTz 6_B6P=)O-e7Po-&%x[A/ɷ.'c叺O( !#9KM?!׵MځZj6wu1g+m:z G ˷ˏY<"ExQtꁷ JJQwCy}yy]Y[B𬋆MGpm=>K;9qw 0\u^>>P5Pޗ4d},..^Z)QQiZokH - 0Q@s4rStvse<:lPYՒ&JȜ^4lM^1>Q#f/d!rE@*vNCkVT4*HyXG0L&8c;2: &w<-xe +3lX9cŬ* -d1zFzii0k - sʮS&0g'ǮSzO8-1vԪ{Xϲ5'&'P% Zݵah~Ffe-:K"D1o{|s6ƌkї2V}Jn-k_Gbʼ 6O+*Υq݋Ot7hgݧLK`z33tA7ʘ")>]UכY~ECh{,}4~O I61~W2U.Wg z wr4)6./s>LwW ^:9ɗ:- $ s\tPN1~C_Eb#^a>녻\6y5U׈Tw<)5>kN_*0`Gk;e[eZuNdO7"&ݵA|wpL: -V^!DL:IT- -˯^O8n,t -p6mreYME%wHҮS&e6Nx&Mƀ 0s`R#4l'2n^M\34\HW҆c 8a8m% #(5&^}?q% |^_S&5c8KS"C#%%{$:w"Y @'\,U֭I+Wfy/J깜3Y#q$tkH #ap%Xҭq_˾Llq~zv3^꥘C>e~)~ -?b#2i7 -T~-u\ŮPC3%yZhWr 0 Ulg8YLGBZ9@sʤCYxmԁw ݺrUO\ATDN8I,~,bFAWPeW3rReZ]< WݕiD`6pG,:l a'["[):ZɞmTu]RrdZ!?QipvJDSyB$#уQke7ֿ-^]N]mcQ{vWQLBc^6IpgMiK?=9`Un뢊g$ܕKVp jҙk<|?wbfĩFi ȫi vX˲&e#Z3Mt "hX xHur@j{)NGoAe4deщYZڣ e"J̴7x;buYMI :A4M -ce7F -XZ1 -Jpa;-6{\JdDӎ -aM'UL7,NnfȺU0*B7K8- -LGA_֍ɏCmɊעsO 3~6lMs8X?נ׉X2c B]UtGݡUJ9ܙJ o>.)Cd%ޠ:V uMt лohܮ=ߢܘ^HZHT5&{ʯ -|D=๝9_B.R?vup=oiHӑVuGU΋270=[Ǟ$2øR>6ǦV\l;Z܊V +Kv6Iz D[BGZO7e֢&[)\|Jd('Քȏrv!^!qKy=#ksbh'9rsrY[klʂ4jY8I{P,YM⑁ PƜEB@J=GLu,Z[1?+4֩}֮ZFOQ-81Tzw UsZf;9-rI7x8.ԜY~R8Ѳ}" %Ռ,GAW"VDD_d5৪x CaB=H~m?xH =EKEWOTv>}-jNH0sQIFa7;&1{'"kz,hV~}Hr[gǓR[[=P>F*M;Ec1߉'ъ~ ؝,r7MfGx[הPf]E;w1X( - }vWqĴufKқ[0\{PptE͞w;sD1݌(,x Y53Yh}qFeӮbZ̨P$nE ;`Α1l(I |άpس.ejZsOEss;Pjh1 m(b U>|+^p~neqZ,1O(EQa;C1(ui9 ڈsZ~v0}jr}` -}U~BKg~ATdi76.%Bdfu -jl_޹&U`K5A0.7ueg.&*%;׍ɂ΁9˂u;jx#_TQU:&K=Aτ7i.,q_%[7)̏#uEmjg؋_U-t|K -BA%7d.1͆ -%w*Ere u`È|j1{_r厱U"J@pO7bS.C%VŅ$Wu ܗQiݼs_%2TǰJéЎ@飹7c._m6%4pڀ2͟ tN i^ⵗ^TgJ/w@֣$7d.xՊ&iAwPGtsˠiMVD^T]җJ}9@Ȣk7kv.J&GN֩Mt2MSiѡq^YuT&?MJ]@7zp.ˍ>&t#qok)shź_i^2SKJA@~7{ .Օ_c&A@sh ]곗USѭ\J+#@7f. Y&o |zo|z̑}3{i\}| )}|~ }/{~K}gn~~*S,~~>~*(~{,x{o{ta|k`|ǂg}zX}lJfB}ł R:~>L~dp*~5yOAzrYzC{b{͈ڌ|4y|e/}Qa}x=}܃*\~;;yTyg}z zsz{f=w{xd4|gP|=4}a*&}׃lxE؋xΞAyJ"y!z5j>zv{M*cQ{܍O|i<|\*}rhwTx(3xy(NRyƉz@HuzՔb{nyOl|<|*(}VwՅwx'ex|,y<yНtzm}a{ +_yȤ!Mz_;{'*`{ދ眃tx؂yvz+9zқ{ˁ|?ur|bAI}O~?;ހ~)lẃ< -D#;큊ZtLsa1dN/xN;SY (5eڋȀ!Fvh$lXP+cu}*C+S,A@OB!@#&2*!Ȟ*RXQkN=%Ȏ &,1j= 6>Ǐ@3X-䬛RK>v'A5#g@x+7w~dCZȟB|s% 1 9ōA䩝SUYY=X3 dgcL3a ͺCnԔyr -lf;~7ޓ~~5f$|ZfDu`2@$\]G\@]zTe 9 4[y#'LY{@m!c>zn:> =.+E)pmp'^2%"qJl?o[:x``:X%>]RC K_ 8mI :u$iG@3zs( PZOr_xs.M"`/WS$nw",8X40m5<ńo~h n}DnaϠ+lQlW^Sl,]{ v`(`R顷Uږ6H['׳W6!EI>_pz)xxcvL0bquf 7?jjh>?^\Y#ʯG)RBQX ~F8F_D<=v~մgԦ4bDFzGv?VBa_݅Jh3Zd+XlJ˵a?wJ3AjVjIgu>LeX<R0EcrCBqf`cǒ02`j;1;/FPcqh4$p/)_uy{Q4hݓj|ۗ2<.LU$i42"L?`RԺR磇@*C!`(`@,2Dv_/{ @ 2]ZEWARSx*dR*WSq!Ѫ`$r"?lߡۨ,+@Ymyon-rI|Ԛ:Oe)Cd>49l'C{ K` HzZiLZsz.fT";ib-2(!-htB  _a' am/ruiP[FUYtc?&%'#'19Nu#7od%o ]#k7ak3RLhUkobJ2m% }01<:E4q2Cl^^)iZJ|jlk;> OP|"Oƪ/kQI -Q<_CUXyF|sM{v*4UWp`H -0 8Jm?qWߞS|lLwy ޙȫ -NL헡, -l~øjb7*&"4+\rT'Vn\5.q`&s S.R kICwv&[r{k\(1b0@5l@|=:s|ESҮ(/c[e4Mty^@8uX!L*<$TJx4k?.3wjnItv& Mҩc ,RHrܥJ]2`8ZDЎziX{!ZM)"pWXtly@@^3cW 3Z+mNƊW O16 HOl> sC -z!(A p!UjIRU:=ح.?\jDtE"5c9!ه=0 /F3h8{24Ulx @ڦ>_L֩l-@]Nk GY$"N (64{/ׯl"eGeu1Y/ìD6J*%F@M#x|hUGZ-*|>'叹5EJw Ty[uG)P*Gb!#a-v~0w[gcZ9w( eU2CKLM+2sZ*»as̉ͦEǟ3pG(]3o2ڴjyoM.}]?ڨ \4 n;b~g?3FG{(fi6-Q y)d2'ߢfX!:=\dLׁz|WcGcIJY)Ų܂FC"]AErrz{0j2[%by$(=]1l _dŶ yBsWܩ*pǤ -,g,{?//\~7&(O7Q -r# -jk|h?|s.jb6P^$@gH4<%"/+0i0 =yA ~'Y -NxR#po=F͵ڞO-t&73<á'$RM»"0XNG^1D>jhQߨM}&ǨpjO|dh2:'x iT+kmc{d4.W}iK?^-H,Q.So2P"rcWz$ajUNsZ2i󺩢qe͑ - hIGO^)uԿf}C&=44>-=m0 ji˹|\QXt:!ac7P{TN[\{$8nh/ymp] ve5"1 ԇ[SN[Z=E1U( -A%RV*Ȇ ^^{B2ણ𫟠pZ'Qw?ⷛK7Մ;wb] -Q.7 f#D(xabaO"Fޒn:`VuɖTUUNK}!oOrJoaxA؇u/PȆ2-6]*[U;/reU^ј -/.TklO"7{77T4^uo ?yi|Wɒ4}Z?K5ukqrt-D tP)j}6N_߳Tڸ4Iq2VQFޒH@&5(AJ)]?E,{zuq yɴQti_nE>JEWT$mT- -bH|apԠ#_RBfxS)yka -ex()S0I'b9F`" {1ϫ>ʑ[oc W[YޙwiP19b$,&ĀGht~In+d7*' W<̓'Hfb k| %c*p } -d>_RW2nqQُu5f%ďܖ5b9YŔg=0^n&kk;7O,5|Wz/é/O앗1/˹x%:Y0 -{XG(U.vabYG錝=2 D^9'rGL^ ;Zc3z7+g EHu`V5 IDI|31vba0fsq>5vGYyf?B,ѽe91x"wԲ`&`h5 f0ظAۣ]SMC4aZQIPD" B`Ic͛DU֒v#(snQqƨhM^{M 7nJ&*^(,Y(}ctS[WU>= ЖRl'[N]@X{χRzm0WIGe!Tht7h=¥T/sp% k2ADotm 4Py3#+: Ͻ؇eHqNC%{YIp؞ M+(z|= PW0aKFT }r=,Es(ŘVE:Ȼ?pT,CV(fiEGArCj$Qa_}$՚&Zu2kX(]Ϗ /&ōt}*vŵSk̯ޟaIVLB8疨/&ጥ\vHk[`H6VM9LV*Bs6+8֖D~/B&ItЇ^mܺ<'uݴk:=`QV 矛L#כrBR?8Ǖ/' "{ry&p6‚ykey[QzeQ_{5GǪ|#>@'}4J~,d#ˀzq~p ~ey~[0~QCGl>1G4i~,-#Ȓuzooe3Z[ނQ.GB>)s4,?#-1z7yGoo#dZFQ";Gi>&|4ޅ,P$3myıo(ǎ9d쌼Z0iPpAG}->%5"&,iP+$R8ׄcy n" -dsAZp|EPGP!>+5n+,$ˆW$ymÃ=nd8AZ-P}G*E=򠍌5 ҋ,"$ď1UfyAndEc"_YLPGG=65C,”vy$󎪈= -Kxˆ nc}YBPDF$]=ٟpu5,%2ވx|mռ*cr#dYl>Ow=Fɤ=ў5)I),%Bω F$x2mJc?ĤY<٠OFJ=ʞH52쑂-1%_c.syzuyv -òz|wkzwٝ#{bx؉{yvx|[zc|{O}i|nz}u6{ja| N|;}<) }v(ҿw w͇xw8y'yڄt zY`{GM{;4|S(}Qt咈uؐvӫ"wp(hx8yNryЈ` zMM{]:|(|t tSBuؖv9wvxTqy-_6zaLz։':i{(|rtsdydtTu6vFv8awƕ4px7^wy7L-zg-:'{?2(| ޝrήsctnuyQv^pfwIpx6]y Kz9zM({)4rdtʝsU\t:Su gwu򤴁pvoRwϛY]#~a&uhbIQ;5)t'VAZẀ|%搾0S#Vuv?aboUsmDցzA6 hׇ -ƻ%LP()HPu'^hvPءlDntYryW+Ŵ6x'&ғw-Sq4¯Th00"7 -R.'|aO&>0HS#ma7CG|3=7fx[:I_ƛ[SK'fAр/ńm8,po R-.\ [&g(ǂ\/{,` vbLT53E7\`NE^'[ϮVvӌ˱:'SQ1;Y!%f&]r}0z[5{rKG-Eؽ%݅!t2!mjPwE5;=~&HRߣ[9t1]8tslF0KꋾѤ[Pl݁P[ \Z\!֫MbtݜGh.3 ݤe t e^VAW7G<{;!5JD-mވdta~g4Mhw Vkٍ<%C  HtHH3@6! peP2箖墦!Hy]'N!zAdUE{ daOAy~W2+yzx5޿S?t7 #mE%Wir=#^+#>^Uu{ޓ&o>Y~<|Hr7%#SRI5(Eΰ_hwPI/6:%[?!.`D\bBnHjc(x8]+6J eAF4b+i8&VxqS:}ۍ<ɾI-w;@&@[Y%sd4(aBTiC0IvRNt>Xp^4$림r25BhBT -xG(=?j<@-h5-Ά1J8)5^6JnyE UD`)r.6 %'IBXEuǏoF_Y2 ;+CitSttKn:Inq;Ԯ ->M144b3fP/b$׌Zp 6NAy$Օ2>XчdmpG|j3'l5X)ی7=a/)JToKOӯGI!S~5{X!FM"U晚!(_'* RKQb%W!6Jӽȶ5T nQ - -*TArNr&f{.qM LqL&0N;580]aBe cJNZP6f[(V2T Z[⢸Fj #ldb RŅZMzjq0* UQ,%b 3!=HB2ʼn -n-ugzTnRkVA^BEp꡺a5+`_iiSDOG@r|12LEjPm,X >^{%(㐇j)~4L@}x!YDLPu&|:41L!oΉނym/4t}x2a\D@n]˽hV$s3UmIX E^a'LeW NmmioQGP}ׄR6GrgIoFUٸ -|Mvb [|(ߖLwud|іx\"qmw(Ta]g@5:^A D{l%Gly/@bjDؔ݀:lK1I%P&Er"=1@cJ:G$x|@෺z'ƛ;NP);09%*P 0[F]^k~m(WmяMQe - g*Vsfdj4!N̩nOGjmA8!#)9ʼֻJ'SaZS eod+ekٓ9CHL(B,6X uznzMA*\_OZw%A}a209Tw"G(>bʷp0D7JלX8e~T.¬0~{=#b[1m0#̇_g=dpbܦv/vӽsa>c|̝6 rKm"V7p}1]"%prږC{+uT @=-)fwPU :@b+[Y_璄 Ϳ8q5-,rXd]]a34sU9T/[y9 )KsPת -Vw5/mUZ /VHTM51R`Ji2Tud\ DxC|եߨ:msރYԵ#ݵ}\jb17U@ZGf&O -UxftpΗ~H7qN(s~;}!^p}x5 GwΡO=@7 -DEDTK)(.UR ɔd&ɤ 7a;;;*SAdmw)k{ Y<]%t@,kM@̑nc}ORUoM%I2;5kۮO22֠0}(d͇7-ӒOg`Av=u2"\7!s799x)gChUI!_V TZ8\ x545fl)w񎤲bNJa}(П\XI\t>b7XYR2~8<T5TpUUK -q}#{+yk|NB[I i B]e$5~kh2"uT'A)bƐ|Ϗ-:+<b,tDyI;Lzۏ77ֳnFt2+J)O *Sis%zagQ{p&%pV#/fCҊ/[AD][@ ]eofj̠O>+i:%4弭'DUeQ"h_-$cFۻ +sֲX;cC%gwb3$s-[r`OϐV i DBƃz+)@*sG,#:jg֗ߒ$|tIC尞yP5uTsYmg˶;K7?5'-qhuw~Iܲ^ƺHiw*}݆<f՝lV3_TUУWB- :v_ oSGgn8}=2oNw3# )jM -. 8c -q2 'vذݮhQ v^uw7ok;mG  ;#Cz/G`&0`CnqMP_@]j/;`'?ciwJ-?/L"=[DQFࡀO$FւKXܲ^kl(72"/4L~[mYeV /(Mo 4+1?("$Ca,HX"E>848o۴NzwuW%Mwz mKKwWl/*?/(B/ag}ADԁ(/W,kvkD嚔xUI5ᨳ-5i-LN?{GjkrtoS]sIMHqWRͥXeF XauOs~q.~,.'.v̼b #%vZDUEiF.mmXJM YKO-Yht3Mp;֛\fT4w;SE.!J~ٜ+ 3Tg8~*`-8Ov4 &+ODչTTBI~ɠ 1\"61zlxbAf9tű\'G7r0:b|zE~fS,:Lm~Z.OJ،%ISַZׇ 1,"n-(RD]uZ4I?f6ϓI2ktpW&ҦIZ$";k -TL "]Q69fܕjCf˴qC,4J}O\E+ ]x?<0»kǗxظ*O2ezuS&2V􈤫+hd& W粗uH/l16Nl7 ]p}|lU$巨djr;_֣s}@}x5} V:QOD4 -aO#%z -(*D[0LBof= B 0Bd6FTR<=\`i8ZjveI3yqƨfnw޼%{'J _X3N-ލ}wElg)i=3pb -&|ײP^"#}hKB8m/򻄙Ebov%"-{Dȡ@!5acNm֓;bEW=EEwQ1_牋}+{OF$m~HXҴX+NMDKzW-I -_[JL%k?zwҾ|B=3R{aHDLk½KȞTK}x*͕}Y>ރVՉ'sPF arʉe-3{yh+gԝ:9nb6$l`-6LK9 2G(k7%u}bLjQM4yWPD- -_CPny~3 -9[1הeʞԦdʤqM1jٛP6ds 3p:2_[(dQzI^M,)Ǫ`1;,Iڅf߰+ BkXnXZV0%+r34ߕ[IK265Eֶa:ҭN"ذ BgBS_5Ӫ8CT=ƕ^Y].@Ǻq0y7\U&64@r9ʥlG>·CP7W7Iֳ!|L} -a Q$<4t -&14$P,4]*@qtZYr_D #nǍc&k&p- --;\T.Ls^Ӭ=XRcap|n;MȹsÛWR􄾜֢6{:~#x,aGp_1OE -:WhRϙ57e2guc# Ǚ*՟Yí+!{m]& ᜬ>6-/%{c#neې[9L@Cd4S6͚W ZFsϡ|/~ !g'>C'G:dTna#| *J|X8R;H4гg +2~WH0G]&&%bcQO\B6v׀QV#EҨ]4~|3.ZgEv7HT{DJ[; /2E:*?2]u4<"?ȥ:)q=DGaW◃da V5KUB*I%T]A%hXxU؝yd}gDP+.~pLRU< dPhz珪!΋+i{[U^<:jӡO x@{$h"WơD%,tt,ͻcdf`KI" .sϩWڞh{,-c(kyH^EL1 y{d79_9+Yx?|\F%-3Ϭ̟XH)["cf]k\dqcxV?6>Oqn"5f+ -Y䯇w͵[i1BHê'a"_?::_?{p9 ],P/e/Y8*݊o,B & 4_^}$!>ra9ByGOpx Ng5xX\ X InW ă-0WXsd$i4ScbX>uq9i6I"N`ASypCwhxY4F4]6mU(cggxc5:ͅoF R,.aŔQM,i8En?ҝ?!ÚGF$NE76[◘xx1),5G̋)El `#MIȑSAy9=h*2 @0Cay-?uF5הǼkEc"x0%zӽ !yr$z±ݠLlߩEkykxESbQl_/`4H 5%Sn)@$XٮC-guc砑 -zܰaZÅJ(J9WQXDhI+@E LhA|ؽ|*<|D7 ϝa4(DÞ 6ua ?rQ}NdƣT?;)IYArCOB_"O9*ԣۇުeE%V(0Fޝ^ F]*a=RÚ#,&AGlG+`GX̱5ƨV̥c7R!`)rt .5vxAL_bYY'Lb&Jq K : ٙd'{א(yq2a7Kj36)x QfE4WwSԙ.T3r<{W  F= 6}G6ݡgh||9Eu;(ݕɶ{?y/ёkup'᧎}oQocI~ki0ʪhJ[I;n$e]%tHzgtnv&6A%f82N5u754@KgdO~Csi4/9Snا]9y7~GƺѦ$$.lwS"'eQV%TOym+'cE$ʕ$ ^QQB^]-6Lj:l5}YMM. '\̔b=daBeHU -眛 %b5?Б9_m卯Iu.Q)^ƿ+zksm5fGN:JRIi:c}wΦLe}I^6,*-z>%X<=&=f&b$zarw;f@chvp u{v+Х6cF ~9lҙh5H#Rh9}.3/N΃@a.gP;)ADwAnlgr?Q#/NglxeFC-0O[5kV\Tک/X{IϚuxܩ)s.\ ##G3a{q P$[Ġ-xN)OS -!іk-h1@Ou9=R'hLЩfJb($i+ ( bW6Mrd'\r}ȀhZţr*BH߮lHZOqĭ~!*%AC {k!Lnt:򥃕=טOKl$M+E9S R -􁐿VC>uM(4Jv?{AEBHt&@Msp)( %ӜP3Xkez֗"§6s͟Fc{RH."#O7Z1upDp@K>am'fGCAe}cX:|Jx+~WLjm®H,wH@5[UHRPgMǴP 4cMnܙ~ZC> -lE8x?'} :遲;V.Ncj\DthU&ptW -T?Ѥm(G@ENux.ls 6*팵aai85Ts)S-?gOĢt78#@ox7~8D\bQ{U0E;C(~fRH)mс@b~uϫ @ߚVEGSojul6Si?*Bø;UJnv⬭6]g܋l0zWy7=O,8C= 4Na\I[-%T)m($[l(+0dj8J,:wa_5c3"*lcqq^yEc3حm b4"}I@zW* qK z]Vc¤>EB)/A{Y昖)yn__[NQStĮSsts( -Wj;й۩T7AZzd!p^;VmJË%zvM'f^cT{oȲXׇL V7!^tjT/h\'M+9T%$oip8oPW^ OpCϠQE#7V e94|uJ3DReb -^BH(~kάe՟+&IUطƕٗt=)Y ޡwD:%b!$gF<{|5bW;F!QmBd]LDdSwb [b3(gYo:@ƒÀZ4CYF>a剺v[_6%SiiGc^]ap;KF 'd܄1UOg^sN -[iMY_~$? c!EUaExc4ZW9Ab.h,0X.Y>=4<5Q:ݏeoB]5;Kq*V7 -/PW%=ƈһ"aY욑?pIWFn^|r g\9ˉQ䈫F;U1'لׇ L`|1@q[5f-a[GQe>Fd;njډ1'V>)暠{fјxpJeޗ[)ёPF -;kz1jY'TlsVzx9oܚ_e-NZh&P-vEƐ;:1%ч0wMn'mocqPY訉rPJ=tmFv%=vw4 ~y+0B{d" }|~wtAm.OuWcavcYw_P@xFy=Zz4|=+OM}"ߎ"~ v~zRl1zb{oYA{O¢K|F}>=<}4~+kt#RMIWu벼;l.-\bqX᥋uOuWFQ=4؁2+q#QubІ kKbQXwO~FL/<04+J#tk(a4XNŸ׈'E˛_<̗4s+T #7Xk2m񆝬wr`ysɄ+hu7WvFx~6y&z>jflnio(qpxrgtr8VuFPwy5x.&&zrʌjƹkߗ]m ro-݇pwwrKfsڌVHukEv5xk&CziIոLklݜ8npvqfsUUt@Exv^5ex&[yeǁhضjy|l<mZouq3earؕ{U't|Ev&5=w{&pyWGh-i-Tkm_+o -tp_dr`Tt yDu5wj`&yvsuokcqISsZDTu4v菊&x0$xcixkרy=myo݇zIqw zsf^{quSU|wE|x4g}zh$~L|2v3s0vtVwuvjx2vxwuyxeTzWyT{&zDk{{4||$}}tU|du(| u|v}6w}txu}dbyZ~QTzH~C{63|?$}rۅC/s˄^t9nu҃{vuswcx|SaygCkz3{$|q!pr͡stoBuXrvBbw/Rx؆By3fz$|qopدq̔KrstqvHaw&}RxLByo3:z%{ٽosvq r'珊s;QtUpuq6avQwэB3x3z%{8oO+plqqrsÚ|ot+`mv Qw^Ax)2y‰%7z;Qnװooq\^rT~!sANotj<_u&PvAx02yl%Jz89nh oyMprq}MrҦnNs._.u@GPvLAKwؒ@2y#2%[zaFn -o p1qN|rxms^tOv>IAwj2x莔%gz2l~i.b~k^'~mt~o`~qCp~s(`~tPvAI?x1zT#|;~|r -|sTw|t@Sg`ղTcC)ʐ - -ALV aPA9zwXS[Zz**z̃VS ]r7O;lW ~xL/'HhwGH\ŮP<7CUXy96`Ȅ7̨݆}j_, 9II[O@M .hv,-B;,Q9]U a4tNNv3Ƿ0/2&l}R|,sd6*Qe%LYp!=%*:|]|ӿA=b7 -L7+ziD-fU܀#Ogʤe =<}* `<bo4H[ Z%}$t!~WY*ѹ$׺]CsKT`[fr;]|K qm\1!7mFCzaHO8!ۢBgh tʼnPՐr`>$N3WujKB-[$9TdbZgsQ3){甎9iB {_okK]-~wzy.Nė+ڋ2nHZ\;a'fër,}9yqa/~`וֽƹ%1xaIiS>ģ՗y᪼}x2y ϗV؟mn&}1|R^Ǫ܋K:kk?Le(P"[Hs?x6@uӈ;)oogm#0 2׬nu͸ nz+2cœ]zXs^)<(l#Ng2qඉV#.|ouiVէ5Y) Vn9K;!V%NDWji?<kLX}~0U{w+z\JMBGδ;ZA.2؝|ւ;s{r`lj uAV_Q ˄Kh?mezlPg/uxQny|"_?=Ay"IJbSh`@U|QURYp[WЅ.Q^Ut4,N :-feWݏ1 _2-֕t |2CloTlD^)eG>:TXOdet=~ $t4^e}'RG3|w ׹䂜^iHG2Ʉq2ݙ"YA5BV,\{\_Rr><*;*>:θ#{^+f:PCs6d5e8,,XNӏ2EGUHBIn5m/=Л)oN+Gel$Ұ nfBP}ʚ;JRRUŇhvuEvo%{^;0aLr|0^%7ULXP M_e/BLm\D;6dY46otlFQ:ufEO771M)չ5ɂt?j1S]a]LXJDLKi3cOU! !_ -"g>xG=iu#J26|&Ue6'L_Fss*vp}04u v*CIi!Ɗ" .JA̗3^{ օbEzr\Uj{߿Jo=-u ++q>Ou&]=TsV\G;J -F0iYS0yHw -ob<ƑjJB5>;T -3D˞ 3wKHhYiE\6+>UdG0Fym^#:kyQZ`J)3AÊ@H9}F{:RאSOADvZZŵBg pg,VFY:{Kv,奨]{x}wG~ozwo߰^$JhoѠCɇHgItoV,&7D'@sBO8AL2CsʡݤwH1k M,uDK6~\K"r_+?8nB@O^3=&lƨ,_\MaG1,C)ÿA(8 \B~UVHTSb+{$=BL.1Px&1ЋɿwXsz։i;ڤ3ѱ~omȀ-U ׏RCuwqWpbu.o+PX: -2ҦpVv|[2 Y&;] -P 8EvgJhռYjrk}D 'rTs)$JЇI.t -#۵0К(w>n`A -NaX-STNHzN7 -:{t  꽭Yw J] ֦g]_W[Lh s0 A)iSc@ qn)N|bj% }ļE'$8ۂ}g M-4{VO -qt\xNa%+c -f6ҁL uDxI YTn32f8rܣ'L% Tܼ,Th -a| -;S|E\dD\)vmyxgyK's,%y-`#ڈ-Z&ԤtPUu<3T>^^Ӌo] bkҠH-$\wahu4J5`JkJ 9-YٗfVaj}aƃ $=.xy%WW Gj5Keo,J!Җ)ˤM)%k "١xO>W-'fm1`*ڮZ(-ꗉ,ŭlXw:eJUC^="]NFHWpևRܪɕV: B2lPRq(2IDw B 98r;w\5I?vB7jª=l Uit .= ƴҘWOBte.񭹈]hA#0_!sKT@cs|< ip3< hl"җ9t%0=5i6@L({|"l|=ZT--,Y}זTAn/UoL.BF̎@8/#N&%eDže|Ԛ.priK_>lUiKǍAn8=7.,& kӂt| -ħorF0M~5\R,h#׉|)wm!dTсZQ@H}>5e,$˂%vףmAXWcZ4;P͖텺GօB>b5FJ,$&#j@Ovlc/oY^PkSGWE>+W5*r,$D{uU|8kbېYYDʎPōEG=֊?5,@$X=tkSbSX>O?FA =^4Ռ~,o$b؃ԅ.tuYjauߙXDǗ6O.̔FY֒&=j4,X4D$iK}sTj(aWfNݔoF=74ʎ,Fz$n8p)igCk]mT֤okK٠qnCs:;u1zWw)"y!"{*~ pof˫nq][rTsKuzBw:%x1Kz^)N| !t}>;oIufM(v]-,wT%hxuKLy{Bz: {1V|)v~!0wpMnԭ%{de -{\|{S\|J}|B~~9~1sU)!*҇Dn<,2e-D\4&NSQsKJ\B@y9ΕB1) -"3n&/mLdCL[^RퟰfJRB59H1)̍["eJqP -m d5([]IRWJ TxAЗ91c)ą3"#pl2cZR>e=IšA 9koG1wه)<"}2nl"P`cDOZpQܝő*Iq#[AX~9@ꋾ1`W)닿&"Lj'sĄk{bΤÙ^Z-Q:I*A Y9x1K鋭)Ul"هŇG;jrk1pbbKYȠyQ?ɘIH6@󕦒81:0)"uX"fj]=l1TmLYLoDOq<|}4'~-W&9 2Ѐ^dϴ \4SΫ3K̀CܢHL-Ӂ&h w`9d})[鮾PSVKr|C{5m{WX]eGOsCX:}6R|t+jOPHFh}A&|1>SK3m3<]@Μ,_@z.=B1OF?{ۅ@o8gݱ]@Z'j@tڥvAvˍ' -OG7\_ږ}Ny Yx**n=n$^p?NiyҎ?hZCRr-N%MoE0צ,棞nuST_h<#޻!bq^71tq)+l55TXF/?5r0jӏw~5`>2uc)p*K֚fS*W}R#(8s*I SQ ;^KE<|qg79b/a;rcc>ymv -;ks=R\T W=$DE!A$K?弓|K ҥ6At&& ~@" ]ꕆKΏ2VDQ2DqyG+=}r;v;M'4w_un#o/c/pCpfΖQ=kCA3 Lپɘ^M)n{.*#1wبkKʝ28g¸׿J 8ߍ͏F1*Ҫ깵N ebam.P:ZD}`J.#9 AvvC@5ޓǴӼÅBfiMb]/FԏiKQPk#8P7,#.F -×Z{^5JgLHy(n_Mê28Ɋ D#ӆubQ[GD ԠUh4hsTP0RLޤ.>㿰#&Sd fPv>%WfϳhH10I#+&tn{.]+<`29oS7,TQ[Ī֔ZHv -L[(1;*NvA并KMUuk󫿌;.Iw_ݧ Ww?/>&:x҄+I˕e*nN5դNvEiU^\;= g v}*A.n=Exd}"_Q2V8 UnHٵf1)2 %#itM(18IXOd6b*XItS+:JCs~NNq?тT#pLXW?;4NLM A,3xIpOKP.`?n9 xb[6jUrt {eA&ƔdA0<;>X+n3H WD | )Xw~&0:6Ha*!2#bA&H)m.>kXoFMi]aL,~<3̜&e[<qK氀iiѸtQu(q|+t<녥eUH`gr<5 6}gm}*?Ш++8C.LGIدhQ(\цʼ> y1H, -Vݼ?ʄL7 -ղ(U]>PoKi^<(mGq]5:+[g=lצ@Q͞pt_ ʈ 98=a|6Eڐ{=hWhHޒ=]ՔnxTD~(eZ8T?! |oߜŚy&nPZ8SZU2UpP>G$Z=sAf՝Uqv&'jl¥ -3H_$U ]\7;@φ9QՇ~麝ډ=)]\UwS&[K VwkG !.a)BEaA^C.%./ Zs3 -kb3OL -"ԥE<c]DxL;7o Cszq|w/g;6kU$_˵xo!ۉWnx! YY8iaq̰M53%qʂ|NX9>]kO$ds n=  -Z 6dHb@ 论!(Y -lo|,&8neq'[Z:Gû$Ҭ!PhJ;j+6*j"[{@Z#Z0S9Nz?+Ue]E#CҾe=L Qo8y*ˣhGQ8oIQ+ͺd*RMJ4d)-e[U2e+ bUֲ&j1zhB6m &.VWH= @D( T@@@F;$!|L2;@wEݥXEUA-Zz]oxr".l>[nIM[= -Ic#.=+) ԄE>!禔0U8)!R0&lIP^xM*rȸ'#pS)rllB6SWX4 \5'kW9[JoG)y %;LI+&k&V) -xƛؐ@FUmA/Е ˧a'#edS,ڑУk@ks=%O>lLYT% 2#T>[ouʎR=-B'w65\c2>c:lm. ZcʉrA rR/5>O}ǂ72u1Sӥ[Sf[rH/nS*$]dƫ\v -hnMm G]݀w*psJN&O߰oє>QT,P:DWKrԅV7,NS= 0+[XZS=cލWEԴPs[9++f$w=\ٔɡIp1-HEl^)㲾[Fc)#gTcfA2N+V  Tq,GD"1{Q2f/9R0Z0X^P?g|%Ouy|sD&1 ҽRWv`-tATZ2"}+:]q'@Hzu۽#_DX[CNqރ";ޱ;[T7ekHy[>K^H{C9(W?'цc*uT5gl%Β0 hUK._d$j $-4k+}htŝӜ.lfˎ}H7>!Ma;+;VWr'IӨ_?^?8l.+#9"C!c"C$ -%cqvFCSy2ZnL%q`W88Ndbo/uItl,4ޚSzb(v@]巽JFܯU uF˚#ĵ&L[vBϕy6Ur ⪬.?6FPI~o)x.$uyK8,$O]ϫ3jZkzKcuV{vii]sl]gaSŘ+lrqeK %p^tD W/kNoԕ anh՞IhjWrL|+LD?o:wQ>/ʦ{\cHYyA6 [ -UfZeҩ=j+7'2m=ʃWY]+ gY1tGt<@!gM"$MH -1L SaPXW"1)\)|+`E,F0h$EOk!!놚<ZcCgBp/^; .^ډK QWIO姬 KlG+ءC$ DRi>7v7oX7-x[eW%iÓrZ߇S7o䏽.r Ct?Q/_l*/- -Z^f*6OgQ -GeH"|ۉi\#$#XBȭxxOuu{mEeU*G>h:h>Vg'6+^hóG -1pXɢ<σe´pSv1\oMƵ>Sg QYADT@fߕ =Y)JV|U~~x&V5)Niwimbjh#b/#-d_o'HEB (~i|@_ho$*7ATN!@UP?x D9 -5Lͥi=^%;24qw 7$dJ0I`j\#0 _'uwu\(5o>1jJt8wj21L) A"HQ!Nl~GASMN!P֣+H;ޢ%R֑dL-Y/F7w-^9e1{Pj49$. V$$(X=d,D"#WdB Pښ3'KBUʞ h=Hi44GbZpIˬIYcMH^i]swƆZ3 [O֘[ 'QJEEL F᪠\| cN.<0r|dbұJɧ*KvM RezI.t!Jln:u<~Ti>iY0w>7>S5)_,^}_yȌ"KVbNAJ_3tU̧>~z[sfxb{wqY^Ú YFsGd!uaeh'< Tj&˶MyyH0{u -hQ/p=nD3fڛgz485ڭ`,n&l91ˈn ~ FA@ L#ꣶ?aiYG NĶ7n k |vZMH] ̗ +p387G *(O\+ۖى"2.&Jވ]],tES 2/n-x,f,?@/|&:Y+zJ^-^?gcM1UOGpUb (;. i@F7Mtz(8c/xxI{G)b-*wEreL֞5/Lw|/YiIv)#!KwkᢈD"TaP\8,|џK5tWRK;+ J/?Ѳ]V@1&j$)(#|/ <Y®0m)u+G}dUq]&uuVQ'!- -("G_|INVAeUQqբ(X@\-z,-/̓ Im.!u*FU0k^v--,ٖk%4k˸Ymz| jCO0K LDPh5&-,Oc|/Lu:9dցD~` ^ b(ʳD0¸d&va9b9 ̾}_dp3 6XCa뙈FEJlrDhJS7[wJ\63B63!&itBP'.Tg`wVvOcsP?IA<ق2 U.(-&:/doNeNkR.^dr%Xrܕi;qr_pRF ES-I ;MC?f]aa~};C֭O3Da:R(Jugq{YϔPXihp6as>5n&ا YB>ڱ5Ϳ&ΖغZ_^묋)|jeӥ jp$+Z*qQUJ Q씝XPݚ{$n0\Jg'qڋSۃCdC𔺥mfʍU?./ϺԴH8'[i5(r$ُ=z>^" /i,'R΄{~SkeNyx&M_FI:}u')aN 73DOM p6T1ѰF=UJc:ц5콬{Kݏ@v -lDV܊QŘNvo SY@cE A.yKqMX ލ}rܑtanQlg4M)$13:/yXmK1 &œ\IҤ:gcքp.J:T<&%I&er (J:pqŻwJObl2}gn_ɹ -q?ky2ra;`U Ff ')kش-V 0Xd}:QwE4j'C>g,zTs :tLx|-dmf@/&_o©|Wc]bp^K?jJC5I0c.[+hkqGҕcAi 7y{&c)أ{5ћ@)~6s,-8 榃dpůCE*iQ롔QT*'9N{9=Nn f>0WM6j]‡;Dʲ]FtGS7l@eʹF[;eJ$^i8Y*=* -z/o -Հ.fÔW?Xlcgl| Tr}r* kAr:;9^Rm7`xM";Mғ'*wɱ%^jײߕyTsM&MiNf{pZ0bw'4U54 -lI5buT AiUs/8O>O.PNo jX[`ޥʁm.m@=XHt+]"fTNYeI!S:,$9CJ1Aj;+{+i:[4`)V -j2U`OR"v -v)S0>D3$[C fˊ1 샭s~!)FagqL܊ç5A$3z8NXE"4vha. D=B\h_}<kizVb# 4pZ -_Baqx%_DsqףOس"zx [;VQT]5D- -AN?g:5g4D_GdABƒGϞR׎45B"R;P-W 2`4dVAWFH>{bNf+Оn/]n8nj;ՒرreP>ieMwH7ݔA^ع}Q&=;4oL_6vF褭&()?,JEfb_M#Uvi۔GCbZV[`>V^*v40ɨ0l71P0V5Lݡּ[y<݋P%>͗`,]=?l{UC UjS1]ySq+2d0<1ʁNmΣ;mRDZkjT>#@ )qQDHWL2 n^IVف b5T(]"z8-."͸,ûIt|J%(ׄTyFҝ2d>XH=?E1~zh5ᓊ.sS9ĵFi܎~,42d́qz 9b*"e-'hEX>>3e_vSڱ.$>_m8/RH`8ʮGH Ur?4^vYߤ`XJ1y8k|)0NrQ, HZЏ(جd}iO_}! ?a5 ;5$1I&nesjn'1n#ԽD93aP:s"`*V3Wjȫ . C妡h|26aW,'qF`M:~ +/xq.~ͻ#XqmY|\lp&)P,} ~Cv$eIgPeYch];RYPTzc}c.{a̸|}[z_)U]j"{xrG,m2׬|ӏ AM>V(нe"Uh 뗴;u;:mhƔfnqT>|QTf8ÿlG<Rs̯5x(`\vY^[`]RSVb): ?3 #:,ΉvT=#]-@7wz }[Ej< /4hr; GdE']-Mk$W`lJp@{a x1\ub ^V(7i?ubcUd9t&0a7aWT 'zIhkl4a<$ ŘG]&}M~F'F0סqP, -yBy47 Dl~Wg.cř!J#Kb BVƭMl_ԣhba]Yc7/2M{ H< "51wѥaQX5Igq?34;ƪE=Lm:N?"\g4ż%"xM *u`玿rnP/gl2"uyiRC{Ao5_8"(wn+W,?ʻ^W}ڔ.CxM[[|d彁,ɦsUՖtU_.xҹ9YT6A-,4]\3Ͷ4 *%jԧj=*x}f\NHL]_՗lv4n]t7u[fypyNQ%0jIbM >3dEtj6h;_מصU6\i>iL@bd0m_ ԉ_Sy * !*IDp+\\{oH<Npj+xUp" uc[uտ%C R: )Z0v.7m+`D1K$5<LQ[B |W 9ҝ'ikQzGv]L2{fZ6S5# -Aʰ|P -;q`BS0dKG,_SMd5y@+t[cMJ XCag2`Ԁ7EV!l.DGR c"1!G+@'u3X^ O!:,u&W;ymM_}%3M^d G~ W]?@4P<- 5&Gb0Rt}'4:?:UGVjD SveK7 -9!zERdNoSckzrd'BRU'z!*"ӈS$B˧M;bokEӼy|}I{fk("|O"Y ϫ[JK5?O> Qv=7dhi/|ph̕b=qm(k ~5)8Sk[Z57_W= 5bNʖC1MeX"Gո +U4% ͘$du-AqX^_~=ҫ]+SF;,|jVL@Ebq[Jod-Bgl{=%ɱcmDq1}4Bvʔ>J41IW%|Ee_/Vޕu~)ȹ 5<"-s3ߖ>Z2zɎPu1Ҥ;yt(#,"!~Pœp#@kg^0ʈ`7P.'\xi69% źLt;OP0鹪R.w"7p9T=FmM~ikSg-~A+ -ѽCwP|%4=B"Ce6̇>B'p-sџ3шSV{%l"uV+J׏zI%Mx:\6玬rN{Lf˛8aQRC[o^ꥧY"Ѩ-/rDM~0KH팕ϲ=xײi٣XI{HWZn* -a8SkJ!6 -x>.1ц2I|m&!{m4% 6 "RmH&b9Pljkt ֬Qfrܢ4f8?$B,hnx4p<:(yWD[ƨo:9ȾEͦ!WDԉMz69f%!D>g"%ĥ܏ke&i5aHq{ba] zT*6Edn{C\Tk=*@$v1Pn(tl]'|s7*Ζ. }2 -Js*oSC8_Ikϲs_|ØXiyRVt[,JYE̔Wf+0(z35c:W+gQЂM-iʉ*B6Hyy[@!`"T\ -*sUihjVRJSWw3nʨX_33!/RAxB?~q=QJOM`H;ffXT.nYLDVt`FUe[ >YtU}5X86"ژ8vu~|1D2R;%_|"Sh:f m;'Æ JJn wbܼ: 0x2 ]2|N{U {OT qb+FEcH\krމW,$.AP>/瞒HIS 4f4۱WX~>f}( ⿑8/'rzKrC%5[ Zeؿj޲nrKY`*֞}e &U$+@d]g)?(iNG`,Ct*GL~k?oyyzZЬoD|FY_gG8@f"8L{Aƨ*[Ԝ7Uj/0HYb$4삉L6~5HOKL@< `h"I1TdW@m0 `Eޢ\v$TC4?w 3RH<AlS1K-(Lub&uM=:"Tn6ULwv @E6LPԧ$|`>0=]?a!o$ v _N寛)%rz*pQrKG(7AbuiP3 bEG85 uTX ꮘpPxT)R<ڥBalAQ?`ְf.ы>Hqۤ5#Xycny: 5*f"*tP -aKؐ s_areE^EӑNt;F"ϐ/'{{d2J*weTsih{}|H\_=K0;yæočdq[o&_vΩl]/5e*y9Uimkt{G}Mć7hø]]SܼNnx&S6՗fk\|^ijC/lS' ߙF@5HYEѬV 7-BB.{$Aeafd垬Su~,;}\6vaMN {짲kp; U}(\Oq*6A(H- U?eZ!ݤה3둗]X(BU̟q!r! N)dRYITۍyYO!Ecˤyk4~@ne1EZ3'b;) 8F5F-`2oZɽx!Hf^&ƨdت<*HA >1C0A`㌷t[1z& 7.}=Eibΐ٠%]3`:W) \a7Tv,}VBHbSIFb|p4$3 TC֟PBigU#䈰] NC7ja[|r kFJH͑<-/1zBIWVMDm)^ͨ~0"v]AkWF7l1ĸeXI7]҄oNGmp?ޮG7qqԿ`ҷXz4\[*x.aP:] )Quon,T2M^j 5 -Y뚇 IEDD&؎D!&T1-U4)6\*K0MeQO!0}&2Ć\= nX+ v[(rлOGeK^mZ8ʑG4Aj>Sw<5|$1*)-LR#jKwZUּ,.PeLs4r$3 Dc({%BzJccfڬcv!si mV1s fgOfQ ;%)i z,O N#pJ;rSN\Rhbn?|~k@0VŭΕ!R0FdeȔBBwo2ZyQTZUԁu.,ru?~wQT_eg2 .1$^ZHuǴxZH2\6?UJz -3t3dzT5mد7olҏL.68խo,*a$<;U-kf³̳L=Q:_2My!d0NS9 f%Ena?Ժ,| -hpMjʈ HyC@t 1Nր+L#l^s)]|+rt%aP.K?R-pp *]ZXO@5иmC'DQ4+sʽcu?нU8m"Eƍzbe ⊫ذG6Iu3ЇUb Ԯ|E]@SX@Rj!GV'ܖC`P2*_3". Aj03j <^ -o|6XazGrx6oD+Eɥ=h[BkI6쁦 PYt?ҡns󪅰;}I9OԥT򉋁=r/&A儿 @(즄Ę>=m`;Y|b[rw[̓/G#Y 0'+|@`?81lo)ʺk`Jͫ-`#-Lts-DPnfՌ Rh,R{`tgNQ7po]\oNkc?k%/9b!8Ǻǟ=r\A8on:Vٚ}!l_)Kl DYΕ7ck!fL!m|kTr$Wf4 *btfyZ%Zw6sʴ|悒6JMsro`||D]p*3gF'v%S^4nN@8-I^ӈ }(W9X2`[~Tto=elKVIȬoU_S.ڟI~Uϗ7s3r=m3y?|1~sO̤lL7N|6ȃ QL3WIY iё>mZp5@ Ya$7HGW^4x5<*G҆8%}5'd]X(Hfa(3"^ߢ~{ @O3JB"d9'@!ŏ#"y{ɘ?*V\3F`^bda Fw, %&F>_3% A(jiq!0DA#Xbfw9|aĽb 9KR!ԼET\a!!7A'G^=o7dv`u'5c.pBȨ-PCJ-,F> Ny\9CC.o?2Рό"{k%L@x -*U#U̿+̡>Uy/# -ޜ%h(+"+) +sL)k zG) <`Y$NմS]L$N"(/3Ql*Cc:MOmm-@Su_%MUϸ8lu)\3l5I*:VPXnknߛ}T` *@xfɝ:> U?vqŧ;ʰ$]kk]kmU5 ^hȧw=T8 Թ|3V+hinP$YkŖ9ƿOU7 ( -EuK "1 2aa_ͱWAH;a} ,2wcBxV9 Lj =`| -O<fasGjmJ"[^Ƭ?ApuBWV ¨ԌeF^-Gr'q cƘ{>Ljآ-mUzU~ҭձ[Jz}6[7}E'&:ۃd9p xLV0p̵X?Q2UE[+o*~(ʜil77Vo񼆴Vw?T{0ʗtf̷^ 3Ҳ(@SQ&z[ a.ufG2|stLJe;VOXJdn9[iySɦe{֗W(YW-K+O26>Шz`oNh&c ;jaSzO$͈ZIUQ;,@cSI[#nU_+&!?J ]M{&~~ W27ױBE9c@Q?+F4ޛNl$+P|JjxW?"J=bQ"/"k!SnՅH'hoYo7,W{N GV*H7rs4i=_AWj iKԙJ),MiE=C*XS*{Ee t:`d 0O뚁:G - @#AR8 R=܄ -s=b F4j/".ܻvA;H @RNYnh$54ņsӺgj7fv Eߦގqߟkˠ׻ DiuY5a(g92Q^y|V`R#XjJxzV3UBo*6&Ķ/eonnoYэT$./Fgv9l=G6ިQ7Ȫ۔)uj#Nb`Poyd"X̟*>r8P.u6 rwWA~k@\YH-d(2j"BB>/;!JEjkQi[z.T] OXBڐ!vk%^?˟oLc'7-2۴ՙ8Zcya^&Kl >z*d]wlǙlD/e=yXVjP^xacRx{jPD'C&3ZVZHzLH53iwړX1mWmUO577xFKM܇ȴ,Ց?|]ONkXAfHOZhMR'Hcir.NI:?['w&wvKOWu;)wמWOcZJWE i3;qŧ핑30j>uCgI\f>^ -ЎOYVq+, rySpL$#'|*;O*F{uF?H99yN8O/pRšRD|ʫT/s+}(uc@<1u_`F38I"#3^(D*+ܫhXj\ @/ȏJȿwRA]CR#8"N/D/J„e\@P[)UqPNPYu#v'u崳 -9SxSKPQ9UXU0əR a!Y{inqVw릜5;-M1u@θnxO}=>ˋ0]XsL<۠#kF(eueAպn~IS% -3NK `H(BXӃ} f*R@QZJbsUԅ#o# D,&dHϘD3þ -yk-:X|u7\@p䶹N F)q.HJ6F|J˽ ҪjqUUJw=E܅[Lכ6c Fl%F@M,X#]7u,Q]kg%6{8oyZRh J@m]E檖xJWGbj$Z`9wI;u bޭfA2_5Ep$>^1ϬI+ 1π2-K>N~#+;㾭uueV7QJӠR.~<`(aHru0 ^HYɟ#/Pp"V=#/duKU\gQ~X%|(,+fYړu>~NVjfʸ\[ J-P\~Shem^K^ s%vL}W-:SNED)0p,M,LYQ K^{ʬg\XEYZ=Vj܋U_}%Lk܂vKP Ajj)!jӜOu'ѳL:6hQ(*7{6v -ԲW <ff6Ǚ_HcEҋ9dk} 85k *la)sW/]VㆣYšbU5׬;|Cma0Gi -~Эh aZn?" yMP$2lž%fRe0qBm.MWkĆݦ5)yyneKeh |1DA_7>J{Y½iP7)'V? 3UH$󑊇ʕqp@t4ԇ3ݾIiPzWiN4S+o -(U\=purٿJPWy ,Ea\:Aѷt݋ j@7gQy^NKfEk/<;C:fD^0>R &9Dz!L&NG4:ѽh)QP@ͭP+%7ЁUZA.-!}μyXvnJ76-I6\HQKV?+A goE;~ )AeG|d~U(1||D_,@6=a-Cl 7T fkG(%xVøoQn Ǭ\Re݆zwme:kڤ=[`3Fnܜf-k^ +:+7͓q~7y˻,¼6Bm2,*Jo!nٝgZ'yEK{(wzen#FFT^b)ړk$ƱT"KHϸ]t^̸ҩ&f"mdar NӋrc)Ndla\6- Ofƭ>cF4~:* 9R& _!$ָycfa󰼂8|Jz+ѲfqR6w8G] Bi$ҿ >wsHe+;+]=Ίi1hTY-lrl|6FƗ,iFE>roy:QaaͶя=e"o- D-Ԑ/❴_TQ DZxK -h1K g7 ?lN&E-CJN$I%O|tQR^76ѧ0.Sy΄0Vxi𪵃SeW҂Ю8jALmzRS -.*G|\ x'I'ȪP}&-Y>Ht"5"7<%5JQ>RBR20v"7cS0W6l mxvPQGKzchND2Vľ`o$NU:ÏڧtEя zٳ s7GP΢X/?89RK."q5r's R|L6pX߃e[Q qP?`BU~vT{8,S/ k[iQjw|aUѰZ."8*{Jw6 -R8fXFnK]:|7MlELu乱[DW1-rd= oެXjo4AG5^ϷO>[ts:]c -㗢m/%nH*~є:O߱?QvԭVt1gis3$YV M^!ɽ gU|hzR8j:iԝXaɼ2C:s'Aæ !j4՝Uqb'n4:UL βMfN&_37ZCa?T jg}%T2+֫wqL ][/%30e}ML/e -M -iQ/V\V+ #uU}9 ,'C "Qyc=S^kI?+cѱ aөƸ *RZ -d):d/Q~_C7e\Nz1SP}rETp,h0DcX_ļ88V@ەhcBփB:GCK L+$\e,#%  H 0+H&obW;Ƚx|h0Y  $Flқk$&M]|ZCz$$\$~S>8/t $0g qEh3v9Q*!rgL\0d%A?4G/ǘ;!(SH0Bo!bp\{wN./5a5UuofK` cR p1ݷ 71aݴݏQ9EXJXNFx=4# MA;[F`vow2s}p`Xkmӳ-'dmo=uMBD5=)3ٞhު)XmWy'?V]Zvا u=̘-U^qfB?"%6r}nّ~δՠMr0~0@K L%>BW.~Nu Oo8dYw@sGȔL/s~C^_JGT X)z@@ %NS%7$^}wL%IL21T ) Lt&ZAGj=V\gf2,Ө|0E%EJ 2uyq 7&vװp8 tZ׿ CKV !h} =@dvv!*zUKAAm X8Z)N8Qwh`P\Gb44TnEY7i:T(;uWmqƭ7AdTq&IzUr$h|!or,mz3=h멺qv"gNt. -Rݼ.Sy^eLbVNm=ZTdiZ^a76 -x^)^hƥ{|F{ FUk4>,h@DBwB~Q/mGcrOpGuq^L90m5YJ9hIFb\D'l_$0tWdnx FO>FƔRC\;NI|W|"0UJ"Q wdTbPjˊ1|'r -򆟨7LA4QТ-ek7w´_lLPEڠSC1U}NA ͚49h3Cd>ЩTDS1|ּ$0PCA< -BP=xFjU9<8 7 >ʏfLbzH)-O{ pijXڳ 8LpROMXWyb2:4ÿTC`-W@NOT{ -v7&2F1դ`BމY .&=Gt,PG - -|. .ܫCL=k4҄$mŏq1Ec#oڧ a%- {ֈ&W&hWoV._QL?k*H깺-2L }2%ln͈i[A¤S݂FwWn@#ګ陻VTO2wMX&HS{b/.ifL -t5dqQG6֫`yБ;Q`/%Nj$1.ȫׂaf895:Ktg8rܣ~6dtQ}ii4["YIV 6Iֿ Pb5 QMSEHEΒo sҖȇfLvٸQ սQG画!Κ -\ѲӏuKPݤmeI4= b(rdQ0d~ qyOPÒ\5 qԵm7/}-7Th:|X}.O_j\mg@?Rx࠳wiSfZudm,AL?s_ `XnR܀+iPC(wm&mʡ>ĕmBcGOK(<=9!J%>솜6n )RQ8{}]aS&P;y [fs[* -᮪%Œܚ-l3^dSkvfi|W|78qmw";%eg0OJʇkmOEUd%-1En T9|+NeN -*:ݭDaRsK!"՘w 1djuDvI_~3|S^kRZK8oF[f*ƃ"i2f=1NJ# Q+,$fNZµSk-]ewtts:fl8Hc0_l}{Pe$ t~O*lE}d=ML>VG̨:W52ĖqRØJ4Dϣ_!d%q!9vR%-P2w|$D%`,fȐk fʯp1z6_o^`n|z(Ւ/M? s)Ŵ 2D]J3*B]T^ڇ8\\ht%rQEbߐ\t(f'✷34JŔ9.`p^g{y4u-3LjF倐kfUvW+MW -eWL/xNꖻ! - \# XAl>/LjXj\,:@f-ճj_;q6XE%“Yܘ1=ʥTS aM!q#zJ/@=Lb6M[T1 QOTر(c 9qb1m`QDYMl݆P(}KĭhOa/+KK*sk>[Ob5 )6JEE&f8V/tuvAN ɒ~݀<&CpKKtRKI24<B)YK1x# ]rSuVP.ee>W[ҜymHBZ^c7 -aVNQ]WvA":m -v6 ai C!ԋg׬TB1AG.зrBW$&]غ3UMp92lMaVIsuTƔ([8-þRd 3Kmo'a2-\/}|6?m۱,DZ_خe9OO޼쾣p;N`DǕ.͏;&u1\oM&FDU@8$'Aw@PFHH_/Hqq*WG롏ł8N -Q} DۦtcRb"Zͤ-+?b=[#V[cd 4r%>v 7_?=`.^*g{d+Sզ> ?VPg"c4oJ*kHc(b鶎`+{m"fUq-µ+NK`HpeS^dɘ&q^R]p ٱh>\ U鿧O]]y:qNRQᆎ y6ɟZgҾ5=4iD!:+Ѻ/ -[vZb⼄ XI燖՟HYXI\$_;j o'Pz´\K}X{i]~Xc-|Rj䔂݉RY8S+AHGѿ@IJLˮ+PNi]$`ld/B/Xldv~㨞te&T듬7 -3)pmF ih.p0,?rMވ牵^D諻 ;5۲)5 JNF2('FMϢi7H"Afs;Ծs6(̴"idج(TWְy#M Jc^eg!B V ?ls“ٹWh4;Gg;@:2k˔E\I8{R^8^F@Cd(h.%``61~4"^au:u6PWuWsV -Z r]FBM'& B1z>2!hD_cpNq -PG%R|^08V*4;}XbLhOtGD:m3 1CbȔȦqfmU|:MM,uinkAPyX^]Z&2V>BZZ,Nj\@園A) |F嫇I<=Nx7]")Xޓմ2dS]:?&Q{CMFkȨ+B?$(qyu۾t/q V,b H5R&sdSJk>Z*irm -gsS{7]hM7e!͗Pxey+8WqRg)JZsZ6]WMZ9;hVXcZmQ5[F.s*Ū@~sdIy@꺥T*~ O~6t>Y0C7{a 40F"TxLAnQ?#C&67]>I:yI"/e=naix&~ZS~eB,ӰRnўe -iW0wBZ (+zLҴ}*|)1&=ŋh(m Coӆhd0`0=16]B;6nlU3^Xk.19<\bFxTB~`K@}5y'Vz=!0BƋʒ= 3!ȖH og0-Z,Rp#UU)*'N՞ ZW-G<Ǻ[=Z+][~=S`5S);z6 C䃈@M蔷;r -B ]Jm% بDD5b!P>K sxJנg ]%K3$]iuѧҽ'>*fDsT1 YdB -6?'Q7j!,Z-c0k +'kOX[T!f<75?Nd{6zmC_cSus=kei4OmMk!T<2|"Ӧ"s*4{S/kvD1f-.߁GkG盉䗹c/IO6x(9 -:B++쉻~3xy?P󠑪ͭ0'M' hh'}<@tiTox8h쯯oRՙBˡXz -^i b .y:+W6̮·3yhD6/a{CoxwBJ -W.yUIYݫ,Qzp(v烜ޘY_.N޾!a^CxdmA$d',1Usg{cϜ_qb.gm ZKÖeo"._T v6(פΒJ'0c9nȘPx7@c'meZc%7WMEr3WxMe>hF<ԧ0ωnehG.Û-TJά05ᯙ(MGKPyP$uM>q8D$.{(v*rt YOC;ǣᇨ AT_JeA )7D軀DE(y:GI1?qk{ -:^KjAd.##ohQHLxs1>y$cm$%cT上n\pyWt=jZ7Ɨ#뒋[ū ,8Yռg0$ɣtê ˛U^>iuت.rbgоfދ{Դx"}czٯ!3*OםP-j6%+Tݑd8Kg><{Dh+ I#}Qc%aC 3F wMHb'rXq88@Ŋiq⮷w8B*7,8~ 6Gd7kރ1b"S9ExW hǢJ2#ӥLC,¹T\OLXb@0br Ѫլ -iu)Y_Ev֑Z)pgEgYk$~JƊk;ڣ@"!=|]ڂϙ(a8DA-3 x! -OZ3t'X~0p@"J1"E[L;C} X QCTk{ j/kwh0 L  6[6֘,ykp <c>f{'*$g!}ߙCHf;@*~]F:*ZuR8>A`>DnAƑ$A5)Qu#L9l8l9E ;Y3G$ܕ _lq__q6s'd}_[Z,bT⶜4{-ǐ}AI(}Y͚֙m -E?Lߜ+^I[CE  ѡ~ˬ/n{6\fhZ*ͺ0wFqe8<`vUhOq*(.59UZ!襶v5)"'H*%J:cuBcc}!yt% 8KVPBH\8GB<騳X7 5 x C!ϐ("",6PCCW cC|&cX\8 xn]0:7,y,YBc(&!%){̤1oڪX=6աcaeHk*f@Jdt BDmQ\0ԔSWmK(5U1ْ\)DQVN#׋?/wփAL(Q-UHo2@I)SwkA-,޽*R􏺄B:$?Q]%eŚ9$o> :rkM_)b؞Mj_k(ҐR(]ԩxBQ q0 kѹ3EDu!ʹ$fS\-T%iGԢΜY܆YW5@^QM 3YT^jJPdygd-ﯩ7 -37>*N792ѓ/ø IN<,™!5@)ܠڤÖ7䥪`4\)ҬVMM\Q{d~L5bx -&kUC)"듲85_:֜CM$n4e \ -l#0t:|ByFqY4Оn<ظ]1F ű}1Ac+bHq N-?Œ'[="u,5[+LU=ζbO5!o׆_|32BDNn b\JXj4[ "RbUi5R"z -%&PkpZ<7/pLaRv%V/:b)&%;¹WzP*{Tbf%h<7by -$Hx(ԍT)7%k[*qیL]>>b-i);i' lm-OL=W!fcO ri8z}働dFTF3wd -r_Ģ -)))U$7kZ6!  dwNH}+MUjx'5I},# y'd8hٷ_x.O:eS.|8Okm@.A.k~(rĽ/qb=[lUx셤ܱJzTAyIeUIW,;\ b:ZtܖN;)\k1WVi:՝7Gw]HZa錄+i:M?znW=jNr1^a2] ms ^G4(4nm]8 *u'§}_#QʫݪIa%wG؜1G%}ʜ=wB|NR.l( _߮fBzO%9=.4вd@}~NX7/O ZCrB]MPnX8Ҋ?usp^ٯFc68jHi;ZI]#X7<UX ;ZCqHa{wf6xSkq[O?T4+:U4o+Aɚz~o&hñpJ=GpT:ק4Lֵ*VeT}䥪0gܽQg PR7}e6-c18I ;)PT{uּG;#xoZ/ 0NmbWf|Pq2CV2@|dyl]r;%L6Sm`}o53T(mc38ay6N^vj$֙m,\'˻^JuuRKXe|~5x:j3تݗ:MF^"]-?Q<&VrZPP 42WN7l -m1rMȞɬ;I_guҭKjdEAVTujjryF :+7Ab1hO|(SY{}4)])ݨ[Xk@z^:W)HjjIhį`wT\,ƆʋO{x_*<h0Ufפ @!֝쒳+auep 7C'a hO'W}H[E -UZUݲ“ ;P_9z& 6RoMfaa~]+J5y|\;nR$ {,/0 !z}M:I1~j9w㪂eЖ -:&` <yMXԃy!O`}JgS\Z;S9; _$^7pxWcA=]xsj]U*-+pBk̉~e 1tk[-?^\3+t[j1A9w‰괇ڣdNb> -3y`C8y7}"K,/Bhz;]^MIVdkӚ4P% zR͋0-}F_Yk}絃x` ~|i/|#e9-KžpUmo/E q^)tkvvRI2OVP1Y1J殺Q}Ѵ@=^ -XbT! ½onNR - s߳3;'l&e4TKxPLUZp _ׄq*geNLy}qs0Һ+Lo5hٸިvC>6i)PjN:GN4yC)Dn!EHEܗu][BH5SQAkaFvh>la 3Yڦ"ciǰYJ."fibEφiQMn"dib0b".)V XȋD/S|"SaÂj='҄O@V8Er0!f+.-m6h#Ƶ (A0e##B懿hn -oܠǙiQ`aamt.MegH GG_?-1'6sD53J52`9e` >2o'TrtU'ήhU]ijkɸxშnϻA8ď)2ye @ڵ#pG+;t~5DmWhӈH^naMa j+0qn+8&oktNKÈhԩEqnxҌ ~;^gCD2Te~ti`iByT[>H (!8HCOυ"[IaO3Ofnv[|Nc"mƝH~ |>>:B!}w4 =i?ɔ6;?t 4*-t$\x&_ "1]QtoD{2 Z3 ٖ!?9ж: $Ps"Dj ᗂ۰1g%3q$l4V}Ȭel*k)mDb{ ~|XtBs[ hN@Zø_.FxvH=r͙#{EDaq<(t.i`PX:tnӮ!xF g.z vaCkPg.c@5jPuT/"p[B -J*/3rVxuxtc?Țzh9UJd؄،7 @%f cB=h7YrCY6)̈ȭ)7~}ljk%Z,&8eG)FR -Ԭ?Y ܷҧlpʀ}{*Γ=sֻ]+] @ffL- ]NQ'tw$UyqSK% ¢VOm'?4\ᵚ#U}ܙsGuE[{ݯtyru볊GU[$/6]7xڌcyH`o }Ɛ`Y /4>i|*-~A>]{ORnu|3f124r?=BBt" ګt٪ L -&+?_ -ty/ockێρ:v MBTCWQPRqGe%2^aԶDG-%a(!V!ى$g46aD%KOPVd&=V('9&Ǝ8$*#y^?_+y`GI6A#eګB*hboXfaaS͇ -pqA37eR[iV%b;j{xacvYȄӘ,7` 6rSp#+@wS8%XBL$cfka\mɅA@L@6}iUh'Q }@+zPLD .|?[bqLL΢H<D.=yF6%Ʋ4`%\;ͪXQmn~10ZHl7ճK{4?V4Z%GЧOÕ{?\IlxpG|<)g3/Vc{U!Q!G]3HU pV0e$85P&N6SK쉎=kw->sE"pHÄ4M*tq\U:, <0y3 etE>~Fgo*۰&6$N'ҒCX; l๲rRW EM(e{O ö֢Yxb?IԓkdV)TP -B\oW| L^472V2qQT`){N`,&59bFRMz}pOV2օY,gBϑZ5HM -%KP#J +э\f e dyi'-V[WChd&CꘆV3P i`j*Zԡj̷o^e ~r5dd~g|1B LB0blNxDdTtLlܥW^7orה;iMw3=frr¢aQYi峪5/j^7zGK뛷mi`П#_ƿNLJd͓7iE.[Orժx -xW]0̤oqfiesgvSS|!g 9IgQث§|sU"UG>8\ddpga乆^\'t{UovՉ}kJE|VҗddCCT08S&r\cyWS>wdg zK_Ai'nvsWfnfnwSvdcbAw`bbj*Ǝ"ETAkfnoՇn2y ԡt>g~n75a9&V C_%Ͳh=Jmn9&P$hl QdOw/x3MgYsgϸcWm8nQYQ.^qGM,Ml}66.Z ,qŚ7/lToߝd I40>ð}\b6ħa,m13ZGuKyO EifQʇPr?q۶vBنfqR`FQeV5ڔ˂}lNp)'1غEDӈLe)0djZ?pF2^ئ[; >4!p$IpkrR9DIgX)YyJH.%= ӽרmuRzY #AE2 %k[Rmc )gIrnPi%uԬ`6ښ6r<SxHڡ7^m0/a' zQɔW0-؁5LOV{>4@.8 <ڥ5c$ok|~y2dVݣ -QBGlZmmh6;csqDFWcm0oU)kfjriJE!_,.s(]fnۛfj/QG{Q;cЧo1 e#,XvOOe:xQ¨Zj_ti}CHd!%G(wp"k+Jx;% o)a t^S0=AERQ*(\))yD~ Oޏy~)rZ?Ey5AqqO;[]OLAJC0!BAyx16) WIVN5 f J:m]tWw|%+%eVT {UZ:iOªWCk7E}+.} y!@ot0/Ltv~q|/:әNhg;N&N6k1&'<1 -( -""(*^"xcu}ӗ}<3Oއp;R-Xo7qr0:'MjqJ-ҦW)URÚe[ܥpi4 l Ր8Ȕ#N0iVQQʊ/2fjr2fd*"t:E=YˮUjU? ]짶:+ H>nㆼWGޱbUI=Jڠ(cTd5 f2Z3W3WP)8A݅2p/鄇1׸ܥyvBZ+OWh "yz*B;[Ti; -dù2B7x@lCyQEn\.Y2ϼ͚TL)~CQȘm!3f  ag>?Y#DUlX;at|cb6gN&2]σj , !CCWG7DCS@ fkhZ+Ӵ(o ec,Jn&y4c}Q *0XOk2a^+w?lm ,jawW%7Fn6kp˲ 7b^D3. W u? - p$vmȼ&Rs,lx.G<˖G IY\O.<͜!,_`o0IA,t[6 4^yyߊ>]0GoCPP,b(d%kr5AR1ei\< |H%rM 6M.8Ų[ yUv W>&LǪ2Jp +4 -h|fuC9tИ0LTdh(m,<<5C4f=)R aG"dXVH8Y?۞췻ܿ1dBc*޳OAb/˩FX5yҸ -pj$;茙Œ#1!P] ON[j7 6xJc~h=Oaȥ,9sxKc.rkzl W{-ajTŝ_WwT}5ua XiO_'/ڗ'c ϫ#N9PK= -&x֝4A[ܡw[j濾8m+n,}H١]\;~.'q1*6&gɅkre<>Ї-moMʆʀ5tj;Nn(oqʓ"Ċ}ײCcDzseŽ˯+_Ʒ,pǺ v$=6x~uMLǙ[ODئ#e)@ןͿJ+@yh@@D1]|4f-?DLjYHuqƸR1BfH \./U -ui4\~:dBJ8T!0: Ee6)k1 KĖNS2/3`K CB!7y͗ tA&<""; 嘍٫En1#cN8iu֙Ng{؂**!JnrF 39rr@A$A9~}<}~}|k82h!SN"ꦈ)@7.x ˑu[2 C5dzDCQ8uW|Ï]g+xBqzk:$'I,DH?HnmV|00y - hґ84gOY%>~WYTlY3MN2 cl)Sd죈]TQM[ ~p~0<@wh@ŜH L;IDvCL($^%*ʲR{ɅF}f堎, -y&`~4p  @ AFHG24y_#zFeqzBTmfңэRScR&o-ZhB '2#E@By˜댫S%4xUg]ZlgRk -ƪXc,Cd OaM-(M~N]LDטĽV1i7CϪ63yㅌDq{Q& < |+">f?,?a6tLj4j\BR+8fz%eR^2I`1t{ܚ~b`@s؄@SFmmtW 3لyܫv5[5rXjfs -&2xqJ5g,º Ad Bˉ}wnOfjp0?I7Wu h%+:K,$Y! yd.oĚ; vu>Vxx;5y1LZ@zf*[CCagڬ)nJf!k<;ӡx8b7Դ^QeS!|%L 蹯1qA&PϏ3 t;jr}sX-n]u-/rӪop6f%(ʩ usoN6`%)O.r5z )8~sgНgotR9efO-sO@eZ@dg@B'nԑ8ݞ85@|ܱv΅%#/\_q -oރ6a Ɣof&׈8lX)Q.T4/ŲRT߬IlmHik!+~6l70!#PJYŐȉ5Iִ[ %tt~qٙmwәtvtN۪[A G@ BB@ !!w@ B -(" ȯa|k 2]Q%TS)q:@ u~U0w@ -LPF}A*ޭ=) 4ofk$j21_.7bHD%iS$ƞX3!C=dCjlD$hF@:% SWOQ2dɹEB~_oXH$aӵ!Voj 'PjanD8hAA :퐃kVųJZrҤi٪|B}<=Ow=HQw5CTMg_v4P::h<-Ɗkخ!r׋ZZtArNV-2vf){َ 2p~  -8 -K9bh|l.7nIh^J>*T<HFTɒ02:GaR( 41|   :PI>Ws&u>PJL4/gY~U=WnG @}$ %"ҟI$q+BĝgȆKu9&j@4I9il@/Go ̷F5dБ 9l/O}"Z/9iRsMV_tP0$ $CmK @p l @JB KRa(+j\Ce|ՄNAT2$]ZuX4Fjbl(^`:`A2a侮j?]w\"g'cm%H؄7*bg,L& GEw%"+'NcX'gh ;|k}?1yZ=N&"gQQ4eFu2 } ^kk -ٝt@#dh:9h /T:l}Ώ!"G#;6B7v҅-錵 vg 4AcJϺOZܾzʶ2#2xuK9.u:P4chձ*Tqle܄ˏ2/\Vߵof'.^>wgٹ&wn6|KK|8? N(&0QxE$I}?[3U3Ő9h#.w]`غtkկ 6)<|^y><@FK§MEd"gZ%þN77}ߵwjǑ+RnD%Wr7݊Vܫ7G&̜7|CϢx(hRZB(\G>u̱,{貫KtŁKR3J|kvzuĒ; ^@PAPVB$T,3h!w >k'~mqх3B cbdK焴Kĝ˙W"\nmJQkk>՛UՀ*j0Tx%b-^u|б]=}zϯ VP28%1fGRخٝӧ&g9|7!:|u ǒHJx'>x^Ͻ ö|B-*f3oFmF{ܕr>ڽrOBz* -rK N&7) -0p<=NfobЎofῚ,`4{o/9Lp1(8VBN%Eq&i~`=kPݷWp| -K*FSJVz*"^pd&<7ocx5C!4P@٩PyVHwWE-e+xt0$]NIQ1lG5ب1Є4;%#Y)eڹE_N([7+hQ,3m9x)M4Jj^y2%_$ ET\ Zrȣ3bht`::IЀQC=j񆖀sЎaH H.[L|ZaiR6SިZ t i2d>C9ۢPP)S9NsF'pKh9Љa2u[gl57ZWJE*,2Glkh?Zvd@ƋjGMc0hD {5OB{@tc"@8NO2yO[stE&E{ԫS#˅VXUk/Fʬ:CEIo f"l@JB~*—uY -$8EiZ 2UШ/4\*& -lflP{p=tA? yi]J-;IwE7Ы1ZQ7ڨ"FJLơrQC7tIQYo Mjh>g@megJ\`<|NLbRx%}c,ZQc.%mܢ)6+fEհYa2{} ;بHbr!} >P?ɳk{K4\iaTtEQ!6iz .w541-MB69u^ϜӧTf0Jr]#AC԰ d:Lw2vO[e+M,.>fِȔkOk9M}m@\>aX[YlWsMǒw#KⲴ\IЄZPC3j> -;HK]L!.bq$1g.5|̀r#Y<܍R}[q&jؽL2^Sg]Ըkk.OzkxZ~=>G@qmMPt)؞vߝpWigp*R\V xefS=Y^* L6"Jf̓w 9 ߽Ezf8s%DwgMaN] @f~(,`OD. NgX&>sMxȘ2f!S7b$o?߾M+э_\yzrt>2 *Mv/*êyr:r-I1MR钯7hգIѩS:ۮ$hC A|m_7g?~xw_?{xt5ϸ_v4Č8vZq;c׭x, @B @@$$9@B HKDw+GD0AQ-y}yw74R_2MR@9u"D1NЅ?67tGb-鑺4q.zSPu`4Xx7vkn׻buuټCP-x^{ex'J$V'$KMŸ(Oט7CAV1yЖѩc];xu١彻IN/X9.`̢R=%zJ`)+O3g*DWTC;^ց/oRKPk(:sF /[aK /v_HC˃it$d+y/2vzp\ߡ=7_ǎU˽pB \`T܏ihKYQ(>ᵫ00dλ(h68pS0K@DQ"9S@/ {~uY`uR.`+B*XlQiS,*; x[.}C8"&&>+m }i68ݬpdW-X ' -S6N88I_b>ZB90VR|V]ӼiF/їa dnH3Mfw>7xcS } -aEdǀn8K,BZ*lȞ`wp^w^8@:$uU"*h3!H--3S6੆g>z7E{#K{=O{٧ާm5HCҐ4XY eCSj)ytci<~pQ⨺SJrƋ̞ƖFVO6FS]fOF @J@%w3c|ЗgѴr*nYR`!Sc+ ̢fytuKSF[sݬF(kLyH? "P|Ԟv.ˍ&BtS~R26}DFKwjZs8:[RY֦άb7):]mrNW[UfO {w >ǡ}c{zo%'jEK],InfT9֪ -NťrN}{]s]l/v^,^oPpd1yo xQף{ײ88bAB:!@B1( % Ju(4I0"XU0AAΞ=h}xm<>rDVIyMeUٱ3G\W(2$͊tR.V)2]v]Ӑ4dE[c:~G?0lߵ3'Nϻ<):X'n7qEZ"4J隂tX%t%뻒]) -8EHU@*Ґ4dB/pd6FV2_v{َT^ܾ6HE#/68"bIsgruLzT*^T(j/}褚Nfӭow~וրFAVYmhQ~J^WKayЂ -uHa۵|JP2J] i@쌠s}?-~G -kP'T2_=u!.2J **MR rTKJEP3䂪3nedA]Pig҄3p3{|G; x |3_|J[Oz1..w%=.=-jK`fASԭ@:B.~5v?|iu5{emc;;W1xs8u@Y_kkSd4)K|U]u= @&Ґ4nznta8Ťk)_6)Md -osOVBռFOk`1[{!/ɂFvD؋qg8sS<]7;@mhND+-O:A&۩A@`ma#@\8 lA0"WgdXKk%ȬF4 -'y2><ϼ4=48hKDHC84[px+R}w{(?D68*RbH!i9j v&tΣkkWo˙ښv^LSR=_j5*ejGx}( -("( r - ((X'呷ece5~? /2%جƻBPNٛPIPM+eUj(g9f30;|} s3r;:[ t? 9XD*R4=[e4*^i=A[yѝ1eewu̖g}N<@ru9Ёv<)PȐ%w$*W~/E 2p>"xΟKɿt?:NcoUd8D]8:B= z#y}9* y^-XYil|x&A#NJjO%OLnbDqS_&%mCt {t;xGTx^ MBOJ3hyH93d -r,_5KRқV@Z?}\ڃ^e159@K@n0=˃νV*QKҴ>N yoXكU+2ꕝ6AE+~Tќ3l$(#Jg@> }UoA>ϫQh|U|,?5k$6H% $Le[&_}5K׫m65Wm! IC:HmZ:n4.P $^ȇ׺+:ԹE-yO3 -N;,iΥ BDDԡjyZ5yP(T@tO :4j X651y2KWs TjiszMPM6T2 ~䓁ZgKP0W ^qm]g 'YC\+1 s*4\eU1VXȬ/ \.l0;̬^s>ЁTwמFľͷ4}ϰ%kŴjI% -/P -!'Io`0Mt04Q[ -ݍ4FL/78 - :p`; -^#<]Ҿ{'.<Ќ -0G'7%ڴ"*'ʑq::bڈfBi-mtY \f%CB- [ b] f>?vЉfs|ΈtrAu)%hy Y| 5U4Ś&n3=Y3tm*v!Ynqٿ9~>8h?5U4Q}*ϐ4xMhJReMI2EclITt7`TD(rYCkombc'ntS_ Ie -"Yh^Rw$+MERw({$:. OTE2.l;X.hi.{kEݍϺiV ̙DsegO0c(vh l4Yty /L!SzA|k:<Ϳ<0-puoZ`_,m_3 +'YW|Ƛ;.{|QJ_8@L!?z@ͅO? KEu r|W-F޻tpbڏ?n^3Xok/lAOWϽQg> i|q1OϒPOgAE be΢>0Lgtz:ֵAEa HBBH ;T@A nXEq_uwT`P, -XQBM@^z.>7=y9; fd [Oh6 %37DiDý/B\X jG]Un-uuҎxDx#"户, -bAu#v`g9AD%s^&+d AԼJcB̙n*,^*wV X> -Λ=k.p r{+.;∐7I5-o"/i2W -|ec޲|XfX9a+lܜaxl"b`:͂`HX21]r<_op!`E1]$DJ|  #!߮"U:F! -wJ~ѐgl߸IxDG|QlBSTJ@5MHg&O䟃Ze/dN+f'2Bx.b.R\ # -q\' AI5?~Tۧh:Ov)AʱV M>K#kSќ#l[ |Hs4# &Q0 :?z2`3Z3xX3tB=|^~C)j85Cd[kKewl=Wݧ2珆Es&wpjȝe?$  -*Szn8F7zL;_ڏa44G0=qvvWӔЧ5dlH ĶVP;:_BB!d}N?SQJ #+c @+c}x]D}ۨEӽa>ĽԶq- ݺF^Abw9H=ɋf#j)6+llO' x3C&Ҙ*rG{zb겺Y2n;8Ox]'un&cd`l\% #s#[YU,{q5_e]9܎XSN[\M΋ pnqy_s$rCБw%s: fBk[/DwMY1`a{LJ?,NJU$->UTf(Z~TVj:"c:8j:8kQ1Oo2U5Irs?PhLTS[Pкak"(Uav&r*J <TQzP[/rZuR}jzI\rLӰ( ِe1|WTv?KŁ@AmKE,C@{$0!aeʖ=&,% pJXׁ(Tks զZbI}׳n_N*n=g59}h*SZ.,̪?om.ngyu^M:gW5neε6Y4$iAѮoDh:MS 㾎S <&}%*>>[\\-Kk,bʭ=*, 9yӂ'j;'w' KUDWM4k)Ҏw}+݁1!#t{ xK\V{CbIKUjMMYVkYq~{QA(QW( 9E㢜)Qn0/F dHR jh 𭷬tc5t|0!g.IH]a}ˆ7:5!puzKAy SܙVRӝv%JK/t&q&:SK&EBY SMҵ7alaQigQ'#!X~I8ď+:XYܘڜUٞXSEwLekwtIqlŤ(|C/ Yڛr&8fRIŮlz~Q0]2@ 8A?p'"V_&mKjLnIE7EG-∦aD aZpF6Ȗ6joڝg]F/Y(ML\]"bQ\qCb&VH"مǣcl>_ho3< qw-e+ 䨭B:}{Tј/8|;4鶷wo|ƨ:P<(I8U)IhfrZC$-!}Wj 5Tk(ӥrPOބ*1]m~7fa[(^5\Ű/' W :^O;[\8P8Q0:X06Spv?_# 䫮JM%mozx~]&sk4pmwoy+AR'GMaa%,H)?QX7UW7-PgJ|g.kag:Z\n+ɹ5rC>v<"5=tOt-X$.YѿRww;yIy>ws) lol,{^$[(SQc*79&й_ע -KM%/q/ \bO=}z= K#?&YFҋdKQii(q߫C @z?D͝GhOv^+;eGb2| |KY&F{.{y$Hp{>+,wcq(k6nKCyE^C.hT,Z؛jejZk/63IHgtF$b# #JXPDz>N3dDhfW꺯E|vo-Ý`l6,0C+/ROBx9w=AX DѬ f@ ALWO!tBCu#Z Ձb9hmȃ4@W'v2@Fv0v{]]=l ^@y bȦB6 -Pe1ZRLp0{ M16@xF_u7U2ukʛ%!_![UxH(/m'8۟DF )SWV`,i@j &81p<^yjP8 -QqT**"{oV& OHBHR@*R@q^ \(({-V.9ܷwIX E,we 0_(K3o^ܾm@;8|= ->j?GXaUx}֧}2mm6:8B f(Ofi"f=s Co@PK޷v9f .vX@H~)Y_kƯ5_œ 8Ü<=t1z,:1zADLG?aewbg :۲"o轖?ۀpN~`A@aȄ)t c<8:H 匄hc^V -gaӈaV3ӉBQoc=!vHy/8- -= pe'((I E : yV #&/0 !qSm84k "7V3[! s)N|>DSHLF퍒D)1Z~ȿſwȯ(>k!j:`X XVa7wlT> GF ϻC%˓~%2 b:IxtCx-l6F#g/S>(uZfi r~ںkVnZ{7?7^ë5gnxxJ)?`W5۲CkמXy[W_KQNmLPc> _Cg4HF^ ';!@*J A:=Q-JhW&<>^AQ!Xm,sHٲ;W.i͟3~Usx~Uo;>O8D4UTf/rҳs2RT)BF& -Ka/Oߞ"ACuBBC zJCږܹ|x*/-h3ѲruCpR|T(*`+lDd T"*4%촚Vbfo,QڣM-283#o^;~__=k&7y|Rp]Y(.#u8Y!3%Uɑ*4YJTRU%dYs1IvWGvk =0` -m -۾|WW xںKVl=/n{TYV.“sNQb3JD [B(y AQSpyZl~S6׫ Yt$__s|>|agX|ymW[\[Kw5ufq@\㾁,JMKB&7II@EEZ`t -ETHG?R(2 -RȢ(xzy~^eKҍ뎭ر} 67l?`,o8t߆C1zck(X3M #8tMC85NCWF]Z~%ҽ[TxI)ȫҐcǮL;|jiF|i.Cl}ɏOdJѕۭ=.YGX㿃S?l>2=97 -ݯ_o1Z4 k"u8wo )ϼ1_'T.lHVݩض:o;Uy$S]y6Cu~BVuy[U2H7NGNۡ*;t0?]l˚|o &mFh\t W55K6jVo&lLDJW֝٠MS?[y&:Ƣ"|y.<4;?󉀹X$%6*L{2AB4,NU[*i ޸{5 ET,}XR -kn*k kMG-zx -[U6UݳCeƠvڙ3^oy Z7uT+lU/[K//LKv$`cKKګb%6޴mȵAOsj{yd:r'euZ:R=O%EcQԥ1 L&~Fc䇬|=D47m-sƚBzKoC?M1~Ӏ| {% , -2ٚ(K&)"oHNq,*GT}IW $C _LTCš]Z"TF)#,0wj*w^^&/Dl10`@.e% IX|/ B-BF9Bt7l~q#a} -8MNg'̝/'aLGR/s!؞!2#Ͳ}>AhO+B>!0jd2>&O 9K -sW, @`O, `:`A $;::IwPZ.42l0zNDLu -sakһy^ t ! W`3A  F@0n3!n dңJ#Kѕa -F~VBN!ހCSX2, Jc0 - :ۋqA@$ -mPBh&h% vV ߔqE۴ -༖s z,. 1"oWW\1)A -$I`ev@U.!}A *-mA~,WfQ~( Qt9 wAz:Ȩ0e HOUw ,Z[H _"O-eՐՠizA:^QT!Uۀ3\A̘JzwгNjy$uRǓ L 7j-!tZ?TU]>OUOP)bPl3&yN? bB|7!FJ}QĒ -Ye|`%_dUNV^~W!yд+4YTe7bPsv~.<" c U~U.੆.,_xVQdx<'QN_wT-ѭ]/o5nq7 (:Πu ;ք0`5w%Yҗ[2Nǽ"ǽ$^'M-TkN8g(PH ܥr+ΆU#7⛑:!2N;cGE~ M7 eސ 7,)}(vC6)r$ -?S5XN,g]"ԳmY1I}m㞦kL8l/z&$zÛbB&C78H~"eDV.y>r)R6]s˂IAUhg*Oz#z@^S/e{ ?ZwOslx:>{Ǣ3n$Ux)%mLҊKZ&ƠĞtK|!M~-J~-`&x@"Xӂk?~|yΞe-WΎrM(r_D>ϰdLfg .MBY-h=C+؏ѓi%l@] EKݡ"x>ԯvO -v-~urvܞLRdW:xFaم"q-E8 -+XrmJX1o+=P*qYAB.ҥ* FENx)>x-?bE nWޞIOlX̚\,CmBLQ,) *T/U TDYXef>VOf I> |5˪ -׻=mwlǂqcCΙZL& -JP1j\Jm&Q >Ջ[tY:N+jŏ4#FABqisa֗.ty[qF׉ʭ-~LUtͷ*rsTOtZNed' JYA!6`1C~6&{I|} - sa v~}k穪5Nk6wΥuW{ӁƆM1KG 6:CU9i1L2dM'Ds`0&Z{~Z`|h48^ Tvp vvҶ/5lÉkC!S ƒ3y*r4+@/0g Od%5k 4 ySڄ)L fx8O6ܩZP۳ʩg낆K+=[P]s1HQYjZNWU̸ - -wʦU6*b+1ϰXۓ \̠j?YDnwt\rnW9_Is{ö黢e'rO*?)مhK=d@5mnE7J[n_I^ɢk_:C\0r tq4ѵ>LJgnR % -!qnBrlDu -""^DO';:u;A<pY:huU7<[+q|g`бW~@F#& QH0q9<)P®t]N:t%1t3K3Cd1c\<@O3(t֩ 'w~Gw}>Gua?؊1 $Ե5Bl߽w޴]I*JpL7`)Àp1Ff34$ #@(FK,?`7ۙ3,|H5*$jM=xjshW,Q7nf2M7-ʦ_VÁlowp;B!YB_!dcΆqdk}o׶OZў-3l -\hխfRMz=r~_KuP7*Nƛ8Dl N^&`-!ԏ![lX١e1$kUL(>##DE``>\ mPp˸әNЙZ*s[M>B.Wr!6![c?S| "´T n &rKN8%$hS`IG^JJSf`ar V&Ec}R$%FbWϰ=XCȞ:Bճo$d3B>#c+0n|||4??h"zzfF&#!*̄8&hb"a+0v` -֪Ho=&!;lOgOl&tu0c>c?ixXr0$Pƒ@u?̡~p0^ALJ8 ( Цg#-BzIiJ~uDSKd^#^ 1x/E#>0X@0$O7ea<HPY +dO!c/idluJFMaa#ދ @ h6/*aq Kh4H-\R-F_u2 8:N -#jXtI$iO^#W4(o ?2Ph3VBiΣ )Cլ ;1|,dt#+kꔁCެ `3XP90ba'Cgq9#T#]~']]>>W^O*]ʛ=E'"PuTE  - stl FN% K1dS̀rWD׭XRlҾQ~}th^_ v5Ui5+뺘:I }=82N Sa -kv02"P 5J̶{LN据LJCfMUЬۡV骦tWܬy`5<;&r# Y@0LTd!?P6Z!,Q&uyŭ{a\`zf\fybW#6S}{#i/Y5;[o-VZ6j>_h>%}1/n -*S-Dt/Ž,ץ7٤O -4mswmͶdmVg;hݰ__گ:kr:CswZ6GA$̍OfL]߿ڹjYN|2PR/5ʭt:]wq^ulr^鼜cnRż\ 6iWAkXPQUWOEvdi;2qr̀UgU-wҵenOEss.ɵkKYv]S' -?/]|EqUaOɡ{񟈊1X?smG3~YiJ[\}RE OV GWQoCG*8P\[boɛ9K*#9`YS|7Lxt{8`nj#tA.ZK !3̘ 3F342.CY\JnIWNw<J-v={|~o݌k}a+8PƱ^ -%᧔̔r;JRO'˩ NjC M2NS<եJ}K:!Z(gˊƚwx^jqՔ7Gùúm4zJ3MmHi46Qʨ&2F%yYA~Q^30')3TAenBOb#RSrsJ;{2C;.CD6NLjJ}Ffz Ǭ -*RV9UFmbRYREv >^$_Ez:Q PUXt*2yiYqg0>OI,z*Kb^"z'OD x,O+VE`]HF x$2T_Xmu{W8xPl-ro( kI2VfferB0!3O1[9~>90`Y.<\䩂V#@MjeP fJsL)mVJc uʼn(fXlDbU 2Y& ="g;`-YrUA} -_7Tٺ0`7,W]t>[k,[]1'NEi|-etn-+"T^(w07U{*H9+s  >K|%(W puƅ5M^;TT[C..%JSRy -)l!)?()E x0`Np?W7JZث4h La7JNUmP48ȕQG$,N=Adlj -cŅaM@,'GKĢ~lj(EK V}+`ZwZaU0m;m嶱פ-Fv:MH';^WTasP]FbRZk*"Ҁ' -l{bJ7=~: [tynr{B2]щ Idrx[zzh7=8-]N hL;q;5c>5m -^< 0څm?lv\YQ3uҫ .{]$f QB Ia1}}TRP_6)Ot,21`h@8#xGU `B 0p,@yۖo9onx9'xŌD]F<4MFFep0k7B}fC|FC~Pe^0]eЂep۟?A}<<ԇGG{ײ:Kvۑ4y6 Pa"&wp#dd){J=}qxz1Ob= \h ǃ:ȝ3g͛$>uN|j:?;4k.spczdz6WC.->wv]E:kXz.apb -^e.m"/M\0^kd>k%7{7>N߿ tv}]e2bGߪksx-[Q%}\ P: x B2>iawQM6ܵqvO;>9Y|~gok/v_ímYleg=UZX&s cn.V`P89 ` @{Ñ>Ӊ@k}_dd̶ۜ=7A.VcCi"D$3nۍQFtj*~ł("-EMr$@Zh IĄQDQdP]`È ϮOT:Î>( ~}>?WlBʻR܂9^!Hjb0*llaXꌁ/W?6ݽs,t;ĸzgXHpPbo";Rߊ w?X/*ZaTXۡ04ܝ=a`>G` N6h @k?f<"~nb@4x_)2o#nHp5(գ^r#(H@yK}Ǿa;_?H2xM|7@LT9~ , -˨_L=$Ȁ]E~& 2GR/Pss]: oW`g;Xb`= - |PWhUa@ۅҪQz-u~ ;2l Y~v7D{8g6Y@2xt6."BY4x28IЁed,e|gnb Ӈ郜)Z?{{HIt -(؟<}$:m3B}@B@<}c rx쯼rV WE+'0sIFwF_N}GQ;}P'F4@2_ 28RN " d] -sHcA)>a'0-,u -s?*vp y\~r6.󕤍&d>pGyOC@8<b{i>@!B m>p7lTx' nCp%82$zLtРNj{J8SNy`wDh3E/F>(>D>><$/ޓ]ݑ5nE݌(6DCe3ח8ځ` 5gꨁ4@'t fUK}ەqVyq!A"ɝ赒GKnl6ތވ^=+mk^{(m $ 7q>"OAvJ#"y -ݭ!z(gmO.ul.=C4-}5'B7W+Mˊ]]K,;u:=>;DpD=fbl@4LnVj gd|Ck^,Zu!27rYY3S*y}.ɤIuc:#gCɟKP}~]_oX5lAY5u -Nv`51JcZ7@o@-o5H<ή"_ϭ)YRݝcTmIl\42l23V* {eڌu%縮^c`XC%M czJ2{F"Ҽ4;bY?~Y#g(TY޶XL-3\n*5g*M;טdNf2].4=)]9ԓ3('aȷ-t"mi︆Jٍe>qJюȪiK?/1/ )\a0[6m\C.`0fȥ%各"Jn$9{vkHK;>~yίuޜŋϚ7wyZo0=_V.ϓU6R+W.˳e2Ҧ2e3ٲ/dsT[S/Dm2-Frn6pՆ霩ZS-v8T8JXU,/1fWNjr^<-GQ-ϨVg(';CkvY,㹜Ș õL[SfJ`R-F}O 7]|X]y[xuSH}[Cwmޙ=)[P( -TF6E*^1S(cȉW<ޖ=59FkhbւZk`A n6/V6l㢞&?fK}`mKeUϞqaY|DNqRBZQjZb~"./OWw07jg*reD9yOT_eͅA}WXn7Sam8΄՚mzm>QT~p׮Qܪ0[Kҳ"U9[Kaŵ9a% -i@[%2ED|*5[)jHq`s! t‰nŽ.e͝ƵBNacIcgSD@v}Lhjm|L|urrDeZtRDZY!<.업T\5*bG0&`b/up'W'+*t -:$n9G|eQ№xi,YҠL n(I n<]x$I؟ts[Pda&AT*QT*i.n2*;MΓSH׃3 e}F)dк㒍I[6 - 눏 iO jW}۾;VQUq:*zĦ·H}R<,#38p``t3[ Ctk_p-:a8+tHrmHDMYa"*4Uؿo<ȯX!DY2׿ -7fG!GHBkFPvR躃+5㪧YKBo=C}"C ~C +|}{|W|*L{? :zdC=I!&ߢQm(Qc<1G&1x$Ix;vvk$wם4{w;qw?u!CWjnR g O졙z GC3-h|oGω(Gd 'KVuw%~T3r#NLazNYeT[7sz]LiΚzKǙ.Ë,wX/ۿ|:5rtz_NwD$=QW_QBfil5r^M\^;ҝ޸0xٿ7{lf>Ĵ`>йhV;L9L8n#\$9.&_qG/f"H=uH uݑalrWڡ 5梛 -0ИWcJ&SX`Dd`y r̯(jjm%KQo#H7B!ֈkLh - (20-˗c.Ks#/Lp?d_x -5,؈As RPCK-q2s\@ MW!Sњd:-AWE譫ej(]JoSy0D, -D M@Cf"t&LdULd dgE-tO︟Lpf>Z`fAEK -D1r]ih@j9Zb9uDZ!͐{ܳL 3Xb|1?K wkd΅tC`QgFEJ3 mL&)lVWD>Ѧ_=-t#tqR26͆i3vlvgEn]uq*su;#{t -}ܯ?ޏ>> _P-Qms?t;qe xcV8[d (?/g"93Wⷠ^ 3{P,N'SGCRǃ!!Ҥ q.  @YBm~] 06L:Nǐ\4W\Q~L0AdHd-~Tp_QDD ϣ$_G%(}'o?c (w/~ `9.\W?m>p068(O,߿)h5fcRv`nTab0iSoQޤ [6> -uxЋC Tf ـH9W,5 AY1rd AG0g\"f& eޤCh25.zW>\/w9b|O y}|7u'©8@삝$c Abk]ī:iۂWJ2$;$Ys= s 5&Yf+4tytyt5q5ɚ:X; DKA{I6}7}˼V#jXUR"#%ekN]lC1vUQv]t'%'N;0${r;}ʝ~ͼ".V@Sg^oq2{0nXqMCqp]c|%313p~c{b!2Z[nZ+v?O+I#x(g4@܅ [l{1j9N&s|-彛ݥb%RAQ}p3EWXGX(cKY/<̬e -jì]7E3DU vB X0@2Г\[^Vŕ< ۢOo!f(1{eJF$ ֱZq)gx7wxJ|[)╋J/ay;_ot@dee[CorA#VНP_墐ltؚЪm؛.4dy*Zū˥%2iTڐd&}/=T,(,JG[@%-̇dRoY@A;j)T1ɡcn_o؜DɐU*etAUX,O. CH5uTzS -^DvYMc/eՙFT}iffG !1j.P(1 -DdFAe5,g>*#xܺu{s&HY-kURܼ=yekխ^wcU\Ì_: f~FϠ!9Jj2D~QW}=!_sToE)q)+ -2Ss22b)8]pcEtA[JT~oJĮ;GSuLЛC?prTVAYDITz_VfgAYϻm9E9(:^y:ii]:6?fiHxMzXhuvfk 46&8jPPBtyK2 Dvqs5ߑz~~u[m0'皻IUo+Z8uX}OH}_P:} lP ho_.YOPr`[n͂Y}GێFiiI;mcrac*2W[kT~66ti*vi>\jeiRxޜT3FzW -KzhOR<I=xt^G G/DmbUDdU:@ۛ>)a 3Cnzv -:C/cdK*Ksӫ$G{{d*^dd({md_Al~D? x@h7 Y:2]Yov$p:fx=zd;j&JG/InJ?Ilj_#ف;] @@T@9'L '\D1%5Smhiw%,PK38p)XBc61fa5u9G$$3'3A};6( SЂbVCt|}3 '$"l rtZG6L(Yհ%$FB: tWѐ_GCy -h~:-zB MGDh:lfCB\ y3Ș(RQA`"6 -SX(bpa ={(:H('T2Т1->K_OZޤOJƤtlPK["CwYR@KK&Zٳʝ@.J(YA6J6rӒ])ikB5!+e);zoU9f7c1Km?1dAkkd8b:JtQNCaB~\gcL0|qAqˠ *0HDICv$ {N%$ -@T\^ю9vuwnj"ppPawE]Жqf I15rbEJ2ݚ.Vg -զ|E`V&AaK_}J"2FTQg.x -N +Z%̫(za{>qe=VkFUdH %Jz.cJ1r5bRKj%4]sIy"MӾi%kEiLPptǹCZ\0TxtZBQH__V+ 2\C2ט*H͒ufyT)Rt -_P)ԺWrgi" *n`FA1 ~g˽`j[Ž2ҷ,ni\bM92qL0ȓ9JXm*Y%sJRB+d)LPp'Sw aJ߯v}v[ $T11%^x^0&;_J2VJFNˌFؘBS%0(ƳH; -[ȿ䉙 f*Dm 0~)8ҸPv~յU5rL+3Tj4O'2ɬ*١ei)<.a_JXI +G|5r?T:Qhvvghcm1jQlr"+S!ϲW̱$lKeiJbYLiA6aK&ip&<[3\ 3h`:`s֭#}KK̇(Gm JQ/! tMfdjL[asN6̤n1h5 J&jED3y {2;; @[v7`Ttb\K:>p!icvʉ,ptFsQN;$QD@lzCo/9- |`+tnjPzB۱Q4o>?B<\к2>wU$Z\PUV:2Yt;+2)9!IEi kBl٪UE I=܍P BN!4B*Zexpx_U#"Bм)(7rݥPF#9eo6-ؖiTt6ΤZߧ#-G-"֊L°RAhG'iNT~wokރ}\6B[3ßE ] dH vs=]'kmo>W*GV=!B+ o s[J@52zx+8a W?'FH4_=j#|_3yұy\f|1#~Mo`U|ߎg <% 7=PG8L w ,`js|+@.\ -Hp! 9쩐B7^vy"3ϸ|Zp ͸!Y: -ASBFzˁŒ&A恒-̸MwqqszwL 4@Z 1 2+ aȭ\~u;q.Х+9o+q&gTfC cyIm@A_  0,&G" ^!f  H6 Ncyc>Gy}y3ewPqӯxU pzcruˁj10FT) Τw>~1 F#U?_Đ1w;Mk.^hZyO4ND>9ĠVP@%(Y:2%}y](~B iW /[)Ao\Ot?!z]$a\x?AŔ~ԇmAn|8 oP;!w!-_CE Ivϔv8VpG 3<F:fUaC -)y AIiYREZMk6ڤ]a-fxI\xvxzz9Hp[8R }S3Hp7 7sf ~Η2\ϧ<ոo21ȢHmOLoI00兌rSD>^Y+2JrIqT \N" _ >ByƟrIp u9c {[+HQZFe*xFq"JQS1)YnV,T1.V[[l+Q?BB!yJr phLy;C̋t[2#-钀M"ZaNF٥*#XeFUg\j0.O5UgPޏ73~TBE Y@7_JXQ E[{[s=qjʠeP &L;ae?XAYw;\'5H% -F>2Ӳ:MH1M8E#VkFIYJgR\^Ҽ$iB9wȻMwvp =aZ*wԗrTa -Ĕ tAeƧfEt2/"RR|f?'jF궀 ^j[^Q*,re4QՊ P,jebE$"а">ElBh*t,oWo O@O;X}OR76Dl5ձ<򪄾& US*QqE /…BX- -;p~Q| p~3~<7xekD 0۶:;@KvVkiK΂&F옶^LI'T)qs*3eMFgǙLq\e.׼[5]*#^q:*>@7vs߂ݠM)Fc'`ݧkTȉRISy lnibqbb؍HV㣈#FE)z PSAO4!pJ~(8s[t])=q݂SAWbH\:<֢g- βtZiѝB-(ˣ`fjDЈ֗4[v%"͟EeE E+FB]?ig8mT|K1~[2;֤heͧ0@% :i$GXaX?f? }J}o=,̣Ϡ;T!0LnK!풷c$e}Dx1v/g^>>6~0mCc^AχG_;َz@{G78 P6= ]IsA1wN6F8o6l3{9> ΨCfQgKRg4{aW썝#`]Wmut)4>4ͮB ft(n!\\N-c/yZ -@_.1/qQDo XN\ojߊŕ2wegF/7[aх箶@r?sͮCn$/!A併@?r Xs&Rj28iS(!@:H0Aİ%V[\=]*(ذV֊um 6EA of?^{>g/z8L0MP9Q_9W([.B1 -墐Ԥ_7`|9^\k3G:0qkQY@]MP΍GH -ұ9lj$z+=}y\gos%VB߈ J ܯCȷ!M#B -@ )K]5~46l -b_Z)=@y@@H0!Uh"<@?r-C 4P.O_r3s3O.5i%ژnVzHA.' l b&3^|gp1dz/-gA>-(½'!Kt] ;D7vQ5*'>[w";$Q t_ }d*,( w 8 !!=ټqŜOVrdN&CnTբ8F5+Q{e%٨#+G;]L#p^:7Pྒྷ# J ,Xp /#EvN?\r[T˹ͪ՜;I]4w1oJ7nG3+tC{UV\sX,B|+bFF<ɵ"}QG%αEd/D=cF5NGu)ʢ@y2N>ގ?-F~ tIi_.g$3Fv#v"yK_D^>p1KqθYqָMqڸ[┱LyxUy|PFp=`> C ɁDEN4U/xQU !;&#[R\sysq]Mk|թN/STV1JM;TL\7.1S\ԣ)2g A88C-"T$C#Н@hɅO'p=i~8ܦ-kSle+,Z~mX}7ײ7RRady?b_oa_#arhKEfB:{cit}Oe$?ZlK*JNoOouuVV뼀[K7Y37Z7f[ Zg=z9xuRuHfRcʤO+ tE("*'wkL"ХЙ^ǦrL(-?eh6?=Ր66ps ٶ١Yakm22mYWr -ϰ/O)X bIJC_R /H "T;!t;?MNРC bLNwʛ8Ty\ou&N.bș+Fe-ZdҴ2 kADA-X@AY$}A@"@$,ְM,BTj\ѱjS;L]PxB/ۙ*93ܿ}y}{Ň(QJOԉԊ_E׊!f%ff6c -*o𝭲b8Q8*n0޹?+O%K mːGP%%1MIcFZ'Hi$'hU q813M-yK-,7MOb/ŮQj p|-p[}1m%mwЧ#7%5;-IU[+UҶIU:ze} %b -S񐩔07to$.fp՘Az\G֐T.ث..eY&mo͈gS[RY XVRɚJY'PQ <Nlu##{̕,}` |?G50T ꭎ`MO;ҶLZ_%'H NiOmk \6P9Xv>vjގHh> l.+8'{=h췅7'Y+.c8>I7ut0 +gt;Ct H0Cwq=]UPѺ^S;mfN#׏o |phܡH۬%iL|}b?>+ A8M?K_XM/%48s?qC%G /72G S쒎9 - -8CՐk(1Ԯ1{F[I1uλEnFkġÇޮ -fs76 c;u c<,`%lȅb4VD.0V[{Cn0u 1^s9!xtfiЩ7NO}gGoI'cy@yE2pLN2ɟL3XE(6xP>Ĕ8x<}<( f]q6O x>xn_,e/\/V/| 뉅/ XL -g@x/,^%{ cM&p A"[?@cNIhΆs \JޮAB*[ .tOs=p았K)@w ,`;1$35Yx ~H>Xz9m~sTjt>LTf5ѨIt W%DJEBTz7"bk/"dx\^g~~3<7> gJQjGG/.&)$#GC +$l">Vl']0 0=wo,B? x "GMWO\C 0 בXE (2%('x;؋s#8uᅧC|yO'"C iCl[LE"DZ=h39?(5w7dwxMVd QGdC1}z؈A_yrwdjGs p"s_.r>X,^E3ͥ<l -J)O6`_AH5#lvx  -w̟,(-Ѻ-'KDY Yr;(Gq0g:o1zE)'VhYXPSSlWQ~wmgm/ݶw+vϨ0ȚCb&9sY݂k͑fW[B;zdR]ub]}"ݱ ]gB!pn_kJ=H=ͥ@;͕6Ϗ,O &(HT5kt ҕ3SӃ3eYp1i!dd=a_Gὀ$qIO;*3ÿ m( -R3R -T$TX@AA LXXk\hƬbI,ѻgx̽ywO~}<\^ c-{ cמZ3ǶM}urY3-VeDd,sZ6=yiwbjj”))sR(f4(RZ)1)71ɃA1ɯc?& }kpȗuRl/4CUbZINA^њб#rbR2&\5obtyl*ELZQptfETnA靁"""# -#%p-,#iO؇KPUbR+LQ),[`U8cyv_=$e9AY_go ->Y>!/}B2 F*kp.w;Mw\TV裴~+&KJ̓gY/s]85 3"wX~Oh~!_7}F!'=+׾3h0Bb >Fg|dľJce+]VTyk/dhcق +blq -/OZmFY{p:7EWEYy 砲+N%CE\nf}by}=yϯ`U12wXagɒOiRFm%&DpB Wf:) ʝvʃ6U'U]4d׆Ov\a Ζ'os?[;k:}z,i=WyZqzv3.2n~+Iߺ>u;,k=fU5kg/̧Ud9i(݇w}<]^K~`Q -6c^D7;#hɛf7Fik՘d6qgchQM<5^4tkԽ]|4,ѓ̿-zMf{ѭlvk;Imjmm -p]8}օmK躴˦mqnۧvD˱o4'<ղmt95FPq@oF+kp; -Ojc4:0Qܡ:vDH:㥓;I;WuvnTԹKպEeb_T&tvKƟ|by⍪wj>Dn欤.=";j?o 0 .LM)1Ply1\<ŢDf]ݵ ]M0:î08 `phoo6I߰qЛ$bKU`܏bR=;Vu;A{:$dB*=50ɞS?7s0tc+N;OB- "H!y`Qߚ@ <A7G\8яY< ѝEw2|SoO]$5gĜwaE 07#Z8߫XT8yGk7`` ҫG1 ~B7KWyOމ1"ɚ|aQXLaJ 7@fX-0Y*SM@Xe"9QJ./ф! x3x,] )|JoA7cl UQacX"L{]LoUu;]c'8~+O=~Ex3QڂĈ111Le !+~Ţ?Bko!no+];M>[046x e K5}MALCО NꂪI8$QL&z*I>~`PRI'/=7g׸/?INknWp<,@}mO$σ'f6OܔݵE྇ ܣׁVpdz w 1p¼%e;~.mAp";$ak)[ <yiȗ<L% o7`3rр3Ns+rه1w#X#,ku -9Ǻ`3^9~hs/Iٸ?P3 F$SSGDI$͇)I´B2lx5iH涐rgȧS!I8y7cww(8%x/)c<d/]x8 _N$WP{p/ld^ $ ?,ts8%,r8)p8!qnr8&lu8"RECAQWKEO ~s#zO#ė|@?%OH ؂hDD؝)щK'ӜuNKM1q-uTlS?oKC{%L.|Kry@ĥOC+]G ėPI,O̟ per0OJ#.#ҜR=mHjVvKi&NitktutԭW:M:#Eأ[ ?0p(LUѤpbmOAYNY._VH!+̞d=Vϭ-e^ݲaNٴwOgM!#3!>P -f`=فt0DFCSdzoҫ'IEY^%˭~mNy|JJ|*Q> [`uW,SބX'6 -"pɃQKw(cW%һ -T[JN@ lFKhemjD[Эu =F'U%V%zeBbU3+PY3y"_V)嶠tUZl-eF18έ 5lL *)99j:/WDJ)*M۸f+JJNP Q09ߑhyPs -([W@UWLkUHiEe ^F4 8iWn+zQ<*YWEratd@V = -^e>,@?gkaH_5|-zoP_""Hk(5uxmeڤ`(CRKxF X`>~qtz)^Wj|ȕ‹-z ?W8= @7}bL([K$N8WuC.% c%UkBכ8s Wjn wƚ3Or֙1?dK*qĦW! -w]Wy -#dg;k m0P:dmZGukEڜԔ f2,yXK {cieJ, e$Hlee<3 9* m&jnzy(r((}八fbZwg_?><ߊiGz$~UٽJw kPϚ!sb%@&dˁG~/f&eĨ'2ڎ]9C]_Ok7/;pi~C}ӠEI5I~]Fa+K[_E]:5XU& dQҁlfaߙžksCJr"1^[;۫ٯ 垜5+)k1kWB,jG|V{B}3]f^v]}Ő-fU{9{|^j-[d(5}z`B{"{.ESQ4(^~0Y\Cbv+xǩo4؂TGCAZݢ  vGE"h>,áKY*r̻ZXy|\L޳T#Y0Ih)k1FGj#way6&Ēmus[=6?%5]f[BnNΖ4m#{R e0r0i|*GDU H!'&KäRkd&Z? Ekk,x[&xVނgcׯbμE\\<$][n >iWxg} <0 Ґнq6M5lmh! 3Vry%3/`YU}c0[4G)6M\n7fd^|AKe6EE\AqK/̵̼9{}098 f+@{.hUCȇ@(5׆·u^m<۸nGCqඦ\d'{3Of1<(oέ_TWjn-zh#;ɽ|Ax)60* N%{*c̉=.0eؒ)$RMHݥ~ɲzOg)@|ʺRUaFZA`` ~lY XFJA&99,E>Y,K)-g;&qsN.bΣgQÙ8Lg8k'_"*QbV}U6cA'D00WqYLE84㬴_IRFj=T82㔪'p\>K8bێF)WP;FqUKM]iM 4uӎpiN8-q8꜉#ι8RFw݉:8v 5nюjnNU%ʼn|/AO 6מ='}F P` \,2TU A -9\K,?=T_)A[.^6_\EsPpgp(xX{Xg_Ž!{B!ZCU]5RyXTQf,aU9^֢~C)1(L)6]R!*!'n4i t&zj]1Wc,[g,622 jaX;!b_@ !B dMl QO"8nxqicם:v4Ӥ[bil̸xF9wyrx-byF϶6|G8f1 -q&KĜ"c$6}7}f~})1@>GuMX<&cUpLk7vU+>yq k -$.A->TےVx$E!MRjl"[*~-z|ɒV},%!W <-\Ex&kTs5ԙn}/ m0rqQvLǑʆ84G,[Tɶ88"-P[ P,E$^ zCMԾ>Z1o9lZ m0ʴפtlo.oQhT Xy)U(RĊ&Ne]Gn ΐ.[N+NZY.{SZw?T9/6YkKyowpow`DNڷgg0 -vlwQ5hISub!9BuSҦRxZjWmTVvFJ׬Ǭ"SVQKĶ]؃+G0sqd3wBw0t@GO;˵뀷*, "EڪZfik#iekzR4cB2Ws2PsYX88I4dM=>{p r^" @7HՃ_I/HSV]( \&@׍qn#Н9}@w/*QtnǓ9=~=Č, ヨ?pWQ/h X]DZv4U2Bx!aX^Y"-Q]<|D~# -Ʊ];=^DQq$f) L4fY u`󠴑{5j@Pwj@G {r) P_]<螿X䕷P擽P嗵 43~CY4:cxғ='ϔٯY3){\zۈ 4>n]hXA}Ce7@faNs]vڿwO_zpe&ۤH1 Lnɦ5$s;'=t_Kʷq+^qـ~ԝǻ:nhXc(:Y f@68ilKCi!-UNS1cw,3,[[؅oݱԎ~oa8vy52*z=<58~y khkDYS!Һ­vt+i[ !Jj!hWco_(n} >y.4̮OCu|qu~v:םs9Nαai)ItoQ [clmfvw/|ޟ~9?Q&[85{9v=aӬu(@yDH n^&G1L#w!RU@ ^XgyIy/0`Fr>H+8t8Qlz!_)6/>mx񠷭u? W_ -^j$Jr-@r5Уq 2g*.l8B/D ^MAqSٗ 9T3wPpNO$8-)4|Z)8fV8ly8(~E)hT֡Au{UgQo}{7rP(`+ -H'x\E -g|%yeo폓&wQq82\gbk"m)ܖc{sQ{z&qMqu1⪰Iaa maIa)2uCux:GR ٬.lRWI -{%ǥ꛲|yjAU V/<}gk oaw0qu~6EV!+ՌmLk7kȋ4 %tzm@N-~dQ֞WPfiQejVjJsw:_Ŝzl0l B[p4\oU>DQVn*SMQfU2jJU.W^h^nfEAOm?fLn]NxlW<:ߜ&lIP9[#E%޲A:E@u65vvvYv+Vۥ/gۧ;,ӗ:,W;.7;.֟qZ)Y)yS׺|MYcl Q饒qDM}9FͪḦa醱N ӜR K S ΋ . y.Ɇb$C<>D)9N'n u? ?wDTb(GXXe}f6$rbxIy)t9c,G^3ӍS|w㍏^*<6+ߜl[X -xbML(sxoYTQS,F$sGy\S,SLSrnSپ S~{L-LT# &š-cX-Mn 9\(J\.haQ^3,#|Y[&MlgY8ђ h,h6x;Gc߹H2 3 -l2 DAvG`aU6dWѸhmh&F5f['O㾆k{}9咟7N7XK\[#dP0'Mtse8)].m~<Ш"9ĬX9 Qb`/wʒfΔK˅i:W5E+JE2!Q#QBg q]~ @X |8r+1uC%h܂$tnȽ|}9vqDC{!Àш>F̡;2G]uQƒxDBxLxG#;܄w7fU\s*Ԁr/s0}@oG $w -|6ecEˇ(\Ɣ?1g>sB^@6l 1Kw8<΢3IWy6qUq Nc+/` D3s}6܀loif3r> ~E?6Ib99zCO^;zԟ1>/ɿ-r#Yl6٘=uyDs= Lt50 & _dqR-ď(3 -x=0P.1ɺſn)}C4݌11+n.^"(z̠OI_ }U6:x ^zezF2m,#\㪮[40` :1X2YJ̈0|8:Sˡ -j詧XKO==\\|}y\dDx꧴ h10}1/ķbLWȤOIO =܆05p{g=j/=\QIi! -"@AAMY*PQPTKTL3-5Ki6u.Դ]LsfͶ{mjgne?>x<jL- sqa`d/Q>0dĜFaPW>fuQu>B|\q8hupw,8_ۀ@?7WfLԩD7Gfi 󘓳?1g`}NWkpc>O 8џ{KSa?GbzKS<ʏ p?Zp\dlX,s$9V8ñ*8Q G"` -f"a:rDN~LN -a) ?%% -KM<;,w1+Ap/DIc1J8phY)L/\Y=Ly`2 u޸>=q{a4iwQoӷG-%">Y!I$&;U*Z,P.KB&޷ϡ-`LX -bwJiߠTE$fZF*Ӻ$UNImC됴K{ۤ;7J[s ҅z@A-c?QOQըG :QD°-bPLٖBݚ%ˡo:dEJmi47Ehzd]z١Z!.B7:ͻė{kzAη?24v0%}YlFBAQ[!kC嵡EG[ǬVYN|XrCI]0_ȏ5r/q@8V!bp=q٩%À BܮjR"![Rԇ(,r)aUJWS^lc;=lrS)G(/Fؔ֬G֬9֬,n3N &sze5ADMyz]z:2:Grj"]]QvruuKMųhxŚ ^(߬7/P/P3@kRy(8ðGQeVs\h1ǑLI>u+ncéW8ԬVej6oQGcEg?*LrWyw(ywKkL"dslg#kAP"dwSj_E-^`*WJ%\϶ڄY֊Lkm- -3l#tۡ( ?zjjyO-~O-.q }>_{D^dJ1*8P^ % Dq״6* -[*~ki~c+/ձ;21Õ"$ ʞE*r_q>,2lGq og k|:ֽMʯKJ5tպLw3mOssR\vԵ#qm`vuv -[ftfo?]3Vr7I:< }fmN޹ |qkN$4'hRGuGE]q;p"*\Df'rknf7D (0(" eVڶ{ilw˲mMr{?y#Mٳ"Dޒ. -gfKBӗ~ñrc՞oEU%ң1 -Ԅhhl~qGȼ;oͻ'8+obkjL=]9!`1p}_ H]A_^ JY~=:xh* -OKh4Pؤ1%aN3eo1QLV{0dF@j\ېl[JkRI%2<9]#y`wk~гs nJ|σ=c4s4fUiSfe={+ՓeJn$y6{7y=ExjS\e_W}+W/)-l\1eWL1^=sc*؋8ieDƲ -Ws;|3+)f4tKCI񌈽- -TB} -*To|dM7S&ތf@0xCu鶯<]JOZ/-'zp7Ii;>-R)Bc E1h7e".{9p)vLhvЌ^Z΢/ˉw!^N~ȨgVr.g 9 . %!X4ĥ@'b$,~_7~U-H >'|_³YGJxs\q _nЛ-tQb}Ρ 6(ie.5yfp -we_̹n1s1@D:J4^?h2g~˜2.e% J9sg?UOUdCW3q~#l>Y/t uBك=(> -w.xR-N. .p TcmU˺v !;>DMu{zG7J1p;ci!lV\A -maUc+%ҌVJ*%XK7!~4ہQAGXe60#*&''V)nalk->ϑ:Spҡkg'/~;6}™ g'qr8 ы09$ j`]z^=vQMmdkїe4A8[|EmJ WMK5mN2m1(0gn<ľ(k.^l^0;j.GyceeMr?9+eQ_ۈGhǿw;{ VͰr&*Fsy9Z|Z8l`LkǭyXW;XW9WV8 mf۲\QMw $#R-($IH@"p5\EejZZvkzyug[w?ekמk%{f${)8*_ɿzz,8[ -}-jyBE E -с,tQaԔ$zRUWZcZ_;lVzy>ސr7o@5WTG=n 1^;ʻ qm>/Ӱ@'PKukY4ь -DE))ja? b|FAMУtb;5c^|\d|f5YУ-lRŻxwq8=hݨ:XV,c -7h\FX2gߧ7zt]8ߡu;:E7* MC ;+.ݚة^q:p~ W%8f|GI4Zeb\^no1TSZZ#ݖ@Tk`sUmѕj~EKilYQk5bOT*%XԠMtSR$$c$| ήmCQ7.0& Lj3DD3AȞGc|$$RbhB$ )(.%AL(24AOAav:&?s:{}ANlE<&R%!{.'$.񈽜H%MFd&hYp,Y^FsKDOagq! -cK`>cМp&Anhb X"J}("0+|l"#:CzFpdu| à+EnP?ѳakgȇ)k " .&ʄ"D1/"Gx `80/'!]):paaS_W0?;Dy}:w=9<ל"*WUD9Ls( -Qt1B!?faN%9ysz&[07!rc7.;BB8uY p "( ! -'^ -Zj6 usZݴ+:Ugk;i7{Y>*k|=|y}؟JL1}ɔ[dN=9XZu:O]rQK:+Ӏ,_edva_㓁9FDSTWдBmqJ#_>g%y?qE Wԟ"m-w8( }1J;FQ(>:YOʙW̙Sµx|CEs&?9 q]-l̺dyS!Q_Ιp<&Dgsk7w3ps}䔒 Y/XF -ZKFa;x/.Er0}ܳwp-#{-'U!d.P74NoWSOf7?C8q;ƌc>~ccJ }/#mOQHF) C9u,a[/Czrx|pg_Sz cWEo,9Ym6Eḍ ȩaԓՌW}^lc 0/VvV6V6ҟ|Z~ra,02K +;9yܹ>!!gÈefFv#IX齂oGy}7O d{zpP>Q1蜁])\lcZ\҄-e,F:lcl7Ɏ`iv-t~R8?o\S%8(W/`#;=]~ }آ&U6JЫ*ScVk 5n^CǸaoas_Qny,[<^z$70~µ Oy|.85؇4Zr}:ViJ4`ʹ(%+Lc:htVaDVqXfA2Y2FO/==o}2 -3Ψ~+%vUKLw1Lj<o'MX2xh̙({ -wl)u4fҨVz5M!9r.L%**c~VaEynXmY=gDأҐRb=R&EB*Qt=@Pbit7u0nf#X &J-c.<>v:rΫIRr)Y)nXX́Z5\jt#TpkIkX' Bkeut[kpv͆=5`^K u١Z -,osƮj"ı8vr#:.$|r4z= ֳh<ӈKt]Bz 57}X9b0#vHuk'~~.:sr8 79߬h(ܞ' -Bzh~qx4V6pn1o1ǼU+^k__{}~@7:>Efh~PTxŕ3"89xKZ 2Tws8OtRtخLT/Z3ܒ`ej{jqA3XȪ#_kb-W_RUO}gqV:豎B|Su&^x{8qp&Ȅa1 F.qLqYg5u]ئFbSj z.up]E|ޞ3^ 0`x#`Icq%FCGzZP-]Qkgz*=DͪF2-wRpȱIJBxCጆ3<F՞ 'NY/ N뿡Mzu ~qքv|7g[sUʐ8['i]P N+,C%aT6Sie" ܥZEK#vkIZmCqu^Q>m`%0 # $@ ]#cK`lu2m:'Iڦm:dLvI;iڸnijy ߽O߽74̻OyY؝F뿉s+/" %vҪRE+Or8hA-(S#1:\>:g{&ivCkz nC7?nw'X0Z)_(b(lL6ʊF ,='FOqx#)3K)K We,:Nc'u.sEaj̝OfQqrN\4[^,oH>R3+ߔi8Bj!1IJX$KhОKv5xwfl~[jְ$dmm=f۰,`kdޚUYSe溼{rr_WrwuV/cQg 2!ܟBl8 -.uYW´m*~ifglyM8q IZd\K -:p;X㿬ÞeoFKCr-zǠtågu4QZ=e4X^Cn6zw+[T -{|ݽiXQVX^RToПXK?/܋߃ucq1 "8½_Fm\ -T0~9o4juz۽aPXjߘW}'Y"[{Q^#34ݕd ,{k``y` 8.\ǂDZ;'oup Z5(rS0& -+CrcZU.H+B/HCߑ?wĺRmL_Yp΃sh>@P I `NBqpے jKd;JHQytI#/ihu7%;:9Q%25&DeI{RQv~<ǰ9p:F `}}DI"lE;&%cT2V@{STғ:e"UA)姂LS^v&(7αM&yv ~B9S2ᷰ!{< 9FLDE3<ʟeIEyiJRnK[hgv}=Fl:AI$=EK$yӷ7KO|H$O hq; vؑaNX iYxCD;f,$53e $Xq7B L~t0 ``#Vr=}-`gP9"a""X\;2Cx(QOq澂Mg)$wI\9,6-,FPߡ) 0< l/U^&uh"9 k<0a{J !uı8hֱlc@r73xe|1 hSzu7Zv3\5Wn{C6GG?f?{]P(`D4}R}Lj=ݭwLokdN -S_)xf^!A/o^WQO1/3?Uv1:T:֣wRk'׏pCm,qMM凓 gBb(sJ85TA=&8mpVa^A>afr_*+qq?kyL!pG僕m0iP5kiVԠvR4ky[jR!XݖKj%ϵvUkzuHa\cv6 &[6؝t$jMdS>0_+i;6pZVk٠EjԬvWjܥE!UB^т!vW;ԧFq}pptm[x7r 2Hk~~:FaZ>|ZlxZFy,5%#0RC>t.ׂkU3rGRUc*."=ͻ_{ Yǣzi?vpoAz׎Q65\"2کj !G^<;iuYcCd(- ʫTUGT5MQ"ܨ2Cy|CYBCit$P8;f;bN b.gżmiV}avim&u梿],?@uP8oUөJga3PL79g88;v(Td*[iќ? 'ZŽe96h/$~u&r<$JǗZMUpm(uJ\dSkЕc.pg,,3Ֆ܄AS$k3m8j x=6kGl5~`g4Z>@ur -" - h7dEeE妨t jL66&4I1jk$Dicl{㼊˿9}B˓3+))F-hu"5gi`nM~:4:w5k})lgO3۳LcE8{w}w}hs^N{JhNiZw3MQK ^52,I!ʆ&Y,欐g9H4)3JJpL2uL3q1:z8zrd(7tFd2Ddhx]a+~;ay s̵Z 7cȖ8c4ڙhE9gisF8m -wPө2 u)ԹSC($,+_Sp Y?%+d=4fCa[9ғX$'B)ȤHWF*8 sMTkRJr*@W]+nCE/˧--s=p=0\)哳v*Ey6 {4Xú,B+| p4/dg=O>%vgfn|ffy-4Ħc Ȓ Fnj] w%Nm=HK]Rᆭ¡QÅ_ReSˡR&5Y0; -q4=mjj] -zgN {69'qk0b06ͼao}b<\<]M C6݇^`a#y&ɍ_f%P"ZGKGa[yi -&iD ~tB<ˏ!y# `GYG1zrd`GK3 f KgB~"a4Ġ gfuz%:.ӓn&n]:عn 9;)goژQa1}l~(x{*1Lvqw{ӣ};q_CF&LgZ'*ҧ*чT7qOQ] 6]eüK~zE_;_up/AUW Ȥ*X_hh0R-E|ܚ ZFJ"WؠprWXG7n Wv &NP'?GoO;`5@(c>xZǼtPJ"~+_M[6BBк@?Jϲu.:?@AMQPf šK0RYsقw%ˈ_ɖ;غt -{Z29zw~}ۗ{tE?TDKpf™O|+ݵ?T.^Rk`T6Wx'?RG=:vExr - +0f!mķsTd?S0JaT¨QOmګV:h=Me^k35ӟa^ OR /\baM3/!20r9 `x#ଅQB%GYi#QANn=B=w~H+BNicg&a$g`,Is)Sc=ycomdXZ ovD޿uYO^p0(xa%I8,lr@\ kp*UtΪ*'2E_ou9cv/kyGM`j0iq꽦iy/P)EզTUkO%o*|kTjsg/͝*2wi߇*.2 *JeͿM۸v<?TuT3p.`*jM"U <ȮҠLZE*\U* ޢf9-g9\Kr,ʶ|/ -W!szǴ^ -/WUサ{ӆ+ͻvw&Gwd7MdC6&ImhhB)BbN BA?# 8((2< u?=s~{4©\ږLVM^ѦR9I4hF+NJq~UC4|Rq4@6`Xtn_lu֖bMTjSOzm,\6viOECJkxZx>Q&`}+]U'Uy#U!,Q76q|>6K_җ\L[[I_ W\a F3F_|)ft1*'fjnk-}hk-6ܻD7}~XQD7VUjkUUfOU]ivUö-Z3ck>`_voqho)/#¼ [~x7ڎ{~mdjгBz]f'hzB6O鱯{!GowqqGwj=f^׬-+y}b]Z~y'6. mw~l@Cg׿DlDcG^14Zߵqk8JQ踶]jwZ$owVu&VX*bFYt֙n$6h͢،Qس(5c_3~Vy]/+wF^ǻf^G{`( $3;?;\Ol T9Tyr tRq *p9ʍ*'VvVek=Z>pI}/ihYߴ 킗fa&P_#6jK]EËU̒3YK9I~i򑈖ꆑ䜮KޡI-`Zp㯵p/@NQvv"HȻ)BF8e 4W T4 @dka7$Q8oBʝNZ:9eK;e9XҼPӼlMi0MKoFG!jlyn9o1! ;1'd&)s}< q!69:c:@OI7?k/K7%AԻvv1kFʇb`bxrpsҙgSyj1}y@Wҭok'lQv/6؍T-Egj1o.X s( A(8ʋ= #Ggq,)4J`w5v5x~m6`d1zB8 -z8N9;MqOi:|9xg~yJHut~TxKa>cRJf`\BW5 {v쵰o%{%0.^<oAP8zXnzҳ0c^`m|9yqtU -x^|RLQK螇[pc%{=z?Ӫ箅%!Lk;~N?~;< id8}ջ& ̣0|7B @/dL&6d,6Y!8I DDTP(TT˩VOjTkuhW[պ)GSlx/_f{}Cw6L_Bo:Iw5]kt;B%E:癰Y~>)?SJg?> wNJȪ'թ۽q@/ q8f7`r{3g1-  F5ch 0ǘ8I7໕S{+f.cXK&:Nm}WGmgx)r~daN jSn$5|k;#FFl%{c63OVst}űv戵q3W o XNp4%Y85O㽕Hv"f3u)rb-|-cܜI(;BpG!J>=(W60LxvU{=iөί`"VÙUZmT8U7@ǵ|W1bg:5઻rh?Uh7ϗr׎mRՌHWjnN6!>;s+8UOܨؼY?oƒjyaEhY1&52wg6z.Rmfʂ4=?]S -4Ԛ -N邅MZ]㋺[<G4S4B-Meh(n#_rL}iOЗzpsۉkf=LdyZdO/T,L+2*5QhFevj4W#`V\ަޜ+ԓ_ݹ*uWGuf2k)}v;) ZtתOY[j*V7jUz6o[ES*rWkU>j/Z޿Xl.epԐ(NBQJbA۪\X Y^*_uˍ[hUtX>6b+]es[wU;d|Gfaꓶ['{dE ]'[-Ɵ*"yep5҆6c4XE+uqioko26m4oxe՟0OZ7 g ~ -4QI*RW$Ӛ*wk\v9*x@@X6dŌF^6#mv۾#l}Y-' -[aMZ+Q7̶Vz.JZZz9r+C|ٻUUPrCM PfhL hI -*.t=gMt[3]6 n5w?%Ҿ$)'CaÕZ+5ܮpJւ—i^xkn߃J{Zs{_Wb?4S] {g nue+]*F6P) -+9R"J(!Ҝy.*4?yA8 4AA@Q~98"P+xIs\d6Ҕ5a붩զn;n5ήl^w00||}U2_% 20ElEB ZFwj{%|uU++dwb+S+lҨ2 ss9ɐ;"'uemX1rf&Eg=xW3Y,CM7T -F&*ПUspp-bY8p05$eTd5BW`D$-Y<|@ȡa8ukż !V&Y@+B t;w,,0-F#&&r̒2`okq]|ԤAk-䢥 lWu.h'^$RPS# 21K:3,R41 ,yrZXc~lNMIr;h_sDz6u'jȆ{&.'s4Zw,Amp@;XӸpGlcA]Ǐ]c7Erwax7d>v98fQ5uvT^ɀ; &΁Vf^ZiPlk& h3VVB< A|7hCM.D~@=OUEmiԀkw];|..>G.=/.qb~_ѫ'M OӐڊÓe9uqg=rE0@>{ܜ|r9o@=nr9ȢzK_&ٗ }!;9QqInrw-~BXK -@!Vs 0FH7XX?ffAxAMз)#'D G@%ktėhhr/h;(?(>CGFdP>a?A}D@[}H򮓬h -:װ.^?V_i@U4|fO>68~\Ǐqu.{z[PI:s7@_?θ ;[8bd<.;ˋ~Ǐs(& PM|\F!ScX{R}?~n]Ww -$:Tgr.ĶVtb۵nrlBk~36> -$љ/\-hp -G,Ӱ?kv!mv`۵kqJ7^jMeNpW7tW^wq\!pE8xi1RVy〣J | -RNuRδD;qf5ȵ5r6l9[9N8 -! 424aܣT dSg֨k -))r=?W^u]۸nscYXkIjT {ʽcU Ol>3U2dphV(o" [l5UY~2.hupO|:ljZ|!0dQSU4S7"U9#f+{d,R(2G-TF22)ԩ}J3U鼦>Trxnb FVV4A%;y^ -/%4\;:L٣;&Fs+sLfYcJ VJPSRM3Ue%Ӕk)Ac$p"YyA5p_ -KCf_eM45tD0ORjHRBR5}\Bs5-ԦJ%5hUq͚lyA,GXY#&*kĄ>4 }lgꄿ|9"0"K6dl-#f T%LI(M Vbx'+>"݈?Ϙ<ؘYa<YoD2ڍQ{ Q/OG1Qֈ=b3\I.×LJjROS&%X͊FhuuE}^Q5"D""XY@E++ - -"ZQ FMR[hjNLFMMQ&ĤL8Q;m6f[XN9>w{ҕe)b#Ti$֘وKm&4bNIhӧxonLԀJPvm>oV̔L RZJ(%d2h4SsEF\#6mdDwQ雌]FDczI]ԴOnc$Ӄ4q# A9(uxm?%[&d QBFfd)6#Y1Eeڌ̹FDf1Zc[5ڮ0kB;5պ_!jJg|) -|clC4'B UJž˚n*fdEeOSdvgnKW-K6Jbs)ض\sVќ.$shЄ75~G -Jw2`dKf[bz9_?US#5`Yڭ -iX 5^q)x\^5:朓_uλܿj .72`!|oIsع#Bq -r+Xs$ic6v#Ǒfpn"D<6k??6[ ߂\l 3H"iZ1ڣи; #(pp'Cu"4 MQ(yWh=߅k)qVhGs@HCA#_8*\&.Abȸ8. -¨ ]jd5"S`B(-dc-A8=pՒ -0x39pʥNd`5F2jkѹu\ln^A@דzpFLrn\[Z-bl)Z[ˋ@9:@>Y4bN@Eo /ci`hro$+E=GЮfSlE/Wї-_h9|5_@mRjJ -oWOYYڰ QZGD/؀.k EQq8v=c'=IOm mc믵:W-&pm!&zoo.XY@.BNJ?ðh=powXJ\avG_`Oq17=C |3{E6Z+t-hZ!cӿ!xC+Th_RK$:?EDg x7VGg~@K|z{-JtYz ur]& -<~?Yx?^y_~<;:ŧ'>7 :&W<vQ0u9/A8 y5`ctt >K`jQ}cXѢwP2Wr\\خ25آP;Waq;K=C|.d0#LV`a.lboCVLM6N7OvIYc7tͷ.1]#؏ƾ k9Q.ဣ va@:u^7A6} p=Q;4G1N#O⺘ ,* G:_J/¶[j"-tZn!9Ď'\8M2PR(7*WYu-QƴڦuGVRi+lL&M>^KP99wHp-1,Rt'XX,nh}C(̂2?N 'C| mR=y(- Sqa - Sdo3+fL9E.nQV e/P}Bo`ViJw\tS)E_(HZb784; AAi̥Iq+ˑ#ê̲Re9^斡ܫ~SrMGNإ}/? s\S3%> -[8G}`r4Z>W{dwU9+#dJPFu YJu+Ydg.%:ە֌)fk)ҹG# ->𪫊"+Կ3 !xz͠T6[>8WXMoHV+S.b]6ŸUEnzL!. 5 Q)~J 3 肫 4PZmx/l;D QLSǠpOB 4Sxn|^3b} @ks jgT6YnjҰImHvrx9} >(G>~zx^LSx1B0({\yuTUj?;XJX7ZOg?P/~̤q~qz1(ݘ1|ypS=FpRo b›e2^ Yz1^ xØ!fb:R”ͺD l=-k{͠PsKj x%\,)Tc.O/rG(F{\a(2 x&1j>uځT]B,OxYZ1"8MMcuXBKc5y =N _z,T.L3썱Gs@+)0aN  -XI\09ֳu4rɯ{g* Jrm͢U "93&, rXJB -8%>7"6Gs 3۸w0c{u'IQ;)[J[X4xk*fb 1˾¹![w;)`VMO^fe]wxh^b/|_ft߾~ֺnpt~6Xc ߳A0/(4'AÈWqc,Qzq^Ϊ.۴^Jd|Ck|g\XNCA@H,0St9˺ca>0`瘋uBryR7{|<Ǿs*~ os2{ul\cߠ[Y787%:>揲3ư˳߇[ -6P:'{2>5.o%{d..ip[2`Vl;v+?YP(;* [eM -: al73[r]]v끏~'+˶('t?6Q*qE)ש M2sj}٪sT9}[Uf{i=ZfVu ?^cq*q{H&Vcx\JͷGz*|ST^ -?xivx5ӖZ9Jl;؀쇱ߡScqOs%K:+A9@U KTے>ۯV 5^cb88ʱ_up4~UX2~,E%Iڋ{jlα#fYYv>_[9s)Y#28Z-aicWtπ5Iγ"K#|;g U?ۍ?[kp2#G1F?ȸD]PU -5i')>&q4("qg PDr'Otqi'9J& fF -k\Up.\A"puA<#cVbdbu;*tv}¹1_f<fl2k҈ѬaCveϪSA}B1v(fUW.u&Ԟ3Hβr+{^ܛj}@ٿWw _%}\>x8?繁\7ϪXC]y.u{՞P$?…q -/ZVKq/[KSVqͧ*u$KA'UIsXŞ27\S -%See7V{26+724UƧ: -ElO#GqA=|qΈ\+$P,sCAr ڣఌe*VNV p}5B6?LIOb-h^s=pE2E*dؔ%(F1Ƌk(ste`"" ma4oymyxppuў!OT0)0a$FQHVzx1c Mcq*ǘ:IBX=q[cB O|v!vSKIG?4q7N@q^@ 34H)E TGN3A|@򽙫j솿ExA!anM`-Vw8@ĎQcQD(bi26OBCH I$*Paւt1kuv[w٫Skwukmf۵ǹiNn~vk[ݭwfؙ&o>|`|Pv:yyK!2 "L,KQMTy+sh?>dnL˿9fE=]T;ټ7ٸl9t)t opBp? }#-Pf-q]uY ;Lo.*~8g4~9xB9I~.e6A;ӣX<YO8>WO2gѺgibq -e}TDȇ?WK4410" udzXm?oShۜi -#\[rBʱv`۽1:V`{ (-VM:?Y7ҿI)LB5 -^J'T%uc7@)4w`YhQ^{viP;Vǣ4hG7f) '{*ph6-=2Nlz@-hv6 -{OZaaoIr(l}Y."]:A\q}2A̞ɖl+ڞiصaץm `;Ǝ'M%c{&e#}whXC5EM_ w),_y y1/Ou¿ixW22r,}`!~D9j21PhWRQ)@QjZTm< eD UQUf}BN!9_(f}?<[5$n(4?C\YT!6l!U4mPb1.cZv8gesqPk2";'tum8&[DYdC"t|׀?!O59 r,SyGβZ9#*-M%!Y+VRqM2gTޥʯxYFyJƲkz.FNz Znx@(w娴^鐭R֪YeTY(7B'or|)˩;WqJW*f4Q~flZ}?OV2UTPS~MPy5Q ` Cy-Cb,FNPi3kZ;\ZM 2'@ h>jx{;ϊkd -d dA! p0Cףˆ0&s1֝'|Ob{ Idbw1M\A-f<YqE<iIIbp0ލH Ar^4MDG2KG"Ѐެ#v'FF {/psļ VaZh.1|񶳩DqUc|h 8!ۘby8DiWV8r]Jq ,1Ui'h4.|3&zXFz؏!}7/ 9A>I8ư?ںhMm .=;,/CCH4b1H,$q%y)~$"ɤ7`b蟣 CuM1c} «})KxIR$XF>LSXQb1FyqX96f2F= CCxc-g}m7\]MW?I;qۉ\'!HJ +)6\` ZXml-viٴ1RvN h?mզMHo* ="<'1ϡ % ^gťMЦ.vXs&sG.rE]8 ="!Cl-ve]:ܞJ9ī VŻ3N@`Ʌc~Ǐ ÙlE{ctz겛F${)|0gr쥭GZhV%Ԃ ҥ~cs-Vzupbt l~EY5DMl.ρ ͩm."1;WS?ǹ(>cAj?vse5]YMzUkB@-q8D.XXSq|[3vZK!6&99̋"`VM[O`qk:wQqn7׸sX2xYJ0 '3q{gs?or?'M]w ~bd ,Rq 'GPBEI6JN^!ofenz%|^RY'jĻ׈bJ -4PRb . +Up-ЯizdyZ>'w?_hl`W]=û4ʯhu3OIUQWH؛$ r5aq_' oum;.{/wug|3_cc4c?~Q=GiM4h :~Z|C|D~W84$ςgɸEo'Sholϳѿ{ ˳(0MAϼD@>'959̯s_)"yFBvt)3|sT-p8ҳɲ}.v_*=sیQp+A٬F&8i1211N4%&GSSv:s3JQSbnEL&Q%I8 j9?Ïqnڅ7*+dq;4"@5OuF8TOmpL>8sM5NmAA.c'uK+\d۹PFQZ\P̯6{Rρg8`ώ=7' Jlϖ/PИ~s.LtE?Wj$C.W33apQ*5{Ŋ=Avll:&8}+6a.ɞ<555V5A](ra8bpᨀʬ^v[dj>*(Ut\yG#ш-\URRZ[`ƾ V)7'Fᩀ{ ̇c1]WIQͦjb0jo|'kp]XoYZgI'w6pv ڸ[9<: . LxWx)_3MZyM#T^!ӧiP(?VAVu++KU󹖕J))Z$aJ.qS\yRLa)k#xZH -80&MxSTԷer3y2B}w@:W콠y+{9W?v2$jIDK撰敐|LZZkmSܶ>qHN~ɶ8iqZnHl#;[ -؂tꔄsEBN4 şo[2,wffV>GrZheXgD˚4;eܔĿv#Q#@5(%X^__3r -; -'|7+`GE2imb*"$3JEК14EB1c7B喊C ܒGoB%Y\YfIEL1-Mgˢ ء˘n mG"FJP7뎬DG^M)s!< g|g%Ts C4@-gpTXUUUR5QƫFT#`j}B%z%Jw>}$ye)v{w%(UFuJEBX Aj-*Ԋt\GEm; aDe2Bޗ3{>bdEQφOC|njIVú8@p ȇT0Ei qƸxnit,18('mͰ> ~|>q5f5ll^ 3x ^A=Cl8l5cNi46@PO-Xp.I#~:} 9We&KLyroǎ8ug|KWrcb]OHrOĒPYkFW?]^2XqQZ!% [~,{Ơ/ѝ_Z6çSB팛 ߵd{.ψ Jwaݩ}EMDœOaW3- ҶxSD't#RRPĭ7O n+*F)j-$j1[omD8oQUqCC[^G9| VFU{V?pxWC*`e#ےсb~p̜_AMyAvmtjx*" -(UTKʡr ձ]u+'$@D!$ $! %vvf 3<೸{ߔi3hg7YG!€! -l/vyfchPIcs2W3ܽx l?`$*CtQ7B3 kK[Z3l } 3L==ztR%\p?';K;޴ 0m3zF@cJ/\5`w;AFjoz7[ < =x0Hb㮙DazNB?gu .a0@ K8e^}-Vq\+N -|9 wʅG; kX4OhӼ]&n{](sY8qEͣfq(OiI")k -KҢ&bxp:˪IїMH\75<:6?tsAB| Kؗǵjz2z]ڕѻG8Kܨiuߔ,qӷD*i-F -:o{nq{opJP{&F[R|֣'ؚ̙t/{#pr][8J!9BR)#EQL[(%9܈ONʿѣO7sTÁkqȡ60_B-lRg(ҁ+M !n]ukO>uᙶ# VKMz{c7o T fE^ǥt.v;CɈ0P8(pݿvR?z_`}~3g^Uϴ75^w?[3p6?>?>$(>?pϰZ z}@Y )T\1ט.j}l*Ri5%IM~pז -W6_ԍI̾4`ik*V4M4$%3pm)]f[ ,fسvehzu鲼)`D,JJb$){& N:4 csg<P`1Rrw631=6^,̣_Θ纪y7G(kqYi\FJ|7Ӟ> O yZCɟ-;+u{#g#xzvU})i%*Y+ɜ*$;uQAEFsQi&%/GεA1B(NYީJV.-7N:~2ؽCە/&?x-f{(-]r8%ȖAYP,R.;%.;NO.5ggqNin;ΎKTP! "r)G@@Hr\ÙH$hp)?ݙ#}}{>{}&=+];_a]@L> K?A*/LA$}.%lݬ!T>Sȋ^y9/^9/Ręd9j˜-v#xm dFXQPP(j3=n~r|e|>q {kS(u!W@P:Ն;4wE|p/>t :!fIM^vωq/ -YOU7&~`  P(rp,oaq^K_m:ld4)c:@Oʊ %e}TB0AU&SLZtݘ[7rWꓹrZxMb2W;7s%1 L``Da6z-zER}MhGG$CfI;EXd#ψ -[%hdn?߮S_oF%Ěb’Bb$t`;ES$]ξAiםa쎩QzkZ2CgTI%%J ƂTOc$&v/:E;g&!l1j.}C+VznCTyH,фHvN`;f܄haO6?\+ב54¢4LSxTǼijP%x-'w#2y2(X.Av QOPmn5?K</=ړL-T%2 8(iu%Nq]S9X+7RЛ/M wۺJ!>CS -ĥ1Yon#R!/քVTQLU( R(\iN/{i#_ђ=:[[;یc]:j?8iqc/uj}ik_;NgjtD*CB$}_ a B$EY\n={7^C*3taݷu=XBw.;},SV ֻ=~iOnl>A[Œ/U{7cX(?'N3HRe7AF<v' 'v\G'kQס};)8:J\*xayr4͟mN=GKpx3c?;ȉs"Hn5iM Cܯֿ]cy N"?p['zYt"iBW] a.oU/)ut^&#W`ahn2>DL O9|x|MG=끇_98~ GgĬD]鲐ؠK>KK.+BaJ1s>~:WrPHX"9۴'3E7  mQ,vCH\$s$NvfgiDkJFLqdDzU)u5{kfV{ 8 ǔmWE7X).~zk% -D']ԶYA}+'pXu@t-0E "a̴BK['qNy4Āk_I"KIcjHh$ -Js-J(w+MM\joaZM\K -k3hq'@ߋw_hAF\Mߑ=GʯSܐ9)yfEcOu휢y&kQZSC, [ZDn/mQ] ʤ;NUz'k=T>n$-.V}<Z{?1A Ґ-Eh0fWDSe4uEy4"R̙ -Y2iHv)MuT[SͭA4"?ײmEpҋ{kQ Ivs"0?G5^۞(lJk!76Rfv͝D >}_ -j^P`fAY8f"YjQg+ -%ΆGUvKdyqm,QJ,S)߽^1~ :J'5ꇜMsy,7(mA컦Di(U1c%1k@wwnD_~ $'ٙ#[ޞk=|8t9JX>_tILR!q$AB@ A 5B A4kseN8vmgg;юn[3WS/- -c;Aubcf=賓)[YC7s)U=ė]//{F5.o}xYEyٝi[;f lWv?{/l.vz1ճr3tܹ7JgåUϷ5ri9M/`_|~,xyI|{gW;B$Q%Xj޷K7[)ya)ElDlbk\ÄRɋ>[W=FQ%^%"KQb0{{#?mY:'5X p5N&x*mՄ縘r]L.8iE k0g`MjY;5%ÇșL8gu&[K'ˆNE=CX8p>}Ӽiމueu8iQ̣_aZ[)aKy:쳎oυAG -r#7=(1IӪmPa"3ʕqqiQ:6a1Rؾ- _a[73G=/q/{8s\eIXt\PDY(& pd[.>'u1<0gFR__R3+3eEr E)efr,n$#/D2"Cp6(gyA|+ 7!p A o8A}; X;Hs$@4I)q9!ZD+Jt: ]k+MEJRi -՚()jcxhbѢdS< d_C.2 C2nE,bZWb8p&)2Ld|Gm$ꌁEu QZSg ֪i ꑰf`N٬ "C2!CAF:!\x%/f'Q3 43Z2$Q=_5LWO('ZA;٤j>nvZl\d!CA C.a<%.3X8HMGG u/&yMEM!M4U(S8US0>͂6 y9𐁋 | H_/b- H \r ;r.7Fk&iiMCHQ&i S놢4XNݏkѡ5 r7 [A |dAkPD"CWt'9C01X嶍ӊ&Huk_>Aږjkfڴ( K6 QْTOѫQ -,Y) "C;8A5 &M^p -gBE&A4?R0^֥KuiRe](9J*{T*( ! -A;xG3lƛ\C\%/*nvʃDq*M+g{lI-Hvɺ, xP Pwn5;mUu+'o:R|wLN/Ҵ:iiUUUE^![SȖv,ࣟ7P P (k>+j"uQ{3b'a9m4T^ʬǶWrS ߩdV -2EOU_OMx`]jvjzr}p+'D@%!6rrJ9Í 0@@A仿죍@2iVRE!盝6 EW@TYB]o x4S g^}->-am9y1D^4X۩%Ci)w?4I(1օ*;8 NfWR{Μ P;:uY5OD bWP4Q<xfZ2]!ڐ$^TcgFB|C(iWNkj~ %67P`]`dEGT:S:Y:Ƚ[&Gr@6H.~7ٚ[m溝dDWxϾ7WĴIH4tvgsx˿-ϯSXVU^R_GeW 6NBPezp7lxkߜo"!aZ"̞Jݛ{^xI-ogV= |X|o!EB m -цݣ[VY[|jyQ6l}nTfa;/;\oP8q%/#&HaJC@?'libK౿UĿA]Yv"6OjB䭦_KN\7mWXWq] RGz( %-d;T!a=a  -yр Ҿ^͟^ g'¯w ޴:\CqkFJ(U+v˱Yš\9U2edTBV5mX~~-ޅ#v$0b) , At6z>֟ V[&,- eeg2£,-wޣM"H h @۰CiM#$8f 'm u xs! r,(`fv(G7} H*ut!#R;do#IcW4h|_} _O  3&eua @m2邙X_6/EP ; F}hDj AzB \|I O_sMPff*BTQp/SI\#5g(Irjg;Zo֣']OVφ4@b556 $ LjNzOF)vfڐ$e)T]ԜJ\r1OʘMyġ =6pAc KgL Out%JLy9+< -HBV(sV(-9r<ƫ+{ʧ7T%Gn C{!mC8fC1LVGoWst29WL)* z½5GG Yo`ٛ;כ; q{v !mԻ\S+q!n^`&VQejBr@&@ȩkEzhё?̟w^ v@<6pц+`jgH!❴)rZ\5Mo4;xDQQw.juUkP<@AGr  B prB"$!X֣2Ңɇ̵\r-R%F%q3jb?0wg?dp;l 6`C׀zT_4@@CE,B{qr[,hZ&1_uS·CakC|zw"l |A7 -Ԡ@5& P#eٸKub\eBLp!S gu3ƲM]A)gP1 -5h ?$@q>jΟ  dP@)}GLbZ2 9 ¹ƞ8~\Z47rD\COArZ6Æ -@ U߁g@7@+&0oiҌ<<4F$#HSHƳs^b뤚| *dp=:@%l {@Wq`l4/Ǿ|SܘHJ.e=—dR{ogBVʕZHVA"-/ʭP (69;VwWШ5~d+^lBG"aN{eydWZZ%/RjaZŇk>e`P A3DzaP(<|E; AR$7kMU|])ATI EVPJ-k#4-Z ;[@ TxzB$a%i9YL3YBCaǎ <D+(%sJ "C+Qnhk"BF֗4 4Ppۺ42jQ }~5.'t_Xnby&:YjdtRLnA:zRK'nG6Tr. p]/B|f)Cd}}XMeLA\˿r%uE$ -y@i1P#rT.3rC Y ` 6aC=`ݞG 8D-?7] nE nIZG+?[Lmו\ ci"tw\'X T*mzhpp]ڐx^S{?1kB;O ->{/fVpjTC稒XtQiW&ƮJX2F Ff̥ro`>uzE#aV'N~ 2E'C?^.#íJM/OSHj}?GM)tmjϮtt-6 _SzX{~bGFs{'ml7y)ʿyP,w*(q0Ӏʍo8r,KK#~Eq}:i7I[ lC[A]nMqfK)'w3ܝ˗!_`<Kş?_5ޅ}&(G>nE=b'LۦAaz@tLG4L4& ~=慦.dj!Mh܂][p͒q7_u\[8w2ᖜvX;9|ދ2W8AF3г \~5zLG *zAAA/*7m's"+_Tn_7p>Nf2p24efĿ;}s(dك^.~P%仔W\.%"W䫎M[b!OX b6n6ƿ?}-p5pr!'O<8Nc;z* ,ř'#A tDD>iĒO n?V`k<Ο aL81p 6NܸTN.o9m{#ߚc;-񉞖DaYm .&Q93|nK3x]"m -H C` 6a[ý@ (x3tC0+"^ԇq . {Q;NaYV[x||X)˶~7V{:/w8a+\vm-:ÏAMS._%H?#3a#1+ 8,E!/VxX lކU[tv'n8JY e v[ ?`\>grDnA,yа.By 3?_jC -h&;@E^v[> {H2i1kTb CƬ>ðg3HC? ,",A`! .U@fT%O]R"Wi6)ކ (^_Dݗ?FѬH|_ mlj"2^@`e ,Q4z9Cc]Q[&!hZ$ BҐD,e<ܑ+(p"+@DBg5ӴӴJfs߇n95ehͶD$/ZioqD}J@b>pK-EЬ!ES©悩 -ӯݗKԭ1PÜ; iȘ?l' A0J;Ajf -5S e$h2x2xR<vhquTˤc-7\8@{{/z"ҐF,hz)ۡ" bzdIq(#d"\7V[:֪`׽)y+۴a`Pp78,dA.8azkCdԋ:ǂd=ؔ4&u'4v%71^L1Oem=F(1%ւ#(Hd: -n_ҧ-8ؚ'3+[mEJegA'UWN)o-Sv*ԤAE䤡K蝾B۲ck~;As|pt9vu GŠq!OZ.د>ԩ՜(\(T_<]t0r6 mVWH d?s%-mv?Cw|tu\h`_{TܕQ^v|y</զ{Ku9ݜ"I*[f`ac3cd86;E"K bZJ)>z߹Ox|?;7JoJݼ)ycSM)RϪZi6LQ`fRcTgoⴟEw~ ͒:d#ԊʨEl-vC\ͺ2Ml~KsvĜSenH<ޘZ ҩT!je6 l~?B;O;|L:^l=+ -jh^S}"lKɱCq œ'=tClޙqΪk !jH1{&fp}/ܧvd̼u*ϣ09i!M=۳"%DL RևU5W;QZ*l[.*_;d"P3a^f6u+uq_b,>sד]W\qI*nLm ٢6*euƑ5AM kj:kmGE!KO;%ck z.ٌ|dOXN@^pC~Ypa.E[؁1miҎ2EZ._*"b6Ȧ?F衆E^/zhszl+ǢK3oݏ}B%uD^](vnJ~aQ@Wހ{WoJطzIZ6تP4Etޝ2!]w ޙ 54AwOע\WMަ59+{;y۶ .ߙ/ro.mC;A Xr>Ott,,v0;4=;:{7UV ~nO&}M:4T]A>!3VZi#W"mD/ i#Nf#.y#+RGV' B"C/Oޛy[>ʶp?myFΗɜ-I_=Wmw WOȕQiHǕڸ6湰O=;*8H([}E37oǷ!zxpjs`sP,s> %!eإKj0Y `i+,G3K+a'ch2r7x)YW2d \IGb6]"JL Z! iMHcBJ0Aod&΍<+%Vc#>}1~L2&xV8fb̵a,L,fѱ9pL=<.K \LlVHhLHޥ?"$D/П㦬_gadʁLh0]3[, l/rGkpi8N d9թ6B$!yO 2@t%_@ AlLp#&ϱOsa/֘3 -Y|!-X%fgPa, s[3ZA>v BLd'^V:/`1X`0@O -W2h -AL钃OG=_|^> (g6p/ o! TkՐ"mjf_ -0`=|ث'3e{>ـ?/`k5TjAH[!Upku*PFQCQG -WU`>TQĈד^@@⥨_ѳr@#c0#CHfObl-!n=;"_j$B{C$QpE9 h0Q`\ۃGk@9 ! >D8GNDp:k$gH|}q028Ydoyaky>t<1G 2zNzN~f FG@s#$_7@8sp . 1ؿ/0ho3xIYo|Y.虿61_u0~ g,NgOh9`'fu?vݎ6 ,!ar26ž/XӸ ţ  q_K?Vs_],̇#Ƣcp;яyޝOklng຺@AHp[z@Ԉ_0)/P|;i(c6T8L1QʛFbfx}q2:0m әәә?R,-!i-Įwڽ Ady|N}Uz"3 @D'bԇ1t9ʄQC7}ž)a{¤%q"1i"n">y"'@gl368B vp}->  MQpQ9m +Agvņ8sM!nÎq1I~,RX.uLR:n,, 0Wց3HvyM}Eǜ$o:+5ML8D~mlVb9Vڈ#<"O ÅUFaiqDZN)0 x{[ L ڵ -mmk(]+C=" xm*%F*Wz]\kSR5^4( ť!Y e 8ΟANG(:mˊ;7;2Y1T -$ -q\2YXMV -jr|Et(K}J9_)+hR`:(7HK- 3HYxWd*(96-jOo}9?Vޫ n%zajfUPThB^]hU1+$r1,ѧʥ+rB -k Pbʯ_W[ssuNy~AxGia)z _."*1_*UvSݩ峻*nS.` }*Sll (Cf(rX~pKQSkgmk[w]]& -]Đ*e'$ :t C5)SKuiijSjl Yr7Yr?tl^\l8wf >(>:X'pKRKR6>+ԣ jjb:!P$& *.)@36Aom/kX>o<`1ݾoký[r|ՍHU$.6ŋh "VD+ҪR-LW,.Vu]Ln:>4/1e pu[rumfconuٲ;7ðHJ__SᔅUᗪamRdESFTхh˭ JWѯE7;!ƻ!כc@#(7g=lk}o t4W*ŏWg[}w+MFKBnCnrzp>3#%F #u#jg6 ʝEbgv`DW>VP)Oo(KGIbTKoPP_ -Nӄ2!?@vѪQq&йJňGU"6&4آހ=*:>U"7ѸRuR73k&` } beTuqC}F,:QHgG ; fv(Y!Ģa3 .}ws N%PIȶ>C0{^bx.KI d/8tBXx$ֆ#&b  dE@S -Fܥor4tP݄T 7=A)Yߍ aZ`*A -CJR! XK.w x!~`iH`LWB2leb0J[ Ȱ 1VJ /`")wL26=15,r .BzhƄ@WDS&XوQ@J~JrҨ&%. -p2pR yS2\jDO^BXJTO5` 5`Sթ{@䗪W ҙ9O(z&U]ꠠ:h[=bfiLӐ#&Nẽz>|HC#HN_i"f0{L+<FyP.[ !Auǭ(Bi% MH{4! &4i)0";$N3٨|خ3r i-̤_Lg}o}-1ft(G1ӡr涩G,-kv4m`7 Ɔ!/)$eP*0AXeѐ`Rp,Bx2,\m$m8iCv4lu? ΢\@\|ABuQ+%1PFFq:V>&ѬENWOJJN;_ /P.Svvk u/Z C%LCM9Ce/˯TB(5oa'ռ -qdJ2k6 im%*)wYޥ>UT{S\ڡ(m38 V|ovT^ k_ry0z/f7rjc{ L֊vEبX ߴ+kumojlګnX7]|WM~_}5g\&0.Bbl]:Y!SQ#;3m2MzYg-=yK=S}O9\xgjoZ6Y[j^?upc0CѩӢm"$%o5n,1ff`̂8"%D!u8Y,u[=N-|DƼLo}@xQGxqu 0JK n[!iLߧaΕ<_]><3u0NP\fod16^ױnEu:Y7cẌ́ꄅ̪ʄEj`44Dmփ);ta>]kb]hvxQe10c&iX6-V[m&TsZr5y^U<4Sqg!P6´:W -)^v܁y#0?;:NLj0kĜZb1 -:_E_&=䫒 -Di\9B4[]i=خ shC_ˊ?/;UsL== -LJg$gPiٕdv*Y T˄ -rrS()iAaL8e6_DCWcHdY۴`nɡuf^U4>iG(2OH jv)!Rsb1XR\:P(/Qa^ꔈ:-Sg - H~zH6Ԇ(إM~ґuҝ4V} -= -[4b8&*V%4Q~:[KʳJ9^/gߑgeY )M{TȡM4P gM(ީ8\e~^g5Uoe0|i__egEs"4F*J8<ɐ3 -:SLc(RY] -*AYc1!gLJ2e?*\ Gam-PIgͯsM4osWG Q1W@гidmcFQ3F7\L6^j] TG9cӔ 9%)$TZP"SC- IhB?!NJ54!Yy((+ցh0ގ^S6[:lu0Y]2ѨobM0`Pui} TçJR{aq=Ю4[Towçzw ͻO ,lB&Bj y(E0@-d~? -4MAbDW࡝ gܩQ7/\h$x} -sG.IOtm $eWջ`*PKt9#uk {5+PUh^hgO#\|f=}fxhh'lۼ$Ym;a /Cf>F:}vHp}u S)~o?Pݚ·zv2?vFv_{ -^#;D@=;h@䓞Zg#4gFa#ʑs3gS>+֎+g8{ _ot ~TxZhߤ\|u9mU3rkmuCI -ł[0p> &>qhnm@8ANfC(w0*{x0= !>YHw35 TcixV'C=-ܼ! nwx -0gsVtwXWl!HHVH; P#K{:lźXJkUzjjE(T(twȃ\@G4cq?p]N܃?R^rST( 57Z R'QLak[:hp^ Og\\pu"WtW+X-&IҨk6iB&`"C$.ttr%eû2^\xq# s8..npAυ(GC0A8!`O t$\Me e`ـEs(D&Ⱥe<m |#Azoxa 5r!|&/G@ڙ9[kX`*ɺk#,}!D"l$?D$bZ!tR0M8D>"Mmm3hRHmpCy"b])^2j̰I>rJC'.'y?GM?z)z'̊^gſg܍F&#<+SDYBVH;#>3ɂAx:4vZ:ko-Vcᨡ҇1eҙe4T_F4FRc(LDPY1%K(%c"[auˆ1=q8jGx s\ދ=O7?-)qZq-i:JfӅt1^io<7S]'"ҤܑKin@L?N,U4vIܗqGdeWߕ7a@~'a΋fd#Ylj⛔w6$vꗤN.խ۪nXN}qfHՁȵC>w2Tk:*f:P.xZ Gӫ,HIȼIњۭؒw=eO#yWR]Nλ(MiջΌcGF H$I42FW~Xx!b>̡oMꄐ;ʚ.ecEe{yՎ%g2N^}2WݙR},{`HܑҽĞ&HAܒE0ABZ-@u4u"޵2͚TᵕY]E9gs+JNfYy"x抯sT9Vq$Bš[_彨؛?\];5A6̩ -[anڃ7xer-ұ%.Z6#Ly~VgIhaum5 -v|YpvO݅ݵEn/Z4XhzSPƢ5\[Ɇz(0Xj-4XΛjfVV߶1 lh]nI?Wu8\Y\jzꎒ[궕RrnSɕ u65z`Uk@&P )(֟UK|_s8p׺dp{;ړ'[ҥGW*Z^]q}ٮֲ-eW66kh*{V?4꽍=C,32YN0Dj0h>&k&)JVEQ='-R.%EzNyf^|//߬Ҭ)$MvUL%HhCX2bB}bt]8̮p51[rRHTmr}wDτ۹˶s_MMJ1rr \yXLVuܯ#m6[\ؿuJԵ;zmkm|xGrJmY:WXUXϫTꪳy-znVe=qs˳sd(h e)_~< u­ΥW]9g~@Q"vLLQ<[̭kuiڤ5IO+76 I49L|yHy]hִGtл[Z#qBklL(OȯI/e j˄u kbW8'JH(% -U$Dž`础FX%OPaUvI uc!wQ1OC{;v8!VДmLg-kyYYyTո0Z4.䔏ǖJdi%%0Qx­Tҫ ?aHWn?ܳz ⎘hޡ-܄}Ȧ҈}E͗ac=E5őUa,P+4S:2 -f2ulܛN׃®EƮjFkk%h.η 8|{?&, T 3 pAݤܑݏiP3+%P,~l6f7:ڵCBs"|Gx{?n |t6hff#<ͮd`d7 ;8K1hg4(ґb(rl4^M:ZM:LxLxNFLu6Uj=Vʩ+i$&w3'srſ)@ּ3FJi3⦭ §m+l]Vx}*x.+>pti^_9}}`O$. @0@@'9JW5oJoa,cY묽ʿ\bEm0m<[K\fZ` ֊_%F2K2t"G/6)MM@%(TU FͅkPWI5GKc[dJ3K`ПH3:& n(&M>qs?2+@2`sW_A*eס7.X -QSZd d2ZYOtu1HG9ښD Ա&MU<@2X{ISQoH!Ⱥ#(8|;-F%UQC55RsIqt^JH94ܤ&DE$IIK9v!;,GAPb!dl*(U5PUM-D5E4R!CMmTUʀ $Ay&hZfKuA <  L` QODL4h"ژhxFE&0!Mߙ#9!d 92Քњ@7DdZ ^M 4M9fhШk %DnK-ЬO1noYBYhy_+d)$;訣&ZѵЊl}d2B?Q]L$\BSN_7lfs33cٴflH.S%n)NW!%J?~ޟxޏ lR:`VX7 0Gp8ڃk{}ܜݜlN< , -j%cP<9]bv`;#Ob~uǫty?<02pF V J3pZXq -_D+ ۀ[$\.9_ݎaz\g/n|!t1<14N1"d <ȦK@P` THaxd10G"r/S-Ϟ{I 䋄 qgn#aoxIT ] [!$eff:y=gL/$OҤw{J5y̧Nz{׈_kCO=_y>&?ʽ5k>&F@&ja56J`x;"> <3'PR_6P_<yF;Fyr8:Q1Ꝑi4p&@56)/[ˍ ~$l<4fMeǒ_d0$Ahm཰aûn =菈A"GЧCkDe;'5?1D\R~ԇu_vn+Fu'lhߝH&=;ȂQ W"~gtF\fE__Eœba~><;<ĞcB 6XL2ӃtE қʴ5t^6$!z" -g $ -2.0)鍩bg5αvzbXgb{XX'dwpp:fvkX!Ɔ6Ѓԥ ]MvbވoL?@Iӕ4Ť GpSpS=={8{(2ww<#q7kM%} @X1Y:39tޫ*{TH\r+M<">OeMByN^%';(=359eR=#S)N64ŋ!y1:0ӞR~yg%c*ӫ{S<+#%fPd vm~]zM$8(.5 E5㢪镩4MT 5҆X8.B E:m+1FW,Eլպ^wbqĖ(lĨlaSzcZ!ΗKJ bqH|LV(Od*J6S%EUwU@(6 @yL!勐5 + {槫ydk˾Xj[!=/%ZV*E3PEZR̗T)s%JR.U$0lɈLT6S1ׇyj/@ *uȍȥf܂s'wWPKjU 4W&UdK2UQ%vgH{E;b`NƦa$u6d]G-?hh#n+ZvlwFm+jjPP.M(۔[*̋)ۙS~\,&g 2 wQA^}nfkvP *EM1*5EQPJ!PDѲ F˅5 r}n77}Ss ͹SֵXk ~aMV 澽Q9 -plZЏ>T[-q,QpZˋ?sִsMٳ˴kWٱ"5x)ז$i]}mR͖ :&[ -.PD5?ɇ;y'b5ʒM+Kӣ}xZVFIΜ R^\}ARɡyI%?Iŭ -_M5? vYs-}zpg3}jb:-5g|U'gV'L:9|ƴIJ k˿WQ4- *Z֛5H؃o㊺-> A13)_K[kϋUJ9\)lRdjmzbBMVZlͼI15cjE]w"%l}Rd]Kbë"?L,ժO{pz@m@%"!&t{S7~u^ε̆}j}l`+S#̎_ް)* 2jy0ՖtcY࿻. 2)/ eo3rG&&YM:{F EȻqưtl{KB6 ~P!SSo#Am PD=8] PVG3@]W6X} -? CgHFg(RZ -Ұ"%\КkilWh|;>PywV)_\S^t:PmC=sX@Kȿ5tz8lr7&[<>ەe~Ջ}ze^ rwS䚾o}ԣP+>)>т V F3HEFGS_[YͰxtl`8 K -'zTW` 's .sFXE&-8G3xg/ kȿ3 czG -qcq(`)C%K^cqdc<N8[NEXe7OYp 6uOO'\;@D D0xq8NȲ!߁R'tu~(4 D;k̴?pXv k `B@HX?/a#]F:q icvQ@^yU%8p&cP$e ߝB% yZgd9S3:ei!֛8Gsu %rsg{~6qpiMWY%0PD Ejgzsc&]5XI /|K=nx-61c׷DkURQRe7K 33!ӱ߇L>=Fy F BiI_Rr@Jo[AҶ?CW!~z6' DUgӴ4{d\ n"7FƍqӑÌۑQ'À1]0>phZ>P|FjO-~dnk/ _px!S,1naafAV9qUOd#{%N&rZ6a9InuD8sj=N)BS&zö#JDo~^0 }\^|fp#s˥ Κp:逞U.OHF[ek0 -+TbՂ -!(ؤ-,愅+AosF Yȵoa'g0c=lqtgOVDSIYUr5<ŘT*J6HZ!>$mۤQq*} H'H_$eK^%/w]h.R]w |kW)ϑK-Y-SBh}X3K5 -~R'3RrE^"ϑ;!Y~En[fK3dJ2dϰ dg kEN&@r;8Wo, Y|܆Jܔ -6'K'1Zڨ8TTtU&MUFҫ+ROdW3 E^[wnfp%/t`l{=haWq꣇b71+rE"ZneTH:AjjB۫ku2ZFS*O*3u^]׬ȹ -ؑs/*s8зN Y',t*ѨO1dņ"#f7&T^dxM?`9XX dtw= +sT . Z#>=LsyYܶ’ݖCDs^+"IRf)YDSg1p]megkN4)Y2Ɛ{j:>-UGCٛ>5Q6f3kJ>EVJ%Y|qQT%8fKy)-w^gOqbGbA1UȻP~//Gzw.]Jvti`rPCMBR5jQ\Mz2&?ZS%vckGy;\R|"╀l)$9h&̾h=(F`O{zm>9L=FڤL`Fp۸$FkP(rk.ZOjmŴݦ>jz$ԼMpf/N0ԂܳrК~^ oxaaV*6`/'A>&'{SH^З+'DFGDEzF=ݐujӕn$s9+jp֧fԏԗړ]g?%4 78绂/l?^ŕt8D(!wN2э#U-~qVpEJK.%85Za9 m1美[ztok#n>7hky:zg@[hv #O>OȘ)A:a#;?&vF(]ȮHmأr{: U='۸'AOzdn qm_@kyu Wl2C!SnBcX=Iu+bXj7*t߄l%/%d B=G_m>$n  / ra4"&J-Q I"W,FH } 5CMg2$! rRJHmBGxx/_ZB%T F P B7$ }* fS yˆL+h!^k$$GB\ Elω{ ]AS9o)} _Q[DAS@T@|!$/` _ ]"P&ġ8EQ!f d™R!H b $+$$] >1GΈ"Ǣ>_ 9Q/1%:4r ~VTJ(PQ&= Ň,`ZFfB#[)t'_%} O Bxtr rr0+``W7[C70Lk|PK+/y.꠼rQ,M> -e;-wCAC``J8*FPEjNQ TQ-Pd]z\4xI&UOvoݶZ声FEgX~xw'Wn 67l0  R礈 jD&`?{>Ac>[0b3S]GR݃i_تث1yNs٬nuvhGF A`QFѬrbXX$g1YGRcs^LN,S{\o-6ވ_η9akfSR Gx  T!tZ!FrB'NUgrNbUߚz R93˥-ߧ\Hp>mG@Csi'Ϧ7Lg=ZcIE~=oToK(QQj~Vbe: -#es]\sSFYŌ73B2ߵ͚c; T_e .:6gOxMNh9O'ѻܙAE ry|x5γajs8,CCAt^.ob~{q4^ /q,X_MނC1 *{W a3wC!F$B7Ώq=Rq}Lnv~@'ήsVΉfؓ:6 e^6 XPZ](Ui STzwX73DUrh%Y|$:;5/K$+$II56I|rD<*KV,J5N/I',0bsq\ͅsh!#[k2$"(}|kwyILt}qxυL`7Ρz?ԇ Ԝ(: y^RG- &-хmnf穖!#~>ÑapG=G[Ʊqڸ˭w^Xrz7vk3VNWebg1@y/Rqƍh@}?XI}hM}`no{aI[ ᥎O4qx/c}Gm|@#+>2H)?<@U ;!|&4cs -zN/^l]l?ƗaK6/Y:mE4瘡 \͛w$_Uc@@zW`c1ߠ -])+S n1v1Z=C5 2|6Il\\_YGG.~E+xQ6CG&Acֲ%F:kjRo%YoN6Giz:$LW#Jm Z@;@}FF̾ .)ΟQ@Y7 VV^Aẗ Yk3ɳM 2  0 a!("(-RW]WOZVlݫ.Jz<'9'뾟'9Dt:Xb' -XNG]yh@vb~W}eBԸۄD`ڟ2!D .L 4 3#{D!/(3KQoh>1_?EI`)Rq@j@1mٍZAQ7~ -}D;u8 <;l+E9Ը\- H1QsTutie (; -zTvQv]Fwz~h~#ZV~= -ჾt.s9@|}`,cDv 8{mCt.>o>ioy7|} 0W4Jb3>`j]` h<hA)Y5aod~6KIh`vovV|1xT 'b<`b!Q">Xek;'!X2\$ӨbVZgnbivm;1sSka}~{xAnځp80oNB..cbAn.|+x0qN5 470Ɨ'7p`mGܱwY`.?z?H6~1T^sw* n4nlK!Ao9 /Y/>`q,B|_|a\D#KO1WނaEUy&L-\c\ocHŐf zBzR|>l]Sa-Z -GC3;.\. !E8!u ^L{-cvx{{St'2KK7"Q~L?"1>$.![EkE!<σc$ԌݗP?vo& ;v%yH<ݒ-͉>M _>G=&Dbך@tޫ73uɯWcE<'3CiIN*9$Oݕ\#¿%y6`!`9Iy Qy4pB`^wAN D2܃TB 7E\Υ䛓nyAQ2թcO5=?hs̐MimH[&lmZ>mduZdUyImoW" $+RAR -aU:ܿAA(wk\ó~o!s )496EȴnkAJ&:`CFvIّUZutzC -jQUG/QZ.RA‰1>sx6yxΟ3No8Rf -7i&k*0%MdI&OZ3yFLuflyf|Y -̵E*3Zds\ysee[6 QpB -,:K7|^栳ԇshX 3]LMJ;IR5%'ziN|QV2<"kiRyV]RYVSRfoRhlMoRIbb&Ɇz {>S}us -;ތPAcibSmr\tUBMtE~^|ٔ(.L-]:3wC]SsI-ȹy4A٠@AUr ߃E٧c[@H[>ٷȚl_Iۼ0B*+wXQdNzpŬ iɴtmQzvJ_ӮU淨U.Ӡ(, ozETVE -*; H;(PAqDPhTPp#h .@M&qIJ$*ʢXj~< Ϲw{ҾM֣^f4]9á~dܡ>Dsfnh-)Y`mS]цIY4+3{AxT+fKN(HI$'e$'dθq/!>c0^)%.GUr#_^XTjt I/NU=ofvwvO6{kVmv7ç -=*kzԋר۷Q'^C٫AcaE3&Zmx4Ru$.pSHzr`B嬺 }}=|돺Է9uoG-NNAp~ 82 AG4? 2-.u$ o@b}N%j^F\h\i|ff?[|e38+x&ttWP ב[{28 Y6nc"]mn6o@mmjsn홊i̦WN8dh5r2|@> -s'_ @i+!Wv,0;N~ppxʁ )ckʎ-z'f/L^y}~_/_IrX@+cuk4SG)}_Yp!IM.iCoFz phMBGO#YD6H>3&P6M=H/)0r{]'o~FKǡ>yM +‘nD ytA>'.ʾZ{*wOVҺX9!K(0<_bzoL%AtQC (a'GV(EPJDQLB1a*j*8B'P)mUgO=y{9ZVrdQSo1BBՑ*DȤ:"a;jKԐh"Z ]a^kr z6(-GA ,R (TGձ(*T~dCN1C W73(^xIռ`y"C~$(fr44Tr+J?]:bƈOE.]5-NF{W j~Ot}%>˾_b{%7g{/'! G(x0 - P84T裙245 RF4fTL>\KAqwz1 {zw͟C;%vY޿,Q443})KV_e0@ ąŲ)DH $=!@Id kX ȾCAZV7ZZ:ں[vth7~)hJ.3ZB -ȷb,- `Le?}~ތ_ǭ\dhW]ܺȢ5 QkE;e9av9b:d;`M;Ͳ:cs>ʺì׮m ݧ@"b^ ,K>?p̯sm9ͳkO>町V6p_b=GaC^)}I#=ؿ&09zq(Sf` y+1r>)%G\]I+N -y.q3#LCm>ӂ>a3iRI+ذG4aBøhwLtwDtwn@>@DjDT!E8Z_P*CvkAL^G)-V51Yɖ:&NmK%O̞'d)&Ul})A0֔7&{(@k@~s?b1԰6u6[6Dq^e@}yudfjI -O]%NqqV\?(J O%ߋ |ȗXS^p eG׌ J힠kh VĬ14= -$~9ItuP! -RCDbh BQ_X>+(bbK!_ 1S~Q]@xe0m D/bGhhعѢ7tvUiN&6q]>&+ ꚹ~~2&~ū_bbϙw*cLyp)` yo7z"_Hf`ˢ>^cF7C%UvS417ED3ѹpc\ƴ?Z߅3aDT}=<߈v?4doP= aJF=~4"gfac.!:O8DssC8<8} R -"rz&Fgh.zgty"Gd"g}LwI!gq%ikǝm6bhM1Ԯ~}Sveq -IWk{5Ih{jH/v=ݮm74~ N\+B-5^mowU1gnFluIX5הPصйVsC&[*ڲw>`{@ k.wWiN|@02= ث.r^N55iY{h:v*v*6O١vwګrx#Єڶlm^Iw.39*-|=sBX#L4B&]]˹<7 w~|WxsaPMKXJ`vﻀw|ȡ;x ; M|`j ]SwhPwk6!kfAg"^b3%eC|s=tIuXx_m -Vq$ORH?—V>}s+a1M/>u0=!t;Os_W Cշs"^"F,=I //& MTJ~Pݏ } 5>y- -'A$ Yf$-2 -D H -d(S|" |Ffyk El<ƪ eşxm($+32C3g[#_%f S2& It֢x -`/(M+?6eIBb` :fp2=9&23̤*yy>7?a7?LC&EI#kg^F7B1C"c3-?LJ'/bԂi\BrzVӱMs u~ W*.(*h>a2Kx h'j5UX>[ ]-mkʕն}ʃ}vwa*؄2N* ^"x|IJYք@v|j\!&UuOuP#ԇ:AS0Es0K9}I{hw u%2]}c?UVO23$N^ADKZё{o7(@zݞ\IH QIՇe{ 9ܰ(䏍ۓ'R>7U0U.Oޖ%uwYۧ4ǝV1w]m'E¼*E]@F$Bi>e^z+P zK5 3į<]e ,͜WFLz|:GycرRfYr -Zh .p<Ҍ]ك^drwX -59!/^q&/h٘=ݺCJrƄZ3ٺ*gc-t. -[+p;K×9w/98zDnNKl,2Hnfߠ#f= e("XEv -a  H)Ȣd*u.*{֢S霞Nq9F>~znsi)ݧxS<O1;oWyy^Ca)/w?w/cOWN.q1$ׂO -lz㎛^K-̃dNE;I8â,߾\1`w@2 n -zCZ!Mj"^pZ8&gju ud*\CjV(BU^XpRd3^H2Ӕu3fv戃EV2YX(E賛h]!Zu4HHm|dE֗Y?4Y daH(B]5k)ǨX5P ̔nfTX#?ͧUQZ/)䨢t9U9Jq;\lbO0Kg*XS)[RRDD"k=y&瘃{{0x.%d.8#8v$*S AaG2zE^LZ*VǪb]$=.̵^esزYߙ2 -Q}`/<,cnUNJ;xNUa7_K`8i8΍FCH{7@o+h@NGSdƄ][fWOj0`G^HA1hC MฎA;Xóf F q jz͗8wpvB1= fT#Π%PmEClG`sT,/I6ǚ*YF?'44dn{0W/=iF;]" -}5O DQq}q Vȝt$5f˟`l枈wLbgČI\ccQ {F":>1ΰ.!ސ޿5gF&QjעvϠ3oqTp-DK-´g9o)4sSYf.jE:QB̳o8ݖ_O)b{">E=$I-ZC @Y y @RְT+6ؚhhlX(ֶ֪![_e_l#<%6ak g1ޓQ`aͿiH uo8=sCH dt낔wAMڳPOrY4v/Nع3iXgb*cM㝀n6n%*]]`].}v`%زD%l$F `|/[^q~"#jv%1f&{ 6@G}`5-c'|]6\ - -q9š~|\<{Yԟ9qwv Ƭ˜[w"P_c'_6?*+w - MaMHcԡ " -" QDAE(+ .F#ڢ$h YK4cy99s38sw}޻D?[=v!z s l/e@0?p5 畘"|G ߓUo :}1x1~+׶DoDp }&P|  e>шZK^.杊yG=s/G -5Ut>K? yaL 0\JX)u`dLBF֪\TM1 -0](K@l;j8Ko7+>^_x10DB 1 .#%bH+g)^V}^?h.GX # H1"1b0C  1LF ! pU RZeX݄Zn,U=(C&^|"0iïA ƈ1!)sL3B8J02_I?#}ŏ#+=z<t~;<^d&|L΄jj*d*&0Pe<+@7 %˂g>UIe=' UͥB.,ketETIEkh=]Tk~8B47#ni3:s\q1׺勘g<331зpiO[!t]CRA}g8:gRAgMY2k-tbIF9l{-%d*Le%Sn3߆̎8oƨPhn¹m4zR+$) n]!} C9*a5N Cͼ%_HNKHTwI^ vJpZZ0(r<;Zh%[K:0{jD~sQî\3_t//w_.+]-#6ڤE;AcOfON;|Z}hVhmU*0bl&#7ΏlծЩeZ&^; ~tw/.`k=3K]ώ!篹/td_6dV 6,PP>N>`~]Nڀ5g { V?7hʏȌ*&2!s0#k8^ÉNMEKM6`WAkL90P90`z.Љ=\ԢpV1^sc$0PYMh⨪t/BXV[ ))ZZi,duYH=6!lJ/, ~lSfA. -bVaXFr$z Bj5-q~CzO\dEDi+^1,G?{Y4/h>PgDs"G&DLS ORNQ KPMH}@"`{s޻2V=͹\5(1Dq ֭)_/k"{ղqiYIn 2g1; eij%IE wŧׇƥ mP'Ai"0v]>~W;Wߣy|kj=6k -][#[Xek8|NNGJ,+&d-ZU2)kspl&d|\Feeoys'9| -&BXuBKLH[aZ4jx.SS<'kyOҤeM+)9i\3uyץѭRi^mƣ{5)J6R?YO2}}p}YO:L(q,q8z_jw!ah{P Ğ.9{? 00{P?lOM`fX7GxX5e!,ZaR-5RUsM:F!өHz8{v -lf]Ɛu[/HzxHxÅݳ"> o8^fwϕo`]D& pz}7z " ^< yque〛<{r"Ő7[I/7t\Mwi  p?O3k@z~g^3=<C? >j$?/c)F}N?d^{7[w>wP|r<ЈKp{򜗴y0xOaW3<}W3=+߃N+yLE/+k@x^XE2,xr  -6|:e:F*&3dR@62B%}Z/gzЈ%~"zYP`,Y#kd !O'9zVҡoXGFtU1z ]9Oq-i`kNEت!y։?DJI?k0e Fv5hfMs]ɤ#OPH+k ~a/ً ǧ\&M..o7.m#wA}Pk>X[8ҳGCOF =驢c؏pn7qUuAHAF?cpMOaӘk=idғCO=Z~Bvc -2>×8 ?Ϯ}観tBF!dXf N K8:1zV+43q9|PoB!yfbt{4e"OE<\P$݃riZ!Z3+EY%̫qb/Z-ٲMm89)Y %G5D*>uo-\;J P e3?BM"}65N;.E˰hr)8\1]Am]WB/f5`@H@ 0?B$"@,b0,`ldn/cq8I{z6͌L3Mn&mƎדj?~#w9}7g%t5q mS7%ECySwh8$A} .D2?Abc{$&Iny\OhxJ´/Ycl{x}A:~t*&i#zOcWU6}{0_ ׾}CCODwt !Sz] -3Fia;p{]4}B4}\&rY]Y,)oANf<E&[ރ߻|[.eex>G{R;!6k怣΀UI_d$pY2$tJV$A7IH> ? rp23YC@Fe!d> |v?ţ-)sCׅb%+60VÉ5.by@66f3Szv1b1}1#~f0wFWbH}-`oؽ #E1h]FrZf5GOzevd̨l1fXc+8;Xp+alobkcyxt, -2/a^Ղl)h]iR<y󥒠0Gq΃Xrר>~DoSt% *vJhb-ڢt]O&*b)B<ͷpB #𗰞uxwLu4‚5 -(4E*u`_ZCUiʶ$'K9ܡNnS.XRKSK@o({TOtW%}3ug -BT='*3d;rZW!CSڦiNAFziV;&:N}iTcV>gVK*O<9rܷ݅ ;zֳ~h.M1cB[Mvp_U~ՠk׫i.i֚&mN1jjW~+- [[S*48͗2t=Asy?xl3|X2kFdߟa3 @k4Vj*Od긕ZxVN?ge|9ZY-{\liz3Qluž);_&f6* C`b^#yF@q -gyB1_4hDY0ņu6#%BzĶ=]Y͘ݶљODw -_X^/C۰k691"dt@A3Ѩl&f8LrtTAp$LFf9v;2ݑ"=\X 8N'oM~1,;"4} cnއ߆^#tp`;Aq0gR0*\CKE|3SwVe8́\g0q/n20I;_,Oݼu΂Q`OA Z@D|!9D5rLWĤtWUpizTW5%%hkDڇDʱ?{$3~[n{ XA׶PhWG*9tMX[D_; ϗ# $QA[ +IՈ [QhQQQAE)""TD"QQZU]muluU}@h;;3 {~U=k ,Co$ye Os"<̙p7yU'Zy C\DI1ֵT>AƠ*B+]50(gK_8XE {r Ma42 -H- 4ҁ|sQ75ת%7%;r?&7b7`-G)Qͮ7 -CclXxs-<P~^wjw)5@<15 0}I@ -6괌E8!Y$炁 E&"{\8Q;Ucfvy䦓ƒ;#@p On/S9SKe*mȡPŁvu"wh]^8ѱN372d/?zD ?u#ׁ^!FE1\gSg!ǃgz,D/3(^G1_uj ~=L1amOM2h>wI'T!FZt ͢bYE`1bŊNp㾫jn:]TMQ T23`=^#l!~eě xx Ӭ+(vakȷHh#m y BE7XDR#/|E<{;~FCR}.1T?S%2|.Ii6 nAχӿQhDz*Y1d#+d]yV!݊;mT4Zpm7SA7D>saE;Ћ6*Oއk&YsZH2rVL^spJ>*uhEO:h#I61.[x~q^OȚGb2gdd[ q;%8#zAd3` @3 -}g uPf5ĒORHH#g-99-< GQ(!~sˌ6H5P]e]|[,oO=@?+ie18.ġEGI8"K!Y:ZeaulFM),hE$]F=8IDe*DTiO>|o;M)kף! pe&)&XK+nisn\zc=PRu;UP#J^ -۽Da(xnu_)Yj, -u%Z]6jn.צiA9AAvYvvڛvOWi#iԇm -#[8:c:ڳKk*JFXo֍u}c>!GzB,}:*L}c>1]_F_锦orZkuN}眬{zaKHIaycuvq:G=fdf湮1$R\WEa"PLS\6rkţV.Fp%v{@q&~5k9WЮ-.(W>*;,.3tg\W#S{V'=UKY&"c*~nq-q!=

*pH5#N -iD"e=)8~_C ~vd/lVB#T?+A 2*%vʯL;bi>je9!ɥEaIeCK&6N(]P#(o{%901oϔLh#D*WȜwȬZm^mLoMxՓܦ-꙲0'iL€üx"~ mc 1i*YM#2vHm'!-!}@B}׸1sGWy?77&Nݣ_]w홚#UʀUjF|[ ױpWCmZn'-2%@D$D%sHX>yT8_Qc,q626}:Ҹψe/ _vyXs.MkDY9s׋ԁD%ZZӒ"K'n -q,>6qiawqzGqFG1pjs5,11DJ62fzOxc-"*=q=`/]nb5DlĦ..]\Bp@Γ-{(=I弮ɹSd)Eq@$䠈+"J_{X? \s ;?x't'j`5ȱ|].I_C. -ߖ6le?]W7\ӻD&9B qߋ84؈u9<Ӌ\.rr!|** -_K? k'uyKX8qY^pހ?[v7&0AH7qtn1ѷI7uG׈L)9H%=Op>OR[!"_Dab-q|D>>Ž.Ё{]|35 ߴsî>~r>Pfa[x@,h~Q*Spۃ"\y7},`2!P4O,d<| Qa.N7]R!ȯ/_?"},b`"̕3I9#-SI*R7؛{Kٿ)sG>eʾ?n?~bM vOb@YFOSqf,|sG5S@vKqPk-cSmWy29r$D>`t+OP='1"ۗ#qL™INy3GXiƱ{\)krsm%<""Eùm9x{ PrEn 8)ġ,ۣ-Y *mrQ6xn9FNkDt:Ɍp >s ̽9IU|ɸ#b _Ɋ+eu$@HBlFY&,K@Bb @lFHYl 68vܺn6IdI=4kL&vNI]0{sGsϻa8 -y'[p 71'^ټ&7vFȟ{ޅ]A]1 w==kWBQtDuD?=͂;TM^-F \g`y q; YGkÆ& ka%[8H‰@QOx캏P,O;p+ rZpRX \osm p; -kI8͝U2*xc(9_s<呔 ?,5&cً}.,q8ۡɇD-X%Z|]]rx$'qDx^"fSfP\)¯S N$]$ұ C&ة[X7p1! b%)r"IM]J.K1RRiGRZiNڜNhDөS=&D7<t{|9>L҇$m b{;zv+rY3KQ2{g*=g3Kŕt5-nzE5)vfZZg _e 3ŏ}ff~ 0ӷ -csP1>/l\NI,J`^H=,{NKsjƄT|Z1Y;,ec4.[`{]ҟm?k%mV juo/Ensv --19RS<c,O;"װ~r]'{C]@|>]d_jɽd}?1iPC?Ŀ^Sl`.bz)Kяst8ɢxS-)0 ->Ua@4[Yܩ4t(-mJǪ(rӨ\W\U3?3!:yd\-'<| ylW[Ĺa\`BN],1\ѯ3z59be`{&ZT֬67"zu/V=5qK\C9=n L2V$9H/pG*lu=Jo\\ |:*`R{^r)˪l.U6hK#J\So,i2hz͐R3%(,tm E}+VHxq6`nd YgP5 F1n5H|uS&P^ί,3F5tZ]Fw0Xw$F[)mEJ(J?(Z(!#2c܇#3%d9j=>LJ𽯖uQO֥x4f19Ep(ܠibzk\@JUbr+ˮ H7_ܬ9"ug#:<-1؜HdҍMR_}C^0TST+Tfst%&#79s̳l)|'5)1}yDVYڀW-l{"BއZZ4C];L46 Ԛ.nQ4k8 -KER˱4en2oXVY69Yf6}<$д:VCVrBFv7j` }o$S=bzQgufB6#GbkdٺBŶL J`Z@ZS;~'j{2 Lj&ܼkpYFfۺho FAnB5Fsr־RL?fKEv+~7~'~co3.IݼՆudFz0݂n ޹Fzt2! O$$ )Lw*Rc$:::8'±˞ coxď=ō|;3HzG 7зه#B:bph? @\ rC+Ht){\9T_<.:9` ]tԱ4L%%2[3[Q 24vm2ˢCu=Hzo^<}~Q2W2|KI{DwHԗo$ ່c7C`}ᣅ C%Խ$~ߝcP ;| ]@jޏnz??#:H@9 /ZPRw= c0:BG ._{?Q}@w)FQtDZu#vQ4SЋ -t>N[nJoG~x*\^;E.tp_&i<Ѯu -N!/b\~ <үaPgCyܰ V<׏7A0,l{UuY*A5 /X7poZ1͐hNBg:g-^^STN X>ŕaP3D1+~)G;Y{!hrE}hM@#Cuk zwui,ИFϠڳNolOS&Fc3JF Tr= -udZux ~)s|,YR0g~JenJev~KN䳾3~}7K|bp3xG Əz"WU+E8-P״,$-ݒk>o}BdLys ggˬ5FkŚZiiwHTO<)^zgPƏtz׫1lEE;)PwÒA}O:.t89Y801ccb@## njssNwNs -pl - a+!熰Cvr8=0у~ x r|`?(ZU`+; -%+X Gk_.(+ & jPE4DDa(01UMms4$͢M$M,IkCHN[9p~NZ9ͯ1cv+=eh5#ƚ+*Li i̥iⴭ´}GyͶyͶl)׍P[|MyVA,ko-P,joX7շ>71&gЪlkHEvɑe7f:"3 -̞ȜYGfdaM*jfydMR -u-U@IJk_KP4P_4٧0Ҟ4`^pI~0// Ēk}D>*ikJmf3"%%䜳"d] O3=aK?p&=plbi54G F-pܤj$~ف%sslم {y^joN7HYnLm?cU9191cN5tvH^lV-eKu؄᪬RyU2Χ2?bNPNyZH4GADaIvXn+[lIrF#bcWxP̲㦄K4_1yL -x~D~>a54Vj-Pqbrˮ5 &e̐dg)Yjta3M ΝӝW>aS+6JX'z rt_pIE _dTnHe7N0d4y.J4!9 e >UkcrJ-Cec,֐2'%oFs|sv@\sa'7bW RAIM[˃F7xsݰaڍhW57"Y'IJt(b5ٝs;w7=wggLzѭۼoj=5jUW#y|,-~X,$\7>]v9UR߷Վ&tԸYkaTW!Umt-Qdg;{qTB^RhGZX - -]Mޥh笓歗ֱ^= -5-Sx{*w{S57WA[+4g|{vg6_{Ut{~jnu]AMRvuR]q/ݡꋒ_7x`cSQ><(8Fr}!mcv~n $7%ԻS*!lla25~4q4jWU9C~F68H-A-,;93=Aȴ^Lİ{`=hvnEO&мyN*Aa`Y#xG A8a{MvHw+pM;YІn%A 7nqJHKK')o" c`t2A:źGrgI,q>V/o9Is/4=iiO:/K0!: FWzy>hC;=9^>JY,E/Mp&P&A4tXn G31^9C&'?Ƌ[;sC@s5;g?0h u]U׽{ /ַڄW۪hZ,\sw}ʓ>}xy !0 -ba(ѩI +D l@g :(nw!n=]'|LA';@Z1h&W -yJ^SգehFc-,FWY 5T/PcNiQ&8}$2*랓hc+ -h.Ztگ8@~z$ :ʍ>'5&`4G9fM" S|\tO~];Y>9uqNby+v}_c|BUx)8ORA Z_E=ZOQYtXHY~=Qdp%vsQyI -r茢NX鄲+X\;^ZJфQ:ͩViVN3kVݥ)[4zƷ}JcX:B*ẏ DHCѠx6zNoE'vhXwJZLfOvl4Mgi"t(,RH"ߠ!/'́8dLb9\h1(q/!wCmtF7vHx&( RI}$5yN["iɜEc35euBi.*mӱ֫eeb-ec̥1[>W-lfVkpc`LUFl_áZu;vǑphDP].sTUv{[_n[c8~EiØBb -+>WB[PX8xpe4Jݴڝ{l^SSAD]Q\PmTT)q'/v⌎9eؽ1E<y^?yU vV}{ S`{/О --mЦ8ZUB[9Bfv*L.WlSY5FcYE<ϵ)9HHr~/n,:e=A:<`m!0h;k:yT/j=TIc*=\>Y,4QhIVvݽ2{$Z>,uq_gw(/ԴN8JsT+<۰'2|3 ANDqm쉢ܓɘzx"W﵋tq]eDiƻ-H~?"ԝ0͊r)pL:n7.W17H`*smP |MNlS -79pMnL}fg߹;zX+ePq0 "g/ܭpكDf\ Db -œ&2BҞP!*eԡjNR034l]":ŏܤQ{)LlX \I2?ƒNrhwXBN Ru0mp9)un'C$ /x vFE}^q7 3,`QXe ʨ@W" AM FlR66m6ƚZIr1q9>;}{oqE'dYg}y?עhZ#ew -$#JERdZ*`2}|S>:#eqL9I' r6y < r keHKM8% -)\ -c\ dDR#uxQgIa>lh8(6!ߝl& -o0xlXu ĸx?x+ApJA i_Baf-ر&dmȚnŸ73nbȀE.0P.*cF4J`vّ1պ6 -z5[ hwҔ`G .].6ul<7M}fs>1y( -;[D44;h7zuS  -1""G9QlG]a|Uz>"S7I~0=6Y@:#ˠE`NА!GΑq!Rpu q2z8 I;EQ0CHOa2)t A{09G݃FDnҨ}tܸNoqE2bǾfi|=nQɎo=avfm㴶ͺ>BeH{]wϷ-ia} ->E?hAo0iY7z4zbH4l:3Y2cbf&r}+'j)$Ml5%~JC!f_`= n1~4Y0Xc0*pZW0MLg%a>"hy[Yݭ^?6Ĝ#sw3Wk"?[ )_2c^T>J|akm$5-4.7A>//a?a 1D֟i"0J`T} 1qBVvI)Kۆ}XF|k^$X8fű0&Y)04F^ƎJ9=D,6rluX~4a=VG"Z9v'''pxXqpc<f:je-Hn:Xc;QCռ.}u7UIU.ocGIe-a%y8O* UcXjcRUVҴVm*3 - .㚖YY75n\God5k֛{:(ZnU>EU!K~UW+T_bF-YKw+MŽwg]"AͼB+i+F?o漢NʧU%Ae]GkE-HVq@e<-)V%ZlB[~\qvA/nhvs2tYC\\sMa"u񵁴;6lO{OWIO0h -*?hS88] drC -RaYc -l e>f!-af 15erS҆6žV-!~*]46,`X6ژd jO5>87<ה^`/5 -5dJ iv4-=sr/S1O uyLy>{"Jz}=>jD5|*Ŷ~̞}}?@"B4?1>Ȕmjβ0gzd'y̶xgyEf[R#,#[E$[wX'Z'D&j{~.븾(emytպ աh|_>ЪQT֠0lGG#:1:Ñ5͑X5Q5QX=ޣNx;qmHw9iui6ZTA VBQ-/7ڬ'+͎}Ƙk7ψl339f1>b&.)z\11K}GGWnw C][.ށ\>/^ڞ@jT -n1,ɈRzREY&xMO7wl\R;K?<3 7H;RE,*dQ`XCP -AYq%**(`eܠZ"S9k[w˨ڱ3gTghmi3p{~3} I |j%NLᝐءxROS㜚8t @\g͖"2ijO,CebzK^z*'M`Hզ|R "]B`7,3<kr@;:&: M;qNmTJtj"tF Ir ;g4~N{fĬޒ=XrCYY#]'dqKLLH$edx'tcuzc.hc\mlFD_i<1gDmT&܀Rh4daB4X<1$'Fs"b5./=4^7MĘrMѦRmJ3T5Ԩ0m {}x?e8^ӖM/eD&勘m/1I*//U$G[<# rF6T ;XD;oH"@"O-qe/3xqxqrrjZv!^=W-3..l9P%5" ") w _*BJ+35o6ȣ`y4q5qֱDҍۆE`"/e^\d,~/m&Qpa{6x>!i&Dcylf=0G .V[+J݌x^d74=NYC5h*f {I{F ife+yPr=Ev=CA6ٓv{жُ]w -4ֱRS7n&zfm"jJP=r2 _@;CKM:dr?1)qW.X)RAͅo ho2..йk%Q0aaLnL3OF\}ġx <Ϣ/_zrv=t?9t>@#+f -K<%)&~5̒ǻvV5: ʵ\Mg||l''r!w v\\NsunIHB@nJ-v6UueĺJ6mZM6iNl|;Gw<9aǙq?m߻lIDkzYk}Y5ӂS -N*XVyq\ǒ؉cb M1La^ǜt `JSLH!{qO!K -58MI}N$plS%`1 [Z1̤!naS#L[DY]Ø5l`]D D8?N` =ekE\-y9Gˌxf$2Ոd:tc:ˋCY~Lfwb"rb8HγA8*r^`wП?BolYz_ [E>OmY$YءlV+T~T!VXF(jhQzH.> O ŋB)St[*zS=WtK^ +]rnX({>WvQ[y2FLb#B"&bU s2dP+{̓.2h^QU*[VӏU-%Uޝ ]AIUIW= y0)O+0VK"baZڕ!kڨ궶u+aYzBk=/z/]r_[]OmྍH)JbgK|F*0d߆~{&Eݦ;[l7kvf^m=m6״ڪk+$9+?O$G*K,#Nz5'hc<'Stt"XS許vVjn6;[uMΠٯw;Gu)}3w:q^v~MWx_km:]٪duPqТy snAw*7 eJo}vJ z׺:]IW$ik2:T:鬮7{ڿk˪?ӕ9'_!z=mF1ND=Dt*<|͛ݎo<^kUhjGWylpRgBo,vyV%umMIoĝ ?nYktɺ7}|r܎ڥҰH - -QG^ԄhrNdGa[(ң0DHހ~gErHFr,Ls["_~&܍Тi-Xb0L:QZl")PbEc4i?eC("Cq\iKob.uE(PNkRL}HAI8 L1r(,6q^8/R/3óI&=4} xd\" "1N0uH= `W&Eh3A`O  Zɜ&$"/4 4sGL8=~|(1{x{a>7cޏvQ}"kbkre 6ScMfoC]BV8vnr>2 %Òyb=65W&q+_%sqq#$fxuN[Z^!+yɹ\ tT7,B2ed$dM2!@$@E@6AieX"XA?0Qp/N|a&rR|z'oeyyȥctSM'fzX-cSa9?c+HH>>H:.wpuu 4dAr??F` `2(EUh҆6씿e/?A)usJC(֟H8wo^q;,xٯy; /љ$Inn1 8ߎ.B]Ъ+-D^D߽2{{xdW /8>| nαF|2 K?NUq?N~~&z|\sC4=_#m\p%ÕWvvYC#IF>*q*60N##d#TyoqGi<W -\YG zNG;Cw,~rq\DO5pWsVýS_8>T>a#A-#Y6ӧdbW<)*xji3Z鄧_c .$ r71_*8]p)@䔑Q<&=2x\Lץv+FDjHe=X<[λ>|Y:*P2ɢEH*']1-Ai .-Ylj٦y`Z"3M 5=*5Rm:&UAoTӧR|{#Y f7p5^gg+ɂ2α*b}bKsh-rV%3.YjJMxTRTR^WJBn+U3sJO-wON~4JZ#)2J2+.Nv)3KfRmQ̍Ts1/TW(eCÚW5M^mm^w:$& DoV( q&$8ؔjKʒj=r4muڦu[Ee:.ײMHN.3ߺ0³tkF?+r _A}Af$PJ6+][fsJm.V*[ekOuslKN[>ӶYk5-iHMMzMMI$|Oz7lw :88tf5K-Z6Z< RRESi j#W(2:* َ, \ñ`w6m~O'_U֔6!!k< - zyқE@+@-,8MR.ٱ˙ul}, HsV؝ )Ύdg!ѹʐܠd=ePc3[zk0y ;gϐ%+e94z0gHRwNЌ\U#rwMb$O$xb$(q&Ɠj#= MEe}gQE]aYDDFVHpv\A0PD״QcDMt14wu5%q4vlTin?kN{3 5"xd"k@2\9<ۭ7`ZેXgK2Q5x@c5;HP4rh.HKޮ -:ޙW -IWsZb-3dTN,ⷣr^ySQR3NQ2~JɕS -v.RP*7) o復n9*qM{2f{p5kM*9Y$JcT}JdrV{zWhuwTF)*C)%J W*F%5]_KλR ;wB> y4HdDddᲯ᢭ba0j|VgUJDc©E7[\} .Wb N{ -ўV+XӀ\Ȣ=׈mRiib5qК(n#f @՟"0$^-k8@ -~J)^ &$ḵ0242m3|K$C.VjYkPRVr:gԣe#"$\/ZJt=H:)ށ og`x<VG~Ə5= P Eܶkк?ۛ^MZJk8oT $,d+.uSRx`7:8l[HVrс/uJbvJz} 5 Jg\8 /efeRrUc? I>8oϩKsi#9 U`_` -#?-#,JGbwR A'NC|.*h2HkEv8C2 L N?o ",}r^.."wHIpNe;\c3=|qi&~߂}p1K>K\W&׹ kwC"@x\ߣ%CEBD_@)T7f¹ {FL|RED>ׇ>}ԅWs}6.:F7QѺ(+Q/U[>Ï ~|"B~~i>{~޷@'8?>ߪ1'x r9&y/~_EoC^l)AxN6гqp~mp\TOb g0<#ᙠz%ku9PG0j5AjrCƽ|뻩.an,b9\o4x5 KOqfsWНfkUh'tzs ܏pEn -"D3KᙈSᘁ};󴖺?Vӧmj*i3x*%Sg>?{CbM69M\k*SN -kSFFLf:> -١އ8 (V -լ-ʎ+uoJYTdUy*/|rç)'bʎR%_R,J,V9ͶWefDSbYM`fjCXφxVN~WZV/ES,gYQV &{heY'(:EDbԘJ)֬j͌YcVjz CSG5;%D_6$D1$ Cl3Ŭ>_onQ0e TF\lCG*m8 7et}(BnrMm#mJ۔Ph-,R@PBk (B(yȔL M(l,{Bs?53֧%M[©1g~f:%?O2\HdE)!R:X*Sc˜,e Ԙڍ68\+2UjLc-rMslR-ӸZj/jioƔUcw)ɧ > N]d{m7T#e Rs͉Rl1),Yj%O˳C-2:(Ri-stdyBK5oђ/hIjb - FOӊ4YLBL@-p#߇gRh"6kYS\YͱZ,knʰV[STl=ٺX7XWi j|56-%&S%:mf> ϣ9H!+R Y0 5`$1T$[/_ɵ [dْ-MͰekihتdK7&k YZET U/د ?*yHjxO -8Kt$PI> -"[AH.qVI1I=R b{dS%jRUc jQ/T"+dp&XOТ?KX5 8;\dpM ;sԂlJ+HR`IxG:2$a(G(! wLA2@B+ڥF|+{S+oKHO@=LYRrOXk@%({#=Hd'5"ᵺC{뎒A%m"vՅr!\ZDXDIi-mkV9; >7m-2݁T _Tâ0Ca=9Ap - ާnI\r<UnYcEnh/L5%b7coC xz8apX<8OQSx -s/|O<{4vb&^y ~`_ׯ7ax#R Fh:>#_Ы9.2EɕWz4`B-hTAu\EA|˕ : -* -K< }$rNNX:xD[@[]G\EK\E|," ɧ1~Ï ~fxN24'w@5Pc$(-߄k[:KLc>i3b# Fb-:wbE= Fn +ձ ؃7s*up-:݁Kovћ;~8 GlBD)+N?c uYc8zw5Y'+BѴS6: :0FGo>H_X,a#"8F1v=&rt4LrYDW.f_qYg5'HbHO6Q/euz7 r(#0311p rȱ -Fu5nS][n鱶vm?zMH4Mvlz2O}SO3mpXwkYZҜQE~]y-:c,/},Oc_B#‰)xR"|x{<5䃣Ȼ 1iOj(_jHnU1x8/{H%d<$#oq3Gc;fc34Ɨ;J] LlV{bԚ4"_Ҵʛt\IU>lp?#Ք(o6f6PU\FcTښ (  Ķa;j8lBkF^[Fn9UX"1f*p̙GLFs~,'Kal䭚$`Y:x|_k5UodK2TQnp: T38[MySsĔ5YF|VҊUjoRUk?S92 ɻ@+h - WJ+"UJP,۪Bwņ2rܻfa#=mlsVR\WuKIUBXw%>2O}2|{x+j|Ɏ4!K Ѳ4$*k6UixKX-J);XxU".)LuߔQSv}pϿfU^o 쇯4אZ4e6=|JnU?Y m*P\~>E(uTF+ub&R_}.5q?^no{H~2+JQm)*S 4 -``x.[fЁkG-"P'\${]NKÝ?&)!d/AW -P?AUpFǼ QI쟤Oc}w > -lb-K膟%Hwt02࢛OT141h4r < =۝&àmൊK9W o5b ~BC,p} i Ce!3vsx|O=fV3i>TO.:ɒ x {n6L;h3DX` v},d"/%8L=9L"qcه%Q:sE̎QN -[\SŐ+kG(K| -1q=]݀yC'#ipH~B?dOˇ6wc?Yؾ xc۫ƑK)9~?Ci چz;>9z4F"ӱ|"XaM.c'~Gc;~l?BadO6CވD,) ֡4)-ODJnbء,w)je?6XO~4~a 7#@[ +pRXN> ;P NV7>ӱ=]Y+?G>`קSt.u#WN _8|Ueb`d)'QwhY}b -v%"ٝeT5#zG>D_а9tg,qZ#i}b@qe\E`U:X! -oyT%DgNvh?mҏhݖ@;tZf9˩/ ^kë`mchVCꌚjZ$j5G y )tQ솻ke5UZUZTe,e8!奿!=Gଁ8ȿ;Q7;h?| dՐ,r -TcWRN[*PinJrg8wl+UIydTv~UY7QH{Y\2Vc2A3_>!WA ,Pea Tj+SFvPdk5l=tCm@Vre6*ӶKd.846ݔ9/LjFl7`f11@Ԋ|J{ re/VqI9*tT)`une;)1OoXԒJ)yRIP|R EX% iua^x\tԎQ#m 9QY9^AE*4)ҢKcwS^e|Rbq| _Z q;FrB;-Rpt SBdB`5uY,h]`/KC<pwT|5乂=Ĝ˩;W"8ڦN>/4gN!V.>T}|l-FP#&2b@.G[ ^ap|bVgG Ar!v>} I>@`Vgt/x""5s5.!ĜHG|}bǷ{\}\`XNK1zy̍s;@.0z^U5.ƅ: c׼шe7~jgq .hσi@*9y?`n\&9PoRKaY.;ܳ2FL;8;jqXHYӷ^75HO^g]c_?J 6 AE?a~ZSt jST8sXӴ؂B^eCz6+ =%}ɋ^Եo||>9:ghO#>R>d;x?Nu?.qH_/и)l/ל):S4=x[5'`jF8Ē O!>;߰ќÏlYg؂NI/t_ [c|EJzW^܆ ^5FW48ZM˕bqXCK61bb&x86{w~]`e))XGavggAwc3hYR'c7~ďLw0@&mrlb[x7zpF6~=b7}]q%c-/dbGUحgЎ.p?#tT/VQR^?+ {1#"#9H~>0x Gu؝{XxrZ%tA'좫;N#>8d嵜^rJNfa?ʻ1 د#N[3 Oc̶i YoNu1}ܾ1͸_ -]ϱp$a=l2_d-ܶ-G3oyPfT_G?#E.L3 -xl8I'$JUKQ%ʧx֏! M7ykw"v/[Ffg=YGMjI Qqp5Z 4*RW BM%RQTPbzT>εCHlB#jiN@|!YT\q0%N\YƃR&z9φHN' -x>=Gq"%j[ D@~4:\[?A{H"5=iM LV y$h&yTdJojSiƛzmڦ̠:*Wy]\ hqoA`(A1{>MCL ֔2Z%(wX&e0@3)| gn˼\iJ1)ɼ_Ǖ[9B*>s p_,n5` <%YO}A-Ej1 2nPs9x&36PaJTj]c,u(Q\1-rtX%K#߫q{^aW>1^m!l/I^JA(9= @*;)1)BcSө1 ٝy9')9]Ut6+¹XÝ= -snVHn Kਃ\ PN88Dz䎏<=@E16F JhcIMjkڔmdB6hgҦNFљdj\N<>pT#A13&d=O)0IFKM/zcǫaaaf>ΐlTcu#'-c%p"^yfszH2WKKN#\E=ìnE#?j> m= я2QluG>xu~Rk+)'y1V%%G.3[vm v V+Xd[7A S  v1. -^bNa=o Kڅ0{4Z/C@=x MMtS'D,w;jxA-"`!fƌ.1VPfAq h=#187t)CWi9\u}?51H] TH;\ G+ف۹@HфOyTE!2VcvImLvPPbt;Fۨ,h)-Փg/14P<IkʹlTވ*8G!Q$, "{81t?&4(A^mM]!I+8c;1go2q*k*<{*ƕe0r=&U1>h7!x p_+4)H)~x}޶k.7!uP8WcSVE8c&(D,!>J%WB]Y,,q?.rPoS3o/Nʅ;cNOkRhipZRs @ze#%Q> ʹ\6W W Va BgA B)e} zc#CObũ |m,΀S"}Kŗ)EO\e;*(1>dX?>aǏQe-9~%gU~u*1pT._gpYb}G.c>5~Ǐǯ5%~J>NӸl/4I - ^c|gS`_d|jmvG} دj(_F9L:i%>) /JD.KKFo oC__@~ ̀iPn/bÝ5E ^a5HF$PMi vFݾi0 LzBI9],jPQlcZ? :UOq.N:GXmgj4ي//iwgGCn b8Z -th3?xV%mkWzV\b6o1@Z5v\B):G̅rbvskv<bnm$6kPr(wm˝1@$ .~fT?Cb9_,sP.3G4![b:tfc .ahO'#S.XzgXCs^LIu;r6{}O}r:wae6VZgQ-_?7odx+3w`&Ynzc:(i:sD ; e_@NXS#67(6'qU|]؞ዧyL:xh= w0sMjWS Lޣ!hʙ?<WMM5I:o y^ec7| F@a[\}o -=v]@'%u߫eK</4p''8?׿DC;m'mϯۙQIJP4%Yz\ '%.#me1˽]i6> ^%x-q/^p˼:x =0tu=#q>9,Gq$Gһ Si*НhA1_ˏMd=Ow2Wj)ݶ<,.pe=*Vue{%ghIŭ1|XGv٭dwhf)_-׳m Z޸Ndn%{)ˬ}xw.Nz2Md(r'ĵ8Dkq*wՕrd?=.>׍.3,&[nIߚ6.m vxy $t2B!]EvUC 9Xgshpflb Du^i_+'  Bȇ+#*uq9RWNn1vٍ8|Y{qw_ IֺzFU2TM8(&X%OϢ -z,nqd K!sU| Rܒ@ʟ Lgl/|=xY\Jan3y-ƵgE;ϵhJl&Q3wYj;Y6س:uB@LrC;ťU/ew3},uePxԨjF%u|%N8zr&,ӻ%+\BEk sGUk2r\b|ZW"15Uzh*WESd}6EoAѶ*b~my3}/\686epD%Z|quExLUUM1K3*HW+2 -ţPnٻq>Ū\X`Զymzg5>79C*gDTᛀoRLIuLi9FQ򘐳u+DK*6'x]8Ν8uGU_a$dwIvͽwwsV·5a!O;٘goWo$h2Kh_CFQLqeIYRhiTLk%L{ŚqT,b(T-k#~J ˊ -|dJ5]k-9JTVZ%ɵvXl+dmmѶW2G%rR Y$5sZ  `0wg45 B -KAI^8Rg|Cɳ{%^!6GXM_2Hc:+ a 9 9luEp1P S(%wInQ؜buISYr19Òl6Iu!)NE'ñw y !<:sوa8zśc m>2HVYlpK%+)J%?`9Uv7 AN$*e8t%?}jXJJe<+Tg3f5G"V ` 6><]K7蠰o릌?>r15 Ľ(8]W7ܤKά,0 3tU;`y! @]䖬ƮAz=PGxx憳s;K~L5TиhbavZ*C83ʇu4uģ#,M,5,*5a0:T2Aȿ ]nd- -Ssp3 p0S"4:vhPXQNse׬g2gQad$!#|އ]m^ >tq"xf1"65 - jiPiɵdXyс/:uptpghaQlj~QMИAE~3թaA8={;Ϭצ$yASNpĤ> C2g2)tuĢ9:U5.nZ<(5"l]ߏ豄e,З/#C%1SE8+E>l5x>Hܡr&0<# -=Nƈ4aQ~bލ (=b)(ʯ6z8k"gތn;gpbÜ6PE1Њ'ODs3A݄/6bⱎ>vl3ܻ:A>Ow6U3F}۪NxMLv؝$cx<2JNl%/|&95|\fyWƹASmp7@|r8;@lG8POB>AG<&?vr+F-YSC{6-CVJ- /JPO($qWg%Xtm:|25fI x]U]~KEjrOcUx!G~'+ϲ[ !6)8OǓ O~&:? .i*h`;aݣU ik M%d e=2ܣńlE‡5ȍ95RFx i#p QZj|m<9ܠ xKrޖ6%[kW;($ɼjda\( rWRdD=^#E݌+8O{ڋ"Bk|HL߬+ɩѩ L蟎L8r(@~ ː 3+!" o&K:h}Iz"3XOh_ۯ6:6S+h&&q$.<8ʑUG=5+pT!*+HO+%Ul|0;9:6c.݉y%&Qr#JߨVj/p 7]3Zu^2C}(TB+tT( v6REBq?4, -gduatX\qD$ \1#MѪScVciS5.uZS[MҴq[5uS퓏yf?sy{Ϲi >kpЃFVxى bf`FvQ)vm4ߪ j:pxxeOJ[ڃ3hZ4(3i7DJ2!^"Rk] .$ӧQm'i>[qpXX\ݰ`Y2@HM,gNƈ8Ab5?6h(cDntݘ.6[ ]$D'V@ 2CsĤ{p xd*KY@@F`|A -h_bA4ϣ#~n9DjX$Ȝ),3ɯ@|ޏQd`n _{dc?a0d@cgnK(!b 2KPP-~D1YRג!>Nb_JY8ug(w|M&O~6v  R@">6r - 55GC|?$A#[mNۓDgdz٩(;&N:6< -#9+1/"~>1` Xt;%$(6)W~bâ#4TAA;(jL@᳝![2ߓX4qPC$ HK:q< -:⨾Xn9Dsc;Αѝ$?İd$KXN,)Z'X[_8z񒘭sM5Duȉn{_JYob=>48yM><+93C<9hb̘M"IP:_Lg?c:?sВL8$rH&yng$ qpyP3#C'Tshnrpݜ 77{H+ a'/"|;isyX>a 3޸\$ -:EgKg3ijH4/e1[eq&8&]!M$Lfɼd4 C\;ςxfjJv#R+g#[5z -=Cy%\0ۓ3 ]{κ@Yhky8V0ehg{2]ݵKQV@>џ=H#&@pOL6z1"{R9&bqcUڴ>3WvLD6yS) ødJٓR>󣡸bnB곀!Ap="N&G馐OlkٯqkfniFH3|T3c!Nl'hQƢe7yxh\Hj ݐ$fsNT{WT1K4USp ]ž#<6vZBqi`kyΙpNap{(2Vf15|Mʀ(CuH` "m@b_RKٓň^>GF'ۜOX髸y(ps+;(,m@c}|=V#:Dd8ڞNд8*]@RC5(Aqy'f9DO?N=JDl]$RS%v]i=I;CrbփR?,J^h=XVa9 NZ\'\|[\A=]~j<3}~A֐0{xD#:&и+9%iZz2Z۴mסc]>x=sd~ ISK 6tӣnj7aeS=;_sΫ|i _YoK._Uj 7}_fع={Cg<'?/_Sq^~ۛsk#+ƾr- ֑v$ż|{> $(@"%`dt()~3?SjG+=^S,FeJw&kWJȲYU 1|٭ُ8WS}P (.( 5-%ӽ#˕O{0?rD #455B"Qh(,0J4<"W镯JJ*T2d؈ƌ8Ͻ,\ڛ,[r5B$Kh0*\Ϝx Ɨ\W]oo޼u6ܽ{1Oug]+SƳW}ɰʫ;F-s;&|U;eG}T3~EC 94АCC 94АúGrؐܯW|gWQ=C*+]~Slb߸}v?vʗ[=Wis&䰉o}eWT^:2lũφ?qQ{w:趺'^د4㨝=gztN;A;HDa $,aMT6u\lk @ le kH$@ bلgpIny+k_V+?m0 VM0-o`L)E`Է:7T4vPk=Ӫcd (@2\Xc\jl;׃Z?0v:ZKkۭY(Ϙmh[_f.tH澣訾QlnU/C!85h rԺgz -e [Aɯ3tufy7ޘ MG$ eѹE]Wmv|Sk$ۥ;5 1\aꜛo -EF}o"' -1<,-ǡ)Wxѩ=ޭ[ր -B@WCYI7Jcx͛7j[0 [&Pm0y.{FXCm` VM֠1!"ej7ifƗ9=ѧ,b3G_% ْcjD8 p`y9۹A?ZbhrBׯx/Ι1{rY!P şkK"*kEvٽ^Ah@D״zÊE9k)&qqtRej*[`P<},fjϗ*T3~Yzh.7~0$IT,y:m2وKrKS4)ktA0A-j/q{͚x)m[γxڧnYU7†RJR=x>)mO yZP`Z̈́-.%C-FߎnuE(;'lyzDm1E,~22Yn@,`8qPSiW+ɳΗ[ L&$LG?6Đzh¾ab2e<Ƌݽv:Fkf_:G[/~PPNOMC6bB dPǏz :͝%|76\!6>ñܒvIvD02V&J'kRhk4>b48a.# -:UΜZb]0,X 1 lĐ-h~Gޥb1aXb;eS?I+Ssquq͎-݁,7?+ -Id1Đg5]U^34.iFGWڦPv{K8[ M6zbOnR9!1*~*Ǘfگ^g;X -ǬICg<=_[q 6<[ܛH"<ݐU^81֧{/Cu7/̙FŖOӢa\;=2N*GfsAѝ@ }˕Mim`DKc*/_K'c"A[|;S܋+!{}(GwMs(:>!_HDS*]Lfs&SAIG,S=DV$gmp9XS]vxeǽo://da<:svf,((JoRB I&CE&H ЂD MRB(BJ" |3gū=hQMiy &hǯ躇/#iח L8N|AHŏ05vԧ=2S}9r9%2P>gfI쿢21i,:8c{}M|mŭlA];m]ӌ -;pH*TePPPAݚNN:WmWI&>Vh_% ?|-}<%+Sr`s>2X8v|PY%`-{KsĀ9;d dP Gwni8ؿᨊ״P65 Ա&3m h&$@qw`}!IIR ]U !H#eA YNƿ aMGDOO.W>VRE"Wƙ_'m_ _ _ _ φmp10 -@Y56IM bOEGǔ=J ` -7Y1~~02đQ*ɹO p[_'6 #&b#YyVc}a8.'pYi!EIՃ}ݡ:BJfo{6in8{ڧE=[1l>|쀒4?13*Gr#u2=fgxgwsڥ X% B -b {ZHJ,$J}B[kG8ϷYnVx[󝨞;%C_Vɐ Bb(1ԅG.l…>- s_zrʄ9Pȫ_Ra %%njE'F ` J۟ J;!1i*Vh3-4aɳbTMhͭQ^W.t1soENMX[CJbVKt'>l|RJeù, =Yܰ0sԏJg JgC EY(PݷVy|Y1'1\Ɔ ='m%ڼ&r Σ#ZO+= d r>Y} 6b((Poл0+Lh8.Y4H--ka495lj\i5^~>/Afg1[zD @/Wݳ\Y䠑+c׾tHӤUR3BV~7l~lRo[^MFt]ctPDviLA ~ϫމ4U75ri}geofx RcǔJKCĹ1^xWꀳc=dmbBEv!SzfECgq 9e& rov  WȱŭALً#.cC}h3:'Ty\r;OKFԜHER<`uJm: l=! r{e'gʉg.Y2);}9Z:6Xap\۾K>{#Km[¦c?HUU^L7McUJܒ_[pY yn,h:}ԽFE9ͣHӖ1)|իK-_9ܶxu"u VsD'/|+\8e)%:(kD) ԉHCv`etJ\4 -bA ;`;`}1 %۷Ȇ{AA! -Ղᔞ7w+C8z֪QQZnh(PDM D CF !@ B F$`,D=ǧ΅^svt?=&b2|:d䊺- -2ɧ -wm]Rt ! \,~M; q0KMLlXgCu1{nT=ܬ \fS ;Sb4WҲ_u-@ԍ26As t1zo cqQ]!wux'L@Dv"YAdW"_d-^K.fᨥ8f,˙ ʧ-+J4}4|4|41d#tĐqHȏC5-H_z@Y $EdgHPztj*qp}FAgÝ6 y>; Ce? Tl,~bh‹xSBjddIxpnnߚF/Asw]= !1d"|u%(XFRK\λ,3d*1?A8~6x 꽸=վJA3&߷up[e - ke=VJ0"sd;F=Q4I$GҊH1'} M@` ",@U c +'_ fg=4נ5(+B!b(A \PeY{r _&?+NFpG e񝢤Ik\9..1%2jGLDhZ7CbU EjTϯ՘c -_ -\jSfC+2}؎DImR@uuFs{Dr{1MG(> UVk1fo)BQ>+-vżv -QHaȉ<#T~#FӺj?@ mUbel{DOPrq%z{uA4'q0#sY<~>H` &!1wS{j%wM&/ۭܞw:৻]S: 4rv.#7Š 6C9oF-zg fؐq$Ɉ3E0[Ȫ4yD A05pݏ&کo{vJRYޓǦ8MBǸ#vĵ-4u-W7EZSdti@ ~@9G&(@ E+`Ugvjv%f+j*)";M@|=7 P9{yzja mn[Pϗ&˘h%MEX8[OBpbx,lJN{[ӻ76>$8ϱ3NX;U[*"#@M=$@@! !%aHHpDQGiժHeQos{\[ZAkO5:z٠kg`x) L"5;;څ(5hUw܅U/?a5gIiBxUW~ƃC[kn -=Z'plX/\c0v+_G;pж?$ttKEE>Qe`*mmw;@@ֈp -@ ۃ-:YE],ܐ<>q[T:fS ؒv$}X {]g0pƶszp -Y X<>)(m!Ukd\Sm(5|7{p  bR3F^GЊVkv2FP-b$e!RQL\#P? / $Đ?CaC j~^ťd+XRëKYp~bx7fyHluƴX儏lPm#b o4Đ !P\ C1OXKN^!dQqEDV05//ºPqtQ;$W6 #Đ5dhAY;Be(Ƽ'EWri R =)Zu0;.ĒRPB?'ڨ*S E1"~6,u߷Y庺/lJ|ք%VTOӈ-T8-[ܝT$JfJ;q\i+ cDʆ uA`[gg6v%q |W5Ɉo$wud򯷤8鍲dƵ :z (1"b͝ Fc&8o?qBDUq艝r +;{."ɪWdd"[O (GUhFCZ-N5[vry70P2%M1͢ڵQz.i4%u#;5HjR dPOmگE6 ^p4M`S9f Qra֍<USXը )e*b}ni jOPv+}6&/NO]]N 2D,oGe$^*IF!LVN]-lPY\15ӻ{U␵[Wf<2qP9 91W̤ #G,W+&qlWlP_CVM:;?kreyq#(shz8VՓKx99\6̖19aee[7  -b`j| :;: ;lJYۏe]f^>1F05⁅-|6g]q< iR*j2 c2&3rS,@-j=!=X󏎯4.L/t0sT ē8ԜoZ,26$w63=O#lv5t%wծY6`K1`!A`.h?/@b!,<6NQ GUB{2bhx9BehW6lD'&p, ;X:=8ق-xۀrV⦪:ԋa -lژ[4@kW؏CϦpL,mล[YX8[w%/}Jh͐vpDkCU)2O@Zb$M8Z`YSI\'-a.bEs)z:K} UwmHW6dևb !l|f$C"WDZ-,b؛yoA0~{{!T+7Іܣ{nuΦ@ a3$&KB k.p6x)s|otuOѷc#%wBGJ 4͵l244"ou`DG-&ffљٜҙ ӉӉ]oOcxUO*9Ro ?t4hLtR8GmQ<,n6@.pl*+~ռ.MՍE+oG(.D D o eYgx[RGmT7-JHaH97xF \qB4ID4-{Q ؛UlXeU+kb?;v;w!"r:UJyM<˦2ư+ՒߒDєΖ -])W|%n^p] -`~ylPfxG_>#ޗޔP& -qrNhM|WzW.N^OkmDy(EX4Xiv( Vu^ӵo1IǜY5|/+tt!MK)b>_ -`) 4#mJPcSTCHQT E]94i;᜼Pْ%7g2O9?\-nR))'tבZymti 8YPl`jP.iLgQhg0jT2&!ߩM(*W,b[-'xd^_$+8+c::.*j`(zH.i/4yThDvOMjUk9AnbJ~L) -hW+NJ\o ,eCFiȏPos^l{`OgObﴣIq^NW k.f -X4#xQLr`&'ѡ -(&Bs˝L^ x8<ᏺNiaNN8mxmVlʊj;DŽ=cqߌ+5dC_TRʆMϣo]mTʸkEI729=ބB^_|Q}oT;Yљm !h!7fmạ-3VƂ.C}A;[/*՜SnK%uGeF )mm knDC̗ xߠFyUmm{8N.z^ fy ܆Q1=!էzk.todV4N -DhJጢ.*o\l`} !qQV}K^9z|JڙZ/'CCR;XJYL~ F]k+I(\BDV -p'6o_K 21n){AҜ>ƥ[,>=ё,jjnNkɧ]h%EQ$#2Y X+IV|1qx`o֍S E 2\9#g<똃!׼|pehB}c\Q{uo-+^[s_٦M9N8" ~kN}+Yg풾VgcEYIa 㙳3nosqyoSsؿ;{n>[}-_7~Kz/N;Ώ$VUdj#g&F0c-sIvs$۹$Dx0p+bi ҽbοkZUŏ2<(J:4C< 0QW;`dLY ^a_ #VV R)FN" ~AzԴc}-I_a6BfĀ96N0pVP-i7c&y!j)Nxc2!B%5-Ri1vH`TA8Ba7(:@݄msK[)(&acc G#)`B1B"uHu_Q,%9;[@^5{ GLih ]#CLևYd]P:'kX B!< IQ|q !RX:a<CN -*F8vJzPԁ6t4C-u` -K-PDIqIK!|V0܃P5yDQB1mDڛ fP* в@+ 2K!i -OG(w@ [y7M>QʔC:31 h!Rq'Uwi&H!ŝx?*VBXىSOK_gg͚a eO xޣ -24p1Ι f2>0 -30b+ІJR)INYZB4$m݀X!^qTv᤮<(:`]M,VW(']& [|c'2_3~ept%[ ÍPo#A6{6?C#Z"Ӕ rZJU%/w_#\Ziy=$S.&g FՁ+_!p7Ď\=oNݭ|>~1u[[Pәln &z(jI9-)-Kz%DUO󵁨j e.m:NzhRfTq'bhbKic0j2[/JHDIaaXĐxҜif^oo6A.:@zڷ[ͷ, - % OzM†p<72ZkĊRF U^ҟZv]]L.Ek(2\5Fuh ݧv|mgGoC}]s2f)ST.\UA?[$KvK)8 :_lY dõ@7ZCk? -m#vL9sw %t]awGЄv,МܑҘq=üfuD0#rQduj(T5Huu]7wj6Lۚ(+CË)(jAWLc ==EՖh17D üUuVi\5X k-z#=b|/'&flL?nf ̬ -Rn^]j$v`$zaRfGL*j  M@J(V5P VA4?vk |з,F^d˚FERsƃ(кHe8"%D|9N='hL8d 6HWJU %zN_xV.H4Xqwyy֜.^?13p(BcҨGhjFhtF=I1˵]}0IVʵ5 :#/ȽySmقiNվv(sq-6gz,*9 -lUT+br9V\g)jgwe7i@F[db; {ӵs2*o{)G/||y y$-5k9ܢ;^Z ]&| -ߣ)Z6YY>O_>~lq#1!ˮ%$ԧs.c:Hv:#H8 H[I=-2m)gw fp -N1[gwk{wgd N$:YA%`, U+s:)%ŒJ$BBQZS>13f1f u2#X"#"-*=I=.O7]y?>s>?|!(` 02 -UXeVú-?F׽y.pپ=|6vo7ps)2  $C:5f(0Yq ua`6{8:lgv.N.py.E@fk"F{@R4.BCpRgA"BPzaMw!Paw)w[C·Hk :XAV; pۀxt7 0^c LFp࿠Ө71xȗ"' c<[nDi P\l!{rVn{H ZLTPEр@i&bH;~4f2~M RLCߑ&4D! 1W^g+0@H"1L"3,&~D+cD}:/a;E1^1CGoO4bu%0s==2hL&)3|tD1Ww_\w -> -sn]}edtsc Ym -nY ea,8Cɉqb!^)x/Kצ_ģ!l/Ԁ Rp;ixHC@^kL; 8Y$fR'>pɄ&m,#Äb+|UA6kz$w1:OmK+>h4ˌvpA= NԱ=&Eq.>L -Hbs,)AP&Y+DlF.P~PJxuTKԀ1j0E,a!ĭY -Iv@d@/yGwOeHÏn75S{Ǫ<?ѠzTQ9"K9 -Jxr ',WUUW~R:*嬎|Vwi.Gú_5-A v0b5ޖ۳~D{u@{|snu$F!ۜGO c˫%ɢ3I$EMF. k29!:5RsP AhilA`j%Vپp~p~mXPG>ب184aE^|VrV\Hj8VKE@Ҽkf)Ɛml2l5}_e>ְ{`Gvݮ lnDJbS+ <*/_!jSSeԪzAگ\QNr{ Wة -k[A(7Tb2Yl䢯k^>tCl&25d]dqE2.ѱ2#C_kOk;5 ?iX.|PhU6m=҉͍(B]>:&"TN)PrɹH-Ց.4r[cNՒ2:*r}@h6F?^r_G]vnz?{8\JW'(DFȠqaoa%Bӑ6-ZNRZHqԱsJ:ՅtuU.TVy?p>_v%\Tj煮 ;N&)K+M?Yxdo#+:zZU|FuT؂*l#}6C݉ -+ݾZ[ݮzGVV;u - 9U/,CrݵO'ϪJ.I5d*]U_+5ѕ 'oG9]\ 6Q 9TC.|~J;mmjw3LK-I^vbSŞM):<62S ǥgKUwH+Ziz:bBc3ivVW y14i?9;|+ۙ%9‚zmaFMɦԦݪe>HRXܰHx?  -/^x rߡx$T"wHsVcԋ5v/ R D B?xI! CDn\yrA=6B -?񇄃>;?ʣp}* A*P8Bx6kMB!8,I&Ւosq5tT&}oeυ#g `*CD5D[́ I˭xbˁ4M8!Z/EDW(FgÒ!ɍ/1$ToDU/ѠxX  C5͍4A<-@R˱8g\,*Q1,U}UJ Қ>^YC'ݕdϓoRzR?%ޗ5ˆ j"F1җAB!+k"GWE U_ƤO> -Sˎ*W]Fv5[v;S֑T6یه[ޕM`mYH1Y&v5rc~ʒI?&{b֥uGC7}lfCȑsq1cs`f`f3r5UMzPv?=Km{|}{%G3g#6Tde6AxI\셽D-c:W(  vn[ux+hgi=o9!+_0(<1 dOyX6o41# nyW.c(s8$l]6 -,] ݎ/r^aB6GLD>GcH"Ueb89` 'Yi -1Gm_jlZfkǶvUSoAYx1&ez; |NWӑ'%Fs& 9Ic ʹ\=vk=pO&a%RjCέ,b˘Yjz:d9a)D@!s)gLAƁlg -L`Ǫ#f']lnw|}>ȣK& U|t "znJa()5Y9U @#T4̧%@Zm),73lL}aw.ךpdQ!BQ%6!0I2<-6=LIRZT^":,ESF>o06 o1H4|9g -(_.:/:1_^<{'g8b;!|7-`27 '0JaBy{ps73ү.y-4=߅:Jzt[Յ:G +:wN6w'o"|]>\r -[r OR`,k99 -)[V;1sx啥mdt@CPT×&`;$8]?} -r.r3h킿G׺_zh1a>RB/On~ -rs:5+k>|YTW?sD@D@ }C' l,A NDQqߩ˸TCt,V¸pZZwQDE@73xcF{ ny(uqBwB%AT<1tXt8IpHK*NV㰑}8ez :Lw n& ޏ_m #kl -^Y&"@Gb| RO<>y([ca VZ&J.ę];A4 1\;8:dUwO6)#0(D/F\x(Rax 2QM/P&:38{ Z~%}a! f3J7\̲asppMܼ"j0|H?*b/|_wȨnPdїw.>^X Ɇb3Al63lSyps(a?\ܩXo|)QNq3< c>N%F|;${CB3 -:#ɢ$E#U 4$! h0H$u/AjH#Q`OFM5A/ ,? _W)`-"5%tAH^XwƂq Sul'uĀ  짤4pFhӜI$qҧ/lH'R<^;ң|vz$Y1`%QfC"R%՜QRk"8!]L&n'L'Ly c<$ >#dX[8 O'!w$@s#͌KvM -^H;I:M}:6}28 c?O2` F$Sim-y9AŞȈ,<9Wu* l6pəN?șJ?ΙLq| -_s? - '/E`=i -X@H6!G3=pq1!N 'k[cf_YѰhH%?Iފ$/c AtP -)&C6TTT'CB#)E6s\VHFīC⦌AC%-wɅ~ɍ7ҞWWʧAe쓢GY~W>%)ȱ dv_h2hC\&%hG+d ?dTeV(^K6U==?^Q?+镾(푿/1ߕ9sC1y5kJf -dlrU vB$ܨ}(_:WQ\R.Q>ʭ*}hXv[Ew+k;+UsΏʛGNe?ն}(=7j7y T -u^֨6F:}ZnMjpA6JsG)ίYv35zμ3  tGnln6zOVGGƧtQLfm^_9P_ɩȼS_J]-(go1?gE1Nzfˊ`!!Sܦ%sYq'\H:PZCⒺK[utZUeRZyS1Am.k.Ⱦ՜O[ng=g>+ͣnq R++QӡI|ThsH⠳L%#}52l:14Zmcֲr:YaV&+#!+#w^A<'.Ib/#U05WCZc$.vwbϵ4Sk *YM m *),vr ;? E -NoD0UB eX+W|/qKfG8#~}]Qg˸/ԥS+3KsJJ:bK:be{m%M̵`<7C0K `>—.ywóIYx4)p)1#g:|#zbRsy٭%)iuu٭̶ƶְɖІк2^0QT|/[d`W -6iaXQ#4L[ _u68qk߻v8$d02ψLojig4;z:'j:+Z6n2y$n98W;83^+)\?Y}M~ fLgV_!M>~WW$5Mce+'\=|o{GD&|M;=5OU9MX~{1}e)ӿ%|_jֻܾ7`{"\ =Bdm4SfnXQRלbV\{F{/{,G$=NNr['V%uŻ0缾xɋXv"ybVG_\"U"\|#z)\,|k|8)Y+>/\_x s~$Mkۡ@w߯۽g_^~X{`v -s?G"nW降2udrJ|41`nb飻vשO?T|pt3˺#/<=e\н0T%'P(.K 55>kfK`CA[K =QjctVY! >i3}Wn\ i|G%_<P` jF-ӳC#C -[ S79n&܂v Oaqb#26ǀ#,ppzP@tG pG*6yuMk$C4'Z$]$p˚Fd i#SK kr*[=}!( vE$]QBX>S6?G{X!ܖCZml&yB@h[kAD;N'.'?EC|A C%6 ;M Zi.PzDy5B0 xb 3\\Ҍ9'njcFCoaFBcC1_1ac#H4.!WB$OM$&YcE::ܷ}VPxku 6q 4l7_Slex!|<9@|`8@r``FDOPFHa$}&m ' Dj!~$% '_!o"ސ?G AS*=AC` -d?(v4g 큏^ -5Poq1~4vHd%;7uSꢻ(PZc^Q~}I@'.Y+Gѣ1q'oF,f@ -֐$b_aA*/!+zM_D%<,a>a>ֳKma=c=>;&n3[5K :Y(KS Z{S&]fsAf?8 ǯF384惘,ν_1۱;xbx7譼qqxWx= Ӊ '$ AHC%WZc1!7pUW&㹌{(Hʼοz9^+Ux>al~ģS3̗ۢΤcIm!xA]Ym9pM@TKAts}O1v Qvr-xr:)SK#=*M֑W>$;>.;|U֖D(i -Cw^SFfbm -9^2W n0+G{Sۓ44 "!.9͑q\ w_kWWW6˨O3e ?JbM DVs n6.p@]akBafG;&&A~V4{`>A;jg)Ujj}OiA`0EE 2@0srO5k jQ&O|mV]:#tbTѧ4_͍mf閳 v.qF_꙯] ogmӥjʦAǴLA ݐ*SJ5gj |#Cȴ & \4q~r#~$Th[xa6WRLeYBYW.W(e(fF*^K2%,}45%&jl~vʃ7n2?x#ѥm7կAUbTWr%Bqn:]+QUU:JӤKroTsU/ Y]̷9@g[Nh:iᔻG]is7=u(`Q{K}={am"_*dZIPYhUUI :ValWXY;(EJ!$T#U)ǎ aUlbY,Xб8Vڊeul˨ 2θ*` s>_=oAjfqf٭ AX:Fѳf܌VHG\,9u*LPrX}2.8S֗>`Q2rm-ӊ -6'M[1~r/}e?2 Ըw)5̘1ڒR=rE+埏O\{*q)sM[R:MBɒ%rJcKfŖRv}US~x"&XolG!CDN-~W:uoZzC>j5 6D2wSOOK=P>{YE1Yy+/:{fi乺őEgD0?aXF&[^՗fݯuwwͨȝwU6[h]oz>9hqI gTJZQfAx'מmҙeȨ '] \Ct1ԛgЭOe9OԎ`ͬ #:[S!uӂ_&?r/.~=gwp(эsDDgo=0}Oh+kNCr^yg3}4i01**qzL91Mˣ7F5l1\Rmu=,/4 dwPiuDlDf$;|Em"mR -q{2NԦibS;2t5Z͇m~h>T\GKeng`;Owv<#mb=tYgcT-Sףizu -u[-ۻ/[їտF՟'K2x\$CAwX*.U CvDV15ZrD2ļc}\ᢆL T <<. ; t Pw'W\5+WN OT(ha 1r"c \Oxܠvrdq\r1Uvb-Rye:,~!vD(L@fQ0!` --e'x!'і;.8lXVm`X>nVT2P2e`/Fbd64L )(O 0t,܊+k;Xۀ3 -*\>Pv; L[ CfP9>r.PD(|;V -QEAyPu |9C}yeL.$E:τ3LbB+8̅sp _9}n ҆,~_偈w8SIYvOR5BR#\}6,9b>`w2')9ږ -'S!@| }5+f}Y;m7aعI햊HR{9b9vfj7+#ɶ!vTJ3LD8V-34sgR/4] y>P9yc󾠻y1ew f$쑤iDuiyb*aQx:kz7a=RR\ -Ev-\o^ <>{.ZdCf!#,Fdwf -,=E$A£g^rlSN*ݮU*%RY*ɴH -fMS iKGk kYP/NwA.2x@CsQ xc T/#unkC9Wan1'E;Lj}:T+Y2|Fcreymv%G'?_(4W*|J){'tRSe6TAA1dd/4RkD9%qgy:`Z;`|;–tqZI匨Ē,B-ϒi5F&$iLS&v 5":D3dN]I>>Pޘix\=&Op[߲}V}صMkkhᶊdh,FY $:!ØsYiK67 ϳXLVFKxX-n`wnh|pUCa{Z˯fӲRGxAH)TyfK\k`-}94H6R8Z0<`&3Geܳp8c!sgЀȕ5-ͤc4l#sjDV!P&:zYvv͒`Q4>5b$Z1NIַ"W x,XxP0=/ݯ>]:gvu-ub7ZNBmHm #Nv LOBk))Y*rEEr*H6'?:"!:&$4T3%;vNp/p?S߷ͷgQkMgcv;)1V;Mk㦓Z%V0,s tᝓ|G6'+0`pA#\_^*lZiʬ1޸ 8ڹ]QMiѩCBBH I,@a ""KtXc[[;j設N]c((:V*(ygI?]b.&S l Zϩ , dqxWC^Mdg Zm !*q2>N1<gtsqCKC+_5ûeRHD=FzZB@ȆS|x Yx~{+A7 $\3#bE:"40Kb*Q"G*r,ɰB(Z_I/A#Gf4' Q̹IHc2/ˁo:6CQCS!&L0ZDH ᅡ<f>L&2QC3fB>>GۭToREHϤ$37@`%BB0$0EPZf( 3ꇲ_TOx+BoӳC_~ %>Q2J=ăLԀLV0_ L!0Mg78੐OBtxER AjA6П`To  9Qrb4 - q##BD9} + <ȶp!UkؐeCq6&o1ǀ1i3:y`A%@# .,&1Li0+_Q0.UbZ-6}99r0qIe~iCe}KPcteN2Db\J. H`͉~j3l}bY3`YȺJWFm{;}}^!ݔfeOJNm-COSy2oHa"`xՅm83)/KYu;u+uvfi;tҪ7r6;[ Ο tm#Kn/s>Jo5#baJ!(ְG -Eez=L7_OZx͵*ӵ&JFsG6s{ns>nyܒyr&פS# ͹04BאGS zpo0EfΎ ퟕ-Ko䨯$tfWX۳[\Zhq6{L&ǩ펓9UZGcnsj}ywGm|0NG 14._0/2&I'd2^y&Wyft{tr\ggQFgYFgDcGmq]7mx恂Kw28b$-5$~SP7/L+^O|br¸p~!m=M4sDqEƑ9uKr.ϫ)\W]]ї}E_{WyFMDg":vFcd[Tgdk7QÑMя - :3 Q@< 3 R3^Lb*I^LU#)>*"\cot[J72MFVYzvkiv_t-{(=]yŨ-Q1TC C?΁V -SheR5[ swi>e05='撚"ڙӼv Wͩs\=w[sOqu ~(⣰B>"--f5<_F#Wi=7^7-ܩ= -UB)\3% A$)7#|H7_f~:ev+%nk{ msCs"OԤG%s(EV.dK I,M/OTUFy\!'H%c%DD6$)MsYK>~*1rS -z{㫻Nmo) -9XGqT3uܔ,H * qDy\BW;"uP>'W&Dq CP2$O*#V\pFMpvYo핾֍}d|OI2Rb*LT(b*PSTs*JKy/|#&sc At -%tR)X%/ą*daf d@Z2*$L JqI,R$Gycçn6e-.;[Bg)^z0IưR>ZZTCjZxĚKQvp X G#V~d@9LT> p{X#V}2z|w)v?N"rh!-etPSlmPZG#NP)X ncG?@=ހ׭@7hk{"ڱIW=.폹tI F -N&JC}G}^,\@~t7V a@77] Vpw7A=IÎd߹lNm_{P[$4ߛ<_[Lo/FOϡG<^pt}+JC߆twGA5a _KM8aw2|mwИXa1Q148sD׉.  v \`=bf yG;ވf9uj* 'lzz|۔^)ݱg4cv{Nk>:4Ӿȋ_~gl0p!b^7WX^1!kYCs;afc?/~rہw[8ef6-.l065ap W:j"Ȗ}#!E($! ["HDq_jUZR9*ڊZ;(U;j]ں`U(Àsf~{: %HT1a`pDHa#OB@|9Vr#-,)Hq='?pɯ%WO -)*r01k dC -H -r桘 f*LT3PKg4F2I:#_J'ge\@kAyF;^~` `H e -drd"AC<&Odab%PzϾdRz_~c2wPTK%r&5AyAW?.i,0H?\/4OhLIrHd^r bA<iB|<1Yp z[ [& Cr4HA/RHO}uU*a|HcLAbAlBWR_B?7#BİJGb=qQbaRITMr+Ao;׍WLP]6Aq)) -z~d"G?lL+ݨ0e>6z4~FȻ呷M_D4m=6zk5WR[5^v;BZGԹ?mFhS2C Aɷx#b -߷%a]1g6aE}ǜ=9z+Kc/gTj/fTi6hϛiϙwkZtg,ͺ6KԄkǵf:8% QDzy4Xdy'eo>Q[k956' ?ZW'K895.=C٭˾xJ0o\h#)B -H;fp-vh9Ny<3Uzq0dnxc9s>3~t8gPӁܝMܣ);7FʞƆEcwC7|}5\nQw2j) v8ҭ ֯@PNZ,3#)Wz05KZ"ʐ֊ӥ-b8M:$J'ĩOSENe.D#Cy9|S;WR;Bl^uVw~k>ƪu]D%SH%99Jibv4![s!=6'I o<<xXF3IaR#]ѽf U5($(-) C'IeeQ#NiDioPd>̀zM򏜠ʄN9~9vgmmcMH51ʄ)yzjDIxI$A&僩1ahL2t>>LV^L=DYa4^v~rrsNv}qEUqKiK(Ԇи䨈I":/%ZRӐ\ӑT3T;X;T3/Y\a4_h?VLnw vS: ~F٤pǪ:/?黡3>$jL8 hN -7b1M QM}3aM!A0p9o2~. _={tl謃kK.Ct׺.AG9 ;[X<ݐ S oqͷwwػL횬5yb;6:߹it籥kKޭ%+޷(# ?MTA06m8m;1ekt:;~NYǛ]˛)5um4>{h+O|?࡯TCp@5x~D>SxYJ{z7nqo0h% "Ly~ :,q/s{E5X[ˀ~j !8tPȯbk_J͞jIOHzo%=ewC>GH|FHWIw|'nnvo4*2?..@B!BO b!Rk" 4{~|_3-XNTX bu+pտX]9hz< WoZ_ ~js1˳r/ꇣk \2$ql5_ǹ=c= -]ͳg;Ĩr ktx9ln}‡΄8F\s!Ɉi_2{& G2@Ԅ%UUV" E3B+Ntl?cXcQ|KtyZ3ROEmq?qKŞ+<5.GELx$>us)?i꿏#=SNo:Y \^ -u2׽D26^l]((.;"cΎuܟ]-M%J H kDlG "[-͂f~f`(j\T`"^2^9^A"G&@Ye(s.n*`WLrgc:Fu&xvf&C$&QIL:AW'j ˪ E#J vJJ 0*-1"0Y1.\7ѿv!/s1V B*C>[ð({lÆ7oR 0 V\PNQ*Ked'gfdg53 ӳziYCBiE$AJjĴEbB(c4?zF iBY l%fZmXYSWV"+(Hȗ效ya^$E%%ɲsblRϟ ~ [ -L6=,ưWQsͨ(sb}[,}[(J>:[CM6mgUr*Cde4('TP%M,,p - gDEw E˛;y]Ng-ںx3?ɘpv殌Ĭ4cV|cv[jbV_rb,4&eb»:a y4veP\)($+ |?nsQنU{RBNȘ<9)TcNVjBήr̋=67.iN܎>c\7,R{(WK;h!Pp؅_P"`YW$'K6o˜e  -g2*,-)l)l)苊cORG6"}A]U_VQ-5vƇY=Y*|ue#qI)_LI*O+[:3l]TLYVTtٮȨQ3χGO, <0Y!c; MTaG*5OiS'(8RxlaVMjPT5!37̨ ^S4ؤj ӎ«+񿸑E~%e@ ]z28M~ןe`m-l~h/8w&BglriHOGgׯZwjCbІjІs! Sw>=t -=r52CȽ9g6."rhgzH[5QӬ#ÚB;7/snԲqr>I-UvZj;M}pq6ԃ Pu{e=wȼvŴ\$8-;Ə0CMApDSmE@"5Zl_GڷJ/ iUx^<F5yiiLj6Rl?R `lͅ{*Vh wRп[6ȼ=s=dnJl>,<Gf,DsHR }%e 06@}"Ṛx:2nbǗA%PBׅp_3 w#O8C_A=hH/f" r,0+NءK`4(1D((!s`(0H4&ʈ(Kȿ)U0ep} `?,kAU Ą補! -Fnb*T-#67+QD -.*xmP"+rt)*Rm!lcv |D5YuD̐m"~ xFoQ?YZ{%S*POg W"?A4rVHQ"4c[(D^ȯ8s\dZ˿F^3y͌rwAheCO[:z5*C(UPYbPA~!˅8"@1Ugy/F?qշC3-ɯTm*y*H3h12P52~&EmQ]7{E#0VVGO/l ~?]?utG7!P5!FrtVnjt Ԣc퍚FV(^m"t!}/p*?u*?q=vfz71\/1]ۄݮ]»Ln&=o -tW[.E_ldDB͇=b_hܗ{SylV<.[~߻H}@S!S-uIiQzG*EoߓvLodW}Qv<tUQ/Ɖ1QA^W~-z_ بcآmءXxP>?diPgīj99}'nb@8DH8D"7"hPK]A-ꈺVP+:uwN;ڭgvߤ/>|3tg??>g/)-LR$Cg~;7$M A7$|,YU+2!pD<pp4pt:p<x<x|'pdͥ B(luzow4_žkމyCkkFcst"5{]>hV|V|||r>2p|ʘ ̘ ĸ:X|:|rβI1= )`-C+Tnr;No2׮'m\yjuvއsҰ˜Y5OGLFL#F Cw9'\F^h6gj?8s*w-|Y$ (=/4Dq܄ wZԉGND;DS'E1# 1ӿJ{F""0 Uϵ.+;x< vYx]Ӑ|gUi8|Jĝƴ>ށO8&`Xk֓8 %`؅>l>Ɇ{'vK.)BuҁLjo!s+XwvE8KaX,zDIRҔ~YUj>&:GevBA萝#ˮ[eI-ffLD5)`3A\2&K?Ki % -c: 7Q%sRU.ܡ4)(JҤZFeEyfV>UHLڐ $g {A14yB+́Hs֟*sC -{ml UּfUWu iVw2YS:Evzmm#ʹ?d@p% @pyo5/:aLX8)LPo(Hߢ$X9sv`V.[3eY榠\DXm~Gȁ(5<-`yZ!.nwyH۫XۏVZ˰F2PRcUy -m\{$T-.юsBuq~JFAܟEy9E6rNԃGȹ7!ALZ&̡1xVl7w943>U^P (/X":cݤ4K7'IO.StJjOP M5z1Np܊[c?z&}O}4RE*7hXEy¼b,[_)TL}\?-W/d2ywU1fC3P ->CwͿ҄2hFFZ@fwjajF뱸A_VUbJA./UH˛C32 RaI"XM* -ȕT@3?C]Ps?vp1l ^Mm-m5G+Ÿ,YwPEeq5cDl4IӍG$"S/&2tiQ$0= M9Bd9h'GF9L0\mn¦.nhY裂IB%Q٨a`<\*M"*[l|e˳pE5q͐mgVѻgEtK|r~:ׁcA2^?]el)c( r3U]rCMm%Lni286:}jIc/m$fk"R z>C]Dwp";5ky ']@c+uV,?A\8LqC2"XQ&g"n$2Vޛ°$})ַO}O8j"|'G;0c(ԟA7 QI9g>Smq1q$gTcf8::CQFGFgG>"9V#I'DzЊ2@_Dώ.2jL{s7l!!@Bʎ([ViGNksU;u -UC@\PY QW*ZAyO99Ϲ'y~~r?/XFgyNO1Q'L03S+byT)e6c3;`=sses%9;dsysMPgaչ?`J*mK_iux^eu@*Jr0j)1ɘDը**wRU1zwB^q5[uCj*<Wqjw930*r|5Wd/2ZiKS<{ F5 1H!$5b34D4@^@οLr S 1Y}'iyFiH,2(',gzy_&5Vw|zD!X89 9 rGI ;X@CtxE@/>f>z!wApQP)Sd %D drS 6Df( Q =jF(ƒj '# ,${b(#*)NH\_.^$OD/ fbHQ!Pr2ٔcXB9VQD>e($~$)q!i>h'G'CgyԒݛ0()GhajdTL.S_Pycr8l"}\TgzLtON%>! C== "&R765*)Q3]P>G֧X(}\*T{U|G,䞉{nmT{<ў<ҶJv0wz%nvG'roiOF?w5h>CD4ikn y,.|Y'Ba{{+߸X3};>MlMwݯbB Kŷ@?Th+1LT`AAjD}A5Oǫ:|^\Kem-͐epR+4[K.>6B4qE`Za2F.a&0uwd(|x7]M[UK둳W*oZ~)zbw 1b)ڭyPqřz({OձX_*E՛`?u$VT9#xL5JbTmC4k&6&$MP;)R8gO7 5v|H3~p|ٱ |#9Gu\8qTiTgoX( , -B aIB[ D’Bl ;"QhѺDbX8n9gTڙ)N[[7~yyw[:]I'TD4Xsiطdعr .I},Rm L>-s(5k=)E>Au@!G#w ȝaJE'8&l< jI%9_s -)18vNdǾm9+lIyQ+$ρtbrwZ~`g2C -nօ [BZf>jp4Ax&T+>+.4"T~Mx . -|)TiLA)ߟ^7 ΢eIBZ2 B22µ 4MF6c:c$*TDeƵHUHUM$PS\ s ;;kT\L ȎRbsOcL%/ ΡjeQqD"RTFWv3v0JER 8>X:Z&”D؅bʰk0=epr["̦ة亶%4ȅZYu$^+.JU1 -q#H[(b.YQs :*TD+b[VfPuլU:EeaFyNbY(-͗2'/9Q~?Kr%#̖Q92n&s>{Gj jz5K;j"wjqʓ}Kӂ -=(%)(DLyk|?>M~8^(:.E>O7FX@0 -HS]9T AG״7:(Vtz ˩VQ裬L+2vY%9iJe\bg@'*OJ& %/bKޱIJ"ToNzx3/.cat5̦Jq)O,%5TQUvR*&ԼxU#/NˉS'bx,9#*f%M.\C:ps`ų{)uu]!*wEٗk6J[d7 “xMQxF&WhFqt9c-^c x'6zl0P7E*룭(tp]r6w%wd¸텴vn ,D0Kw;<6Oa.-Df>3fp "'0={U5daw(fQL6ee[dt?N.9SQ} F/7rKds>>={`w -~'y CQg"I-EmI#5 lЃ:P{F=z&~=\?~k;nA} p.b![@vcXOm k}'WpN0S!y-͐> [j([jЬ:ׄ Vn;26H>ȘŘ٨':[ی'Vo^xٖs+|.~}5uFi;{p -MU^FU7I6S xӌ3k:˒ >Z MuyG=V\u d'`O ,aC{p\y\QHG=>I'yOrzZ `p=߃ݓ 3SG)~ }w -@5SA6;fּZNm38HK}K%ټ!-,R:c# =x7':N̻з}![l!l -9`-8,֒=L @=?*@ʖ𨝬@'/bGM'H*`ٞRK!)AB! x#Ne#|B(ddn"nRK'C#|yOSU<nT7*F#DGH:ҩTlaYBed5YOgL(y~ ܃#흼.h%p%E0ՠ^|IJ5R/TG>PD%GX| -ʭ픹yKpmT3zJ~!ONІhk/A%X!7J~ bՐS )`e0L;/-𜩆62'O --7pLRtZJ2[1:Vh.F8'ɼ7t+D Lv$TZj2OG -桴i k1e4=[FGd$ | H%&W^!)F`{蕳VdFs9<}k'VK1ru F|;|vvipod껵z/{"'rϓs_C(FX|)a=w> -c/gkwIFߙ9qnpWrWpTq^ -<x{>L=ɠcA p -v1xhR^2N['%8Xp70F}aтkYa‹a φ/((X/:UtTOT8*>">-ޯxjO^GH";; pʏƳwjaZ]YMQQ}{ mNζ>j]c}H@bژ256d7fnAj9۝Fv[ToThWBM@<)ce;/HA\VJ%PoR?.{y;;Ŏ+d[5͚*>& Nh8Ӵ;ՠl?+`E=Kh3РNyu/$$9Lu{B˖LתI 3\7&̕.-Ӗj7VkW%\q_^}LQ^E-P li'Wg3pjGYJR;lR9`zH -ϵuY^+uVy{/Uxy/MYxgq⏾Eϼ>xЃt#PE=^z\&iLv#-b6.k IL+,etR}RTbE  -U O[ֿZyzuxJ=hN;hk{K>H=m)aT6kWx1,0GabÌE=V66T2 a6lQ4ûc '~>>E;'e_WL13hHÎtL]UteFV2AaA9w<9Y3ӍƊ\IQף?Gf߄eŞGg-4f@;߿)ڷh贈tW6ga}VN^VjO!Asɡ3DL3eDN1l{i^Sq,V8erL21#H( 6i4fMq[66t9"YvcpO5;82,tȑcz7g)3͹>>c+c2WŎ6ZFhJ -@:4P.wcղ.W+sͼ }d&zLHf 9vt2-%Oeb|ez@=R6[6[YΪZnd=܂HTC=\{Xǿ{ڵ*2)R)$]uHiJdKmM)#HrefJ3gʐNf$saϳ]ou:k!]l^]h.C%Yʂy*̨͉?gVD3RRB>IJL^6!8dMMau䫣S:ƥ "R[p}5,}(6Xg|GyfZf-=R3&xM3?qiqiť},,6m㨱{CcOD_ I?&M<~_4y}(axsY}PT.+a8 ,#?6-7oJv$fMI7-CujyљQWQoR9z^C@+v.LCsm@g5Ky/4|.-p ,(g*KTV)QI %DQ̭D0<,K>L5TP ΍T5бGUze]m]Uf9jWUٰCwc[bw*e x 啴fj L} DbT-B\0SW# VxՎ6ZoY;ģv,unu ]kw =n0R8F+?Ǿn-,S}H&j:(0k?lG#$d.(=g]nI7[˓:K?mKz%v'#!Fz~-p;8vH*z<8 ̤wq 8 8 A -6DfGX7ê-a0o$(/΅B(ZVðe7- ϠN@ucݷsܕtZ7;x4.̀% -`|C -[FݲV?n<@.7»۸}GS-zֻGJ?sg |kLz^ xdEbw6靷Tӿc_^НqJ@y/Jum^N/3sbg7ϑK_p O ߲{1f͓9#p瘝v?f>$u'w&;Ă8/ L)F+ּED_HP`@@L`T`D@E"fi+ G:t J?R{1;Л98*`UOw0~ dhw%NٟI'*aL005R #!g7sP180Y3%ϑIoEt-GؕZo~IC3pwf~{&\a$ -aT3;CaP 1 =NL

  • UBO9+-ng\5I#%=Vm\@%γX&c!9ka02{\[;ngf~&q&I{TQqS"\Ur% l;Zkp-dلַ8m%9i'i7{tF5ueq\H!򰠼IDH <HH $ \LH0;1;"*TԂJe`0ejkGl5:vf:egvZl?ʇ:~sw7BPID= ͍ןGk差WnYC=zi@yѧޭOvۧ{[n@V)Ku:B}-^X.^ 6APHYl %{AxBem_bi nyr-t#t=EZf$-1HטEIU,22yJ|-Y?Z !g,zzVЫ(qwYL*v7$m&Z WY/EZ/FZ_^losmso39ϙN۞^.Nsn=ܗvǸo즸$; ,HV4oR?rѝh/cy16$~1bgcg='cDž' w.:;Mv4*iX@qhYG0 t?"ZKpCK1vN$v8'Ju}H|BT|Llp97p\Fœ;Fgw w Ww}}"-. "2Z(/Aw)hA`q>%Tp29y*I$m\s{WR>"t%.CC%cf)^ɊWsN3zK$!6 BV<'We8t4IVG\FSS݇RpYwLcɪ|{d2_ϯC6.;F1f)mkV]Z)IoF x7J @{}wJ S\q.d٢:fF2ғ͊=>=iJJW!/Mڀ6yK@3Y>(? W['j*9Ae@>7xs\aN@9A8}DTv<֖hIћE:E{p\cV*N3 yV㐒= -)Q *Qa(@ `AdBt}PM廢|`^Y'pڗӤy{uAոYr]BDX>~1\G?2sq"('j,Eu>e] l9xh/bo.ok<ɩnbrBUEʈ|ecdShSlyF{n!l`iTsrȅ}9ޫ}K59as;C --!(ZN}A{u^_NA/*YtzVS1F5&JSj25<\K|;'Cs8!@ϠA~Rqݫ:X*;W:-*96cMnWCԧ0-@3ypu~FNk"ߣx)ϣSt/8"9D_(@hFн܄-ේo -u~#kVQ%v)HԗiҌb/rGj&C'ndb%T_es3_C ށ<PPA0jvek?>uE7Qq^J׳vP"e꧃1uZ.+NCx}s/BU620}o8Yߚ%r;%i jI(61DrFg5 M4i"0܏ڏJP[~3x55%3FCꆵ~:# e,UVmC ۓ*<*Xs\cn᛻OGy=|ύܕ=p"<6 yw -yvL#tx|P {=E{'swsowK Z8S$^Y~=V*˪u̳1'f}N~W_NWQ>d߅B`BڀEbO *(q\ǥwE6=6B>6Qo}n-reFۊt] k] -j:|ʿ'\# WK%kībJ^U,^^eE\e'twW{T]I(NTOTyr=M`/P٫R*oB+Xo9e -`.}SJUj~stD;G4B&8,R# A9;X_G.[h2~Kjd7<\@tAbw{I 7;t hpqB sd(( 9|$`j%U\rzk -$;k@zOo~ `8ShwaD!B9l /_w {tcwFA? F}K: <<8opɏMq8N.}_StO{,k$5whwnt. J%AseOQ -s׽ d>#ȚYsfZ]T\wH~"%w} x CE8ڈ/-4@0QHP! 3T2, )$!kQG؊px3L*g&{  9C[Ѝ9aH>'1Aițc!)tѱ? +Tp{krv0+t7*OPү߃}d̡RItCx0 :XLr\JzYGG!c*\ڛp8'1b_yg!tZ\ZjJHCHw15Q>g,nϹ-p{':QʸvzD41#aळx'TW- /Yx(tR$;1tG4gqZ1 qRtCrr4kШއ͗ا=nwQpxҫd "D觸&]sE]_4i81G>aLd7 Cbag@5vA] };RU*b xCZ#%n+h ǩ8зmg>fKuHJ5iҶ5RuxTQ"UFTJTqHQyA)$\)d[P"^"X$^$$4xh4J_D%( Uu6VY*z\3]3GY(oIˌ+ԸAUlܬ*Q66~Ώ%(49$;%_Yؗc 4cT]=:+jLq\{֔z_S?QS(~0~vivi6ߔ]gӮ5rL[u٦5S뮫[[3Vi>+,|R- |YX2}X-e~,sEy;G|I+{pu7xxyg7uؘ¤7uYCy<'mpCu2븀 . \`MoMg c-m3ۺ߬3C2j ;K@X H%l {XKTAEE -PG2괂ӪxslSO3N0<' {kWq&2N56?cuWYLSaPfEzNq}OjSwԥ#9mKrfQ{ΣM*lNٔMc׋8YNWGvqn:1CUYsmie\΢aa;-&9%nmi9-bIҊ5^bwXFܳJ<Ոp+g|JSRV*a/"g1 y(D3'c-X->E鶼V[vBfi63S!q5r^ַB[&SIa~䊿B2|WH y<<@ ܁s3'ڌIp!%hW'wEX6%8˓ݪs9ܲ|_T/jҭA -i_P`\z"8Gz)8[ypy`-?[E Go!Uf`ⱻ̒zݨHWfޢn(.HrUyR"*zAL'ȒmHe Mv< N H1|=j[uȃyh$:oخ1NmZSmEUuY]J,I,,s - -RAF~MXz~[XZ~OXb04E10CO g9z S-5Ўu7 tZM 5U16*MCae{nywviD(V*)%U$e`r_P9JP -W~__/;ϗm}3D`=#ځ|K&kjh&*mԲuI5b*'jMP9(AoLjn6|SÍ)śU=At 硓kykovhT&[k"t44fp6iem/A٠ݻ*F{;Z{Ɏ>\WYǰ#k"seE- =D-D.#RuSq)YKӷ/O"rnNwxlcth]gcn:QH9k;Eyu+¸.rg63AcG==;#c~RQV/+/D3"iG҄屽ѽYz "{*hv8Dۯ9"]Xa -_مt2+;E>Ely$cD[@,sЈC+(eCICp(}V#2mmlBl[C, 8bwjI,wl,cyfPោΣ}0`ī1>ǍZP̨#EzSh +btqh`DhJgiP~GQނ琟ޟxpWmd0bFJeNO <0+cE'A wU$R"JÈDSD~07&g-9CZaXO dkȠe2Ta3Y'd2}͑d2L&s bp 1 }mvuNy D9c"ә%Ěq \ijgQ($6s:׸&=w?pj,5\$}Hu9h  2Egfh>mu% ͠K%zCQŗ>^-p -1?}%w3›m"?Y}7'zhfĝ#)gg(g(gܳGi{=O><89K 9,IY4 -hKj:zAzCoz}HN#5Wk 47=[xgB/:9(F5oi-kV'zx,(cv-d9ʇ/0 9ƒ57&KXм&ނX>qYK5a^3w$D.Blj%!&#Gt -:k2F'}\L0GmS1?B9/_`5qXgb^` ;SKzv(G7(7Ri,.kE}lBg,7GVtjE'qq:j;IGlШWt^02o`];1/p=qっǞGw- f#IE4ᢦt@MtңtLPOG=::IzCAwz ϟie9;փq=țAtW-!v>]x҄ :?NF1aJaF3~QWT͌b>Y 3B̠xrsz,o57!A\}t`::=w}3=3g-휕vHyvjVI OѬWkE2dy٧uIN(gAļM׃uSW/#kJ$zŸR;:4ן͝l(,ʼ?Lk-L 3{r=aEaM:.iFziE߲M:bN:f*j޵Z#}lں!?q~; -W0ۋ11Ϩ̂.򦝋s皿/[Ȣ6;e)2Md&+Y'im5ۮmm5ʆVˎحMn;ʾvh_wڟb;2}G;i{E1ۼ+rՆVyCJ$ojNsTF8kn1{1됃Y>=xA>6SK5ɴ*<(WSZ7h ˵&a,(E[׶'j{D ]xQQvR ϧX-~S|uS9r yi5rJF!SmxW<̢(5Δ钩W &$uiAlNW,Ոcukt1!ZwX$V>Qt0: S:\3F'1sF w k>ۑs<{(=sSB,CSzR`mrX/U+BɝKwD&_YWђo_ #l`d:\Bh?~}D둏UGU {SQ˛meo>%;5!S1TCRCXaX!44,1lEѠ "pÿ_/gj4b?RZE-a?ui) -J)42KgK)E N*ug)1 27]k -rLNp49 G *ej0.5CD0  b* y,SAvt5P#wUk/ϙ~=|3)9{b+.sBJGliULu-}FDU9Yvn~#BPswz:[x0?s}zv41+,aD)S3_˘r\2etǨ쑑Ͷ'mƦ_S ].dh:՜FW[\ @g ۣ%|F H^Ř[&XGkg)"oڄkڌjcY/i[j+B-7m&To-Zg/ pspo=@i .!B$3+E\䄉E>P,+Eƙ-bVhR8O[l[e4W붚~&߂22m?xo7E4'tf@s!a'PP4~N.~%S.ˣ%哤~hlm(r~S/qח -Υy=_P*E̱{Pͳq$YM!l͢j5۳Q48 7C\ Qp1La:iX: lr:XG!sòJC73z(#w/k@Rdzk6>̀s+0hFaj8ӝKڏG yZ0S $ş:V|ID_+~}NjYnNza=aw[^Oo -]@tzc!gOe@Rðㅘ[m/wY8/CY7}>&ǢDzoc^:'Ή7Ћ7| @.76#S ExaNE"Eag&†] @;*AǑidYDTI6|R@ xJ` V1X12$9я)lZ>:c=wq?4+_,#L[/@/c0c 182o>CD< ӵ>2rɣcn]chTq] 5:aF c a CTf3t%ӵutlcnrNnpN92߰_W 9nStm:k`l~8Fϼ忘-XCzYܖqsr :\tY/Єqg85N24~J:I0̟ro\= f~tNcNJg).`%KYӳ\zSDG)ǯl@;rU`$_<~)ɹ̹$`5#XxH%T B{BY@P! $| KvTH -{%6v6U]Y_M󐯺[_a3~G1h rE]ZTKs5}q$3$$*p$H p$r 3J(Z -U[vnw1n;q:|Cb朴yPZ˜60{SN椬咍eXdYc3Uaُ<eg> ^{b 8B? DKLz-Y@ a9+Og k\^On7zۼASWz)}z|w}O:_/I~ɳ^˲=6rr<-33qt{Z2wXY;,PV+w*mϿS9Ʊ+g8{9ʥV孀f_MFۘh1GEh9nz[1t\Y[]ٙYZ^ߢ16kLAMKp9^[3 h͙j[a&̓JVW@N z]sDŽE9 70z -X|w>cix9,Vk Va'ܤ iOJW%E߁e@*G o<{^ٍy 32YH=(lh+Nj.L/YA|ADX7q&1"EHDdrdH7KNXs@*E7Z3(bB\C"j]\.+M5fp*tbCPY`h7tD ha*:pJ>hw9x{#`NՉ6X`׌ElGSݵѥUvj9)d{/^K۸2{wԾ#a?o m|ĶV9\G$]m33b2&ez026R`#u0LLrjy>RgO'u`;psm֦b6`ov{w}Vy׎y/&?O_!}|'\L4!ĉM$w)mw[]XWkj`ntuzl]5~v][ 1c`F_xFa7k0pF͢a>L4 :1D% (96m 9 mL5" \C&Zl$IN |x^8?`E 1x)5X{#gבsDB..3c9F"q@MN ,_nxK(%L|L'X|5'hAc4h\"e%6Av(-k0י9 -m !YA7q`a 70Æk\B(&/b-@i3u2xsB$EĻCp -6oАߗ4F{a-&6~q^ tZhCSdh%/e7 Yx4?H !% ", -qAPUq츔:h:Q8uZe7 =?w}D/ -޿c8+FNκGN.]Ht%.' l@7&בk%pwuwm%Mf ,Ò}nҧtQW -2F/s,83!B1;:/3 K O>}6a.FVbx 9ڎ |W08F -wOsΎȫp2p -VGs0xi+Oh'V :oH -g0 5rF?9 -s Bጆ/ ɀ'bT[9<̃xڑp³X-+ V;[^s!Ruj8LpYϔO><%̄ -W/oD'\=p-ُom}<c0k8C1 Ml`W=ozI4Ci]PJ[fj&amz.Z縆j=Z|z\P+rcX>KlŖ4cJl /rUрGmigmJOsiw!Π5 -Z-^Y#Rl)u{KwR!T:|Q%)YVh6hQ3>5Y Htds9m%Yjns`wa`+wArnCP>ho~>^m׼炪@VP9̜k8VN?Cµ# w;e Le&pBxTQVߢT6UQUWTZu`I0W)R:T7;TfOgo -OE3Tr$щXA0`ס5ɊŴD3is[4/Ԅ;4hu6* U犪DsEjYFrMcǩTѩD۹XsԹPsC\y,~X;vCPnOFn^cn%Q[&4DL׆G:VjNYTrm TqvD[RRȥ@嚯]alU,W07\u4~8u`C? 9@]Mϣzo˩*u -gE;EK1Z -|=)ejrm,Z`K qLvMFKqޱ<ƒ᣷LU,3%ciQNNHW'.K>#( Mz!Ib!,BœZVl|^T*Zv%/ǝrrFRF`QBfؔs7zmIHM1ٖ*jEXz<4\{Teǿ r%njP IZQAaDKɠDR*ʬLUw;Y[ڳіi(?g y=m N4~ H% uLG-=z}AK]˰ahd 6i5YDMr477Dԃq.sə9㱻|gM1?){nls`zvAx64sTA)^d}/&>zcP`䟐?A  =)l[ >_H]2 z @s -#toaafASO1ݙ?gBqy~y~)yK|+ޑ'1wB~W|iS\eǯ{u ]q'-UR\u2b8TӊGjjqaT,X3%YJ<`3[yY*{r.z8qԟ{,txF8LNJaźV XÆ*fVJijLSJ54@K)4F)wJ]S⬳cXk1ƺ8Z6Z6Z:ºe]"V2DhVc2SF/TӏR&f%]]sM76lcTmmehmnc3Dr CmE0jVjHe*(`/\k)aXpqz?XfXM#kfKa4C|v/ۃjT=Vd')>C5Ⱦ@򱯐iz m{M6.P{zws;̶&Eje6ߌ{$(Iޭ&yɣ%R2&˵Nbs-4Z3D2M e#p! o w-mR !3@P*0f[Gro7"bp\NoiWhlmlv>we&6bhU]vofm^i9ZvHЄ}R3Rd%"]~6!.Ciѷ/`=\/Ӱ_>'"'};9d `s x `Q/\tIpéӠ.}B-CqGtg?v7lJ"p0SXc$2p6КvcOh- -t9ctW^Go_1|'#\rA%œ5|2h,E90(":IoQFXzMͬK/1"8D<;;@ފ|i09Gp9͆ C.9CQU -\AGݦgաNӬqS~R;=hw]>SMjqZ/8dyN!gUk -Xh_l翷SZf: S[N;@c_=oEMCFM9#Xڧd&EP -a-zt;=i-En1tpX]N3ٍWU~S5m+'}oc sQ3ҭuū#]{LNj3= m.VU}֪n`T;pj|;UwXNj_SeChS}(Fm~+#ח9rWmj 38~T]m -QMH6.QuhևVU: kڰ,VҡWr -S? c9-l WCdꆧv4mq6De*jlQ"*Wk -T>rh&3~# V$I$ ApI!\& )jhmԶ*X^:Sgv<;;uu[na9_|=}=/wN.Q5hH3\eE4X/Y}?irz5 W8B[;̆@ h;I+i4ڛړ^O/aݬZkT[ֶAm;k v.՝.su/yBZ< q݂2"&Sڗvg)qv8z7r~nMw~^~ש>o>f_fdL؜Ga_F=#lZė|Ǎ@ *<>4.ܛϦqv步tvĘ7ZxF?l$2T -j&a=Vhq\l< j4&ϋ덷%u%'E2/ʑ*{5T5,(tLAn<$`J/Hedq,yM4ZnqZl4H΄z@f<)KY%Vo&U?M5`i ѻ۵9XͰ1 1g_Naվ,Z6JlJk55$=ڶMViU=#r &[ -c[ieF"} q:B(tTQk:Mz^CI&9U -{kWmrGXYR{/'Nv_O.r|\x,t]YhgEE~0̶_zc=l -ŽݩLn&"R%+sV(g٥".U%u*u5z_irFir `E|ޅvxl:-hRzUY*20ʼROQy6O٪x̞d2;0xߓz-Uf9GnA_F0n{1g!hQM[GuZ=U',J%V_)L2׮4L?(O'e$&ff$eT12@?7Qc~%h=6h7sۤ w**kJe6ep-P MRcCURnCCRNC();ПĤY$#pE1c.H^Da3#Mcj{[ž/lCo܎z(LCK/ .U %Pqh9C)d iY`6'?hڊ9m.RV/h 7DP< <)HkOm{{W<͖ofFĈxyz8y^\w@=F1xO氒ëNp&K6r2Vp)O7iV^j|xt^}뗬՝x[W- -FЛ^@=":E`г6AG(#tu@ilJX-gk-50kUd22A+"Ҳ t=R }CK=ez6_pOC86o݃ zJƉ11ڱz,VŴ\,V -hY*I Prl+)bQHұ$9G;$9 Dă ]<8fycؑP!01yfh2M.'d -%LjIWAf14΂ $ d@+g o+΁Hye  h|P H00I"D7`yB~.a(0@gOh`7[ -n>fw=T$Oqtaw ]n@ = ݓ=@>FG#DxN g (2@UϴαcD}!|~ǻM7niHX ]M.7 =EB:(O0*/ i 9F `,c J߉ Ŗlq]F-.]@~ tmһ_y @0czTF5`4FYIҋ{8KC8.Gt +EFt /yNB~د/,30'@1V,G|:8N!8zQHi+=FWOX}:jߴw<1ݟh[c-!a,xz,r_AErpz:WkbFvPvTz;NhD aѣ }|y[^*X#x&"څ={*s+͈eK蚍vcڋy\$u**f4:z9x@-cql`u -༂!^t ZSQuCp5i:<\œ|PC %? -fxz2h3'6q i#<4z7Sz+GG[Y.v)KmL\߸N;f?Ɓ]0%3i46hc~&9wV?3V=SJݓiFZ>y-޽ὑڼwP[xIjr}>#_^ĕ~;xrs_4fYxQM2x֏L biմ$f.LZKK}]WLm~eį5SvZ8}%->@MӷQ5/PM]x© -`9U," اP=u-Ǝ8QEC3iHZ@m)A%(fSSURCP#/.NMp?*dS&<(4'$1o^˛Gu5}q3z {`f|duϚLF-ᱜ:p!ʭ sk s܊ZnYBnQ+Q+V)(?仕_ \ʟV8pVx 9 170XQAkP.ı$JF-ӨIL(NJíRy4~**QT%bUH$.tǬb^| rU VlקxRπyĎ}Gh+zRıP-jfP&[yZA(,Ύ D"qL\[%n]^y+K˦>'oJ-RK<;84AF}3v,8V6m1O:o*O'O,\]4oV#piE&q^U'riFmYu]2n,]]nEnҝ7DvL <=wf" .ıqb_JSg"cǕVOdӥ6}̢/g苙4}%c6,`R  ۘd[L 3|h etz58qmK C<qT"t/rOIS3Ŕ,I7fMjcRLszS"4_3w(>EyBcǨͧxuy>gbq"*bLE{O)rc>+Upg )?s -92);sǖK MV8Ś*[,$Z܊kBcmPL cmC24vMeAe`t9 bxN؀ݰE[я\X n|.eN"[/e9W5:bzGHgO$3j{,_e*G4+r('Ꭻ0_Y @;Џ}50V#ĒpIJF һ8:N(֙&V9%N$(]uqG, ur>23""Q\q սU*8ʀ #2( .ȦR%BdQc -Ԙi$5165*&j=`=was}}L3n9%{vw}VSK[` x/s~*.f 0E# C##_rcqm6;28M=[B{wAsn:~kww8FW=[sϠ&eeIIR,%r4sz⦁_EX[}, , aFe12b3Y ,CgPzuK|[ w &<2$My8w*KP>KsG\Y4[$S -T򷎔u[5V>օf5_V#o5:\ۢ'd7!52 r-X 'z4"K_?|l]eWIz6TrM-F.yrі+cv  mXZ,eܑ˰Įج -Rꑟ؊"znd!Ct,(/ d(` - (|8#Kyʕ\t -@-Q-+H9~xz K(^\`+e(e(.oidno|,N?Am67™ b⊃燙GE'<o?,=@!39K; q*}r!gҌW0t3½%(Md9`8Grh?dҟ+z::sz&}KgQzm[(H@K*t` &WZ1;-C@+Qh5uuut25 ./gpMo>C+k}3X~t=y$\c'Xbc)%;:"~\PPHlx^vr9M6R>R~Ï`r#] WeT8fr_"S)t/g)IW]p=ҀFs2z<n+r8shO4L$c6=Yr*JT*ߠվo2+ORZ(%` - \AEhnPOiv%5 -gcX&]χqs>9]&F(P-9JI J4Ei)ZM*4WDܰ4C\0;|aV&*ÌCƘ8D쎭x8=jŊӺ2n;#HekY6]Є&4M($mJ i)4Ȗ*[,@Uѩɢ"2:0(BuX93z 2B0y{S,Nnb:$ь2-#Cɰ*"fb٣55栲\oVˌ(+3Uc3TŪժҬ%Y;6Q_vfFڔRkDV[cU)rdKL lEbS WUZ -K:d)UY415ܹ@QUbko}AWd=^ {s~?/blx_74Ll#:6>i  c?i*ieՄi&ԁr!i=cڛ]x6lWM 63Ð] XTnwijwxXZSA%~8쌝H"FBM`8l:MC!!: !67I "kcB!j'8-ξ _9vS?w!npޣNNb ļ|@Cyoseq81{8չuvG;up7MYaϧK3g+_<-(@2~暄q "pJ r-NW8Kx/qB)) $So}|'a交bP|$pL/0n 4n]bqs}61Mq(0UZ/+a).4=}?[*zX -\C,mA|FX ĩ>^N98MN=yd^fF ew?hdH>&qԟD\G- [21xj5qL:G!bkQ$vz;Ǚ&czwX,b@~O6cy_`})-KQ֬y\♉UNKSOzUX-06k1-NcS,QaBZt1~k 봇1c}6de ,-H{8R20ǶlYLGzLEu"ՇuQX5g1dK  (c4-N)m6{3>g3>3lO-}[b16Sq&L.a2'k1# NcMB07#IK:N~]ɿBW?љrɚAy#Na_ܴ'[,m>NSNDJFSJ`(ӗc ҽdt`eF/2ѓ9iteDF*|EЬL^&z35.~79\n{ù#V5mV2WHV4R0` 9˩DON 2N_h -m#/wb.xsBh"6덿댟Jo\M5h9K?p =M9ʑcNL:?g ].T :ItM+!cM[:Ӭ.*,//zC2} ;MSg8 4uE۸ku09n(L3c<-*ciNCـIKf1zG3nK\cT[&jiݣTZ)KzK-\Y,,̬ޡ_#59~g)"!\D--d4ٲ u6XSZ&Jj[RekV+m],v&fZ-QKUulZlD-.JWl"Krz#j/kׁ)}?WXG VFG]ejBHt:J%!;JcjЕT묕#:sܭ9JJ}%%+]CUho;=K۫ WrL*줽jT\vҕ -WP沊rr)G-vE5+USͰZޠw*y}N6_r?KY/嚚Uޠop~c!~c3W -baQɃc=eS-x仉D]$ ͟ ^MoҤWiĿ0d}*cs\Fȥv*5w0v-+ ]z[@)&$@-!ֿ1X_wDwK5aI&ܤpb#r23 _Ϝ /?VyVK!X@iH >`² *<#*@dp [ B R r?~G X/lfNld2CZ4@._K.6r56#"J@d(BFBf -!7 ]ӔcT.- %>:=Üż>o$@fS:aa60o/臙a$;I~Xr8'1N#_P -?ϵ{s}Yoz/1yہ]QQW}ஸ@Fdf%숀QDA\qjDtbpE+1ĚQQS۴6Mml&i1'<|}>~)P,.pqֹ..Z4U-\KkHpMMd bn1l!l5yvA ߮K\mt0].7sx-F[ _@ Kv mC`4phX˫wrޣl+f#?$=p|^@փEmM$Ky72?6m~8u0d4f f& F6pm'~iU|C$$oϱLexWoflŏVgq @rFR[,=Tڱ25ʽ(y Ĵ݋.M >d>"z>=Bu- x%ɏc;89%p8[E)A:v uC8 -"68Nzp -,B\/Czr?bAHM.q/\x(-𭃫Jx8,X>.g9p\C-Nw-Vsvoܦ?=].w.*3N|Qx\1D -R>p#(5-EHҫGx-ЗR.z[~D HChݧR*Od&p|) Q҅;o((HB~N e kup |oF"U_u2X&uD=D9}ǏOP?P^~⼏μ%o.H$}No?Qo |Abp&a{|v?nnn0~!MplL)We;E u~A 9i%7΃/ 01ȥĤ_^I<)" /$RyWfUṗc1XDތ^Zǥ(Y165 ycy &# -qS0R1|%Y*i2G'UW+_*eF2ޣJz,=Ȏ).tOܐdR')}/3ϰ׍jw e,->en@꥗12E%e}ejl)7AJHRAπ?ItA4e[E6֓:곀uҷiCtH ⡉R44U -2e_L(vb_.y$:yidZddސL2VH2tπSUb?Ϻ |n6r -ʚ:ud0) ÆKA@>,$ox<,A$+hd/I"cBkFդ\$|I~$; rX">ֿԁutd^Wô2)dCt*Qrha)9b&#,GI ^I __J](#?Wjw02ki Kmg%ߔ^]+|(7ҳy刡2."P2#%=(c"4I4eTx%9ʮ$^U 3UKBJMޤGQcOk1ShdpjMQN/ךuCvVO־Hh2?G>~dXI)TM$cj(S:2&S5Ej\l6&n}׬ԨjD{>U/>$'(`&!Z§%QSN2-ZII'6$Ē&lVbj9C5F,,ejeaYMjUa=ZUS-_f IEV%#hD,dŏ -jS MH|Rd3hm([( rߑKCbxԓU - A\Aŵ(-56D<Ɠjb2&1iDMNMV63eĎKt7y@l(|HJ~C_vC!!xCaqBu9r9EḷUw,.[/8I"52"]C5(&DW Nj8oh2M2IeO*1%-WhRz%5($鰂.*(㻜&t9 MD!5x.q4je16Y1)S}/5Z}R+.= e,yg#'\J|1l1m1˯f.l0Cл;Bj%oÜGj&h:ɆHOǕLlIč+,;*и -.r[N,,erA++l: -!q|ԫpmeF=l,<ԃ9=BVrRe_ť_%SɅSp*YBk!!r2xv[ǛuIj2y?( -I@v䀍EԆ6B[/ȉF,l =8z»C C=oeYx/w2۞6ѺSu,&\ E'u>[Vr v5UJf'j$E`&80Á/@*tfUd7̒Lm''(w3y{mZ[K.VYZYΐ Y?s@}pm O2<##s A+` 0t\mej= Z*=rUùb&e=OIna?o. S`0w!/oyy D-}P& -f*{wAFu-&$:r7ѓ'tw+P4wwUߡuo)qvB|R5u*4hkhNK69p :u & 2z}TUTv\v츌kcp:{@,@usoZa:qpaC'`r%§3# ).s9]٥R_S軓zI ms\~~X>SWJ -bǍp$s~-2s9{6gЪ%_ -uoӫ -G?H! SD^;9`[p(m{[mU/o0\)ر4QNh Z i-@U ok ӊmx_ 51D7S? /YӉz<S -2ήR5_X'i1] -/Us0} urO| ã4_?“OcR(>ʴ -&HlIxUD `d.ׂuaBܣ\w-\K͓;^'qZbHZH},cRs|љT *.ȡoG;YN2J=gy1_͊֕T_,\CaOF2e+óA6c ǕihSNwRkY֯X}є[blVŞ*qBwe\ihpgeqi1]yXY,gҽ5ɫI&cJjDJ:3\\{є Hbk, ;@پ7\~cT{LT&g?KEX*%*!h⃎i|PA_@]:ur+ /ك{ {Pq{/`p;b_AW0_tT3MB3Q RaHCR5<ԡ +VvX'^̈:#+ݲJJa I8c"wS/vfac| / g1-kȾ* K,ʱ*˒(G]9Gc -#5z3Hi6l+؍F\lOSLL)6Hx<)guEHltLAG9Zl3kV_٭QJ+ŚaFR\a/56ք9Fш-7lo>#v]QSx|Gk~]{*G_"QJ2jq${)-qR![UII&gyFLrRnDTYFDB#,mA洷zRzJo -J8̚s"8s125h{5)V>}g*=T1hEmd+"#ie!2g*ȱ@ uc|'4$|3o/k1tQpnmM'R+l!9X&^I9l_EdМxsRB䎕hHsi@^Wܿ;{ Ⱦ'c?<oco-YWei4Z dzRA -*P@A bW!\8B PX!WT/YF} -wTD77Gyoe&l *hAK5Ur5=K`B+AqRgp˙y8]2&W_F'f{7er,EyخhMdzi*GK!#Ġ%Zqy5H@.Sąs.ĺX*aܞ8}H}vz| 4af-cJy)-hcyOL9@! -pTqWqTQ̕l1N&%L"SH]KV-AR lHM_bd8̲L\ \ԐrQ[C԰y3=MN_TM Mez^Lݬw+^lha^@^밅S2-їBn,êyIhbo xzz.$h:6x[|YuulYFOlXM_b~rRg^= b}hf.E C"{h&4^3DR!XKuX=k3#:8QJIh -_! @O6J>Zrt,gOZ9LZE+~,#-[C G&O&i> F%-Z'(>/X8<h>ڨ6.6zd=uN.H:$8R]{ -y`'mXA bكql'';wEfOpؽIӿI;O6Ky<LX]y4`l=p{[n{oNtٮj.z8U?Ojp$;;݆w`?8v.1Q.`:Ef=P{r.Y|7?<^YM` p xLG8۬? W o8op |t|^Zp!IYY8?g ;oo|ゾƅ}Jq\a.Pt IiG xqC}\)p'q'hwi(ů0lA 5 ȫXXy/:Ul]л~ Vg%߅=(Wd &=HTa_`sdKB)|/h:EML}q.ju8eX` oT`Snzn~%#R#:6G";qTa8pJ<=ĥW;.+S;pJ[1mMeZPF bnf~K+͝ܣJjj+i;k Om‹h7P t뱇˥{VF-*6è;wigV;e ,/X`UfZa~suDZKNTUd`%Yg'>Md! rg"/3S 62^`UjְYUr1J|h /gy#71?}h2%W;!jz-NQ͒LU/YJ+Vש$Y* -'pXAG2A7@AkEW>tȮ#\ƐCaF?U"8N!* IZ7PRW+?Qer-Rn~D*G)eG^Wf#~'{ʈx9}\<`.y\Dz!x ?F{UQ%Z%OrDYe;)WtrW)'RΘz9b*+v2e7M*tR)kWrgJ6}%ksYc q_$yYC.2[Yr ;gܦpLZaiJQv|]\\ts6+%al}JL(,weS7Zn+`N3|o}u xtzXt4fJRFbhWJRlIJN.UF֍ضl۫1RRbl+kE۾T!:gܼzAzdUn~{\ Gj+ {jRRɚTSS9ͭŊOޤX{31i-˸hW -Ea' -O3Wa9LM`4H1')%dʜ,L6fg*&ۥhG"p4*٦PnRPΔPK*0{N0#OƷ1YWI;7.E͉O} -Rtn"]&E+,/GK -W -pݭ\F nS$?gۘǰvը ^piBW' $0{bߓ*?C+-['vayE,k^/܃ -7 8wn U4|b'ZR,(-*H(rU2\ʷczIXp/aJC]G#6k:#s6Me $Yi Zv;ObV555I"YQ ;Vߥ5O.wk>8@064@oໍp7%f;ߍ~dlf43liM4&Ip=ai T=:c~{wXÎ i` - s |.с/p vHFѷ8C[kmRz܈mzK,'G0I=|jҩ.4Ҽ2{xI!1EEwA|'܁"~ǂ 78?ӅCp T'Пxd}q\Jq -' -~ooĈg6D~>O^6=Dp&:>O5|V'gLp -Gq1?C0xqD` _Sc0=)dA|>Ai }[̨7~s`xa7a/GN!c2EL3)i:Ń'/COћ(WRlNS|˫WF|1Kg^e,1HDR_$3G<N,!Uzx݂UcX*\$0ۥOj]ȍ ;].˃ӫBS;ƙ}َüX!9pcbL2rga)>H{D2?f?aZ}H<"GO5 ;FJrc~O -c_,/ur `a@`F"2TDToiF-oi^G-]M4m۳g3vsMۭsZ[]O{ǵ6=?f<}W|U,FiYAx{{\a9޸ʬ^cISl?CE-t7h3 3lgw5sjT G(Pi)؟Hֻ{Mq oo W?b/ogn1?Q-^C64{>~ѫ=Z*My= N?)yUs旤l@|'~ B3DW4A]FGG..ɭP&Poc~x<ΒWqUv} -2Ct~W/dGCس>qbK_B62Y@ .Wo$N|Zu{|C&xYX#=Tv7ێ}_bkרit)rq8"w WF|Ow#ۉYڋn.tji+o|.8볍$Ũw!N/ ?S)`LK$ѫ3z YGkjbѮm*6@ }^yߘ!c?M.3#(20_ Z]]᱊b&eVlb2Mz>s>#_O6QJ,QMpƁN+8xXd>ey.g6@W3-?\5xn' -n,%8S*il2 3AaRxjkn'SIt$i7&8BUA&U',$]#BrtH(Vq*a3+e,q2"N)-o(#UT3&VpY*5ڴQFVѬe*T1^d9"3BFdVveL-.մ\)MJ6SRIc>tK0Z5g[s5{ fc22F80)YqʌRF\K5%'NSyeJ*޼W&/k qw!4[#7b -?1~i@2TfI&ghlfS%I)IJNU -9y.S\Ŧ.UtFE1S&cwLω }h B1ިȌ6msnQh R *(cg|!:Fur3w_[zjSANLƜ=DqYQJPtv"3e+^0{Bs*$UAd]eeQaaK%z?;![Z9ѳ+axK9iK=\ʩ[9Icn#Ђ -)P#OGn;WX8-b.ZBV؇8ckɿ#Ӥ{.5N \Y}XWqѸS5m%MS9ϼ䆼' 0ywe/gK(}%s{ -In=|6M5*(Yqk"'!GhL}ACK1u4UAnpf0 -aP>Nqip9DwE;,p1RҬ i.f.f&&nxbx4X6-Aqx}[s FH%& .& >QY;ڲ 3\ w18Sh)4['k&!"'-\^v(>󉒭uzx&u|.b!/#o\4hJ/}1m}E_tquq̤g2\3 =@t?"owD}acFATT((@E -V.(֋jUmlScS趱m665kjulݮiibɆ<<{}{>xsO'I*ޯ2{*~G!1/9cNc;: h /E=SOXttsuqw+Wڅ;ORx&}_ihepBl oA>'g 0 \Ѓw~r:6,&Msx3g\ 8M~Zp1B~4sy< L=ڍY [ -13 MF]Șk!:Q/H/HLS#'lc-('sٽkrw.<.%'X#%|Y&>^V?o7q?+?6p>%;-&?3EIs߱Y|F 'l!S,>bq}>`%/. W8x | d%3|1bk.< Y Oح{lSWhVﰁ-tLAAb\y`I6I\F/@WpvkwW*E-f96+9G56f-aʙ](qp)ttCzMl$'<?:#|1%ÓDvK[ح"ErJ6bhDzG%!n^'#0Qu7!c$ۈKZ~ـuxQNxxTSmUTj.]b$(2usknι᠝:mFd!6$%sEZ?֡c-QC5߬GE%V`?L+'eTB)NƸ^NRz-\pq#B \1)3p&lʅ%((pnvANxS)ETp!_H< -4\b `:?8x,kmngǪ$77at!i)K[/Eb$`\[J+,f×"J~ f2'9"9nYƭaF%h5080_'ݕl`4-ZnJwOR{fyd*#OIJ4+P)K1^z*kLS>,^[=YVH=n/N-kWɛfҼdofX5.O5[(_V)¿V)sL?  -:ǘug\`=b=n[JȗM92$觘&Y+:ЦDE*;bD1"US,w5iZص8"+*I'Y"aR`d"c$ sm-BZybc~1Yy[R s}k@-[ѲV_|;#%]FcgWMDyƧݞ#7;δc2;d:C/FgzG{yb_7z'h)B|̢vb3>1&sFܒح)d(eC2IuLndn*i}c=3 6.Ѳ --%Z29Z:" |bpp8hu$Ee0f'C{:qe*f)=A3z_zT3 gI(G̡g6ͦg'Y0ٵy>-e"%iٮફ; -I 1*(K,A6AEUEDDTDaPZwetڎ3utNt*N;M?!ϩ:swFu Kp麈 2\ʦj#[Ʀ)c1]zVifG( Z!‘Lj':fMjNKj9R8J˕E%AɌK¥;I=&D'&*O[5Ǭ0g+#֬ғbrseUx:[\s DHtUrQ%U*4=1M3M@[u`k}jvw5~w-3xqch%ۢI4'Llx"Of؎qqTo3 }qBOpS_;br{kC[]rͤ="[f\`.oow=C}ʜrV:Dw47/T﹄8$~EcHOr@' ۱oi7 { W}r|R _'Q/_ŧў -8~K M3CQcyx!/:^e~O 'xAJcxGg!ESÿ]gM;AJHM|s^OO''4r_em/OxH40?K`'NO ^q/%kIgcL3ׄǣ\^U#Ӷp>-\_ȷVn9mkq9 M^5yۅCxe9vޮǣN]ƪآ7ɓ`Q&M5|zĝ+x&Fo/+rPwܯ&uVy`PlYƎf hUʓ-g\O&]Wnuzt?8ePǐN]ZuE`] eO<9+b^.ÓLood3ݮusJ~L^g:[."X= k*aMu%8ǞiG -'AOR5,~gG˖<E9` 5$.5/Gf'+b{*NNAhdn$ 5]BPV``v|XEz:rPu&|Q'sNn5}߮yWJ-IŤ\9AM!MEfe7x=ΆU kO>9}Ul= ԗ8mKLdӱYqnS^k=$Z-w.""7dXR*L9́^ >PkorN^I#?>u1d7_,6q7w.@N%Ƒ#ȑtJR.'z+^ɥQ7ڗEQaDwy)"'薺$EVZ]d6Gc1d~,{زȋƑB/( e /lEiFZgE^gENs[Aixtθ422YՑH6:t4Er#)뗑Q2Mrngo!Ǘu=~3f~7B9gvr#';-"K^ddD~H64RsD)H5]Dy.Ku'zz9y샿k\̢x]&.d.aEizUꑡrr~\{DznfuE'(,洐Y-dz:mzotdS-tCK ōqv#(;;ڋ,+-H[0)ǷrCKoᲞ]v?uC_a/GxptC73w[fU/'̓䂼H`/.΋=9܋֋eыeɓŠ q+&%£D,f 4~:nrG ˕-KIsL -2(a =ը7tpҘCUэٕՕǥ @hpV$?!SP^B^bj~%b .f4R7OtI*[w|זܗ a9 c-}.zEo#cK6r'r` f Kewny]T,b ⛩CgL@'@]o 6&R%4(9.͍Me֟ //%L'}YTBQLtRh#z`qr ŧؽ:WudAGp$Fr'#pd8y1Q$P(9@!,s2:+5|#Ĭd?}fT+|iObdPc,&xƏhq4q $cb,EaE>GӧFm3}[-|"ek/M.8/QM:ݤӤ2q/qq4p1>bqUO$OsM2\$4VO Id=8`ɉYLzDe=dcB` <~9l&MSPߧI~dhE\,B 2mN,evCm6AkJre\.6/1-(< ">GNð -.V+/gZvRG{o~6{Xf:gIL \EAФkt0.KeIj> f;yH\熣}N8ױwMg%k4,[tr&3,J'FW􎃼G<'HBPh)UV^=]>W٥ 5~i-ZѪWv}:ԩ@nt+>}a9)R3s-ygUkֽGjJR!)dV$Q&q(^Hv\BW⟈[ {˃>DM4D_#c/c[h&OܿPj߾-wK=%9Enc&hJGv&'ڱ_v &O_Dc̚hɯo4};懻t#=D޽ǧT avfD'Pm[|ps/ב_6D6n:Sn(g^=9Ke{)#]ʀ}edM+گj(wE$*F6D+v˝75ZU3*QŢI6_QMckbv6ܟIe(ߎە%%KoqŊ ݘPQ=*WR+˿%@@%jHhsNм`#ƕ֕K -G7FsRewdBFf_tN 6hb!?Yv%?+a@U;5d/Xz%Y`_He]L-FfP|6W ;mC5߳ Yhc_l ]s,"~ -zx[n}6Sաv#/41#9i+6^pZp>y ``,PX\ b$q/b8 u֫)iĜGTXG'Ds "j"@D0:削-J N8}??<;B( VzQ -~T 7_l;oIRYW~WqWq#|G@.Dp((n+Gǟ/2v.;k0~6v>go1Ij p5If;pd| PMu>(YN$V|k.'.g`aN{+!8ژT -l H9"~s -Dۜiы)VD$f6>d (sb nck-_gMxE7m~@e9mբ֍̌y UoQ6=-;iբyǫXͶV+*K y6Hs5H07d6H|L{b+EF^S03!-sK275UجK{;'_/%2$nMHP`@`m1]V»tKu$^ ء٦"r`.Cl۠f P8r$`1Au5 <*ur$@2glY+vJCܟvhC$f]8}`=vFt:6-۶nָq&f[&4'9 b1Wq=)\Lڜ5R1]iMNY񾛏ҭmXͤ}ϡS}^*2d9mhůϟ>{.IXt񂹳= `q-=U᧨|Elw` $̀=KY+f ,@~o5:x" c0`y -g0W`;2 endstream endobj 89 0 obj <> endobj 108 0 obj <> endobj 109 0 obj <>stream -%!PS-Adobe-3.0 %%Creator: Adobe Illustrator(R) 16.0 %%AI8_CreatorVersion: 16.0.0 %%For: (MariaJose Barrera) () %%Title: (TUF finales.ai) %%CreationDate: 2/17/16 1:22 PM %%Canvassize: 16383 %%BoundingBox: -11 -877 1326 10 %%HiResBoundingBox: -10.6055 -876.8867 1326 9.09863 %%DocumentProcessColors: Cyan Magenta Yellow Black %AI5_FileFormat 12.0 %AI12_BuildNumber: 682 %AI3_ColorUsage: Color %AI7_ImageSettings: 0 %%DocumentCustomColors: (PANTONE Process Blue C) %%RGBCustomColor: 0 0.509359 0.790461 (PANTONE Process Blue C) %%CMYKProcessColor: 1 1 1 1 ([Registro]) %AI3_Cropmarks: 0 -870.4004 425.2002 -445.2002 %AI3_TemplateBox: 658.5 -435.5 658.5 -435.5 %AI3_TileBox: -93.3999 -1053.8008 518.6001 -261.8003 %AI3_DocumentPreview: None %AI5_ArtSize: 14400 14400 %AI5_RulerUnits: 4 %AI9_ColorModel: 2 %AI5_ArtFlags: 0 0 0 1 0 0 1 0 0 %AI5_TargetResolution: 800 %AI5_NumLayers: 2 %AI9_OpenToView: -397 136 0.5 1196 636 18 1 0 77 134 0 0 0 1 1 0 1 1 0 1 %AI5_OpenViewLayers: 73 %%PageOrigin:352 -831 %AI7_GridSettings: 72 8 72 8 1 0 0.8 0.8 0.8 0.9 0.9 0.9 %AI9_Flatten: 1 %AI12_CMSettings: 00.MS %%EndComments endstream endobj 110 0 obj <>stream -%%BoundingBox: -11 -877 1326 10 %%HiResBoundingBox: -10.6055 -876.8867 1326 9.09863 %AI7_Thumbnail: 128 88 8 %%BeginData: 12182 Hex Bytes %0000330000660000990000CC0033000033330033660033990033CC0033FF %0066000066330066660066990066CC0066FF009900009933009966009999 %0099CC0099FF00CC0000CC3300CC6600CC9900CCCC00CCFF00FF3300FF66 %00FF9900FFCC3300003300333300663300993300CC3300FF333300333333 %3333663333993333CC3333FF3366003366333366663366993366CC3366FF %3399003399333399663399993399CC3399FF33CC0033CC3333CC6633CC99 %33CCCC33CCFF33FF0033FF3333FF6633FF9933FFCC33FFFF660000660033 %6600666600996600CC6600FF6633006633336633666633996633CC6633FF %6666006666336666666666996666CC6666FF669900669933669966669999 %6699CC6699FF66CC0066CC3366CC6666CC9966CCCC66CCFF66FF0066FF33 %66FF6666FF9966FFCC66FFFF9900009900339900669900999900CC9900FF %9933009933339933669933999933CC9933FF996600996633996666996699 %9966CC9966FF9999009999339999669999999999CC9999FF99CC0099CC33 %99CC6699CC9999CCCC99CCFF99FF0099FF3399FF6699FF9999FFCC99FFFF %CC0000CC0033CC0066CC0099CC00CCCC00FFCC3300CC3333CC3366CC3399 %CC33CCCC33FFCC6600CC6633CC6666CC6699CC66CCCC66FFCC9900CC9933 %CC9966CC9999CC99CCCC99FFCCCC00CCCC33CCCC66CCCC99CCCCCCCCCCFF %CCFF00CCFF33CCFF66CCFF99CCFFCCCCFFFFFF0033FF0066FF0099FF00CC %FF3300FF3333FF3366FF3399FF33CCFF33FFFF6600FF6633FF6666FF6699 %FF66CCFF66FFFF9900FF9933FF9966FF9999FF99CCFF99FFFFCC00FFCC33 %FFCC66FFCC99FFCCCCFFCCFFFFFF33FFFF66FFFF99FFFFCC110000001100 %000011111111220000002200000022222222440000004400000044444444 %550000005500000055555555770000007700000077777777880000008800 %000088888888AA000000AA000000AAAAAAAABB000000BB000000BBBBBBBB %DD000000DD000000DDDDDDDDEE000000EE000000EEEEEEEE0000000000FF %00FF0000FFFFFF0000FF00FFFFFF00FFFFFF %524C45FD2FFFA8FFFFFFA8FFFFFFA8FFFFFFA8FFFFFFA8FFFFFFA8FFFFFF %A8FFFFFFA8FFFFFFA8FFFFFFA8FF52F8F827F8F8F827F8F8F827F8F8F827 %F8F8F827F8F8F827F8F8F827F8F8F827F8F8F827F8F8F827F8F8F8FD2CFF %A8FFFFFFA8FFFFFFA8FFFFFFA8FFFFFFA8FFFFFFA8FFFFFFA8FFFFFFA8FF %FFFFA8FFFFFFA8FFFFFFA852FD2AF8FD55FF52FD12F805F8272727FD13F8 %FD12FF7D7D5253527D7DFD23FFA87DFD0552A8FD11FF52FD0FF82852A8A8 %A87DA8A8A87D52FD10F8FD0FFF7D5227FD055228282752A8FD1EFF7D2727 %275227522728272752FD0FFF52FD04F827F8F8F827F8F8F82752A87D7D27 %27F827F82705527DFF5227F8F8F827F8F8F827F8F8F827F8FD0DFF7DF827 %52A8FD07FFA84C2727A8FD1AFF7D27F853A8FD07FF5327F877FD0DFF52FD %0BF827A8A827FD0BF8007DA852FD0CF8FD0CFF5227A8FD0DFF7D007DFD0C %FFA8FD0BFF282777FD0CFFA80527A8FD0BFF52FD0AF852FF52FD0FF827A8 %7DFD0BF8FD08FFA9FFA82727FD10FFA8F852FD0CFFA8FD09FF2727A8FD0F %FF2727A8FFCAFD07FFA852FD09F852A827FD12F8A87DFD0AF8FD0AFF2752 %FD13FF277DFD0AFFA8FD09FF5227FD12FF5227FD0AFF52F8F827F8F8F827 %F827FF27F8F8F827F8F8F827F8F8F827F8F8F827F8F8F827A852F827F8F8 %F827F8F8F8FD09FF5227FD15FFF87DFD0AFFA8FD07FF7D27A8FD13FF2728 %FD09FF52FD08F8A827FD15F827A827FD08F8FD08FF7D05FD16FFA800FD12 %FF277DFD15FFF8A8FD08FF52FD07F852A2FD17F8777DFD08F8FD08FF277D %FD17FF2852FD10FF7D05FD16FF5227FD08FF52FD07F8A8FD19F8A827FD07 %F8FD07FF7D27FD18FFA827FD10FF277DFFFFA8FD07FFA8A8FD07FFA9FFFF %A827A8FD07FF52FD04F827F8527DFD08F827F8F8F827F8F8F827FD04F827 %27F8F8527DF827F8F8F827F8FD07FF5252FFA27D7DA8FD06FF7D7D7DFD05 %FFA87D5985FFFF27A8FD0EFFA827A852F8270027A8FFFF7DFD042752FFFF %FF27270D85FFFF277DFD07FF52FD06F87752F87D7DA27D52FD04F852A87D %A85327F8F8F8287DFFA8F8F8277DFD07F8FD07FF277D522727522727A2FF %FF5227522727277DFFFF5227273536FFFF5252FD07FFA8FD07FFF82727A8 %A8A84C27A87DF877A8FF7D2727FF27277DAFFFFFFF5253FD07FF52FD06F8 %7D7DFF52272752A2A8F827A8A82727277DFF27F87DA8525252F8F8F8A8FD %07F8FD07FF27F852A8FFFFFF52277D5227A8FFFFFFA8F87D52277DFD05FF %2777FD08FFA8FD06FFF8F8FD05FF52F8F8A8FD05FF27F84CFD05FFA8F8A8 %FD06FFA852FD06F852FFFD06F852A8A852FD05F827A8A27DFD06F8527DFD %07F8FD07FF5227FD06FF7D2727FD07FF0027FD06FF7D27A8FD07FFA8FD07 %FFA22752FD05FF2752FD06FFA827FD05FFA2F87DFD08FF52F8F827F8F8F8 %27A852FD06F8A8A8FD05F827F852FF27FD05F852FF27F8F8F827F8F8F8FD %08FF5227A8FD05FF277DFD07FF2752FD05FF52277DFD09FFA8FD07FFA827 %27FD04FFA2F8FD06FF5227FD04FF7DF87DFD09FF52FD07F827A87DFD05F8 %287DFD07F8527DFD05F852A827FD08F8FD09FF52277DFD04FFA827FD06FF %A827FD05FF5227A8FD14FF2727FD04FF2777FD05FFF8A8FFFFFF52F8A8FD %0AFF52FD08F8007DA8FD05F8A827FD06F8FF00FD04F87DA827FD09F8FD0A %FF7D2777FD04FF277DFD05FF2852FD04FF2727A8FFCAFD14FF28F8A8FFFF %5227FD04FF5227FFFFFF28F8A8FD0BFF52FD0AF877A8FD04F84C7DFD05F8 %527DFD04F8A87DFD0BF8FD0BFFA82752FFFFFF5252FD05FF27A8FFFFFF27 %52FD19FF52F8A8FFFFF8A8FFFFFF277DFFFF2727FD0DFF52FD04F827F8F8 %F827F8F852FF27F8F827A827F827F8F8A852F8F8F8FF7DF8F827F8F8F827 %F8F8F827F8FD0CFFA82727FFFFFFF8A8FFFFFF7D27FFFFFF0052FD1BFF7D %F87DFF5227FFFF7DF8FFA82727FD0EFF52FD0CF827FF27F8F87D52F8F8F8 %27A8F8F8F8FF52FD0DF8FD0EFF2727FFFF5252FFFFFF27A8FFA8F87DFD0E %FFA8FD0EFF7EF87DFF27A8FF277DA82752FD0FFF52FD0DF827FF52F800FF %F8F8F8A852F827FF52FD0EF8FD0CFFCAFFFF4CF8FFA827A8FF7D27FF7DF8 %7DFD10FFA8FD0EFFA8F8522752A82777F852FD0FFFA852FD0FF8FF52F87D %52F827A8F827FF27FD0FF8FD10FF7D27FF287DFF527D7D00A8FD10FFA8FD %11FF27272752F8277DFD11FF52F8F827F8F8F827F8F8F827FD05F8A87D52 %7DF8527D52FF27FD04F827F8F8F827F8F8F827F8F8F8FD11FF7D005227FF %2752F8CBFD12FFA8FD10FFAF27F8F8277DFD12FF52FD11F87D7DA827A87D %A8FD12F8FD12FFA8FD04274CFD23FF27270DAF4BF8F8522727FD10FF52FD %12F8A8FFFFFF7DFD13F8FD10FF7D5236A8F8F8F8535252A8FD20FF2777AF %A9FFA8FF527DF8A8FD0FFF52FD0FF8527DFF7D52FFFF7D7D7D52FD10F8FD %10FF275236AFFF527D527D277DFD20FF27A8FD04FF7DF82727A8FD0FFF52 %FD04F827F8F8F827F8F8F827F8F87D7D7D52F8FD042752A800F827F8F8F8 %27F8F8F827F8F8F827F8FD10FF27FD06FF7D7D7D52FD20FF277DFD04FFA8 %F85227A8FD0FFF52FD0FF87D27FD05F827A827A8FD10F8FD0FFFA852A9FD %05FFF827277DFD10FFA8FD0FFF27A8FD05FF287D27A8FD0FFF52FD0FF87D %28FD05F87DFF7D7D27FD0FF8FD10FF27FD06FF52527D52FD11FFA8FD0EFF %277DFD05FF277D00A8FD0EFFA852FD0FF87D27FD06F87DF8A8FD10F8FD0F %FFA852FD06FF537D7D77FD10FFA8FD0FFF27A2FFFFA8FFFF7EA827A8FD0F %FF52F8F827F8F8F827F8F8F827F8F8F8277D52F827F8F8F8277D277D27F8 %F8F827F8F8F827F8F8F827F8F8F8FD10FF27FD06FF7D527D52FD11FFA8FD %0EFF27F827272705272727F8FD10FF52FD0FF8A227FD06F87D00A8FD10F8 %FD0FFFA827FD08FF5253FD21FFFD08A8FD11FF52FD0FF87D52F827F827F8 %27F828A8FD10F8FD10FFFD07274C272752FD20FF7D277DA87DA87DA85252 %FD10FF52FD0FF852FD0A7DFD10F8FD11FFFD04A87DFD04A8FD22FF52A8A8 %52FF527E277DFD10FF52FD04F827F8F8F827F8F8F827F8F8F827F827F827 %F827F827F8F8F827F8F8F827F8F8F827F8F8F827F8FD06FFA85252A87DFF %7D7D7DA87DA87DA8A8FF52A2A8A87DA8A8A87DA8A8A852A8FD18FF27FFA8 %527D527752A8FD10FF52FD10F82752F827F827F8272727FD10F8FD07FF52 %522727A8774C27525227522828A85252522727275227272752522777FD07 %FFA8FD10FFA8FFFF7D53A8A8A8FD11FF52FD10F8277DF8522727277D7D00 %FD10F8FD08FFA8FFA8FFA8FF7DFFA8FFA8FFA8FFFFFFA8FFA8FFA8FFA8FF %A8FFA8FD09FFA8FD17FFCAFD0FFFA852FD11F877F8522877F87D27FD11F8 %FD2BFFA8FD29FF52F8F827F8F8F827F8F8F827F8F8F827F82727F8F85227 %2727F8F827F8F8F827F8F8F827F8F8F827F8F8F8FD55FF52FD2AF8FD2DFF %A8FFA8FFA8FFA8FFA8FFA8FFA8FFA8FFA8FFA8FFA8FFA8FFA8FFA8FFA8FF %A8FFA8FFA8FFA8FFA8FF7D52525227525252275252522752525227525252 %275252522752525227525252275252522752525227525284A984A984A984 %A984A984A984A984A984A984A984A984A984A984A984A984A984A984A984 %A984A984A9A9FFFFFFA8FFFFFFA8FFFFFFA8FFFFFFA8FFFFFFA8FFFFFFA8 %FFFFFFA8FFFFFFA8FFFFFFA8FFFFFFA8FD2DFF140E1414140E1414140E14 %14140E1414140E1414140E1414140E1414140E1414140E1414140E141414 %0EFD56FF0D140D140D140D140D140D140D140D140D140D140D140D140D14 %0D140D140D140D140D140D140D140D14A9FD55FF1414140D1414140D1414 %140D1414140D1414365A8584855A3614140D1414140D1414140D1414140D %1414FD56FF0D140D140D140D140D140D140D140D6084FFA8A984A984A9A8 %FF84140D140D140D140D140D140D140D14AFFD55FF1414140E1414140E14 %14140D145AFFA98514140D140E140E141485AFFF14140E1414140E141414 %0E1414FD56FF0D140D140D140D140D140D14A8FF14140D140D140D140D14 %0D140D145AFF84140D140D140D140D140D14FD56FF140D1414140D141414 %0D14FFA90D140E140D1414140D1414140D140E1414FFA9140D1414140D14 %14140EFD56FF0D140D140D140D140D14A8850D140D140D140D140D140D14 %0D140D140D140DAF84140D140D140D140D14A8FD09FFA87D282827270028 %527DFD42FF140E1414140E141414A9AF0E140E1414140E1414140E141414 %0E1414140E1414FF5A1414140EFD0414FD09FF522727527DA87DA87D5227 %277DFD40FF0D140D140D140D145AFF0D140D140D140D140D140D140D140D %140D140D140D1414FF0D140D140D140D14A9FD06FF7DF8287EFD09FF7D27 %27A8FD3EFF1414140D1414140EFF14140D1414140D1414140D1414140D14 %14140D1414140D1484A90D1414140D1414FD06FF7DF87DFD0DFF77F8A8FD %3DFF0D140D140D140D60A8140D140D140D140D140D140D140D140D140D14 %0D140D140D14A9140D140D140D14AFFD04FF7DF8A8FD0FFF7DF8A8FD3CFF %1414140E141414A9610E14143D14140E1414140E3636140D1414140D1414 %AF14140EAF5A1414140E1414FD04FFA8F8A8FD11FF7D27FD3CFF0D140D14 %0D140DA914A9A9AFA8FF84140D145AFFA8AFA9AF14140D14A8FFFF840D14 %5A850D140D140D14FD04FF2752FD13FF2752FD3BFF140D1414140D14A9FF %84140E1414AFAF1484FF36140D1460FF363CFF85143614140D85A9140D14 %14140EFFFFFF7D27FD14FFA827A8FD3AFF0D140D140D140DAFA9140D140D %140D85FFAF0D140D140D1414FFA9360D140D140D14A8840D140D140D14A8 %FFFF2852FFA8A8A8FD05FFA8A8A8FD05FFA8A9FFFF277DFD3AFF140E1414 %140E1414FF841414140E1414FF5A1414140E141414A8AF0E140E140E14AF %A914140EFD0414FFFFFF277DFD0527FFFFA8FD0527FFFFA8272E14FFFF53 %28FD3AFF0D140D140D140D140DAFA8140D140D145AA90D140D140D140DFF %14140D140D14FF840D140D140D140D14A9FFFF27F852A8FFA852007DF852 %A8FFA852277DF853A8FFFFFF5252FD3AFF1414140D1414140D140EAFA914 %0E140D14FF140D1414140D85A9140D140E60FF610E140D1414140D1414FF %FFFF2727FD05FF52F87DFD05FF52F87DFD04FFA8277DFD3AFF0D140D140D %140D140D140D85A8140D140DA984140D140D14A85A0D140D60FF360D140D %140D140D140D14AFFFFFFF2727A8FFFFFFA827A8FD05FF5227FD04FF7DF8 %52FD3BFF1414140E1414140E1414140D85FF3C0D1414FFFD041436FF1414 %0DA9FF140D1414140E1414140E1414FD05FF5227A8FFFFFF5252FD05FF27 %7DFFFFFF77277DFD3CFF0D140D140D140D140D140D140D3CFF600D14A860 %0D140DA95A140DA9A8140D140D140D140D140D140D14FD06FF52277DFFFF %A8F8FD04FFA8F8FFFFFF5227A2FD3DFF140D1414140D1414140D1414140D %36FF850D61A9140D14FF140DFFA9140D1414140D1414140D1414140EFD07 %FF7D2753FFFF2852FFFFFF2752FFFF2727A8FD3EFF0D140D140D140D140D %140D140D140D14A9850DAF14147E850DFF84140D140D140D140D140D140D %140D14A8FD07FF7D2752FF7D27FFFFA827FFFF2727FFFFCAFD3DFF140E14 %14140E1414140E1414140E140E14A9AF84A90EFF5AFF841414140E141414 %0E1414140EFD0414FD0AFF2727FF27A2FF5252FF0052FD41FF0D140D140D %140D140D140D140D140D140D147EFFA985A9FF36140D140D140D140D140D %140D140D140D14A9FD0AFF27275327FF277DF852FD42FF1414140D141414 %0D1414140D1414140D141485A8FFFFFF5A1414140D1414140D1414140D14 %14140D1414FD0CFF52F8275227007DFD43FF0D140D140D140D140D140D14 %0D140D14A8FFFF8436FFA88584FF5A140D140D140D140D140D140D140D14 %AFFD0BFFA852F8F8F87DA8FD43FF1414140E1414140E1414140EFD0414FF %1461FD05143684850D1414140E1414140E1414140E1414FD0AFFA8210D36 %7DF8FD0427FD42FF0D140D140D140D140D140D140D140D14A8140D140D14 %0DAFA9A95A140D140D140D140D140D140D140D14FD0AFF7D28A9FFFFFFA8 %525227A8FD41FF140D1414140D1414140D1414140D1414FFFD04140D14A9 %AF84850E140D1414140D1414140D1414140EFD0AFFA827FD04FFA8F82727 %FFA87D7DA8FD04FFA8A8A8FFFFFFA8A8FFFFA8FD04FFA87DFD10FFA8A8FD %16FF0D140D140D140D140D140D140D140D14A9140D140D140D607E5A5A14 %0D140D140D140D140D140D140D14A8FD09FF7D4CFD04FFA8285227CAA827 %7DF87D2852FF7D4C52525277272752274C52277DFF275252522752522727 %7D277D2752522752525227A8FD15FF140E1414140E1414140E1414140E14 %14FFFD04140E1484A95A850E140E1414140E1414140EFD0414FD0AFFA827 %FD05FF287D27FFFF7DA87D7D524CFFA8527D524CA2287D5252537D277EFF %7DA8A27D52527D527D7728A25252A8277D7DA827CBFD15FF0D140D140D14 %0D140D140D140D140D14A9140D140D140D3636607E140D140D140D140D14 %0D140D140D14A9FD09FF7D52FD05FF7D7D27A8FD0BFF7DFD35FF1414140D %1414140D1414140D1414140EFF60615A615A615A61A9850D1414140D1414 %140D1414140D1414FD0AFFA8277D527D527D5252F8FD42FF0D140D140D14 %0D140D140D140D140D145AA984A984A984A984A914140D140D140D140D14 %0D140D140D14AFFD09FFA877FD06524C7DFD42FF1414140E1414140E1414 %140E1414140E1414140E1414140E1414140E1414140E1414140E1414140E %1414FD56FF0D140D140D140D140D140D140D140D1436A936361414145A84 %360D140D140D140D140D140D140D140D14FD56FF140D1414140D1414140D %1414140D140E1484145A60146160AFFD04140D1414140D1414140D141414 %0EFD56FF0D140D140D140D140D140D140D140D140D840D14848513603614 %0D140D140D140D140D140D140D140D14A8FD55FF140E1414140E1414140E %1414140EFD0B140E1414140E1414140E1414140EFD0414FD56FF0D140D14 %0D140D140D140D140D140D140D140D140D140D140D140D140D140D140D14 %0D140D140D140D14A9FD55FF361414143614141436141414361414143614 %14143614141436141414361414143614141436FD0514FDD5FFFF %%EndData endstream endobj 111 0 obj <>stream -๥K-rF\`;7޳|,ȥ/_~D݄S<[iK?v;Z2aF|At|wۛsgF3 e7X z.XI?3[˺EbӃ -*"*v؋zk5jI˪12̄ c-G? ;  wKM|#O?\:j #|Hj3iWRzNoᣀN+#9n,:Tȑ1^+h,FZeNjr:PjbJ Qpm`ȸ10u>0'b?!| -whSİ?7!~L/|4$gb}FaBj^U,7+-E Oyu"O=ީz68wcj,;ùz*K?*˷d]7ZW"x^K,Kˀ4 p`]28a̸(_#ښQ?urCq( s4gQ[y6>ə$إ?5 ƒRgG:oHbUNlEq4][46;a>9gmwj,cbr|v?He [yQ*Zve33"$ e h}?䌨cVCZ;0ĽqxOq*pjUZa|PhX"2(DrOp8 -c5`.m/h-f0/ͺ۝-GA %l[6Ѩh9:fԖIU6]gs(wx'WjԾ ᦗ]vϥi? q\19Uсib9P2j#^pޯ,\w:e8O>q)&ʍПa2p@d*2% w h0؝Ү_I۹-;8@]#A_lGzWօQ3? KCK]}B ƒ}b^BVh!͞@Z]3:φrTJRXa^tEne# -qnRO׀=n^M_?fUkqۋ'C z? } {:*ޙ'4q·[U/Ƈ -d\]siCi ŹUd~^er<!W*Ͻ8{$8 pB!8)kGM30#`1v;LlGBw9TnÐ8+wk{ٻϥaup -[h)1,w]79=@`YX,Tmwlُ,^Jo[,=ZZ¿F=4aTDa.e+ɡRcgI"^eaj5<zR^4sU!.V* Pr-ΰtoE%!@콱G,~?)XZ?J޲+I{ySvAehkR2['<\%9>^%|5Xd|EP@!^o*W}˂'` Uxt2/z:;x.'D;:v]&JU8\?yPMoG٤ЅَmB$,`+kКZJ㫄iʐB+MY[ i/0_E?2ڷֿeWՏ6-).?dx~]z2A}t_|-˦ק٫"e@.晍],e0_%G|e̮W)·O618څGQIvҝ 8T?v/&k~i>]1։)gKĘ<[,~uӯ> H)ӯ3/cpPFnlo1nu;~\dpvmfο$}0]gDgd,L_4}P5|Ϯng>Lr}=u ls|YQѝ?-n-ͫ.k=8Ηc6㌬>]G'|xG ?#}wjj2`qwATۥG8 -Q˩Rk3j~,'™EY@x܏A,?p1ُE{k:r; a|fEB@+}omFOW&3TONwM޾C$htG3OP)~s}Cw'w6*f?Up96+^XEtak,Ct̲~"RxRy cQ]^ ntXAF[s=YD˗lO~p+NyKrYo;MsjD͚4sŰ;gi]dZ4Sn6G}BC_s|I}gvtcyFނ.SnYl^\)R[-BӇsizJWGrF֯3o ?)dx2>4o^kNT!Џ9ղ8 BtGKˠF*E1$>q) 8 @hdO 0&s^;L -BtL~%4`:6_gQ޵+:mNrC3;art;p8; 6 k٠nu.ס BSG\0Nhs2 _\c"`T=bTRdd7"9z 2Ǹ^u"ߥ#ejN*It Hn7qލ "΄g5I`=pcsg^ €]E-f\,!d,e?HXBr8>wwS9[1%':O{-YPccsȹ9fD˳ע9D[ >>[0 ;=j/3t5bt-D -()'zozL D|UV+ųt }0Y3MJ{R^q|*T84!>O&;*>-5u*Gw%:d&Z*Z ' V o*/W7sݭ G߯N,OF' -1ǣҾea -a;lYl>{ԚӢ?g7幢m g#6VΥ$x0nҽSd$]fWGlŦr:q05ag֮ލ^|k*̗psɴR蚍5G -ެo .,zщ(iC;ÍxrvLRf*Tf1ϣ%^f;L'i{M*nFum -llff^uIVCjlɵ.Ux"T+Peg'ʎMeEɒJM %F ;[^滺ys9}3S^5XiN=T궉TDv} 8-iz\S²~^S2r)֞W21xoS>]%\ l;׀O; -nfkg.N,-6|,zxv>0.+nq_B?$sgO*@ W I -Pΰ0oqOQ_]Nȵ0ak&S c=T?`55O*.Zts6_] bB/b3g+F|@v?@ V =ȸh.bFQvg9QH{8bͽ\lleo9+hW 4,9Huo,'@@a+, -t 5SV.dkS=݋G &"QGT0 hxY@ (6j=@lmkWQ#@ ? -syjm@gw>߲+EE?ڢ(b +F_ -˯ -W`||F?мmL"jpx~F%"}$dvlAy9~esNoq?J viK3zW 2xvfa+0._4۽Al|[8`_%tR:ybd 6 18ѣ8]U'9K+۸X W/h qf^of{_Ge=1 3\:8]IO(s?Mn&5Ӯ{|i_t;ښw^/j.]a} Y}uO?>BXϧW?gۯ`x2j):_'߹$t}:_X6 -Fof - xPi'?g\1o%WTACVJg -4MD*1oIZ<`oq2KB򷳑g'|t<&n:td;Uw\hv+ @kiTmk%1l|N6h6Y8+ !}\: oYp }b%fcCHOlAy'큼B?VWXk@\ =ycrrLkR,V9GE`<.@{b ӷO̦}_v>ڼ5rlaq(nGU7[mb}K7Ym܃3ܢ+^a $D=-W;_V^u{6MbG=Juճt:Uyy'1zF7RCt]kƉ1Z7\vDKZ2ޚpCa5Dg`+pV1PxHQӮJ LL̻lgW~_p:~jz=^? ΍αW#YB9!r5gYs;밹%O.(2)a|u='K={?ܻgqcZՕctj)k^ 7Dͥp+)7l=eUÖ/ewN<s=SRp| [,^0l9kڼ氫i,6xhZf<әBN3zAx9 p̔ +ŏye r1t'-vΞVѲu{- :yu~W,$NWUEuKZߊ2a_FM%>;,fRNϧ܆iU{fځCZ/gyvwu ĸۘmĢku} =^Zp:?ۊZe<-b(R+KKmLH~'t51Nj+1Fv-V{]lp)rE#Q[g^ץVfP>=;iBJbtÒ>P Eˋ$!͟M`m$7W뎋4|*6CTu R!ى.IIx]k;4RцfΚ4V"E߽ŻD|mw'Mb$?J'V-UT}Oo1-a pnjc&mįi(,n ;نn'c93 "|;xO"!+Oȵs| -ChVfU m=k=Tg`}&6Eq?`N~ŢoC+:]}ctF͑/O͹Φ'fa,gu[XNք",يc6>/QU -ܡLV6Y5Md+dqbXPf -%ʌ!Z|3,w/\[›ז+s'H;iޯyݛo # -9ٍ]o/딛8`P? ^v TdTI!liB'p_߼ 3 {:=ÞbOpp>)uu\,zh=-LՆPoRu~ TI& - ZևqČT`FOO3[¨_I(.Go?l,h-*W9G6.2xHS4_Y4M&(lK`_~Lz"ɓQzĉ]bb\z -2@}@ Y.2U^=BGSՂG`agJf愽Yطq+84 ۆHG`RO^ƍȓO³eЛ{6;Iۏb.Z eԘ.p oHo:\c]gj}{d"[wwBƛ6Ȳ) O`9/s-?rËd)GyJ=$[bnKhHE]cp=ОؚmOavTEϩ'k Bdy~?=[jf Ut\&*rh,Kҙ:YtREb_t,)S7V%viPuOvֻӅNӹrǬz L6!'W!/-N&d\qzmqӋ"KOۣCA* {'|15/jȤpfN?`ca~:PmvSlebs[bR5U]r-!4`+=˄?"'kIr7_"uK.E羼:D]Cε>b ,?|cէjyf]νh8?6ZHd+UuЫV.W @*u_s-nvu<ztTEduyҦtu3_5V* v8Njv\/08fKC*EB]]L_@(ѫbT\T'"(^* ¦3 QXVafQ yOS7z-:m!O:FKmQj?NofXnU*++FНVES>r$UxpK,4 ŠcFE1:wS,P;=&^}0z3```mS2{Sz>2->F5D21:FwwU-R1 -m78k9wf|1ke1ȚWHd:H4 f"Ņn 3"G?#*?RAGWTf$#Zݬ:ѥ]i@B  n5(kftMzժ%=nug=eM蕤~˨\Wv,OT&ӕN;.RWZ\U}^ew5a?}F?"[˯<^GA@~3?L~;UB*W =)[:h(v8u ;u<WӯOf<a=J\ -a#+gXVoU_6f"Com_%0V-(nI>͚}'#lcp(af*s/Wňmr^%awZ o]*-U۽n~yQҝO-vmr}=^Rڙ\[Y0|QLt}6@;y1:쐤땸i+Q!v#s5fȌr6 v^Wj nlnuC:^[3~Q.p0alx.hDLB+ã߮y._ጷvӵN-zXf!-l)n['i{zB)^2?\Y _6 ~S: .f~BҞkvuomaz=\&AnmwP`Fs""{*򉐌M!}l)WZ&jծgۿD|Pq;5yBnͷ{ܶmeԼlm󱾋g3fڊLZZh22ABװVB΢0*aB_H_I_U+}8}g{Je:c^J6Ɛk"y+aVcݮ|-)RǏIk,t!7tp4r@A^n7>e?.:sXOD>'<_"73w?gٚs9[kl]A!G(-nB_,/y/ Tys裓V{ԩGl"; 9eZ,;e`F%L^k7c:X_foGO8 =UTv+D>rr _O_ww{#|~Os(}-:t'fbw0&-U=:JW]&K:?G>VUd98 -;l%hGj> c[tvn3'.FT({3|dV.ЖR -|5 -,qyKwN6T9#eUVxI8 {A\#Y|(/[=zܐN尝ioA4Df!2ݵX{Y[wҕUS).ԡ6-\+U - -Png瘔$A왘bU;acz=utϠ>,NtlmN3swx -rSKѮ+Rְ|P!=*/$iLrgZIzor-x -d&o7& PrcY4Ro󙯹zѝ>'D kZ5bjQ2y.f{1țG#Վym:ʼeEDV]MB\GRv<"4}xX8UF^G[(h :䱹i`{ r}T{=fCot1ƽz"AeH&Bo?DG(/;Yp^vՒ1_OtE4n$›3_=9 rSy-8ÓfY y`{ͅaŋ+.HT|p` rd9|6%v( _L~ w]Qg ln-F Ke}A^ʮ<~%/Rd3YX)9._l+<<[cYܜǷzLo֝kh -~n!nGcGc`~aX5 {F}fhfG2)T5~|}MH -~IOB8+ -=B[XƾSzƀmj + A2n^.r7=; Pzx4}'R\-W~߾`CY[d+jQdTeS/Փ~v3)nkFovfTKq+XZ?0Jl)4QxPFe4]dqߐuÑh>*g# +?S7eH/>4Y^"5e9 }Cf[&OuE'q^`Og+Ʋ3Zo$*2?(RNxx}[HZ'p5!>P}Cu+`{d~ӍTy-ȫrY!nZY6/uO#bk h Lk-z1ѤxYb$lhҿ/2X9h -kVn5[=̃j{ttKaͽ7dG}5`j9ʔ+xΜ7a K[ЛkۃT87@xNkDG'BC݅$Z]=^ΟbnPsgj`fCX[ֳw%,_RŞHTȩj`Ma.r̵!-Sq!yX*y7~0_ϣUGFݘhBd8\w,{m? -MmGxMP CV}5j2̋ooQDNRDbKW' pFl^[$`VQMPLW[2hi^+VGW}5AD6۳Q&Z}}Q85ZWkxۮY rlj&VIG^Ms\lkb傹Nq>'xKNJ6ޔ+QuZ|.\ZZWMF)I.:kΰ ,%z 1ׄ*$Lb"{e`!`ڂRs <|Pҭ! 4\+ukql /P#*WT4IcEǵUƠ eGJѓm/ -lϠZ1?^gInd#͹_-񐠏jC }Qˁ(Vo?<ȱ|tпpqwߗ+VGeݏE{4sTGO?W[G,qF8^LӻSf792[3l\l]ê>bOĬt8dOJ<:uj{pa? ?|ЏWC94G~v? 5׫ɘrٸkS叧!V%vkmbBt}q~TWj.g?H~t\6AoRpR͝#:azu"&g/7Gm)Rq -|QHc60iuts.Q棎^jarMZv5V1IP?8I8GԂ=؃_ZMvs|Y'm wHtf^B+k>B"SefVXYy&.WuK/^nP rEz_@?zr#/gҟa\dyZM ]֦٭@j X-!^0i ҧReN?B~ -y Jy3efgH1ӨE7v2m~B_u`@c3_|De7'n!uxIiޯJrkk,󴺘3+e]&f469F{r>팔?طN\ـI*ǷR=B[zb.Q62| +E=ۛ~zk=ljy_,|a^DrVVU &4O|EU;|m?[`/|۟b٧snGg z#YWIs#کmmhg]puNP2CvMM-fȐȲY?Yj&d}Wr@4bn/Ugp??P!\Iw=.Sje63|^qSUz9X DV(C}_ B+*26øVi/l2"< Qrgy#B}PLm[JCuu 13`eѮ=Ngz'! R9ڕ^yysU5i!Ӫq-/·FT '\7tqU=ZxcЉ tRI7@".qAArlA/ؐkȫ,qf'cKk%=4ƊkG$ߞ9RhawJ!ۗڹMњ}aOյ8]ZolQ^S?M(7( -J}jwU5[1U'iVzQ1šFͥ)ĩMxk{F} 6\iv!le1Lrsyk64P+fj.7LS}_ 5)M̗$N(k$BAElp4AZ ݿ!sV?VN+uH>oT%WLI˔L)D9J޶ /LjD]K~KinpqUtŠTϥ! ⷷ oV}j~hR$|mi.o%13k^ υf+'Ҽ.("՞LΓSOHѢ -o鋀d:,ٓ5~c $ /v呡3U ]9d\\{o-r$k-Z jY31aRm'enWWk BmUTeYu`ϥ f_*27Bgh2\4xxx\Trl)b{<[Ywa)lKMZjgF-,wj׍SDfS)6&DNEɼ/M;s6{>e<)ЬD UĚBU-RjJg6"S$`_~n:THOQ ikm/㰻եBoP5Érw;*?̠դ5j \zje[2i䦕Ժ392@4kҼ( q"[-L/,, -{tgO;d3z]#5Z(r:3_Tzѧr]Θ^ & -؆K6ss˟)~?30_|_,2uS7әђu1%ZG2#_()Zw/w8ΰE 5XoU؏[pEzǎYsh6dV*EH.'qmJe1n=sRz']1;dI X=CEBCǼۗ 2Q w;|A 4\/y\!3T+) A鷁f : 1]dkƗx8i@l> :A`v">rvm֍-Ck(A* -=3 -X5up`琭eDM. -(xdCPbN|(@809ȷbxVsgQ]67i%ET?ɹ3[Nʹ/ؽc;_8@=(h4ۮZ>ݩv;:my΃Jȁ -7AP -V@@0>J/$/ zbйz_Q٭Z0yQ-܉ KڝZv:8zrV:'FAeRS@j` -0sL9%6HC/"` kذVN_|EȊ9#ֱ.׉P4BUi *Mؖ\;]iѓP=~ͷEhQG[#֍,9Bl[?Rd H_k [@QKE,O/>fuxհA*XO(|\ --_ۊ~BDpIA*=>j]ƩHx#~Fj!< }C }Ԩ?jaYh}}`qJւs pl@^գ½Eqs{i_տ:&>Gȋ(Dz~\w?k-a~wiuZQ$\AJ:Gl^9rLDۿzIztpUo+Xu?ZW?H;RLPfXO(,wf׻eۣm|N9\)cxBH؁]EThXm<{6W4Vau)65udQB %0V??=6+lVjS)$UWP{\wsrɧ# -*C>^LvLM'-7MVNѬX[ µ%W~[W -fRݶ(sDsS5 ڽcw=R/GkxμЂI]k:ݯ<5ҧ2^ Fy kY3=?B| O~A.5-aUvso>5z]]5RR^m.dg1gI{6i: I_Fע.Lϱ9ւ|?mr᭵a&L08x:^FQegNi ˒"#MDtݏ۷*wO܋3{ cmխVU;d/f;/i%wvvE!3/yrOs֘\α-b>R9{[\MdYdw[~{=,(Wt[Px.*#\ʲYħnTbc㭴à/.W<ճ%B /YwՌslqJl/;:>S&pu3&jO8ShZQÂ]irkD9lgƾc뻱Y(>nG6>h-=*4!hlWrU^\ys CFEoa8,k礲EPF6&WU:P:T7 8O.]]jaZҽo⾙wSjPe?Tu)'gwst)z5qlfO-2n$h>8dЧ_U#&za7_Mm:Y4Ӑ4ʘk;i]7M];:gHvplJ֥ne@sՆ l%:[{+er= 92_&ݒv ; xv&vq v{c3]w[2@?Qr+82={EF^bkʕVfͅ(Zܬ1T[T-Ӱ.ȧ,K]tAFɪW Zһ6Lu@5|3jx%/I9_7~Ů aXΞ,-T9^+eT3O(ԉ}\|V?ԙZ#f8(HםJЫ KY?3_@& -y\yL*vFҖpGӮA6b6-Y\C|l me;吝U+*Y|S\J4Z ?q0sxT}7gp$ ĩk <"K3;CÁXPm&n++5Fkd?ZԄIɔψZ"hC1ݑ+0;̵R.T*;< -;:[yYaeE hOɯell=̓eGmfwj7W\Լ=|o!HתѼ e*Z Z)cđH:JvuX0Bc+UigZE}LDGjNwh4ҭ6ɌP%λWOqF)_@VR 6vAG۱?:?j)zNڒKE~04G(bRzm+X33cI-gR\tr@j( ?|u~XD{' īկ8L-uvXE\\>_Bů"kmluywzuӪcmmi~N58&3QJ]YP3 7!wp_pXCX9E/0/x\E&C/",e9$׼ !cy{dync{2XIQ+oX-{ A9GS. -:0Ԩ?J)ڲTʖRd+?nq=E 3[@a_*φ]c~1p;$kbZݮ&VJ[5^|PΨ=EJo)F1\bBf09}Tv?gBT:w39ɐ" Xj9ń|^@#㕉>/ !_/=sUsmb*" Q]D|II82H+[Aa*߀UMyzE|v2y*3I.0]ds2`*`g\N -y5 7/tlM4_ˡ05Oɼr2pH -B ^)6k;" -Nk]6DC@t-;/@\J6 4Z>Deay NIOdVmp}vT8zWwḑ[yPv9%B|oDI -r4 k4H}ܨr9& #/rJ .=@.A=[Kӈ 1.$z}]FVDxDOe΁:/ln ?o凅Q (a{O^y)P_gtC@U^e:+{(yOfS]:_~E3"We Z?#/祖u;ѡRDq$Ӣ-@,+dAEuP齇rS(S]? ykXX8pi65?s/:u3k+l@}3y `Ww_uRc"^GOJ$ -N#% Py9? -#2Z -$p]\XOC'R'gT%~|z? i A?tGYh{tRVĦPA3>Jmp6?,k'C;<~X8q8 .&6V<2ZUVKc//>J(OW6>W ׿JVL'h}gi_͑q&G lk̫zp/346hq{Iq<5wy5~t`a(X@y>xxi39vkUwq7X {V;%s#)ɼذ8}ݧ|)Y͙`m(-[==4|UjmWbG`0=n6\=_><;$|H?Z${E޿QKq}Mm{.ih9$60bOt-*io1z.U v)eʑC]o}pA^ۭx|tRkGhe'k_A)xtt Rez4,j#`_UB/Z8> )OwH=-2zT7PBb]ێK:͖A\kz<ӧ9<3=QZX8:4ti]M{x ӑyڅVZ->ɣ9@`9󵙞ӨkNcM| i |"g?Є>,±Kmbcw{ AZN6+P0&._;jn`c<-@å,v3 i7Ԏ c8cfG?[{>zu 1J5T2 m6 Ԏ^sJ*>'v6c17LJ~rhnBt.䋯ªיKg09lNy{s{87Iul/ Qy?Дޒ[o&u 4mq[3^tgכkǠSyK"4B1%K7sqoSO?%6g+̦9UUJlcz $Mk /azϔ=/.D Rh;д'h͆w Wԉpqjtc>q~O-r!OZWZy[mw)?T -Oq#FR]ND@ZuAe;+s:Ԯ(/~ -"6Ng[b mee) {Z)'-W 5oz?h z2;Tw+#6O(D7US0kd2I鵪; \s'rZzT[~w:'כ_WSLVMRh%g6.ޛP4p ZrU-,(;Iوtl\~nyb ;51%$uXTuA d5gua Bs BsEpl,?k`mvνoLmE:8cAyklQn'ЧLq ]g76~&%QxR -^bW7Zuo/ƙuř,4O0~bqxOXvA/jEN;LT$狺.PZ%wBH@-ʠUһ:{YUrZ:<]Qn?ےl5X(v*y}dWk;-VRЩhG sb'<؋^}#+eNl6u-- 55CtA>]v<%K홺$?7RU]s~sqkeOr\ΰ.[Lc KKa@3%6L)1"aEH3bݬfqA^>*qWrYi5\AJiWY ܱZQk;s'jʳeL׺MCJ)VrLOw{Aۑ X;"J~5h񽍊fai}>cR$IWXU$|`T@(w+SXʬKg"V']g4i,fT, - }|>z;Y-F,E8U}"vSc}ofwC絚*b~i0@%s5L暞Y='R#u,Q%|?4hok8;XaA/#&s 2U>EM~^K||v7`di_m[>Yʦ2}*Ǯ_4*mK|f8%kE}[:G8^{2G,?~Y8Sr/I@fgdu8@uŌlK|`ckʯ~(,&R g'3"T.F5w,Y03B` kHw:]yL/e08KSgPȂIr-מg6 6NB)$:rQ_J-גʴϚweumpYn]d)N0:JJIRʩ"gU4[\ YQ覰 ޫQ?ֵFP٦wt%sLUg{ A؍0Yn; 6$[*uQC+.evLJ+k.bcnCoìp?$]]fvɧKN(%"`{K)$ӶRZebh9#$ۏH37MR$ucVo@(d+}Ks9ɅιqQ{sR`'z00S1yK6l--(?t%NquYVﳒ&*L7%xܑMcU4nS[(qb)jR}6^Mw]?C<2TCs c | 4ǽ -e'? )_WH|n6I`:u7uqW ZE[>)lIHpyGR4IlVѱ?(A29ˤ w]> 4 -Ho<>FzH.x8 q:qHx|$hqM@D*)b+o3V9v1TL:ɰSpl ch?R3, O< PB(Ҕ|c@1^:8ޗ&p wTT>J0e^"@wn]_QsFs$?B3.be+'}19T<t'ȸ[zv}U P3T`LqFro -` vkg+ hQI&uyْg7?& RR?1UEVO.>~[y5GOR|<OiRM_nyMMVM\E&^=8Kיլѓ]a.ooύɅ$&Tӷ7Vn<2]+MH\Ӱ}?=_%tUBقCX$׸F79*lG֨]]al]Bo\ϺY,p~->I~pa++J`ĠղȏgpKk} }4{6h H}ĉYdhH/ͳ j,DR{cZoGɵ/mpZ:'ynQZh^ #NC{sqhFhz<+Xx_t}jg#6,lW">B աvr1=X{)=kx@zE; B)6|}N>BmMT]7t]OzۿWkfv߅By3VgJkO# _:)f,v`N6dxŐX`ɉ˷ tɴs6c,΃j?,&3dYM;pi:Įr8_{C[{ ݎ oo;v7o$+vcn҉?,[1W_jfd/NN*qK&}b+3ՒKj W[t;pFz4p^{m -_@dfa{gt$;/L)ؽ]tnqY_X{13@طS0qJůgϣOɷwo|sY* : ($\UtUSŎ>@;p,J>N(s^GJ/~}я B;w'j+vr҆ uϣHzLCLnP]]Q=GPΉŢCX0tϩuh7l5XshU39ㆩ0m Kv6Vðh^ڗ29/5yO8(=rnP%Â'exG[C&5,{΋*"]iۖ-Siv3WS`M3x M伌YG"6~,/~3?o޸(ʝp饵?T5FHad>rB:5$:RWЕb]`v2aqV섩}2#wVʹ1e`d:=57]ZNmy8;m Go[GvgeCs7~O }X}rS-]ph;bZ ̸]24Z^kؙ'Qw ABc=fГfcӁaƶ`# - %sYkQA j}4T~zR$;ٍnfyB|+nN ] k|P;l>Vktd4-BڢϧM'A S>6Jy,tA1E1g%5۸/j041'Wz>gl3sݡ4ϸiv_lqsf0F7_ocqfB.)'P/լrLn 2Wml}#~̧]Hƣ1S4ɪ\e'9B\Xζqa;3|a mXHb=\"S[zӹ\ASzU*f>=y/;SN O6zE< iVh5'oTZ0ϼKHWE񅝲 -DR+q&[ڷǵw^̵ _,_*5!軽Ah-*XUNDoi'CQYL sƩw+_livDѽBn{iP.R>hx%ШX .0Kv9- BtٚDj>I@Lw}-m8 -dt+foyap[& &'};3Ry֤xVxӖ}1ze8ODne>AV4a{'$%FYoEI}VՕ) ;mIMEYT{x{a%{0a{LOz+ѳe?=BBܬ.:YӝYâ?̳*dt@لsTc\,hdJGRh~tSᕐ:,ISJ]8K]ݹ1r[1ZAWL@CDjn)gpBo*>5qh -~o)*TbpxҺ2q7_+MlgMBaT{CD_*"lt>w@G̘ʢbvnȚZ *'SIL8q4hkA/]ZKM^.ELFz7iKG֨s]Ճf_7[WuT, -$C0<$WWvLHKKUА>3ts[/Df3)\{G6*qV/ϲM9t+PFhv~GڳiΖ}ԭvl۸ CY:-+&\I7#"W`zy3qŷ{?K@ٵr V&cJ/( [BuؼD(Z11ΈŽ,Py Ur/h4d?"@ݾ^1z P)i#PF9P%TRWTp<+ R\O>}nemktdbyRczt{?E^> 迥+2+!9m19` {WL=`XK=bW0D*vŎ9Ųf֭bSy~fU?*T#O|74?FBO@.}̰7YVhd\jς}c?dqJ{d։WǨG~=&#:k]?ߴ?6_qAߣӔ4uu3K!eA0Nm('&IJ(?Ž"!qiT9H+enθܪ9koXq^~SW ?!vֿO7# e"x2EGBtV1ʕiM%rak^U{z3k|Wa,}̟aF}:%{47^ | }@N?oS]BolP7ޓ -֝KUo633Msj8`K2s{8(Pj#`t&y+o~W}.[Z5nޯ>/Y=?zyp؟ 3+?ޚWy/o{?Xcx*!s>D*~vXz^@DZf$:̂t/cR֒׷?iF֩/ܘKag0abw\UCfdg[?)s#1;pkr3ΖGݕZw Hϰ>S^a]BX^\u(mj\e~B`Fl^ޗnI_}E/Q^nR8%mx]έ̮\y -f$&s`#d6Fm@_7KJ7Ƌ!t "_ǩ^FAťvlzX,f;)UFړ;KrlPjw|QzaDmӌ>V(}VQ3#3m@ϲ4}ݤk0\P,_Xu{jn:iɽxZ$"yNr>m;{(,6këc|qPc)؍+go8ߔ+|K1)кMYViSgumn-^D"gBnYEgd(Z$Ds5Զ򔷉 uAOyx؎8-ߩq70; u't+Uɂb^Y&7ݗ]y\FsG6N:Sv|rKUܘYے Wl\[oL^z -#˓fԃ`5VTjĉ}V魋xŀqߙ߬N7; {zwA,wNL#PLZQQN*ջ!iΟP gDC x|aƹrgHrgbpNoZٯsߒl.<_֗KAVgՇ^1|fLZr2r8HNuu/*l9 _ÆM4Ըvب XI8db]g+6R=`-*{e'pϦrS -ZOUԁ;ѭJ ŵFb#n~`gShٻYa{#K l2Fq%[is9:aY%tSq⩿t:UZZ;fz$?z`8j~tNĞ`5#؄+5^..1ik8a;a{ÒygF1sxf+ wŢT -mur-UU KZοw7LM77V -}#vIJ\O/?r -:jLs< -Yu5+>U฽m\~P^-'`Zi &2TKśq7^ oFZng~^T_ӹW44^YVbzW[ªbyv<$j?D?*hNYVcct0|V1&rlZ1VLnD8B4.a?s+AjLXbxSˏcZV -EGDM\}DVOaM`G; trl%N<Џض{XdAE"+ -#Dr` Ul${;4syPd8ݛay`جД@<<ڝ̢2LAY;!AY-P.68{~`yB[vz}ſ~.Ƶյ iH;JLAx4}]i"=_41ؐ9Z-EJqAV%m=ŧּa`f-0T8u8' ʗFa6kZz֑7 ZVfG!eO r_~:zp)w7.C8bF L~N7r5TvT,ԿU8]sqР Ky0a5_̕')ﮆq7,-Y@t( &Ojc I&yYDJѬ@ 1|g堧SV:Ncf'OW@TYs(װ;_Rnj{@l+@|u Hu~NinJ)^ZW _%4s؟0[}1ʏ4s r3솄{]6˖{5~]'f]iYdȰ$nV'cA_W_ f8ȡ{oޛ]Ѷ=p$t>z{ujqqttQ쭨^d/"RNӇ/'w@[Wdnv^&}n<;,/3>12nS7u,WNmd\2vq;]5)4;L4a'[NV6M|d@[~ͫǛ~sWw o[#\ efo\:٨xj\zbb-;[v~hk:ֹ635oߌ >'6Ւ$-xSnC)_\Cs v`zBݕ;[ vhSjWFu3Ӯ4e1jh0k@i'Va& 8'U[5PS5jY?9?;~q:z)Gt%c Khޢn]?::4bkwt(wBc^zbU`;-k>Q#;nTgЍ~_ryP$<6#mB9e晖>5l7Ban6Hg{bQi'A8X;6b>1Rzblr٠Q5LnQ%t^ۻׁwqG?+\3Yǘ}kepj7U,e/drzKSnJ"Ә[}f.vul+k*zgSSE}t+y:]GMy's^Om=m=YMʛhGE6p hƵVW4Fi_׋霽`2GvMEhOa΋ɏZPRE`rTu՞qUv>l&?5J\fblx~ޟ:MrZ֓ը)(2PG}J5 -RX -kj? ^mŒ•3T/z\ -/^t)m>9բb+Uhdu nM. 9:T q:)V@cWfP,<%Od(Ѿm7C^X^=ĺ'X]rd=;{gO[tc-\pC W]+5[㺍uCUzʰ):'xf'!v -K^\oȓN;s*kBkIJ9n8oĠ6˻W~ ܘػÇ~su~ɨ0Ι -rWGrڒm3\I /N3|nCN\b[_V}1}I qN7/52)ըKDx13; ؈T3(_k7?O )jTTs,Hs6VŕضDt4|QzeR!}z$}eV&}rC:Mm=vhKO:^ʍǠUjNm-x#9C\EX2le)}{pzz¯iXɶm/c:'61oZdΓ5"|Qr.|/a|U{^8nx {_*g`U d-OR'? _i^;^0I_iTIwp0"1$ zO;W|YCƦ|Pr=}"{\_Z;.5Wl}]_G\7inElSs]͈Œ*7ъW)ږDLnIbo'⧤[n]Om؎rNH{Zhf e08ۢu %9֮L+h}R(FB[fqΨbg^zQ2(h"$3<~viÙ-lcԻҙe^/"Zh/o"dyd9^uZ/ JHv H)…Xy #t&PĚ9]d ,Kk∊7>`tQLvrse 4To4[+% ƥYq2#Gv] xT ƻ@p6VLp5oM=kA1(zY49Tι:'iB#,ʊnt'sBYْՁ!n_Lkyt*vao]3TES ,, -;X4PWO+:h+m~J~߲# R +n'q;+PDZҟ;T PD9ջ8 2=具, -.uQn?/(1fAgB)mBmNq?~ )^tzCj ;˂u.zXẽ2u®;viW; fe1;*v"uRm&zÃZ1,zO[k`ׯ>I5)6SCzۻ]GL,.k4׺ԝ-_*/MV`CI$O6]W#{+-\ X82]i.V;z8 ODN+3 +Ŭ QY?A5q@iW;#WMj,zlu55ɇx5-"[3h!Y "#@`:1DG`?-xd&wdɁʀl K#"/:H Mrr@֭g"ηӷ2'>o<~`߈^)%e6*sqkRG)S͝_)9|@?"и<@Iv3W9غ h\Z(fm9@:%} h@g}@C3Ƣg"wK|Qܵץߤc* g"x?3p[%ʠB-<=8Ĩ?cҒl {ݍheLOG[?UaWqPuk_(+݅7+E6-j^X\<{نB?3 n4ڦX˶ɞ zbFoz^.q\:[;rǵѲWNjcRZΥ}ZLu3(Ze7?9 I͞lFk0A2 ?9 -#0Nq׺zs᷍3_vpy>g^n$ 2_֫}Sgݦ^ծxsmMX4g>1H hđ*uRyu_ p9VǺ3Ikc;=#묍cD42:A!v/ oF Jiee٠XtX7 r[qfm0x[չm_Հ)C$~(Ēw<}6FmV`R4?l&_,"D'y@qd#A|Mo+F.fČa,,c,poecjuj@W\ЩCzX]-fm޺B~u -H5]쑿m 6t@VrY(+w/6kMuNJBM|eڀƭ~=7d5V O>Rwb7κ -1yճ?ZJ mcks>.S c]"u};:F`vk_gF 2 -)6]tW5Lj;DJ|7WgP -|cҶk~R(\lt}1ݚ2ևF~.םN֌՜z%[#El]y7C-fޜ -ƶPlէ].UPIwcS/t9ov2Mf X6ea&ZL*f i :5\[S5bM'5))8 .K ȸRMyv|R/.r:G͆ʨT3O.(x3܆1lW抇.A~һYX*9VTAKL=lBPx_TJ'isT8q͍x˾x`a#ìILvKg~rvqV-*HzP23F?zF0 zxNZ'}+ -bIheBQ,+-A'M_WVX|BUͭ]"x2qM@DYu2X cz -Ÿ)z&8-qyV5㪠w L3Xo6iڛ{epjߕ:zFY~IwGfNj( WJƓ͝`U5-n(ÕcRe{=1;7g({⋯g|A{B.`jroP3׮ g>X -<iߢ纹X?X$,w4e:|Y4Iĕᬸ@v Q -Y#=#KɅ,Y{Z{],?p|ec*4q{j$yWz!I'·ĵwU334SPKZE.bйQ4+r aElFK糖SmS)'F"bk*wQeBI$d宷{j@0Niѧb5iڳV7`Ŋ :NX:OdXe!8Ə'v~qEURv3[?pY s|2b}{SZΩ!lEc4#a7c/ Y#f+cKWi^VS.dSĮDoh1zx[e0.g=gz/AQWnhْE{h>v"PtM/Vx [h)jwYst {GZ*⽥(|&7AbM(Cԯ9?Wbݰ}x}mhڹ)WhM@XWd`!UVRDLNFnes?A+/|y14 Σ\B{Z$BX+X-@*(uH{xde-Rc -x_H]_][8 <^q;܍?wtt]ѺlJ'oүrp᪟㎖Ԃ@ -^Qrx+Px]@NKqצԾ((QJE2'@P*/2.FbY<@x-\02J /YQQ*ܷjyVl+ƈE J*P}P#8.|6f3@k7A.@;腄ʀ^rX=@M,oYtmr>dN^p!R Y^D0+O{t:~!-/SӾ!\` NeT^oBa,뀍yYmX{xDm7~հDtHd65#-Tzz!`үQ\& f`CpD_[٦Wi -eg >:\ԻnRױX>xķ5&뺻qr^|Rg!-3wLgFx8SͷJ?F h@Џ5 S "Qza|*zD#ȇSmQu=*nmš1aq{ HJkpPչweDzg-4Eh1 z c褈4XxöڄԚW6%oP~ -aSk:[J\/.|#'s2B}sSZ 1smjô<P\? SX?ƹo= س3z~?翩,^;ZQ{fИZH8>8HO<mxwLW~NF>4O|y8\n}(U8:G]*&Uķ2 -d3Uݩ>Q4.G%h^C/w*"G؋ƃћ^gE}nZeZgZԴ43oin8x NN0ݘh-D M]3[EǚRy;VX'>G uITor3opoL u^R0Iڮh*=xtң^N!h}e)j uTǿfTL)ŝPBTEB6ZRkww߉J+x ݅ڼY%&i̛2Ձn/vhJr;ʪcw{_T5 -n.R];SuYP*/*WɻV#;Φ5{V9)G Wo -y1 lF' ,v,Uq|b85H=6R<0Q}#_NaOZF^W0}j -E+eܚ4Rf[]q/UşRnD>eEu[STϮ;x+F=uhP'P,tYՅhSX)5Tj@B+}{J|22eΙ fPWڨH,F-Hh&4QM7Bor"G'jϮl1vͣiԒj+y}QuݸcG7"XSnHiӋʣ%x+ԩ +CkɷjZUsgH*280&q6B7N:1 ;)f[rqQn͠Zᒷ!hF 4Lhg?sY79CX يTm1 ۦRUa>N]AN|U񹹕`C(sôOȺ(bv0gr}Q)Riubp;8P&>~!^*:n:eE޻G+v93ӣsf&i ꝏ+- frzKv+n16=эb8.ņёy;:^A7 `aƲmz]+,2/edhz3E%Oץl]N F/T8\iX5ړySHne] PNLx{ȿ9>J'lnO,wt\dlV>jv&Dp+ҏ™I}7`K8+c9 K`He3\biWr;I2a{_GV^nJHjB-@BVavgZ^g 9s9[e{ 9&64RijzZB2Y0 -)!5^WKI䉓90K o#NME)l֣OgB>]i@<Ŧ# -wV^z0=MҺ :@i\}Z EvRB.=@wdsh3hҸ}XyÑĈs;T\b>>ьbN;B@k\(屛*r],[P;mK?ЈQcYz[IV3v)^E)ᬥe -85d=\b< qk]3H\*{9<5Z2|2X&g x_ϯw6kEpuǑUbѼy5 ^݊ƣ1!zbR}t}V2B?eGPx֒Q:'΍b0b q,cY?3@D;AzZz(xiUNŲMR4~72wic2zv1(nt;c&IP.dgB-7WaRMtqeVa7ÜbsaLvxMdJD :΍MHǛ9$S~%g[QvU+k.O)ύ+Y2Ϭ' -щ4sd-9 uCdޔy<o~ؕth}3b/{7kEܴ>\b1 -9bP*> /^r8iztH<ҰˣFAN,?}7>f/4Þ{ȼ:cЉjDY_IfJ~M 6F:cWitv7wVWFCdMƋ538-0ۣ*i]g9]g]]']a}-]y%`+uuJ{8hU VilbX>m[QZĕ .ÀAW&7U( )>wR3Wi<Fu -6s5\m+>yQm+(Xʚtt]z H&x[7˛nMt-[Ǻbsv?+Eb]`(5~a\NߐZI^-Շ#n&& BZ˨zR3,C)WNn7j/rxbܷV/m.t+oE MY{qV 6[6A` -kZ3.n8W]9ЇROO6eXGZ6'5K= -eaй}X̨ 5˧x=Qf+_^?w`?.؟yySHؕtvE.ɂ-F7䌂)~t}ܿ4jiDwGJe[[?-mV\{f$bW%=; -oy -x!•u`"]b"Şw;}>JGVl EQQԮb|ϸ{E?7=f[V++Q#((d*Nvk 瑩ԚKI;{΢^t(8o<7 ]Rb~Ÿ(wTB3&FG4=:lZ.HX`34/g9_O?Fu٢me ty'HrhIpR}*a)-·5_>@\Q#KYO S-zږhDT^ -vet"3"Ki!-qwC).Q],9O+z&V -mî-Uxϖp^8m~,S5rܱnpqٜSo[T-hrUYf`lAľz~RY>6A~\MzgꩂYA_`f)\{} Y!gs-F~a6V|f@l!ȺA7J' --ol< LGJuJ`=6Ш@3% -Av_Q@vpAvPl;lN?A,)D،)܍j*3Mwe0o.:%%fXHS 8.)XnK 4F115x ,X ЦYe܋3h:IJ-οكhMjU@Y(* @o 8F1SԳ|lժ-V?,UJAT %Kbn| -A# 0rV X :ӴE,t"KX7& Jxzݏ]䷱LJ jMȘjz׳aC0 -l_Ljt%# XQ[@ |ɮ|"W2 -p#nJh Q+@պ7ߴ|jK1Rx9 [/= x#KD6didPkWRQvzѡ㮏]%˦߅[<Qx@hw$Ȁ - V$oi&@3X K@%RGǀ@xqgx0@ǒ.9f|V/>Osg -=>;AnMZ}j>h?y@Z\U P"Q v̜u@]P|T=J]PP1 wd -բ#E<^[FhgĴMG|Oĺ`* -XKp=@A轲 Rt<`<}F&&}3j;1?Rr\?┹=Xu9cBs s6Xa?9&)7E߭$>N .,?\e!08<Dz{/k fUۘ),qx<5yVCV.ZewIkgid Bw4 -b{;ZoPY?g[͐ѵ.1D<ķD?zz 5ׇAft!wD9I]XkojҀ0 )~}njY('B/Ct8O28Q -f] 5H kXX=$VNVq6m7ik6D&Е8M:V?6x6k0Ձv5 -߇i ]Fv\trlogP_wZ͖mtUJ B)Сa3; Y_nnٿ_aҘHu zxu"[۲=<[@mjC)^F7wڹh5oȪmFŋp\m޴ v}_T_J!BJ$x%x#OI=?pS< kx7SćBv#\spUMPs|F*}7 Wc2|_e<`gPR3sќh::>۩_*O+!>).qB9(ݫ$7nZTڱJF{hnS.w;d*-eU׍.wg3gYҥsbXӚEP_qN%c -ZXNN̫$k)q&բJT=ƬU_ꙮcgPЗܖ%T"V ֻ){QflV挶\q'݋Z2#j6Z 4ZE|׵̼Bgy&_n -'4h Z\rn,ԂC|bX~!r\hªyf/yR| eҔПцՔ}lIϯkmrT糪"S"oHrt\pm}YZLrhȨhEM)x9`h>`[&4w(dPT6O t\Ye0Ȫ }GvNn˜g_9&Tr :Lb1?@ %^lKN)5앚/[1c̙.g ͨs)tWm\SIہv[yof8Tx+uˮcr|<{k!&HM={A; ^_yS" u\k*?Ȱ8dm}iQ ?p{zvtX[ -ZzLb)Kslu[ilr;ͱ\k%5Z$Hۛ݁p`狮MVQ|\8$Fy,GƗZ%^#/v\e*XLԈ{ĵ=ySa SFS}B&-^T'w!);Y:6ըc/,WV{k0Mӓ -y_ٲofbf'[FNs5Q_X!;z5ߙrCrB)BNxUEQH6#]>K(4SG}_I CvȍG<&|$C<gqMGM d޹ȕ2T8 wf 7n4عV6RtF a]KJ3*4(**޲ʹS~`J -{CʹbX|Q˛p(u2gHh'+[Ѵ*VcVAEe2}3 ΪLxVg?03/Lcӹ7/ܟkDf#Btt RyK-jze(؇Pb@`|0,,X لu腎uN Yr9,Jq`-,Q[ZA4p?T7N?F o_uh HdG$+[TM< doqeZ~^@ Hd!1y^/9$`V%C`szK3Xzؠ!Q-K . 0 C>wbNM^r[ɍ1z}[H??_az&>ퟔ6Kx(sa`q+īLCK$.Iߔoݪ?ci xފZAU%^mM:?EZ=,É8|o:ݲ=tŗ cS.4Uv SQkTHEݯ782@%@ J>  ^<17ĩ=3 -K@Z_BIٯ^+We? -6 3l,]h\IIj -߭ZqQH M>џ0҃Ux@E@\7PL? S.Ȉrb_pR Zw0[|H)[+w5~KO/+pZ/~u۲.)77zq [0RЧ Qt5pl|8d4W_M$GaΈe(|ZSϷ/1}ۨ|{B(|2aڗ[u/ژ9vi XvO=J( 9';"t,ٸN%w~KN|ۄۦ4>K~:4{jxr_ ҿP>Ԑ_`CvWr?ܶILAIVITJ@ ֩;sHץrb'@N_l_93~Ih-a_orr Mv]H~MP~Z @a5&yT,79Hq6Yn VΕͅ̍FGy<ڏ4Cڀݢ!Ƥgqr%"bբF@ u I=V".pUES҄o;סΥ~j)xqk>f!"dYSb%'[Wg֩¡7f&o~y';WH~uݽ.4kr 'hs]ͬGQ K5n3W|i-zU6h?Ce_?LCV׼0Q~_%$!y?y/͛Cwm-DM)1VΣ݆7{C̀@"O*OlH [׺ұ;.ު=: hي4[J<$ZO֯uzCz46C:8'?oޭ퇩oD#.Rnts<hǦTʹâҬYWGVsoVۻJ uUJ 3?pDz ѭzI[ "oQz=T`5Pխ~h#'ʹ7*{ooǰо Gѳ4%h;:.<},"W/jrC''A\}>Nr^">rW?|st*}Ti중h1bTѦx^lg%oE-2/xa` a2y$>!{7 WP湾^&ڤt|mg`fgK*a&lծcas+j4"¶;M3[όj(,8ȑ%GZvM5-^ID嶋>\4s[z뺶~Μs(QYT<1{Uոhp:A~74yEZ/׎ERU('Oo6iab>ky.V66,Y~̕~5u;Vg`YV>rH0KGck$yZzfLKK\6IgޗZw>=ϒX)"Iq Eg FIfμiGJDVT:GrR߶jިonj6qg4SAuQϘJFj5=~UOU;:;Miismf\elv%=,U9{hȶb E,=I"M}*:!Yh~P5UzV(#-N(B+7\_E2"of(EԝB.Fdd9&l}YXIqR4mZ_P1iVReE#XG͌$a߲W^_$#lx1pY[w;T3䑯wsq -Brõ g>dSďF]IHo>:>H#z?a s:-m:Ebj6[H+U)b,on˶,GC:ĖbcJ+} Bc |JMgo|%6뤾H]%Y\ԲEvRdKtȓ#nVh:ʌדK՛FuHe]0jY RꋧVtCNJн7{nX9[cXϨLNy*/ia¤/mU}웫 =9In/9k"~/h34~+*ro$+bc -7*?ӧ<yu8dY0 }pgnZ.l +⌝nzJv' 8[xk{.mc"q,T׌=˦4'L),_NbyET܍Qc3NYMgOXTzÈV#)wt{N33(Fp_dWs>^-һKd)]/)Jlu1J?|)/5~܄hsK[δ>ft:Ooz\b)PU1^%%.w;<>$p0v|d:-a@ڼ V+)s_e^rGjhHt2㱠l8+9.؋7.l6/R`)6W~7.3>JEb"Ӓ\>x}2xzVD76.HyH=詗XTHʜ#x0wa&H.D}4b Z"߀y$8PR PYr4PyZhmhy^hAlAfCW6(WS,#^Qu#TN.@Д$  *΋YzQ 77@7'@I_@>G$n>.U'pzuJAي:i}|[8 -u$'73ys=W >y} T"\! `NK.cW$a -We&PAy o<NcƧoX ]1x3]{'~_9^a퉎3}{~0pW.w,n\xOo|Sr Iu I'3 #ZُLW 1XtM$b5QWS/T|h - P7?b ,HڸB_P/@q) ȁ`J@x2%_MHq n?+DP7vǗ"4S{%)[!?q*X8tR b$49Htq:'lgA 2B_o @K3"91DtʶQ視l,((eHW&j Z K7O3/& WogO'#y kt@b:{uBggDU՞fs<1Tݟv))ۡ+9,_#Jo']ז|@zL۠.yPq^ (4^LvʝOhcq>`-2fJ?Ó}JgzCIn5nw1}-oU0􉀾9WUг ̀v!1OJc 4 l4QÓÒB!7z7?ph>`%?, -=]-l=;z:3稿AWx(WTTli,ں=btN:w<7]RuvYiykwC4}˱-b'4K eFr!cUeSf1DŽ9y6BZZiCwh(S~NZ`S<C& -_>S˻mWsSx2Cjx4 v\{K>XwXS];ytlE|uKm=ǼDCrv i! 5 ٫;n.5x+ERH~{{-,Ǎm5k~ühVAi]CP}ƷKn$+ڴ [fFXI:S.VUhKVͫ:~k.FbjWMɓEOvţ -_LM=WZŒpV?+kNQ>\:qbޡ7A..D\\dM,*ͦ6d! xdٓkn|yV,mQ>VH/f©W|J5\:̺ؼk ay2ͺ 6u*ΑLl|$ZJ:n- \='Oy}p؇pqd+ܱj9`~-]X.Ɩ.Pœn,V1^Nƍu\⏹ޫ;;ԗN$q|t[&Yu+m^~VEϪyyZV%GSvqhÎq+2vޏ9%F]" V\xdJD gQ9zu#[Dx`o&mIVjY<ͳ+MkN3i\҃Ej1\qPẇwIꮒKsMNcpɦp,֨wi_\?6-! Vc1y':=FFV+.u~[z*7O33ɻq]%FIY=D}ڰX~ ꤳJWrGU^Mخ{ٹ]ORa[ZCsdQWGtE]bnPZߓk;jd7baPo ݧ~?upW^;_m FY(䞺&-1/!J͜¹OpC6 483O>i.ʩav\:Oh%2Z}k\qS>jWgJsfVeQɗ:wy\ v΂oxІx|sl%N9;3,aI4|%af%OPC9ͫ7w {kUfT -`'o.Z.ť+;\~]'/9%aoReLyJo4yY]q› tޟY. arUڤy%:Wӆuԫ6n^jB tMy'ie-6Bk!}As!\Q8|YOh̼QqMѾm$zDn  \c2 b^섪uSGzv4p$CMVXݝpbǁXsncI,у-86,-v0*?R$S9šgr֗s ju"mP+  }~c5er\>I?F-}{ T6Iem_r^%=< -">[q8g:eo~IK*%B~O҈g 5*l(x4;{bC*7d+S 9⌤q 6*ݶD)tz;ԏ(י'sua8 -&֣y#qtf3^?)U9%;[ޖ/D7O pHWȝ9-ŐI1ٳ[Hs &@&tHIf|y;/R'Wy7׫Э|¦w8Z3|9ylF\f K#o$;T& -Po @];:P;3Ԟջ>iT­e;MvVhDq&c-UDJ z΍SYշ5u_c -^Fͮ @;гgzURG+z_}p$tAV'G?x|aVd?5f/l5BEèRwZgZ,uclxvo=---wH;D=fIluLVs`Cz3sl:Lo?$ NhW}0eѦ8ycb"8 Ԙ9] q3* -0東6`Zq朿0` l=XGHN}`W5bD6uLvurg`5Xm9BSwζ7-{IKd’"<' -ż?u#zluvFoxpM$:Ows?q$BRwG0-WPF%RTHjFBVq6|5xZнRE!ovCބ"݃ {yawPq{(foCs:\) [@H0+G>wUn:8:m|aGsl[K˄[閗^>&`ZIgJP$WƐxSҿ]BRzҵ~GD9]t&& nJK1 mD,ƁF LofeQ [ItKcxJxN[e-S+˰0[̟v -y_0]lI s,X5ui( UEbotَܺSm4!΍l5lO!wCkrQg*yZˠqP]%@9PSpFNxncLbR:B*G8e?چ`7 8DA 1m7ߨm o>t,^!hhQz*Vt\4dK[E>zT)?%@ُcmA] ߁5J`I(nc\<ѹ(fޜ}(pB%}/ Q ^ikBjnFОQt}NygC";'d)ips71]}RY'R˖,;w8M:J[z<5tۦ&d#8x,a,mG*feXi+jjj_84k6a5F3/B?˭T0 -|؄wxdPD%۹qmV}sky4Vڬ=\6t;ލ틜N!6& im/dM;Kߛ:h)~iߨSvq*Gǔ^Ztufjޔ|)\$ endstream endobj 112 0 obj <>stream -.}hq׊"gԋ ݨevH2_ >5ڐ26k5%1KQk# a+mqg:W'gȪ?EBɷk]Nx$U:KS<ͫ1⪮q]# cqg~vbNzҀ}Hh+NOO:Jw_RN(!aS;&:>#bo:̜֦żuŢ?vUǜjfӌs)Ķp8Ւ}/6^*pT%Er֟ IQҥPP(F|Nc>8Eᖕ%E3,`NڰN61_:iOn

    wV_Y!AiQV9gjK1Sootp:!o`Ż_8Vc:8XiRH(9dLje'u [Oߘq08/d{#3Luzy a8D5"R|Y^h}1D|c$(e$z/u -3 -:ΈDM:dJHMp|FDg<' 6G5FC63Q1T|".uEKQۘ_옇k]$P4Vly,\(30?#"Hs 51e: 'ljl0% 74 v;CʋeW I4> ʫo]b/ߔʫQ~XH#h )!yB b<Ŀ @xD\1=,I03 O>qš1@΃0Jt" DĂYBn=fta_̩ۂQ.|`Vӑ%Px$#,I hx/yhV_QsBe[.< x9(0Fu- ;P\dd PqvޙD9QVR ($& v&#{slU.'zmދJ͓/!,G&ƺN)nGCB0*OxuG@06rN^:~5UrX7ٕIufyc>&!~ͥd& (Q -AeBk)BIةHsWVgF,ZǀDA[p]MToge1)1*YLT4X) ĂQ"= 0h'vHy!&a)1>'t8$ L&vB[`\6.E?9\Enh3`9( $)I`gy@{t_zwA(7vdY()Y m [^l&#G$]ģA^2ip  )3lL*bѦ?N`رUg31W<`1~WAIPLیo& -࿼?r_j!6=R;w^vVp,ܵlm@]N ڊA2+(nu68%gpVf|\- łS Rd -YW6]]@-f8.֠ b94ʮ$ttn-TNQ*44w䶖{۬2ݔuyd9j.'8AsWnRkHW3Im2lp }MagaP ܣԴ& m -ahd#6eZ^yo72ʹ&+c`2{@FJ )X:Z@wI2$1ƪݤ:[V1uj@!ïb,;0˨w`ߨ.@ -⧭;`˼W^7(ۢeϦ"h^Wl"8ATjn]GXl0  -a^/ZMkkX9+,]Ͽd͗!<-2b̤vZwc=l;A5VysB6Yc5;1Sj.&6]Ȩ.#X$(2/[HسznNPLtOYCԁ]YqY-if]|]{wm3+Y -:Q34.avuEmJJGvݭvl[ۜQuStmD,8$uyMRedϽU/:O{u[Skb" g`jv2>HVhhWyoϐvn #~ -i`՞](W϶og !QDax y6_ɡ c/}v=lB~c$@1<ν#' RW7Jޏk@/kwgwa*Ҫ -y;v;Tt9XILv]^qglB 멌ԛu5yJG{Ĕv]~/m&v1'Ӳ2XkY>t??PχJ/ݓxA |(1Pg$^C#\{ݳ|Rp˟hCݼPa^InZC~{ש{и'xFV0>-M{> Om])~Zv -$\df]?gki}[M/?o'&w|:D'Ab  t4C9譒n"@vWAZ 5ޘ Y^FYY)lNo_awRkkS.JSɋG6JLzCD[IǦ(>QJ Aԓ&@BreH<-3QhgGytVLʠ]9 ^&:v+)%TIiM+hR/%2*ۭ#q1Y:H*Ah" -bL2ԺzVAĖ8&)?tW1CM9j$td 37"֗q%Kɘ -;rvQô%vgR$,nmDmɧV⩞ !Eor^ *ĔcIoҫ:†2;HZf5q 2sEO_w[D$`;Q4 ɩd$9A̡X&ی~I^GqH< cFT =fӛd2beLi$͍VdՊ6It~ɔg}_Hv4V`+J$$6#:R]ҕ޼dr^CENDgS z[OI8z.ÂA6>Wx!`3+GR:)yτ //Mn$ -hhz5qў&%)3F xh ~foF+F/gf׍}D b8 ;$/} MP;$l ;R۴a)t-ncH?Oa/d;p\㴢IF󦿤AeBdY2F6$%=5/w$:lYkD;el:#-H # :fP 5=Dd/$jLI̔nq3@"֊b#@ *賤s/ǔL2f*[ T<]ÕL6BYĜLS I3_oM9ݾM0XkFKrEAV95SI;$$a~NI - lYLDtCRᒤJuޭe%fԴ49:_w9P3ʟ~Fn1wQ6DOQr5(5tj85U'\$ّF4ӆ`NԹx=jMc#LQNZn8s|s& RՑCftw po>dy-r1f2d|o,X` qCT옷zRED5IYz*/Bb7Rٳx*`ݸ:Lv=2Lb9Q&J_n4)h^zIctqRֈMVDo#%o.Ӵ.Z&jP j@,e`" -)W,G20U -7f`89T*-@,SL6[Ĉxk#֗ t IbAb.L:ƒZlj$L#. - b9$uҠ9r0#Is+I\F2qZ^C$H-TwFRLq3)2Z -aM)7HΉ)͖\43*hh1~.9Ӧe̥e`E6鈖n Ȋu LT#rpC3AǠOZ2O@2A>xL#tl[f%?a$2rPb[8i Ig\5I+`Yբ+8JLGS08Y -&i)N-N֝J_٤44I>#eoueBj[ZR sc*vMs -Us0UE `.!Eb b0:L$-102O c[ԈJ9n6hfg~0Q;M똙QZiDI -[U;{TEyN%EL)I҈6tzdy Si"JԈBJq7Ap?Mr^SJwDKaS-F.-PH-u@ԨWȩ7E_9ɞ9!׺{x/z?dvGo,]CJ9,b4RŘ"84~,KΤpp3χH:orb!h&-7gHQA:0s9#ɠ0˞I:0iM<.38C6TO>ic&6-ڧm "fiqT#Μ'Bt9 z>FՊmWH~rKE"_2VXE;&rI cg3wd2D -*BO"%z H5aqӒbͬ)W\02$.'Ff>0ێHb);M ;oey~%[eIdUфi J]),d$7v>sȝl^Ѭ4< e!dY -G!5 mdh[r`qs!e -xSv[>_zp\oE~-!%cRzF^d -Fׯ߈Cii -K" lD(DDtʆ$)RӒvQ-+TgAdvk$UPY^WITo 8$1d.lbr1+Iϝ__:5QИW$)l\BpybRK3K'.lfDc$@,be| Ozv1nHV h;<m>m>nnnNzKO;>m>m>nnna$L Ynnn"MH䭸nnnn1~."s-p p5p 7 7 'lűtqqqp pÇ pÇuqXՀ>n>n>nnnOOO;>m>m>nHĸS6r/=0,5e){+^1؞$'9IKNrhPm2 -}ө6t}sI)I$Y:#](.QC.rL":BW/AN< EH-2w1Y ?Nafvm 4O>M!83sze]铏EH܈㬡=LAn-Gh~o'fN:?z"$̅'6C544 ?N -]ukU(_@$2'چA"2Y^qsȾ|4I55l$f9[fҒپ5S8sL3ugʩ ug/TmtݙrQe%1˫3 H6~忾;a&<;\'TƨRȾ҃[*9&4N+yf fМ3$_Ma7МN{@s35b͹4,zTIhKǽE29ݖݳl$z}Ty1 B\>ċᯍffrxMSv3K10=L1/8.Јs&=l,L)H"2l9zuSէ PM02[3 pftFf4Ejk`/b7w> 9]H).BUGZĚ9t/t.c3b}iaFYҿta$칥)-~Vvx>L;YgA-|~㬵i-jZv&uۙf{$KyQAPfdAP (ea~;wvɈ* -Wջw-YSddd SDC^ߘoQ۬j -ƒqҶlTa)a&3퐉QI^ʆ5s/M5: ؞f~ܞ, M uBp<ٚY^Xk姡 -!k(v --ej<ѬN1&΁J]ˆܦ z>`RX1ZRyl3Ӄ%2nB;':l! -ioa"dܰ =lOL#`jLkѳQLK ܇eXJPFzJ<=e3M2Z3ޛg0Ҫ${f_Q3rȴVB,n`:C= "IpX"&'[[}ںqv{[x`,9,0ަ E y's'n7m^/[}߃,QX -@[4u}e҅9SѸyz`O*j 6pӘ)2/vU_pǘGQ-ꟲ_SnKzi[l0Mӎ0f0l=k&{˼66ئm 6ئmrƆn TF[aCm#ڶDM!R̾ZKi[ߦ kl_K,Yma,loQc -lQG!]뗝@p%&p%v t ^!&-RZzXY@vH5mָ,\/J+$̫sO^ƞ.VQ5Mů3R8lpG3XAjNzqr%Ь /kRh/)d- cRt]u[&+*S uhkザZ~![x4^.F9 =8ZW{.R&:]-tcdS]Ish f`QXGtj%_~%?'RD;@-֜e$j|dH>$x(_#A, #sfouxf'fgU>܍Tp F:Ya bT3V-c;6ܣ @QB 1a, -2Np^SJMJXVHd|~N>"bQŚY/JƾnLst2StP!wXV{)?8)o8?JtϨg- - HBPЮg.'UѪ(>ۯrRHB>|~^ } -UIUхq%dNd)G_zpǯ"Xݮ%<##HAbQX3DlE5_=Gnm[]^ j:{0{[QwD|ok{b[0+VjfOCE:d_Vd@1t26= 2CQAEZG0Η&'agynSkUDwݶ#xO::4e۬T8̇vg &-UԿ *>^y j.ۊu9˚1|o- ۬* J y1З:| -4oهuEϼZjGu郃]3۾tEu7B{1i) -&]KxAK7Mg --R{ RU=$HdZ ۅf1bmXOK(es -Z JVmjQa6 $$ik+zQ %agMAzcUmd6(Wt]ſ^S[G@8Qmf-dKfâfCv6ؤHpۙkdh[(okf-j1.H*MMrR׬ V@ 3 ~` %޹fJq>QʢʈMF$c(88nI~z_X "@|`(Sy3 S3|݀.I%a$Ө@fH:8v@d:1 :e4*VDs" Ɇ,AbR4ٞȹ:%$i>8"Bq>a }UK(k e$ -*7N -W 2>>^P 4B -N+OߢOTY#Mg> -%8#wYW\-X”" -4dYx7B[mR" Xe/Alɞ )d)qs@ -.C%`?3^)𘋬`cL:}Bfg`@z$H4 -u!V%'bP.2@_yAl:OӧGDD26J@ ZX4Bʼ1-fձmUsB1,ٺp*0FNVxF*crx&*3VhVB8M B -$m -T*,I4 [hɈGjvg2iZ8LjOO'nEOdWf6&Ɗ臬&*@z$0Ԭ QezH"b`<zdٹ5xb$Hds]T5FVi,PdC@ v'ax` \`~wngP&Hy#d - 'OyIQJGA OE[|+ ':Rue( #w%%ݱS6B#5F=! C#|Vi|&Cf aPI],w ` D -7`Y#@c4_8DCB`h`A_@mÛI2 GZQ1K"Y*F&8XAl}@kifv`p[KO>h,\@Gy#&}]bH"Dn ~46@P:Y#PTdtׄ3 -x.&6 *(yت"F㍔?P!$}K0h 'WxC`낙iHM4$(dkB,f-:";K1fjPX#I\0%iҒTm F,MU@c2Y0 eP)!5x#qb9& {DC+2҄up>)'*)2ҀXc6!~(UQbAadZ+Xd+6pPY8*A5 S*1Iqpf d C$ڠ@EkD#<=`xwcN<`Ȥh-*A35рxwN(^X - @5HGXmy36 c2T|1P.aq ʨBCf?|JV5RѠ=4[CO1vOU3cJ/Hw,%#M5Hr}2!X# - вbHȰyD1 -GI `R!}칔]Q{$+,hT3bK'PM=tg 93 0GbNL"\3X#4& ]4but'yn`Ph"\2-`A΀j% 77VhȠSSAh$h i9AᒦMlrvՈG(x[YF6XVeP8B5Ѥ)A*@ti?4!,BNe4 3°;`ՠ?7CKZ1h:PJt64,4Q<-Rd -Y)P# -!+Yh(Y6UC,nICAW'!_[D[/M9epۉTd4XF ( H@)0r.ư  rܒ<@Ÿd2oC8*ƻa^C^`p<]<0xT/)cT H- ->>t qF,afXBZ_^6V 2a#ht*F:cqQ}ݏ*t㍝gaJ %M# SPa,t"!gYH]3#l&!%FDn?f-6@ WB+^ L'$^ dіXhbDl L:X1]ƆlfPM/fCЭ,#L44E -2}¡y0t]ہz $R" -Р$lpp -F2Їtӝpcz=lf轰L1!Z:q!~#'c1hSVD*0/x=QfUTK/2Pi^Ld 1e -H 4>Ϝ҆J83>dspPU3\0YJ>TQ!fXy#`٩ ^Kh?m~3+-Qg"Hxr (w` r32 B uxMf+P',ub -+f`.Y [+3ɉRh 1|GS4\<*,Q2:Dpء p0̣2JMY4r( /l;%!B(Re#T=r -sp8NTN#Z -tf)NP)[zp BO  &#−?$̩٢d#J"K큮!qi W;`#?_hGa y4(;d7RЌB2qcK n%"P/%lYeH7)A 9̵s( >u? G &gyq"PŠx`3d -Ga O~cHNBFu SL.XyeCR. )7^LR~-4P=$f) %YX\ |`;97W8@#V*fIx``AH נ|L@*HQ>Tr\(% v AAU]ȍ m49Ll>e*haװu%#%pZH`P#%Du  r3S+)^4%7dC<x -ln<z6Hq~ -y7^Rgd$ IeH RKN~+%wC0gsTX"6E0iA^ Q.ѼTs{.&f G1 ,r$}6Jg Db-4d&( |%ÇC;xTN[z*F dxE#2|~6r np+MSݰOC*+7xj܈'|oPxygAbt] %P b| ia)lإXt5j 9rFAe!^\Ǔ/El(` xL7xmx7C!gI3b =H䴃2'9*Qx+txVѮ6&0?0|I QS,*,!)jHeHPzaXpGAWrvvT -TS쁙 <&U*Pi $<W ,8#3ͣJ! 6 LD>DdĄ#`vj! -kEaGDWYtH.ۆu^d5 -M Ӥ@CM|ٰe<PMTl/o[=Q/.^QaL5KL~xK GbKtsIEx934ݎ{Ќ?g|c2دD<_# ^b 'q<"gb4]>-{9|QxM]=,YOw壣Mp}~kh揣Y'+pTl:9D|B{={Uطc7m:WXF'Ucm3 B)YGMlڢ7Gs$7m[UZD}-g؇ZC02e}K[N l&WC'BN`X3<"tc :VΏzt5)vyDݔ6oM 3"=u`|PEgyu`0 mx'|ۜ; c=#liH|=8'Rap6/X/.t3F hppT>* .)ñEocE_f e_ B?ZK*Bu/_SA3#HH֐,(ܾ@R1 Ah)}30@D4Gנ-VZ;ZiQ6ףWXJm=_ƥQhr gޟj)Gw1!yb-ז:n[G q8.}" nߩoTȌkg 6'cR7NZ0s F[.MF$|p>~&LKH3H Gwւ(߿t9 #{Ax'WCEZbӬzFl}!^ ZvMdgM %M  Km,7}Z `FHg }G "$jި{~dk :hf?-NI|H0uh؃}59r a%Icz8ř&??8?X-,ڛqX4?(}+H#_lom4-问߫h蟮hF06}~ofɑ,xhV -FUTnUt0[L>V,06z[ MWf]y4tA2]OH_n):`6pKVJ&BXӲf7a@E0ڟa8o5T%Yá&oz[^mϏF /}Ę0&j>fNӛ)v۴w$~%~u|_$ Κ,zk!'OFg> Hf?I{JI1SX#1@ -.~#e31^Z>6^/|7E೔% -t>5揳oP Zl-?L[Lz4%36G8AM}A.)A:_~;} omX`)pe䁣(hPS1"TH={^o6l@"EB|Έ/s!J'k}d7 u /Fq"?o뱽#CA@} .hFIId8rOsFda}Nr(nA)6hNaxUhC?:.B@s1@vmy HRDjjT8 M"Fy_Vqlw/!kݴw]v4`WATp6P9Y EKcwlp8tY6Tf6y$΋7}=mmr[[mhVH:RC -z $" -ٷ!S!¢hm!P4%: - - c.l W0m,P &J![!(}aǃ$[zm.m;]n0&[#)(GByAo_ف޴R$I:4#/kݱDI>mJFF/7Gtr2L$4r='2 HIVn>ؕ-X]&iɍ8Y97&݁k -4ڙdw9ZIGy1˥ȷ䴅^9G뗽~ܗR (dDbSp6L5 &Akk0yL7'퀽!wF/lqc= Ƽ3zvLxS\+m-%*J""oǠ&-fd0J{?FrM{++?(~}mjǎC=X~)][ W?x6t4YL1'd,h>2V'J0TӗvN}g72'1'DOh+ 2X1yՖ [|q6tO[ze& -D h }JaWʤ>.ߴ2h koB8J,~MMdCXWEDC{9Nglx_ ?=쿴#et%_/WN\ۗ'_'?ϖ#` a.>mƫJbޛ>>~ekY.Ҥ5dS2cM7[4?JK2uIhnׯ?}*J|e%|uzcpO?gaq8+d6fSu}iټ_х;蝍/ˢDƫ^0wu=vVcƞ1R{dN}9nz "QLǜFcgUZ"pG>X>qʳ&_9sh̗;<:.V$m=)׆01UV(0ڸC"CsHT{&F"0˳_bOQolWc~Ϣ?a8Y3(~$Z/G&'71K<``a10[.6ϘG~Z&fjX]f\ ڳheaxϳ1\AGX[2Rm6h&6l-w=׈i4 S9Sm - -_]XIy}H6OAm &Mi˚'vk }/鎺ʑVZo4~} 8p~JqnV}s -?d`_xHߨ!o&Uc^1GO;6ӟ .kR_fd4UnLn\G>i,byjj=w V\-u!6pַ0/¦g@ʻh4d%ހM+빏N{;?bDE>c6vCZH2C -ӲTTTt?/N;7wiNlmn۹q6MۼTS'P}Q+Dl5L” D^OlRlƾ@$u z[m(Ri78(qCڕW%C̬'nA-aԮ MĶԓҎ9XG\:<( -)FW(UtQ8)*`y~M6ΦӬjAtA~ch#S|` h-: o1d.st[wM wȬ`XY/3}96'~xtllj?6O +u <~yPĬo|0`ލ.hBvL/fOGG!?#N~ a4 f9!;5`@h%>mjfv>hl#C?}9NmV#>3\2 T>2eoh/sx=?XA`.;i%[ n-soB/~"dKו&9dqijn>u o,PlSoqٟO*?]~DfEquk`}0pקg|jiar2JB+`2yy5b6d9?[g ){b:nf@xে6`mG.D[jD?+pJLجym(->ܿxiw]p8xSޢ u| 9}2-ټWA+KųSc1(NzXOP 1xR>96VL?Ol;>#:*]Vugj%I8n=hj"DdID&>O~@KViT|c{|vU/l;ZV륞n5+ڴkEܸ|ˣ_qY_Vm?ښ.DM?rBJn]Jxz'jޮh/=.\'6j4}/WGZZs2gari8IK`Bl]}.ju PڞYFOONZwY|Ş6O@;0PUa=Vz\5q灯7V ?B&u&!HNiVr:\ώE}󄕊ߪH.'GQ*:]i(Z?>ɾe?='@Xj?|>t\xfef*Jp}KK3]\E*>zRY=c鬼GJ'SyC*NCq&geԳj^u*&R-Q5Mڹ wͻµji]^]ޞ7dl?w:]1niz4&vkϥW}'Uy5(-u{룳l|ueCÞU;:\#4?̈́sR'y]rM'/Y=׼J]0awO.T{>5s/WYsnBDkW=]([׎RcUah %E$1yNVi''Oq'N%0ӿx*p) l4T+⪢z3jnLf9[ąꑞᾜ*,h9__zgY3w ʹܫ'>o\da_|$=w{Nr8eRTΧb6O=Uopy1<L| -~vqpt,dɠ|V謘_K%8J|[nNň-WCNoU1ZQWrL?O5zJY`_WJ&ekVviEzta6٨7f=]3Rv#OZ{ *^ڬuuFggvP?tp8?hk=PQ.}:uO7hY.5h5K籩KOMz[m%o`0^./^&ǭEZ_R1E)vLLC.RhsK%AV=\?ܹ2+~*tTkP_t{}%}='s'I)&OWip{:]V_Ni˔Ԅ k09:pLhg!gq ,_Y0+s\{)Q^Ζ7ðVS`'CppB|w.UOΟ[ bxt #YG}Rճy,"lo{Dfys|$iRSnY4ʙ}~eK񉻼F<ˏ3KqS)ϙ\9+ Q_Re*K5t*ǵŴ~ּ[5\e1.śhE0;|Vm|b9>ZOb;Ѩ}n-9)δYig{Nnq}O7aK=Q:?i?nr|b{lzR9VD6c?yj_.<}j[g8s&׸O+gcΈUFkpk\OƓq*4 c/3s=yw sXD*w:RB/:Vdž*Wa:ĦQoV.\Y_9{tgXٴجf4-A]37^+O]i$/FI9ϜdyP7cƿǪRQZŧe)F"H?+GZSf'|p,*H')9PW/z*zF% _NO3YqJK^3jy*k7L>GB-Q<ǭzv]\7Kc6|m(\࢑9<6#Ra^ W輔r=2}LItbRM7zql{mg%Ki}VQ縶clϥE%&3'G^)L,ń RJK˵uy׸ -m /3W/;`o+|r)]$WX%MNg~~rꖦ'z:3ӳ& -BH7e_]??LӽJD]}OaoUc=J_.OTRh=#ݑrU!;?Oz- 7ۋFIKwڮ$[k$.]LIo|dbc=7UzW#Oq"3b˧/QB|>H!?Cج{G'/~@ȧgGA4菹IMNx3gέraaJjἨ"2\$RWgv=bo,_d'ɨxZ7͋a#%~`~騖kߗc]X]"NF@yITOIu ?w_9iv:Qg}{"Y]]9ʾ揽At V]6=|F74'ċ~0ɡX}RkMuu"7/ɦ3Y& -!S5rrNoxz6>δRy^mt.d:{Q/UγY0OA٣A23f}.z;o~d\6k)&?*dY0)ӊeIW~Ke  HaigucWlIee*,gshzpTbuA׫J*4)2u}VN]˙Vy$ɮd~>0)\ jYDTΤ\Yl<':Z&-xgfc+{i.)}~^mͲbU ֨f=VqΌ҅}4YTRLV>ИcC>,s0v Y>$nAdR6i_G[Fg=1.$vSǃq_?IOOYwhT3.oZHϼˈ{'V;S݄dVn_73Ⱦ)qw'.Ƃ~SpY_IRܷ{\3Ņcɮ<^nYΕɃǧٝz.yUXQ~Py>&j2ߔFj~m8\sVBw=%|rW9ze{h^ReЙyo5(֦zn/ZẢrQNZ`0`v cgo&D㝢7A4\\ڞ`r2>nPKr+]WZ\yZFxy<;Ϲ.G -f˭e̩|+84f޵<هܺ|ȝJ50\-rE.eo]8i*Q?P.mW?0>GoZ~<]?T+}i8w9BbQBɱW>k뜟4Pm:+OU;_U:""`>zU2\>y/Cl5hKW9͙jLcQsbFX5AV&@_sVJQJwSR~(wαWV~.UvcdЌϲѕ*5`)fyxM~!xw[VI|9\ygOrI{N!8!,W$8peӀNbYp6Àxw)N~`hps: ߅\Hbs<ϫYs3GՋjS\ q)^쬝}$}}M'z/m2Goe5ዑhNs_2]2F/}?e^,B58j撁g>y*wzzQ,.aoX趰j1u"&;~nN/Yfu>{jK. hF+URJZlN%^݌r4,KNp.qȟ(z1_;ʡ{U]_UrLi՚ْ vS/T`P,Oz:/1cU·Iay\_T"Wg2j"+)<ջS4V8x!>c?=Ύ -Bi3tꡫ|/*rI_#Μ}1Uуӹ3x)P)b\WȺ?K/Bvuqj UYuF{G -Hג]u6 O*)e-?gˮ S\EW m+ZHvX.nKC*)S0pw5MUM>;ƥiT]Ju˞䲝)ry`!y$'! -[9+z{o ?S͢rvΎ.`b&:|\?ӹӓfz*]ǵuu+W=$w֯0-]n1~PJ|M9LJp1S?8)Εc=hfo#aY/ǮU;eWlmQ"L`lcHMS=\N`OwRh\^FrNqZ4*f#+ܴwA{=SZ%׹2gG+M&b1\ͳdW޸8(@\\}_(刻 +=?lǪڻ4x9;˷k5I%)"[-|۪Gd1PK{.~5!7 ]r]:oF+AEfNF+҃V$RYx>nvSł8-doč3]תsaͽRqrU?UǷrHt3|JCME.Z]=w͟w~z%zo ./gӮVROVJǢY- Jz,07>2`iK w祫_U6=զ{O2}"[1Uo`/\gY>#c._u:ܕTy|\nnEU)kG_QWxPoE Ɗ~ܻj"]?7|n.7B:w?:ӻu6]'D{·=r𒾾䘫+V)& bB%";-dʞVJu^u}r0)]7AWq.BE+L:J5s4bǏuܻ+V!x=1Q?]*ZjTkL{Toñ樘}[@Az -sӕp+QHƅ۫}3XiZ'd'sm\ zz~ORC*R6ǙTܛ(\AbyQ]bdK6X?dSW mbrux^-%s^9Di [jFkpթg;uXP=%GS:(WZy,Snlce,3bW\GU.rC,j..sEЋv7V_ԏ.qPJũz&R{Wt溣BU-uI -9lYИ:{iN7.8fՌ^JEOjvy?CxV5$&ulRtEStɪQ^]""*&r9?/p`a,UI(jO?)@I )TuVq -7GC{A#KB߅VPxJ/RM&mTc镹F0ﳁRo"ikn?]G.gxìS&Z4xd(h^&/IZ1լQzΔg_ ^  -U'%c>Aʶt?:Wk@Qr1I'_Zp:sZ] igo[IR}A,*ytt4.shQok0jL$>Co,&f+eռzPi`=0mcgӝgz h6Lү]vG1b?MF#GպEorL4ث9άFZ?}]؂Wb%))4dQCҮYxŧ--w~' /zhjsi_Gɼη,ŢiT{` Y0aIz^)]}C]GUdl-Mc m-qXU1WfuZzBBA= -KZzUuy;&o xڲ^ns/{f-/|3kE:+ p@N;e&j hӌN0nkt._8Z~"z5q篽UhSpL> ASȩ7i$V!:ߗ~ÑY&RV‘UjvMrGybGn][Z6*`o35LnvOi2~IC:K -}\ddoxny57qZq nr46K;١7Zl{EF!I \Wq?X/{Zۯ4\TVԍ\Gz! !liGCXSd?r챽43Maϼ&JJkgl*7UMՂ7L]$x5'Fi3V?{sƸZ?>lѤժo`,%]^̾'ƹd\.ɌׄG=̣m% ܷyz;`Qکz?xK?GFmֈ -/<,WM߶NUi>s1 oԚA)ԸA X1ㅤLGg$c>[EfCf/ˉZy[y Mi5/gTւ]>Z2$1Qs (@F걼(]q!LQuNeq|mఒZŃE-kb5Շs = Vn$2?lڦ@H,`1:F+$Kud#+UF?3t;7FcҖ*W$sy>  n^E~S4j5]cwMd\!Oqyiy@7f:Nk#߫yj~Tͤ' V~3) -4'Z!|>flW0Z?v@{ AKiű-ŧ^`{ -ǃZ,X:ݽv4 DnN;f:tJW[sO֚.mՍlp2L1oy:j O&ޘ7 2Cnv,e]K&;p(ߋF{ScaN{.NK]"*1ٌdS"׵;} ۞mke {1C^^2oujQA T?0|;!y!SD~D-yk۳{_%).];-Q^%/ s߅)f=O0J)T|u,N]):#nlwa,Z]lif5pgtݧ%$nRLgN"i^=(a Yܘ@ c4eB1V Q8qW'GpM_ -07_+PUeP9q veήkD,_O]/)m6ck7:-Bq6F-OWWiS:k+17#wbe[~ly]W$u}+gT PBdxiKM/Ch*b ?yYp7`],Vꬻ> b`ZNqKKdg֯j>xM7)i_m ^?t[;ャ=ZgEliRYR%.rk-k?&yLjh(*5&3?vEC*.WԾ:5RhyɸuqPjf_(M"WN5|lMt}y6M0nWwo_<5GѩIZzN=ѓ>D.bkǗlf铩\ 3F:c\K#vu̓ʢ1kANɶLG4t> ʽ~WMevfP]ByHR4'I;#4TqJ"*{*u1:꣩A*!Q} Zu锹fb|@.OEJ p+nku6O5iwЭؐއ_PW/U1Jd!5:I[8dg_Nh*~AQ՗?]FGL"yy(OeQ櫫|%ۼ~XJ>>&Dۂ|jLH5 Y* ~Vn,=tyGvրT:X\vNS9;CтʲUF[t3u%v -psJ^:?c<*ֵok>],^Ma8(,x>-w8##1~~oDI^/>H3*"7 <;K*@ڝ|:βW=$9ÕM!IGBmAJzP\X߷6(mE,EcsEyv6O .T)zeH% -%.Ux8.&;p45%\XRCnkR;j<4 zB̌pq-LE=􂂎"W̭֜ʥ.,NɁ䫷jjc%2ף{|&J0v5eZtO -]S0.:H -z {-b#>;Z(,ڿ\J.<5ƪ,J3[z[ګELsXɮ -\b)jIG>peMM'7v/=ZÃoh]o~J_!FN;ΒrU2x9j3zaQݔ$~ZO (ʮvg)lc]g)k0AG7u+lޮ[0ek4bx\9Ϧ`s^Ko=d*Oόjk1cSx60e&ZX,SEJEpepNZyА)RUYOHO>o;.').%+fk5VC,Ƌٛؠyr{GAw~gr] ƚxzͼS Wv5p15/~#2I!g[>AŃ Oe.VĜܡRR>ZH] ";oϨ.>raPNXcOO䊾,G簶ΜHv^km _n$> Խ[ \źWRUj{?YKK=;^8Wkm].ɣ`Bb-'NwUuL?h\5ňح(XWUW5ś+m&˺@{k; 4~ TsrT٣IoaЮ*rV]xXGu@x`zI1:#7lT>͐>Ȗ+yoM+o|5bsQQw+TEM_'TWy&8-Ocgˢ F| - -N[VYsÏtN| -Qbe4Rbe@ج'(3:>VCᬧ~M -PTZ!x[~/z$gX&.vkJ)N;'NO?MoߥwT/Ѣ>T2hՂf\t[vq>rk` WV -ʆBm))E9:U}퇚g5+ OQJQA0ޒa[Tm%⶿"nzUVK1J]sj] o/4dY'E~dV8SZ柼=O΁j#Cl2}oSB٣`p^I{*ۤ|*R [9)Dؚq98/'q}Xw9:NF̡R*@{AWZmHRߵzjȒi@]ɋUkF5v菒ed'7>)/˙BպG͊$v9unKm9}@t --3Uԭ,F< }Xh,uՍ]9©mrT ,}ݫѷ_^K 򳫚ʫjẀ8(T9/k3Qr7rZA2KUqKWhtMנTSMHow|CZ\b.kTky94LѲ$>R,^L@eD_C:՜b_~;bՐ9W𷽈Sx@_6|2,,ktxڨ -A`Ι=Zg-U|12IGp3 R쌯p~{ -۬hAJ@3vQd= >MU1^MTjg[FkQ i)8檪Qzr'mRmܔ* 85j @/#4I9yLioG? ZP! $=,y6cd\OcRT, }t~# H*\*ѻWˋo&tl}.43 [o`(/gbSZmJny\#S%~߲l7أ=ew=̋Y|eՔ<֦B޴M,֥ېS^(R3W=^sNVEăX D f`D/U*>>ur5-5U8.fLdxwut~szT^7Zqr8SSq`5ߎ2oRwqawR]#Uok9PV VcF0NWW -IsPvQ񂖁2ZhVWWχjj`bCN啶u,}4ۿer -Uyagd[Zi}MWcjαp QVF"0#,ϊ^շg 'cLs` 9+R_eꠤfJ4pv6{V7{Hgnw,5LgNcgZ!#6i@ZJWϿo%-⍆ʪ;{ -[Nbؘ}Ⅲ(PlٽƬd"2`~>xr`KF8 -/sڍFk[]Si8aF^  >SA:#j+@F߇&coõrz\k|}to472cdIN^NJvgzC"JE[i*`/w5}pi[i-OO7@Y$-Dݻ&쾠W.ݩ+;mEU~SN\` 4)ݹ~s>ӯf B5FsGd )djtRM)9U1[j\Y[}6*Pv; ͶZ(Xkւ|P: ߐ*="X}l UmmK.7~>fm{ݗzMH6TvWO?E$fnTV*c?A/PUf{a1Tf)ERTהrl1~c@ ,UR B;Tyq<jkw'n9AՏSTʡ1KrUկ c+hxOf7CZ=tS܆ǵ&|h{WUMo|ߡ@@KҢywoGQvqߍE.S}t~ZmK:<*{3=;jQ|)FI{d|Kϣjљh̻f6foqhӘ}c$J5ӂ*[{~9Vxp\XLOosۈR*4pn[srCßQ3y]{ khɪ+?>9jRflG7jmv !e(GΑkcK* 0_kIcUtdLj5D|Y'[H...wb'-e:hz(uZ/9%l=8c^CAHY)fA/PFt\ %nB=iuО32΋W~=xD?WRY84>ž"oj7\ URNFMfWqMؒD=ۯŋn%iifXNOOFQJ܉riUK*ݔs:TE/1)c+g15F複…H- T~ǯjpMܜQX}wZlP˴z(}pY{ ̕<6[2pm/t{yU0$BFgE6;(qȌYIf%P֭:|MIo)~-!VUݗk0G-\i=$ ?6=UXp>_ h3Akvsm?FfޝAc-fYB;*h"]rZ4}z:Th`#;i#[GvZ{HٰUa"~ps.r;WY9 &[F,m -C;_( -.=z3+r;<+]w :/66@7:}SQ ;b]v .eBس{sq"Ohc/q[ǦQi58) p<ٰXc̜V6o18=~+xq1ao_hSOv~l/Gs%Fmr!?3ކnN26䲃bAoQ?p4sYko_F՘('+QcyfB;{^] Y >H/GXo6PUTgzŊ\'oTdZeK/ɛd.d͑O4=!@_3Vߍ }gΚhyZ~CVGmjE4w͟v6ḫ8,;ȜaV30Y$VUTR=te>]l+&0E`/M/0=mLYrU`]1ZgbÐ@ (l\{=[i7~p'o~3Y}=fQzlQZaR,--Dv{aJ[_꫐ԃFu| -QOݏA?z|<_-hSx=n-jdTD"'Z8z.s,Ef -a2r -]gίP;Pbq жPXu@TiR,^ ^Td.^qM|e?;~ngi❂6|0&]oP% ]|~f+qv;ӓ:T]u6wº Ӷ,nɝ2\"L9f;agt߿)YNh59nR#ydJ@hS?[wQvӤ7EW+~"yО -mVU}~O| iUbw'p<7 -!;_W8ZIǫv}'F3L[|*~&?'З"a2BVSk˪"mjʂT݈l,j4q]vw ̺IVt+ńu1z7eO*s6 >Y?wN4r-|(젯ń&-Ij[e8u*T$0?GIijٮc173jG zcǮQgxkK1PDvdO$+_諲.W -]¼|F\.Pߣ ѷ}sEum[XsZV`˻lC؞ft\բCP"]{AYݻ;Kj0]6T|lZGfTӝm_) -9!ۼ=bxb֯ὡ?^E -IdTWϻߖ,.2'}lizΐYiwj)C#9zԑ#?Ddd/YȣjvӜ-lT1k`T^`Yo-4_S\̧kӛnP&ڟr1/U?~z{I(3ٺe/e.I3kQL|tّ& --|dp<)uW9JY9DAN :2'Mp4U|<9sɴ7CXx?-OˎV_-Ʋ$N 9b4Rx&X k(z6sf8 -&4^G7ϾBhKؿ瀏|ؖ\J%gi/ATiթkxMv6}޵0-)gtQͲ tY(Ѕ`iu7aO\Q>_F蝂}^DJu{\MnC%YƇ~tB: -c RTReE7&Ry5u\e@6]j 7Ϝk15Oa%zd&p| ^eQc\e8e% vĎq$7[kﳕQػ(@IJc}qP+oLHD(p|g,K^=EqLURWFZɟۧ?:#&TsWCQ ^G;'D <׻:LGdNw&-J3VUA *0$9M~I/l:vͱZ2yRжns{;=&H__Ch>cfO|jFiJA{ͬ5#UÉf;췂ἵb^sY- Ek -'o)DI-&i]oYͧ:#u.PcOsw'ٹɄ\&9IK2h #kDU\hVVl]SJp}'rIĪf]}bAMCB#nub!RȈm@ҰҘy:uL5}/eD #|b=Dc 6kNX q;,@D^֬"wpockfLDÂ-7z/Lx~G[ -kTENAh' n`46Ay`VMyF p,{/IrL@ Hk`ZX5x>19rD :򢭊71UfoTK+5YV"-Ya۞'qi'b^BpOy[SQ. qpÊR*`MW[0}msfJbo5cJvorـvJ|ʤI]6g6F#629ז-ih^-Xx]m/S0G~͕ r~SY -r !;K~ۖ5un>VrosA[SxXdbV,#ɥˍac*%IY=Ѥ66^ЅncX@8fÕv~ kpj0&(3N^}u5KeI -r92l?e6_ڒKVW~s -Ml1@/ #$>֯*!zթE1DW,Vu"pYcHDzCY;Zӊ<+2!zvҹdAFt#f9TÔv 6&oƉ p\ͼ FasY'Ykp1Lvi;}fȶl/NE*R(ҾOj<{U#?AU+xPQADQw{/ALn9LB8<!b|e{!p vNQ7RN fO`MosS5W Ixŭ(x/7v_%beڕzr])Tzkz_i\wB|eiæ7Jfx[u,_m]iE]oo"6`UXr}*̭W?+ge]GW5V6}t}>cqDy1T Fka3Y$ -OE7V*=o6morה/ݻi$_aJIA{WZԿe!'x\0D#SDߟwZ<@ĵ P^ F5M<p\6;zpB }_V }8qxYT@j+7$0X +?WՋ}%Q jD!9]d- PĤSXͧQzXi{QZp͆}|Gw#?5SyC1泈of 7KXO)RFpVݸ,~uhxh]{zy onyf;o53Vޓ5Ce1_V1, O&U?fͿghMZcAM\lqe,"ϯl̥. w*Cp1iQXj*# u?QZنf$!l5P\*%2-]2~k;ࠒ-+R?=xQώm*N~!֑B7ѲqI}1n,k7`Ϻ l@]0_[zSKs\5+x߄%J{@M YzacmEub$!tZ#-A>zUXl}0>yfZOٙȣ2- E0kgv2ѫyO9ooɤ1, H#e|*[rjJ$fE&wl# ?j4o} k\\g<o^ 2Lٙ~kz<_tr x#$w -`]p73`dt-kJJS!+իrdR(&KθMO3wpnpɕc^M:+o_ʺk,$Nf>.kCbKO^ r*K*0 etldՎ;I瘜[_Ȭ1忘IEĕ{% 7ztI -~׍5 ́ ŸFeYlzBhw*Mo}Q?iҁVMuVSU~z 5N:pMcS]$ԪM^Er"5huۡP2JsxX$Tb\{oKcˉC6}dXʰj)ΦzoJpg&D;jvq7D d)șKv -k+R;d6gpy׻[hjgOUZl]s}/mrO/ٕ"m"C_V3HaO.[9ʘj=@.GXkdJ6-054Mh_83gYagKQ!&V fE Ga ]cq/94|Uls&Z27uxx0no{lsgA<,%PiJH΄)=Fjr3he-y@/rNю,KMldYIJcAOooIs0<;wp̏Nbq% i֍\;xߙՉ߲Ծ4wN)xɒl1UT1̮]D~fc&չxzN8ћjeYTi!);+usFSC2'|.<9 Pq:(4)zYytKq[cu놳6W:o M^?bޞn:u ݯsiݱs$#lzvߝյ9F!.me vQ-w{WA|howW$Tye]"G)S'=g9He=cy4{מXbm =6.ucre  -Z WveUn؉Wo$>6Z/V"hnmV,U7D)Xs'cgckPyc Q}l,gt:ʺ[-3^`~~W ϲ7ފa~YcyR E]s[پ">Gޤ#Z!fW+71{w8#4g!jҗw54ŽMZ4iU2_l]b\~/{#H'w#RHgK1g*CΫ^LW|F{62Il̿`V.-uFfT2G%0{эVhFVԁ9ˡNGa -_?|x2J[4 N0.}al/gZP gX{kv4X'cEٺ8*v`epu65oKuvdhyNHPr -d )*6SE|0bb>bF,|]Bq`F2>/L38{WFuм1zlkQ?z-P - W>?l;K:,a`GuHgJW1է~Ok"z - vRLDk<ڌЕ^/'jY-{%(ۻmT-Qg"#.]9"L}J!&~]M[ ^Ssz|͝x dH.`?^q5Ŷ}8bE~?xթ+a[WШNv| w"Vu5”FޔhRDz&0Cϼ6;4Ǜ05+{|;@wy 8k˺K)][j8r/Ia40 HLRQ')FLΟ&FtNnM/XdSKh>xPJIsO4 =$j{Ҹ$7hY& -̹X^ -ϐnT]U.Eya엵1s^XV}[rn@RݴcHBiPSQ*Y-4UiooO1룕R082FOw; J .kƋߏ۴mnR"XWbqԲ -s)2ǔMxxĥb;- ӡ=2!CUdwUJlF?7ۺk- 6طu͈ٳɥ&pz+1xExW ۣlr}rn{@כ0iZ]wjwsszv[6}A^aQ,'Ȑڿbgiv;!UJf ˙h׾?V}tC; _ `,Z6V*dt-Kq$xUSkqxJ`}/Ϛ"F'y$7qMi&W}͒Xc'av1͍NUgWe=ZfO~"r/%nw\2~ÃhuoZk*,7CĖɽQ9YZ~ςuLώWի -OZYs(STCnCLܻco^~e8 A -m;sue՜5wvVg%*7^у ~M!tcj'r H>d#qQi]5 O{ZZvWma(Y9hzQxhznTxh -zJE3F}8CEƤMWV-»q+Y_9P rGwleQ^]|skެ쮃. -wK8SЕ|{?1*ܙ{h!en "9Y&H`3 'nv}&-Z:} 82QϨb7PoS=ְ+X捍v^vpZSrPjA>3wmffFn1"BuS:Qm=.JҮu58>8"o%n$p7J%wn6z6v[gLL]T$POqWnpHn!T_)FV>b9Ah^;W'* xgVFa1~ړ5-V7$j*XތwE;̢>Wnazy-uMvlCJm-%V1D?-08 NVE/퐥68qZ51dձ+vڞwnϘø.ᛀjC:zwxVO{HsN"^1MQx`C)c/[_4l]K5{|>WJ TT+ -w*t - jz)L04[%cFvn_u4?&z@ǡzBnL;]<(0sMYڂvnq5P֬\5>\|dg.ڂ',B9yO\_CB*~QYe-"Pfޞ)(Ovx.f1#زZwcUaO@c;h,, YY-a}S\-yY*G̒(Na?kAPLs>c+)3Vf6-9 læ+cW72F-ݝ]XźoWw̓m61RYzԩx9SUUvY4S(Dt.MA1l:,=P?E[6Yc$Thfgwz5W9$Ǜ)q:nO~쑈C61OK;qԊ60tϓy={*aʽ ]P;y(^UHs$Qҏ5\!ӯnTQ;s?YK."֍ ~^ ڋy~V\ %\f\yؗ3S$[Yw5zf ~C}d#m)~px6hD/A)[;9Ћ*Riʇk^'1P%~YFQeƖ-9p@ [)_ .j@9&UO\u -LJ;LTrFm)4RU& o5c#QWVA ^m<^TJv#+:(Ȋr>5̖%o3^˃>z UL}+,5;lX>ƗI{P!j4DS~T vmp^hH&ςMgR2S=5}]x~b?7*we.2m"nDj G -gs X׶ -TQ`i)VB܊5S]}FuW>qw+K[7݄DHO-NV60`[}?CE{oJ$-gjwr.֋,dJwSUL|2ow*$ӱϛXwER:0<5J9 $A|~!ެel,pz;@l:w)u؛+6?DwVi{ :)Qh4/gd}#{>8u_jԷ- |YX Cw&X? +&1FAL5~SURys`􏪡 _Zw;Q+B%VĝeC9XOTͣ>yyJj{jgR˶ԧUC{`iu֢h}r~6$M*es' 'Usm+W -U5P@p#ژ!@nώHvxя5^RR}b7cJmR'Slq:3 aG]S`mjv cmd~) }O]}rځ@.GgFgʻ*RCTI#ddBNaTN}y]Ӡ<~lsqQTꟸ$mGw*;c(Ux)YhޒAa:8ٚ,5 He$ nL8Z/؜lf T&͠aF+5 y|0kzh\#?Y$3u&$>i Cp( 0p- QU2cBZ"LSpj{EXƿ{ʃA$oUk>0JKصF,55/X/ Tf6"nV:m]DyLBi@~])?`xkSS@2*90{?dn ,f&7py޻j zg -Nֵ1OV[Y>>3 ]V9#_JuX]NuJv5?Z/1vՍ^F5gփ+ib7de,u@̏<o -#M%khnjKիh38Yu V߫.5&-䞮pOB/-{lsszIŸ82#xrn9ϺFZX^9ܺY$4耷5 -=jhf}iif"uhZUƁwJf]ҝL  zwr^l=Aq1;.0Mr-Hpbuzjڤn2jBq+_fɭTP]UK}oF8$%^_==K[jگVdٮT+0F)rp5yH3X,R#kgz9f}Ӈ0yR#O-n}66W6P5hnlW:-uZh& S ]U[0 Fl~XGztH4OVӢ˖j//tj,ҷ@9>6E"w~1_E2G%:}JcԜ*]-`:zz}axZV)B}4;^G#O{>h -`D(ՖKIgҚWዧ~P/n'&1(#8z,\iE]P3Pw|l{;I㠃˂kv;w6y]={6B%T -͛ ]^WVs}RUhjzw_EL|˗4BonЭ˼fGH."ƳCCMJK 0g%ra^̻ZӿŸ姽&"qG/vSl k&yzO]r-A^RP h -hذ[+ "\F_#vK8Ūy(CFt ɭFs>*שG*h| ]O0i5 -݈]YC4,gN8*C ZyЗO ΏVC )۶7BK [j|d6~ /`qR#mJqޔBe)i3Ƹ''զ-Q5jqu9)>b@[mFQ`AY.R%{qA{+i 76 n/A,,Ȟ S؊[/l=d5M0@,3ۄНp5ٖjy`v#YV69aJ ~!X$uBM E{iƣ{aϙ֘f7Ђ՛ >V|ܺk>E>x4+ُʮ dů>stream -_љ4Zs )E|r}gkIuwohA=BQ):~]L${>)S#LrbWqȬp]ŨkenR`6:҅s9puxX6oO+ ¹37ΐi݇ʎ|*E{2|pi%{wJi۞;СzYC -ˮr?&`9ϔE3pvGau}w_}H -""͗(c[MLJ;v ͝헦Djw+K>bF~v4^ݗ -Єuxua-K۳]%3K ǸvEw76rF_߇cN~}`>UͶ2X[5F<܌I<_W[VF œU:FIzZ)Juc:wznlŜ?ϤnvSo laQb!26[ -5лF?HTtsvguNX 픞_P"nܒPJSq qxujWH޶D՝mUpJz\So=,KZf$Qs_< /.Wo#]ZOɕ \P!-] -i5~*.u 9Lm5>,W}*f/=tFQkCpApdtO!l94X|tB!LoozmW0rm.M?yE S澀q-эE_D\QFjU;Pj )Vm8j𢮻UWu]B7P)< u+=8'_,}*׬X%cE&zJ8`&(7olbg=g>7h{sC{6'OpN;OC3X>J}εEq -MŃjj|ds+h8n#qH-yoN`LΠސmr^&n"0O^Ċ"a0zP$U?r8KղU*1w3MxMv֋^jUsPiB5:(u*'؛/|K~g\&ة˯wնAr)/p]VQ22}}-#AE&ʛwkaFa+~n_Wdoy^`Oa\Ӽff8!$}>iKBw1ܷ GW!7}mmq3N5{fQi~nVV!ql%|1m^&@{jҁ+t?8^XZ-k^ts>sL{k,P4>*YDOiE;崅,C#쳓Bol K OxCUj4r}3JN5*YMcM6hVЋA:]xش2G>-^{FSsw7P9O(f\ėR7(|\ڨ\Q!_t\,m*j'kg%ICM׏f-崘3Unaܹa9˷&4o] X)Tҷ*NY nB+}b,7g[Ժ"&@,6A} yzxU?N-d2t-ZX:TWfd6PӔ"ڲ| -TvH\fW&l+Sk|N"yh/Ne |pl6xN^~٥5N_{kXѣԳ8('EN._U`2lMJ$uS1vrU^ϻ_``xX;i>_9>t7֕ke {LN鋃2iTӇniYO{~=_TGC߾S`V8a-9]ɣM\atJ&T[$I2Cj˘0yqIݰȎgX`4F0J?#7i:lõڛM7HN*W*>RY^p.kʘ<~2- Tt)> cy|z9R8^<f69VIڮ^'UW誕 ٫B:fѰyҼ5p4;OWJo2xk:CK,ݱdT6Z6'\Ƣ={f5&elW30~4)ϊA^jL!{b^O(WkU'Zk7b} zV?&gS T)D:|^N9`G;%_V Jr(_MvUƌڢE r$Bxo x2]Uܐ+~/p_jom:rбWhU};q}ݫ5:zNkD_m -rAV&]r^u{S*{a -8'>DEgfbѵ˅3`yY-:㉰u&Du0ѿ]SAOV3 ~6(!"ڮKD׼#1ePt v}ݫ_aAe4sFK4\4,J<)?Xlh½V`ui">_ݢ~] JyY6N2x=S*8R㿣FYd1^zwڕg(U Z/BD -"9,& eswb[KXFZ%׶I~ycVCTEpU"֛bP-}H!ͥT{.q74r+)A-P34w.ee0?={صVKs}KU+[/<*IT|{*CpurWFdZ:[ܾ.t=(<9#+op}|m2j 2Q@A/n{\jZvm?kuG>6%JWdAfњY &oP4bEi`'W4}4'lox,כh^sF+՛!4Ekw']yO.hp帷~`oE p[}t}ղyQFkRK}60YDQ' ]!Tm(-[y4d[(˫'`[pUq:q_FzxLWs7ǛRd wq:{o59wܫcUd,-j{H1cXx;ns.XS7@ߑpar/ނ} REA_F̚[>:Z%.ϫX?pRR&ep5z ܿgߪn'-2~-n-HQeN~U -4}hK`i=MS+&wV'-H$|=(X&VV"gtVVٰ/ca凈> >\:UxOtiQ8gnCst{*"VU~M>rU^brs74 f{&!hkK5LjnpЎ8(y鷾dAbveN/|)נS74oŨ,D(dƿcH޻M|ްK3R=[l*l$>]| b6Ӳ_}{<52޶uw -[˴juf>Olأyvٲ>]h-bm=.v ΅NQΠ{-tߦ6D&1kf'yc9 -c~y%dB@%Y6'3dCI;m^;7 \/ve8f+a7!Q`^wN{#/\y/5vqɊRk[zk>DdZ?nS*bY嗝Kݹ8Epiԍ?D+c&N2B0X?jʝ995h♒dB~ N 'G5Ѯ?-8Hp*}nRhxip*Ip7m{,`TUiPNK]JRH ju-W. - -9IWQjędTI7~/cٺ}r.y_ i -l۹\@f|!{y =JWJt$R>Wb VK&٭*}[F%w=~-߂aD{ 6~VnYPYMTuMCςS峦y[POiHp6ҟ_XBa?%A7jx?'925|y+4a#\b|+wKشC ֊nB;+CnHdٛ'W^|rkJW҈ \ V,dj'QRKO/a~RV-͟g6|Ë,+S -ݩ}߱xtIxX"j8զDkV]fҢG-#C56>JD+.vq/ig$RqSԞ>a\lCb8c&Hvv+k*ʋf#y={| $~p._e8R1=-PO e[ ev=~m0yEWny:WUxK]sn$g NrJWdGYuZv˨1DŸɣJnǽ3 mQ F>z֘x! N6FrR+  yC/\yUdpj nb.WoֱCj9]-UdfmUF$V2 -Tzd!Mv}3 ǩh. .7I=S36Чd+cXIҥ> U Mx*1Stmy|ux܎%,bpyB, -=]53I;^RS -V~~YFWa<~hi+W'x*Ћ'+w>!HyEqTˈ#[o[Sd)wJY(6ǾNy# G̣IKٙf %e&%JHr"H_%ʮ%X6J0SU[tHuRQ];+t*UqB@Td%qc=Ʒv֦OUSR1 -HP:gmёL+EW7bTNIWj_5ZweKL.1!C4|TύjMG}&E&5Sp">J~o+'8%-VH1ǤZEҰPhKo50UfU>-،Bm)[Sx.b-Dei8v~Nǝt١5 *Pa,H*)Z \mVy=_l~:~$ђkny1Rm.\v+XJsU|Yu#z|-_8 XElOFcIQMjKQnMXW4fL*kJ$ U5lj<3*R+ש3.Ȋ E؎v-܃epbUИo0ٕH#Ғγ;^EY.Z+s0\a;4#41ְP噊+T[@>1C%ިLe02m,@2ﶅMZ=џjX3Mv56Tom-^VE11&Ivsg5@\z2iKv^c#e8޹jq+5=^?-+M~ /z֭N͡Q{A<5Ռ4E {@P -TFf`tz:7#yo)eVۘ6e%߾yf׶-Pjs#8՜Y){_:&#.D:oڙ-LGm]{= -=u}Zg@7d7s]dc2%KF]Y7Dky_Kڜz8;=fZYi5VM`&Mgi۪Epaf a;Y*߿+OʳmW$e Ҫ[>>r0UN WȚ+r—X;wh8oI.?]G[rI'g=\ʕ_$p[eƱѓ;92"㖑fe}l>+7!Ȣ|ӱtM{EYTR ݵ{AvV,3 ESTr( ^(oqW]n@"h'p[>\y2梜+sGJ x)9pP{1@eb;!t#lu+ H)oI۝4K^h}FJ\)ʥS D'lVp?CҬC[!̟QC2Sd崾lt Q*{'Fl{&\/yBĉ=5#"O߫*֚wyYMDmy\~㥼5Žr(K7j׺d˒K -6Z7Op{q*Ǚ#RqVj[}ځv4?D0NzWrHJVun-Uۗ.ivėY[Ԛ.42ޣY'y0/!fy=&5ؔߡ&YSc0~1E)2鱐}vMnpt.s!W]0oz+!~\-뎑qu&;;q}5uwۻG -nZ2sVZXi4ǭ5Ŏ `R_-s_X}D?p`poȳ'5(ܶ*: g,9e Iv˥ѠTJK0=)^<{/`W>“*{Q }jh_w>x0%i=S`l(p7FASDKhΙc"VGhRԨ`Gp~JV#bqHjugCs OB;xcmfGAkc^JW;X9u"Fxɞ҉Vgɲt%?zd(]n`M7ףEGҧS29t xRmrd64y7EDw^(VȺgZ#AR"JQ1*omB?!8 :;Nݿ)ԬuHRVjm`ScޫU$4?@:`HD:,EW[;y`|:gVNA -p+٥T?I3v-ۯk1vCX^pg?F4Ӱ6/$Ϳ6|/8+׽jȞD5jl"8L *;;o:"jߖ;TV_Ikcv+ PO2VB߽[YNs!CK཮'^d_46ŪN'RչIY^FcKkdp\;Y;:q3뤷Ww[&N:kio2BAr4WFa@|8Mokt,"=lORLp&t |WlmeqW՞=.V|\ -xKM wCLČE4yk[NJ^|.MFD_y(Mݚ 'nx NݮEᓟ̈zz eoVe9T˙W:BW2$F+lŘBWcF`=CmǷ}e7)MƏgڄ}yѼP)[lY ږWΦ~hD*Tj[̓̌0|gSZ\gNfrÏ_NpPjlm`B/8}j oϲ@&7O+њ:O{(ڻ[+IX_SuѠw2Aw0j~|U2p)+w_ TsY5lraONe/])f5)MS;2ׂ'2l?x) Jһ ::>7;7;vbiE\G\V}g~d$DS_jSLD{q둄1r MUZOuSYÉˈ:㫽k+ڳQ~3NYlH+̪iH4_F -9qYJusLfJ\^fQQLcSz.nrܰAkb ҂a3 džX#X\nz?/[Á>Yۙ5ln?N,6slD%! - N˳QМE:4:j7Bm;ͬeP9,tݜrY&6䴯TWo0^HBmcsҩ'm*Kj 1%Ϯ4jx&_v0.wXL(WΈN 7pN]w6c(Wt@S .+Hnߗ4Ln͖SO=U}$bȐx`) ]Y>x.~.\x}&qK%-)) nVߑh,ܨ:i='~]ܺA q/ 1*' G\zj 9HD50Bk#FOp 8vbDhS$]lwF+~KkPI,L^:ju52k>uƅdf)i0'T|lJL7]俥u[@-jdusË=_R' tTpFW}\6")dKn#?߸@N@JJ)[4.b%3]:zܰ *MA_ܕUoY)_Njڳ.]qhRuD\$>!{5 |Mʡ&S ծC}eDxGD71Zm!stJZzHϭ8EtӕȜ"&Y Fw} B"2QZZ.b,{ݳy{eBew9`p7(bY!Xa` rr,>ܢc1P8%޸NmR_iBw1Y47$Uo72H|=%ɉBzck3U'+>yUŖ٩Wat?tU#mO-ϣ2]o)ʡOA㕣ym2pY@Ֆ8 uUVR]:O -l3+BUP7JϸАpJ(sN> /ڏwO%\ed.r{&,ѱU?gҵʈR{d;ъك_J0_ˢ) Ny|Eb&s>&l=-أ̴*vVMEvEy@u#~?\wtH~Of;lSMu%c*~emU:[YkYB">BR+[d6_wힾ~w$Gjk:˳P'g#^}># .}֞>ݮ6ᖝhbj*p;--di:u"7+eB͏H՝ (%3Jһs,2 ;!wUQ_ma$Vrw"wsKrS9ɒϤX-ܠ -,?ĽT;6).&e@utsϤJGNUr%e֨K5Jy/$ncיsl" Tk-\GY.aig-l*mӉH*t}8 MҒ[I~SIxKWNG?3H t>T~vyBŘ(fqA\~jT; ?Px緷4/* }MR7!&5u9E IU|Z/ʚ#jT_' [GD+^-SLI66- gOL9y4Κ)SCT_+?3YR\j!G[Ԥ -;&Q<{=5]|ixV]Tud5Oۅ:=7[ SH)5 Ј:恮UQ-Z!2rJg.[fO7589uKG5? -9yԮO(J8ݜ).uˋJC2b$'f`RrRKo|}b0[קzv+O:׮/7U2a70=j|)7zu*.od{ RN.gcyP^0*I_]7[b+ } WU rT3yL7 [Ez:)QO&t JOou۪tIyZfEqY6pNe'% 7[đcxuX lJGRiǀhw3%; 9}fAܢ蚲4stk*A C"(&sN"sxޟ =]U2t?oxcg14̮"V`>N#g,ks L~ܷ%a`i &Gҥw&mVW{;]")A/g5{_G͂@аBn:hN^vFo?Rƙ?_Q"b68qxS)ˉ1_5wJz ߔs:+}|_?(-cOn{ {sQ_ ۴VvST4v. Ee]b],7v&:{}heq?Eղ.}Yn>"<|lJӮ -; u w|l| "ďh}.HDY©\D`[#9jN(2~ -PN -r %WjfRóCtqlR4@mB+NM M'4b/frЫ=-'c;CW\G&<I4Օt Ͳ-ӹcnnitʅg+8ͧi~VYOIaӚujm;7K,%O}ﮬ`a-N&|m]%te>o 탖6|S@- .ӹ; kYdR]w6[)ЗkR)<֍=CK nZіxh*Au> .z^lE)k{m= o/ O >{+e;,K&t :%P-~Y9g:YفrmY;aHl7tV3;ٛrl!EϾh씫IɺA*yVdwZm_TԵ|m:tnKA;\l9O,a1h -4fmajSDڍ!'iOOwܕϏ+ -@UhLm :@;rI+nõ-vFch7o{I ; fG6xQةvΩ~]^ e : a>m^fo@-KQt~>{ K'2ܲ/g"ߜpQQCx k8g=ݳMWgLZ~a4gPW$Ӈ2>`2@:]v,T5C5O8HL.;qx=Q\o cy@Mx|]87إim QA#ey+h{|U'N3uғz)t"{X=oMY&KxδɤԈa"2ri%m|Lv}xvFa0}60ZM3s.H/?%%=j8clg#&†椭jV6ۄ8\l^]_zڅ9K&Owz\.=~ڳŽt N풥i;Gw۪ب܊s,#xF!X }rK=*^9Y5U*{||^X?oǸ#YԂx7p '$.c^"Écd^5?`ΰv.nӵSk*P^_YLvS~߽zڮBDy9C}Gzf - -㶇Z슓0L nth;b{>Ц@?:w - 2'xسxc:bx{`){&UHW!G/^@j'up>|[:U5T;>+qm9{rI me^ -Z2"3{*gaLgqգ3ꊖcU Qߏ߬R )t_K]Z) B>m"Gi}=8.: 6v`݀_.vB~xU*:3| ibd\V =?C83|P~oM8>U+GwBRJ?{sЫʓKry.EU8-V:o(j!'c<LM :޴C-+농M\hP l1cg4~w-|%}R;Ɓż[WZm㳷,usZrQs!GIqKJ"T?. G6Y@O}@qeF7F^TGN]3!bHl;6Hh^ -fIѬ;A, c.G6"kkEo݀7īzx$B -K_-uyLxNdLׯI.7F߽AW7Xeh(k6CN=&gzo0[_:?S1Dl׺s3y [qCb5kd<%ɴNkeZi1"=汚gʝBƭ*8 QJl"3[ZGŰ֓ ׭'!o^.Xylœ}rĖґ@tij 1CDx;Da!q MS)=pS~f7MM!s8$aJvzF:u(7o#r̔T"<5-:O~u!.žĵgEX|crCܾ=O#mK*+43Gk{[șQϵ:I|? 4ಀ",2nPGiHW)qo%LPK(.]5{ .uv;JwHy?x|x#-2oUhEvo.>a ONaծ߂MֲXwD cۙqk*O;ݼK;.=@ -3U^_(InCǭd8'H1|]/zF,4~1s3oϰٛY}bꎉoMhwCI{P&]𡉹yjWl:`l(JQYճ2.J;v`RUS5,6&.1@:X m :Hf qDIɾ7gͺw 0Mհ¥OB)6y;n-ܗfrNK5뾔/wYo1xo;]nهǦHne{ j&,|T{q;uN}o?-ֱj=HtTK{DO?{{"9#9,7 ?л.)Av rrӄ7f+((ĪM^Ml(bͷqM-|;lVAk{%ŹRjDhnݍ [@ I۶WO:4[j4O@P"̎62c͔-B5s/xy:[r=o$! >j PR.sϕ>E -"|O6XSUgv{oǀwS'3圿.40N矮E*Zk&9M'``ybRS\+\ Ԛ^}?Ҵ:K -rՆ7 Gկ5ٮ̹X8TBP-LՌ2(n~k&2Vݾ=n;aLL ;ڝzv}}A?7ɮA;Y2:"`R`ًstOZœT1ںپ=KX|9Rp|k73r) -4sl -i4HV&Prx'8[_dԐn{Xɫ"?wϽ=X .bxUQA(kn k]ԯ`)O$ uo)Gy8RO*e9h%v֓m^z״P}ﻡԿhGWʌ˵42fUuQ׶*3S=,y2VMci했7#^k c "6W2~U~w%K+lzWI7;5Va^unfo?K{ -D߲D"W(p> -oa˱+a;XC5&6iaP^_mK츤퍼*K.v.P)xuHgMÆ0S([t"*-]+z&cݐM (sS~_V~;*տ>c8Kys_ؐI#'}g\hT ) 6gEf|tȼOPVz?X,7 ~3}x^K}L˾0AuK8ݧNAJ#8;ٻ d-{{cB: k#)ҮSycV>ENjrDa*g_)SjAp+)Kt Ȉ IɅxݗ-CxW,c`8 +!dF g|Gai`>G)\FuurྜྷxzÔNoճe53#4\϶3v]̓ bF- -}goAAPUe1:峘4P}:V46Az\ 6rY/ -MMnع֒E -+*jm4:%U9 -U/|8zBgOH8Kk ^h~x6n·r޲1;_ޜ_zk\9};zܢl}?QڭYW3j~ Ab۳Q/K?.K-[YU -k RCLPyw>Md9+`@)R#}o+5<#(?S+Hf_W>dڢ>:x-fT5)\:/,Wlo/8qh`Xyi"AVVėJk~\SFFS`o+^jOI YCկ?X -Vӓ5yxXߩvr}6cY Ti=RNt{?f7}#*Nu,Q=%` ["7&eޖ6EVhnƌBgک˻Sj:kZGwZ[l{+= ?_f g\O/ߠHR qZ򉙤{e; u#aaiH&fMpU02l#/+3t6mP^N0f<#|=}D<30ԇ+Z÷'%E'Z_fyJ|M~V݄:xE _Fu&v>`SfƂWZVb -S)L[jbdڗ6^}Ļ+\6]\mh>G\V#Zyn]Vy(Ts{Ox$߳$9!!<:ͅ5_X+txۃ]۬BDZkSd#>VgX+Gr_HP0-eFg, M̐g[Z!Bǡ3P:T.ib.4aсʭ!+ -:id&NQrތ -OG0p~Cu#Xm1 fwj|\rv -tӮ7wWÊNJ+,BW_Jsz^@r@j~oA< @gn)aJkhg?T@%>}9l24yYcrhQ8W/կr ~<>TosMff}k?kڎEKދ ߵȚש_ 'joknœu@˝c{~R,䇳[BXBTͬg9NZɩl+LfI]n^D*DOؼou_{:+;|N3̇jB |Z}] _ZkN3ɬYno#bMM3m31:Ҵͼncu3J9>uん:L -dq͇z ;H=V16JTfyke=5t>Z fMtKF5oU+r,.?z'7RY<66n`ߘiԮ_ke!9Dlj&c^.!_7^-9 jQݬ9l׃ -sq T٨Fo'񄛱(mx F6ؚuqX0S) p=3Ɋ 5:bC?˫СnhۅpBC٬(i4/zgElT%!ak2nK d9!R&lCnǡ8a)bQV!e}6E6fZ`Xa-7-Z/<æQgHJ-IVrb ⦩e6p;r~Zx$}s=9Ǝ_Ҽn8^5*@7 Y77d鏻:j!ݰÀx1,cѼqt`"bʧ0 >ۖ/ӅyL-iS`=^ =f2WaX^MvOGePAg,ƽU$;WWSit/.ink3w+I;Lg b)1+tC2 (q Idn[bYZ|L #Eis˦ -D*̥|,\mt1|_}*0+ZVҮNf}invLNA;rU&#C3[j@ӖU^h`L 2(Pi~G?^K%IV?vt?oJ -i[h`VٺYȖOo9%LO<<(>{gzeWI^iu44M jL -SstBߜdU|؃hbľYvQ_M7Y<<;jOOc~Ugʩ40Uǭ=]M~죤ZtpU:/Y0ٜVUA^ؾ|3cܮx6~ #-Ga,_)Dwv3#[Nv>wu·Wʡrcꠗq=nZ;I;TD! sg]Mz/Bu&PC7&kn7(ᚣ% P/As *zXp u$;'~>`OT8 W{ZTz8*S2:0tAI:f.wQk$}h!=xY86twj -ό69éD mA)S7Աf-RUc1ZV ^VڹQ7hBLZ -A] -LɃ+u]6_=d j{b1jus'ζ ]nRً v5e1;@.}C+zO3-Hx9k7*Rfw}}1If9E԰^oQڪNjk HzتB;^TheYg5 ՞?OZPDeWsKZZ8å]NFH|))fJ/ Q7ϐ&J[ۍpn<Pt#pFƐ&ժq&D0>d,qXmUhS%XV\k.Nb-ߊSNCrY Ei[=`,AHSՒ[1QgKjnƣjt!x28vZmP.jz@PZ#0GGU1[) ץ@#-6G=#QoV~~m_Wceܭ}37fE;H![{դ~۠-z^i,,.ez[ڢ/HA\ ll>lNX٬a-Z#-@5| UXdj!{o)v ވX)uYQ@~W0.EE ܻD&m܁nYцNgqͷA:9hص-qUYʥBqbղ)9WeaP\[PX׻ꀋԧ@ץuKK5 r6^@A8r1zwU> 7Z>CiLUCЇzw>&0W VhWD(M3sr9U T6G<,޳TC6CtϹݪߗͪGWBD!RvͲՒMKsbZr*bث˱9J9*6mz6h? o^b&4)tw"韾Jp/{B ߺT)T9|j'C?!ΖmQXUZ} ڙ\8סk4VyfDQxw(*\dĄ9٫{5}j}6yы=qwo٫N79r4tHZIUyJ,1m -ŏ l*{A&5'VnC[I (b;4fM;Á2qhQL.՜MQk2-I)YWwuW۠V KjkLI/z&Wd) -Wwmm'z>QMT<[;iNV0"Eڟ|3;;lHN -DVI5R쪕@_i;tAkfiT0yWKƝ|󻿝X#9z'HR>Dwn Ęb.1dd xxf]ͥVYh9 >&YMTN擆;̉s[?yڗy'G]w_E-Fe˅Fb.y:gPqUs`pw{Bly57*nt\uE]ɱ&frit~ -eٵup)]σ;9}w8){#s -~t[ޢJBMT]{}SRK>;jksy6#̩<[Գ0KԎQG~FM!1 څN˿}51V~%цhJk\R ݿGhʮξt qiZq5֛^k\ZXL^Fpd=f?9 44,R7SW*vj$OY>`3Q] j~YS;{-Jm]X}螀: -(f'=I\Ky ^cY /3e%\#i-%Ά.Wf^9{-w֧7LcKag^ \g((bibCR%/GKZJ+ՆDzPe2^zxwe;K#gV-if1z7t93;gJiNuh1 ߺFEZ0 {* `<1K4\k-ϱy|刬o8EW 6*vmdWG -ٷ7~m= ?{vڪeL3{9'KM~.s,Fu3< $}ً\M;WM UM|k1m /CF/2i ز)f}6?p<=ڣhX0@ߙ6}Y١>]Sa?Á;"xj_U4DD^ɰsI:g;g -)o˄5vTũǒCBRʬz3 K\{L}k8.ڄs΃ *WYn[/]jyTލOͦ~R^_:j1}z*~&ю}Cż|*Ւ#pgլ,j槱 E.P^ŬEtUOq@?7P!yD/:V/S<00KQt):>]QAm:v";D<O>͑V w:)n_N.I}ZMoYݱ(yZ|tM8"djB=")&fMawr\?mc-4nB gdz[XG lTh`flޣr/}~%?) 2rXLasrNY0K@%匃>W夺m%y,뺗\ l]Ta-S_63FV6P;vnM1> nHgknkY^cT$e{p[]n'[i1E8x&vQn^|:I[+&X/me [t7] *@o?{g^'^='PldSr42sa>~;sTŗtT0#3_Ѥ 1@zf7dnSk#6(/$؊XHés3݊?Khb>GȚ Lh[V*3FDRͯ$f%Zn%,[lG8ބ9ʕGoT!3j|Y9_&`\!6y -ίx bs7͕^ZFl}+d͕mon#*7u; d@˖.Y>}Jp6:&eljv,MYbM.~ r_K;`6nqR൛z&;y: 1ʆM -x%C´-4x[P(՗~ NW INLJ=vh]m0 <;x*:?d{^cᙹc#PORR~㒸n<]Knfۈ(Ж.&Y1O1Fh.4y3&vY t_~zh{ߪ iTlV2Z_ a@6n Uw> -Le&-Q?{GW^}/pXG` 5ibm(v*l)hج0j)eh\4r=I{}|-Q1SuW7.ϼm:9WސfvP ݑ^M?2iN~u(;Kz8Rpո9`ogyxEtŘ74xLB${P'CC7kfV=FAߚHܕ{#cnOH:B< -V?!om={,`T.3#Q:ft;=nD,xĸ|h=M 0qlQ&_ވ~.a >È5 )}U|4^+XToܩc :G_[~;pbo1O\JƬia gfX \Ц hnv$vSGE-e37՚XwzBEPJwV -ʩZ) ~Ofgݷ{K߷8jzA(0Ys\2"əGY5)t70[a>^bև9v37֙_5m슓BhFTo~4}'Z&P5gaeN4S{e6F^oǬ,F=Gaԑe -}թv1zT~?D,I'8A#~<XH:BD=8x,~)G$MjpeE0ׁtPhH;[Jvz`U=MQKҖ>j*v؅9 ^*菪)&w3EVg^ Ѥ=e$ks)37 CWs{o;{EldkX\e,86y [[Nާz)O StRrg /$Џm}%k0.p -CcOAZ-@\8* egr޲=8oN£bהK,۹2 NGYYkt&csT* j]E} 5-EL0gs_|էȟwk)WӾ]<[cVwĴtPșVV+dj]i *kzޚ/{S/>H?x]?<۵󢌘|6/ Hb L#:@<=m6=L~ܙs7VAes\o=p n] vٵ`iȗv 4~ y罐>Vf&mO^KeqTZYSϡ4*-ϕN)I@IJƏʏSK+V˻;8att~FzofZǪ;Q);*lz(;*{,q`l&"AǏҥxG{5ɶ:E(؃PҏA*44Ej[|`6( G8CV#-55*ZV -B_vMwܗ 2dqm:ruV22"p;˻3!LtVXW*-ע."}shBHFqou/_Qͺud + [(1K:Gh v`=Y]+NN5UAkHqxb`Х4ԸD dи|TioIĽ^=_m,FE%3vR>Ќp6tbG:{um4ƦB *-}kuum19{UƮT%2ZN(V^nDZ&x],v_1Xhb[Ikϼ7c^&If9--f^㜔A^6D *rASJmR[sya7mԪɖ:ϖ<WTz#E鹳Iӫ9jqyt;{ŜH!fΠKŋJqEsfp=)PGVp>ߏ|#\ay ;ɯ/hE:P9)yx."^$ݍG@HoCRx,/zˮ/kiS{+X4`͛tpA`(N9BJ]X؋q7}b} -xK ܅ T-!?k$Ym^[3,Á ?4_'6k^F)l{f4*𧤳JP8' SA-#Jߥ]2n?ڷSK/ۡ$w{e2NUF5I$5B~L7J{J  a`o׷+@9-I\SnT72R֜.Ui$Va˻t|㼏PI~]kY}/b4yk>݈p%vKW{/ѿl(¶>x*\mYIJ|4m z npLGRԽk8 YyAU?oa h\RX`86`oM [QP>Ryf=-s5h;y?o8J/_whh߶<:}iY -JlF/w7?/}4C^3ܥ4czƄ=6;>߼:GU>-.zſW7zJ)|Jßޢ:Ĵ/O ȖrĞJ>zUÐZ}y "yE5.ׇ7l;46lqK-j Vphջ(G_1i߰MF%WM[q҃,ux'uPP~I-ͻ:UqNQ'k*,ihLMz=,[$KCl‰_ gU?`C[;}S˭]?HÉb0xI̫(Y2 ! m0VpXmF29VZH[@2p4kzXB; BDVW)I[5p+1_9.k75Fp?JԳJ0M\ٌ[XLFb%X |Cѫ0e,UYmӟa{VF4XyYVX?=eUHJlb{XmV -7x'2GA7)wg~uSHˇIϾ窔/kžϨ)Ѕ3hHnIA|C JMM[}v\~7^ͧQk"1 >M,ɪT-?a<5v3ItZRcE^֒ugr8oie%?zqjrK2|ˏQsSf$?xu|d&7WHϑ -- 簑ɶƬM͙Ti&g8MNYV.sq_ɸzǩ!ByB(N7?->vgu&-YHnaJn -|)|x;WRC^0? ٜm -<zs<v-*WIQ,{q:ڝLM0C\wҩջ`KpxVoKܜ`_&}^'yt1ylc>?ͼ>Jld:{sR-|ot4SF,'_8 hA{YlC) -ucqy>j=yNܻ8+T"_}ն]+ջ^;d!6֏@L0,w:<6Nq A%Q|iTa?!(v',^~{{HJtS{$Dv0fE|EU}j5T-,ݲ˩kOJ95_ -E 2r޼u}l9ޯ~?H^fY>Jb*9kWڛQ_L}ldC{#ūn cͶ@٘8>,;&5iU6UQjW^o9T"j|h7Qh:\1.F -VhΓ!bH&pfjWiUvR9}*g+RdejjMxYbԇ?kg&!meVtQ!mGѮkc-J2a_iƉ dۋ Y=DQ]k'K' VgUVpagokު_3Ծ+}E@@Kzc)vwB/g˳+5+J49˱;Z԰9tsXDvAdYt5,X+MZk3;9]2 -oi^c=DQteߎ{r(:BL隫2y5m=?B3&4FK ocq u|L}vT1~z;qӝz59߰\H@jߢDvc-ٺ]Gf*9ulK f69daGͳ 5(4_*I1"YI`LqctqMX`3aOR֝BCO`;U&,ra<:Ŷ.oJ=jef>E^U7ǀ-׋YW:jk"7V&]˵m?}{g{Z0,L nI !eS]14JQv>30/Tm=֚88P& =<.z ۭeTsV0l#fN -bv ۵j}W\Q'XcQxV@#w ߞvwbx)U۾nyS7%䗌u&VXQ/lnJZE\MJ;$}{k>;y @;X9Rzl*/ݛUTʏ;z"6H}D+\N}%UPA!ŷŵ@Z/Y+ -U#|.vO]6";N7~3U4:J3c CEJE~K.rN7sZh5;E!~̵m;r*iG,T䡢ggDox܊jϫbS#Q0>~KZ -@&[)#?[-zoVnF7JGρ4ׁBfq[5j4\uQP ͖AiLbg?@s3Y+Oi!M4jA:O\sO!eTZ&*;}YE5qvQ4w/c ­lг\vpiV:]F*d>KgX_i!gnoEQ4ouVˌtm]dt|T[Wɓ -6̕JP2L35/R5{_*qQ{v$߅b =ObF[xM-DJƉ!=՘-hz'gϫ pn92MOkK+l) 5 -׊S.htB8w?VJm<,KQəTyd~?P]_J<8(z]s]@zlkZfB0T0,I&ugǹ-4ʨVx'I%lP^w$Z,ˢ(egqh<wC+TӨ (A.ڛyPlE.@?m8kBa0ú%Y{ҵ;v(?})TW=[L܈M{R*ahd FdVhATC=UBi֧kNk5QdCHXIo`U*Fgf7Ιkax_%uQ[!æeOV%U1wbDn*5TۨvnqfĐV .fp(%͙ܼ?h8}Rx|~O6MŗabŐ[V%ڱn]^=zCQړ}+chNŅ -5CeX?vj -?ǓO:osq#lLᓿ?-[ϻmEWK67VL!qaJiVqi/,E6@#GiӻO呎 ^˫k R8}5ۺVHmRbfAV]gˌl=nsU -pZX_~M's 00{2j#kDbqp=7]_ؙha>pw*r[T'C~_-TNKu-cdyyZO3c Wt'4oYv×Ws}hyP wF C\"AF"Cuu%/H*8D%6V -BJ0pk*`s6&Ǫ2Q,q#\4E~ͯf>d[+يw' -ц N&lQDF* nr(a4v ' φ5m  L25 -^4댺xф nq`!Ϝ GE0OZdzyd7y&ya^q{SծMq{FF?"yp5̰.11nVgv9ia| Kdj`f)̟} ;;2[ѹմAxWfksPo}~Vg ɹIznWYPx"ی/[ Wv"<{WshCŋ2.lT?{g@5zW2P“3M23M/-tX5U91%˨BmNwB~pT`W7[Ǎ;1}(nW\X-V++eu$Nޮ*I:'՜.I#[e1{=&p^)`0dd6Et}*u0Kvnڍ4O=U~e†Oc%HvF&hTKz?fj`.ªQKAu_M ƃ;ki7C2MBswAW4T7sB6Nٶ^.)3s.9Kp^12Vno7:T3sd99)\w }׾:̞L=IC+07xIVFZvUL%\J˺Dq3؍ۉ?LLJYHU0v,D`kPHu -BFu4F.ܝAA4t9 7f%[!a0@yP[fkN?Cڨո|&(Jǩ r9m64,VUƔ#';'We]}E[xtSɭݜ~t_՝C'=S3ۏ50ƾ'-6O1e?Y)=Qթ⇽S>HwzyG\r//Vnk9cT鶰$9wڐ/!`q^zaԞH9ֲjjmޒoڢ`t Bh/!aOXM٪Omtm}ƭk5pm(۽O7W*>fsMd[&>mdݯrUR%\bVt:Gu8S7o.Z˙^3}]?V=}eȾ =HnyAbV 4X6Jp-#OIW_op;]!moq'!F)OaMFǂO tU\ut.y߽0a&ܿ}2ҺO7ƻ>Vsc}qb  XGoՍ "Z{yU¨GYқ&i7P)#hKS -Ѥ J*9Z]hf]^Ф-w>^ng%Ⱦ@qUyFu$ -heI}-0 E)4žÑB|I\ͬt]`"|l_RѡaJ4}ZE133$33~鎢*Uh]b S ;S_PdqACv%M53H5h<՚~Nj6M>R_?鐛?FKֈA{?@75`#{;kyѝ~ 큶\t2qLFEq]H.̀h-xah|{LkS^WZ+P)L:z/Nm?Y3Ex~HT h4б%&8LZ"y%Ok j M;Ֆ/R_\4U@`a(~xuNJcޥ%J$`B`$Ncm1\ԢIxn۱n50Yt108 !o~S`@U1օ85ghARmM|J1l*&ؓ̂`sxfGL_œ3l7I؏;IvI✷h b4ڳDYzۺݏ- V -@yf4\K4 ;;[ r齃mˮ=7$g2XU4j9ȘK~#qOOWK;Y%ۃJJ!b]j/?HnҁEG|dɻ te&kWшG*+HvA^zH.vn;VWa*cfŝmAAaXce?5 ->ѾjdB3e/Z5oFr˛~>V,^j94%ӭ>Mxw=yI۰O]~(_ -[Jj-mB:R#[1qt`D@ছ7h[{|D^uEƮWV - HM\yWJI_=wEq\lujnR}޹ 5}p{Lݿ'{ - "K6V / ,y!6U|1gJbca %͟8[EZXwxV>yB]]z%Rj5Ralg5mܟ. %:S.JΙcRكRЫQx:Y0 Ԙz6|If$ܥxJ lx$Vk{7v-##ߝT+vVxYJ`Sc&*4]F誗'QX 56 QiE:"Kqlsaw3B4˧0 F ŧ쌖wT*}3a3^$ {e;fuv8^-fޯȟa>-";Uc76eT=Cn%o?|:f<4?N']UHI]_Zw2P2:H'Oī>P߶Rq ƲO[JKl҂$t[ՉAxcS%FӮގx;s:R -KʾD -[uC2Z?F{4,F(f}hW@bV-,G59P!KZB:t ff=i^鿚֥{CoG Mge,-ssm{P^ 9Svv{TtMGyT>vi+VdW`[~ShުݻM=2yz6'xml)ɴ 49wGl!nk{ї$QdSǜ/}5x\74V,2!ݭ]W#W_ I-oz٪ҩ9(rTLジkiLwtߚ{4w:S{,ze\ҺMm~٠xPCaj6/rMp,6]05a/y%[gf K;d&üU|V}:&}澖nqPMz ꟮ ={3aSr*Uu&U]黐S(e y?M{sMrnV[1.íE'1^u<6|,f7e*3톫%Xl]Ϩ y[MwN-վ^5;4~fqrq I{p}pQ%n=m5yhR౻7'YTok)5n#?%([/ǼNǺ?~[ P󝁿*%Μă.$S>y 2F!B}Ad㵽S"c[ޞ -In76Vb%eHNIH4/% _q"L-_V:`:.+ [x\k\;Il^Sx̴v-TÉ9EC]MDA}n믿K~𘟔Mpٮ|s{m6O61Ol1[-7Q_(IbԳy;N^ug>( n - Fi'[:(Ljݼl6l찘NtlaSw4 QcG΂K [LbɩpA|pҦzGl!Nsau k~[J4]T29PYu=>ڄu~ aUdo6@廣”"®ހ/uOXFA'}#Oܝ"_d*ۡ5Ʉ7mEg>=.4%b8,-B2m)'C8"l 94O2Ѩ0vN6 -cacpとz7μl -ږ|mF;qy}g]঱#:T-{!5<'zagyK|hIjsUM%Y y2v5 -1Hv`!qrs} #Wϝ *j@8Ѿ~osӫɿ6FٝMɠLH y@(l#$ $ȯߖά`ZgW?3u+36;H7 -U[zDƙ -k)<ג50>j 5?GlFD_ J"attIZqoML6(Ňw\~̲Z6FzoAͺ6IS]2ټ&OG)]6 A^w*O3|6|=>4׀}1Tj`n/yvRt݄Hn2xM |G5gp!0l<@7úN6$V@%(.R,jI+}nfϥ1u2SZcs+}U=V]`4py|ComXꡱSrvDx4/̌t`ܮ+h|ȁ>u/$P'.svMM-15חxnΖ cKP,{+X°O}fp0TT7gS&7Qв&ⲳC9vT\A538p]?Ν(0~Ihe!Əe߫$αm{;unjS+ -z SXp*AMޣas*?e9i7Zi,c΍ʍiO^3~5ϧv43&xv>d y:瑁*F7pr>ːd"2;YtC]7*{?4𡚇_=V %sM{+\s 3ܱ;A\,+FeKƹʲzXُNm9J`NQu׬zcbu3#?X/kgeMן;5`uͲ vag`:I8qVIgӴv$ 6 -T~)N\?4>J ")E}xYbpJ!rw!u6:fU4c1}Q0|q|AwW$r:I;c{. ͜bR9Y7&_su)+hT [ߣ̰ɩ06duD]1FqS񌇂%2fc|m/K$> oVen1ɍ?$"c[BάVg)yNJ<[i+9x'~-h#5f\X]fWfƲϭv׮V|Jc? -O秙EAOm/`:㬓#nkY(k`=bM!>i]έ i}"cŁ{Zo6aS $閽{PY+2e}|۶+L#;X"݉S[~gJx?$owDyE00s~D< -`fkpmƒi T vjN&6g=>zڣhuv§ OUg#3@'5:Dyk5|}) 2iW~zݕ;7ȞpfPIJLfzmN<Ʊ89(X]6 -|^۽ 3jt}Ȫ4{kԘ⌂&5wj"ӰF? C+tZh0v9.TmgTzK[ ͻ7>!F5BFlTFxun\/-Y%ҜtJGU[vnLr\u:mPp};ԩI>  Fߜ"kj2y.TeWX\@>Jݗ^Ve?ӌT9K=i 7oso9P}fe=0 T2ALm{_k,͌ѓ":*=Vj"~}uc^豦TEv>a`Uxx#m@/mwuS '0F5tm򂚙lNpo,ĉX}'ŏ[Ά"C7+9(ƨ6Btt&#u9[t (v -^=@MV`wX5e|@#%ɾiR/#N3+?.gҎ]U鉎m_>4bv\{^N -VڻI$kM73t{_)•_~} -*=|7K^]=|Eյԓ+0WUvzQ3e='(=OrS{([rsB'Iɟ/=UW_to@͉{xh{׿WewSV|$5G+¬*I'Ú ]lYw~х.$erȽ\s'4K[É|JS= ؽϞYrog [/N]SqmZ-8jXk%Tsx3l?;Ë8F =qҌQ3YVYf\ -*h)؞Ćn`BV,_ŘM?0٣rߣ$Kj'[QQs2K(q絞Z7Zj=ZhOl~c\RC|6ؔ{' Ήic<5ԏ [㐣,%M:\Щ2NK[}Fُ+k9Ue$bV5.06ά8x:7ze(j7Hzר|hp3ósu%Jvrnhq"Re輅6 ȕs0aaаMD,.f1&)Z^\>7ަ !zk]T2a&X^: ט,vK'1c'|RxgЕoO;O˳M?LBVS}(Ġٔ>]qZ1_ o#m >V*w0F=miÇm+. h8~]G[_`Ɣ]n4޸E=|RӒxhбO{D'Yv-Z=#I_jWCV\@ \)<EFKBk3KsڊDuiՋʝMyoM:kQD;>Y%{6i!޷~2G5LqcDAc4Xo)n[Fp@X;)wL3G'uzztvO鞨c?PBu xz})I ioGv!Ua$ZHY"5TX5['m%daOՊ&eD>K B- .\_K&# PSwcڽ rwN4^IpspqhHhA8v]nlIKGkjS﨓|h=[pi!;Z58\pj0jLוϷ+D/3C‹XT1Dư);U7O,̪}%۸WDq.\M46Η. YXi6QޔWjVCM@MBwYs`iTi'J%Suϟ&$UjGAb-)m>ђEbY)fcbOVӖ}xUyZNg;Znx=)hzin#]z C\|L,w+ /q QYS^8ir׆ĶJSǚ졥$+󸴾 6iJ,*e—BLpbgsdx^e)=2vnI7B<8p,֯](A\^ANz.(1/XBUy"U~3Vk1Ȱt;3i"K]l,:Q8 -pI]LO-_:MVg[gZl[Rg!-զ4|llӟRh0룝Cۿ|ybJ UB9Z]:X#N;\F~(2p%BFCy(o:R%9f*?om1_WS?Q!ܦ*_m%/C3R4 unlYYiE&ycAݕr],Ɇ zT\J\ZLkJKw&ؼzȳ?yo -SoSJym$FL\ Q4Rqk|efuΪīeNq. ذ]`V֗EFu0e햏R;vګrP6p=tA~k+Tx5= ȅZ馍]̑~#~+Tg: p(ԫ{SMk8WY&(|8ist;9xmn]va - -[A4oZ㿿;qSΖ0ZR)ci序:C&]  -?.XXdpt9aUo~1y`)5v7D/rNs%sY9k gG[T"8OUWe!g'83{4@E;7:ѧclJPoMSWJ0ȿf r -:[^hPT֡[vNSegQU.lyk+SacaeeS|,`v$_Z8&,|!i%; o|SCbRD1q}fb[%qqjQж;)C[USA"S q1wl uyt> - 1lVo>@"9+JotΨXQӭ.w /?%r6 {hg_ \ާPOZHlZűwnÀM`dwTּ=_l` (0MeJ79@)c"SgzF&r~k OdYoS>";B/gP~Hr.ݥxIϒqeOhV!}ϒ8;7~AJNJ޶ /%ۻ8`YhH L}+C<PJF*ߩ*S7n-ٝ{W0kIƀ!M4 -evޭR $IF5UJHuX&+.w¶96vF,/A=QvnE p Xų^^B0҈ H)@ƶ(| oYVP8?}[w 9{J\Ƶ:&C:It,mܙ5j<ޠ=b|Dib3VmzQrt4 wI\I]mhB&tFrЇj-,c!NuJ - 0=4Ta/υ`I[:J(tT#VTR7T.᢫gq37#}L/u]b?lLI&g+-+FVdm-ju. }6bC'SW8塘;?2@a޻@MIj"^YVsfcކ҄[>jE];2_[N4"c`~bZT{]5յ9Z(mnxx&ä%K뻛ᴲ]$ -~TLw,YxqѴ~Dj 墯+J+ps<p}Ta~sEz,I O&:͙ -z5!t7RᒦF\Pazd^th,R_)TU)47 k6.51e ` 60:eH#Rw@Px&,O89|s{0{5zdVW]_aYaOi yj7Ŀ !*b̔;I?6tBX,Z/$j׍BΥ^P^ᗒ>-_o|l.^5o{]]ՁwSv(lݓ5Ι{gө[Օa0*9?6!gsnd|u&UyO4} 3k0BkϾHھe/a6iEԒB j\"^'$}G"CS> Ƥ>Rp174 |p[V=1I\)BVCwMBc"#$Wj7xT0ǚdB,yb٪<{+k5s \m[.5D$vUGyz3Ь=SWDUqwԜq+^iʯ*j[KX*}7ග+l sk_;+xoincR!#n[h-.Aϙdj]kk6# gɜKA:͌*[:P5Ek޲Y37%W]yIj2&$6ԭ{ʐ9W}{rL $GQin3>0k 7+{76dY_053ioN_} g{خUy~ѹ{065Є7 ~~h之^(VbzK.A3 J󼭔]}mpXTyY9Q#GiITa䓐/`d=1jgcƂ?= -~V-nHkbګSRݜJMIbGuMWÑeKWԣLѐm&`Un7 *W^ quBK^O5qc(KkWG t1_- Ǎzc6/&XhKKZqӋ-4#Z&Bj.W0R34Ŵ:koe]Ux(Ps(qXS?wߪF.D҈XW3L}Vq+k7jE!ɳzk 3x烠]wSLs5)ƘT[M JMiF۶^BItyxWYuuB0{=H_~tR_Aυ@ - Ij>> b[RG׋J{#n)AѝNK쿁hq1gןtڣ8;kvk~1"lfKX؛l]VCeAAY7̶.3JïƒSM+}؂ufuB.Ž[Cko߽0/H4q#%]ֵ5ڶ.FTyq]ZNRՓVϫMd?MWW]B-册{! -Z|d W LsXq_AvpW), B9be;MtN *T y%$ C!+&&׊?>B(uǃx/IAhnoCLG DTnɩދ THRxg]B:W(dV6/L٥cD5]*5R:=IQs!GsUb|Bnw &W3Ö0H=ʃ!0UpD$U5KG` 7a@\ʼnSY -*}%|ABo|2+6-`?ZzNmzGj{?v -O#0Wy$h}7fM芋o쯮(=i_ B[& ilVI|1<-w{֭)˧;(ڽE[6?,نZcDӵ 7-oG0MslxBLF S~'f6 4Vh+~B<짱qd=BtW#]14@~VFDqi3F4 lO*yya#̭j ZeyzXr Ԃ׎<{=eH./_jĨY}A-O[k=^v3QW`|SQ_l㓂H59*Ӎ+mޝicUy_J|͍]é&]\]7z4շ5&&CTY6YH{ Vo@ k(BiN*4_aKoMc$R \F 0kԡVPwPTNSRF0WB!]tH U Cg|x -P߫/%MoÒ3f0tG6<ֺ`9{Zvisg ?&pJ/ȕm.u U:%aEzSy6F;TV,Kw[MVͻ+]?Mi_껝U6ZE^ -X=;pj7 j)𝳇zp~ثKG͸޽qX8!J{2,Kګ[cll/u؝ Hunqث=SϫKLgچy-RݘMz׷t~[Qf.ؽ伯 Q}bsXr뀙AJ"DeV] EnShS()I~ɖwrCXy{[֪k\AK9Ivn$!ҵv3h|Eƍd\s:ʻXevץlO f}R8fCV$3iFes}=;=N:<++kJ,|a:* l2Zwm3L1r{5Zou-,iY< -T;CG}"@(B>1t~NZVFT~nFڏ}LO$O5jqo4+5|x-Wbѳ]Q?ٗ)~J/7gVhrOiPhT(uRm!J7)|'} J.<J6>VwhUܷZ~#Fpl23|FMkНlxaG`unm{n.zm;ߠvz駇]L+ӏ0UNtbzaCv|AA GrIӃׂ1[bxK}581xCm;$rU+X w\$h1"1jZZ_.WI=t <~Us )QϮQ~mAp+&|Rw!Ə#Dr(/頟?(MgɝUuzvgͫknݸ-^ awA#iv'~"Ϡ: \< s\sq36cཽ,[>=8}K@~H -sL8J܉of%E -<,j6.ٓWPy>7!`SstW`WBԪv^S2vɓc]_;vCtJEk~yHg]iu$bep/n`ߎ.vnv{+%Qgfֿ@G2h< Ut8[[9"nԣНAҧ=:ke4?c mKhVNeC+Rmv E݅.@ -<>o -̼OڣӅC. =Mi"ʘ/LZƯ?VurfUێWo?ŽS괴u!!(Sݹ{?E/ĩMQ>3X&\6 &76oCEn?zd?iiQѩIӱ#Y\\$>|={w˳R鮿i#߳υ1^}y$XV߿\a GֳQ -I2/l!:dM3f(Ei߆\EVCeo^-:}جwL+-HAljklϙ[ՋS#[CPDDYVson;5V,&%Sq5u` -C"N:EwdwJ).'{{8nr8$Q8ZirwiK*W\Tn~ZmmotCSyV.>Xm5/ؠ[ФY0wOusz,x.3Gwƭwй-Η4M4Y:+ygHokl#ɀ˃cGvp8ԝg+ɰYCnPhx>e"G;޵oG?4,Ok.9ՋF|r>}@ JMS, -.G0u_ -(ER81F%~G}MLT=ߨj8mM1{c|ͺ5M]FS3g5ˮVtwu?(ߝʓivj8Tqy ))FuO_P޳BR==֓=VM[f vdzu¡D\d!N_PvXlѼSϹMr -r?)_?MRUvES,u`YQi[RQZjT- - Ƽ5< rnE(JM -ܷv̻׀Z)ֆؘJ} ަSUwѓ)0^T0[g s15eAN?0 CE>:DAwdQ4MQu|C,6WF'<5շ-&η*qǕa[͡Ԇ*[bLzਵqpD&E*eY;]l#-.7qAԷbA -$OmF.\G!BzCS5* eLksG}&& aB1Yw˸$g8e_,M.3v[k7;yW1\M|CEj֞Q"hjL`&W`Nڝ0.Vu5/CӐOڪ~9liuJʺސv򱮩G=|kp絻xGrZ0;R6惛x [ir1Xjѳz⍃u)7O -uћDYfVk="iBEazr>stream -1Lj;H]DϽo]fh> I_] -kNO+Vd.IIH=0zsQ ]̙-UhU]2h΢.}κd]ʴHY+6o*\dwXYF!vE4 l)dC{4#;KQ4m5<3mg@%=_L>IMw.M64[抁)[Ƣ;;~ 6SF΄8Ξr#}ISN>,4[NaiQj\HSi¡>3 E? 馽04~ݨtnx4gE9`ԝ<7]r51PbqkQIں4܌zg7L6׍ ]}y@uv4؊`4%h9ž%B9"f ` 7>UeF _r?xUFҘ{{\_- [kM m[MmxX{{Mkg* ʁ0ͽ< {rm9uc'f#޶c%nڦr+Pyii=>8C֣?Vk7cMg̕če0ğ ]yfзIDC, wzy?8L2>' ,FNt<u:)nWfif|Lv;eӉ5tT$۟sR1c 4JWj KyGwu%j_7XeL0<}vhMj₏fv`Ak/ޫf#~D"RML:@sj<6nB0+R|xa=h%(=KW^ m#]GY^*eVVUSWjp)AW -+T9Lv[lޮhGw*|__sJ&t?,Rt-h}/hVgmVn[Zk}wfu(1'3x^`eSG'tdwĮJГKL]\T$P0Tk֬0O ~&\a*ٴ7 rMcuvƻXpǰYy]ni]\znc_;[pMy4T3M`.F[DM)%kwߥ2;ywC}Q ƔbS "ڞD~S2lV7˶˳@Y#Uh$~oƖvipvh5RBףG<[g3ǣř]RpF ]yj:s | -s2Ɛmmi횅OZ2e)=W̮v#5/'Z#2ue@PAOَs7iƑU vd'n_F^}Y|gNDM.yߖͩݎ>z:>TfRW>KYr=;ǑC!'F?hzaڸKt+n8+&NA3ӥSEy7yˋDBYgHH7vVMXdEX~i@aDWi;*U?b|DZKe.GۓfeI;mTf_祾߻-sTemIDq/F LdC8Un]A;Bc6Oв 0}a͙onǮ7-[mP~Z2o`,=b]Veh P>ɚ˕r"e@}iEc1,1t 6yZ,]r{O\&X̺. ->Rf6Kdq%3`-s&XFI%C*u\̈́A`0cGIQН - }{4Xv8n87_!mNB3HGv٘XDEޕؕ m R#2r+vwkGe;ζ]` [+‰ -jxJJc67 l#[yVgƍ%.B'7+ת)Y텑-l(AYʯϱw(DÉJ9A)s1,trE`bQ.pdɽºĭaDN})2%ҧ[:ڋإ/]>&5,C6L9,ۺv+LJ݉8TTa tk}irA +>k&넬v:zÎN8A6DCBOt2J _f-7ڎ=n|} IF& -vkI]@rhˬr e:1?GSM0͸ -E[E/Cܔ``+y,QvO49 VzD7R6=sdޛ zWi^_ XDEڷ}޽j.އ[ٿYɐg֯4@=!lߢg#ض*y7]o]Sm3ҩ^1,뽷}{zha̍CP'qC{1 -:U1iP2Xci\^\W8*qW r׹Gw`PO ?n3RJK& grٮe._U>PXtM酐ll% =yݫ_VUqEkNYEHXM#Z2235E-+|l]]f2.%UHpfZ:+w_{#<ڻqv+|!lWh{n -yI>.bW_Qnɸ S݊NmW0Hfooo|Z3\MԚ^}Zn.vAX`-+9)-aD(tNI Bng }"aWdSRfOh7`r0ޫ zB:~4p`m+А^77/K Yi֫?SÿWcӟۘ6˪&&CLPZx3kLZv oʙgX7 畟"ha Du_ŞlVt! I:d ]G7*F|cB W_ak=F]2&q|`=mEk=E 60]xR9jwƏtɼo'1zO2JV{Ll8q 0x\plC/Ӧ7m/fԆ-[7w^%rXBggOl6y.-\P#y^|D)ɓ4x6q`l/-Ҙ(TS~81 a~Re.5Ȣ8:ċ5= uxZJß{[}s Dvo hc|7QrG] 7ۿ73%/,-4t:!s<pZk_^[[YSpwo]/l&#'plG^p%ݾj;LdD;(% ?CD?U18Ƈa,xLG~W KzMS3\Zl lwZ[)@yA뿲~ ٜk|~tg̈́Sz i+Ij5t-|ʦz+fU Só,Pr䘿As Š&՛R=s@X~A:t^} x3gQOϬA'ոRnYpp[uS׫TfT3MiiM{ϧf47jR$ aױ%\WX` -TfOB!h O/y_;ݛ|QL ثm9vjHlTrPϡK s7UpM(sݩUWi32cbb|`EݺV*WM 5[mVx0 +Oc5(kn}DaE[?H=$mA,QU>w 07Ly.@7^;t\pA՛[4:-45K,pk0/V_kcZxP3`C6,c4=jؓBF'_6z{e@<Ưʯ1֑IҹRwzwNVJ4,/hdCtnb)It4m^noa٧|F |u -HPm -9>4TL=`6קq \<:!zkY[YS51S\v҄*6`܋p8oF8T(/wC]*,eizQv*\ғƃ m.9'!3zGϯW ^g%]f0?,\'}z񦱞1rVʣb80]C -.`Jc|O͍Tf]>1N (@k ^c56bf/Y'A-Q x+Ș#/hlj<{#̵ 8ugںӺlH/eZ`: [ae%Ya`αմXyv!_p-WɾE>f;}ъHn ӿ%w*/^'2ChDȺ17OĊ:EWiKE~Yb2bis\aZfu*G7h«̽ :gfB.~5avBVV:Ru$;G Xjߒ0ec - PZۉǎR3Sݤ.M9N*؄3ި9tqN0,9#-d:1Xgl3v̑;?yT6D*5s1޽V&b3,|soF$}k -ɇm@|%5[/+홶luL5] lX6%qLAbsU/X<,W F>R%s?s(uy/j&A&RFJSU ̉ PPE fŜC'X׿ -s0lx+Nެ-ROYmʍAS/U&rѧlR@'dmg4uIUN :rd?Ɣe}ޕU\nSN >Yr+ͤ6AthI떅m}$b}D:# 6T5)ZiӰvADrTC -NATwۦ⛿xI_ȕ3c~avQ`>|M>o'öՍAl(V.|͸-^%:踘I/}P8;)!߰Zty5 Yzu_.#s'G?)=ezS:;q*1).}u35‡ -1=I<[͒GQXEXX3`?benϓH9'nx"KBO~nr5N]SY\I`DĐC ~E;A*"_ihw|(MOI8/~6fWZ ,](]ܭɖz,gY$\LPTU\ ֽb*5pcϚh;Kp7U_|)-a`EXNRebԞ^hnΣ(iKMb)9/oysUUD#?XN_cxe!~b[$Sofw {s ']f\S ʯ^_i9R1r]S^4_%vX! *Lz #0z(ծPNUt{W {2Z8=z -;EuTql -05PJn6u>!lIqȍRs.:)ieDN -mC1?c֦xt=U5ywBGX)g1rg}lQXUe7*l=_9-ջi(>6e6k933溓Y 8}ma#UNMzӕ=4;L屃ºʎee\r[6 -3T4cq\ȃxXK,wRo3qVN#̚i:Z}kYPk:)ɒ]vZn.xPm0'rSY814cn.:?Vo 4 Q!IS1SZIИ߆e8qwz= -I[͖CB'DX>gyT9#+z^LUW[#zگ[Ak$i _q#I -ǏwLԫrJ:n:1,JU#]9ϡ iJ#`t߃0y5\moV-THe)S5#cg?6܌bb'eogqA,\L,DJhzj~?jN|K{(;:c6 L tN%OG*a%k:rO^ۨ%Bt?M3fj_W :\ kQ~O+;TR47_i5_ gW_ÞYzvbS -6Ɯ"y}+:Eq>:4zJ<3 u6`z1=UO;Z_t[QZt-!j-qm{32/ͮs}SМd`,jZi:R KBJFIwCv+V903x/>~v s1=,e z^]6P*<ݗ#Y[@A2umPv ta!7R{Į)p8S pq+f*hicd'gĽ3/ڈyUۆ$\:6(F:YGfߍMЃAu["AB+i,VȞ?V -X!Cz;ko\E*~Ȟ*ߝvf֓+|Z#gl֦1piUԤt1l=u-#SOcvkoT+B]9yeάkof( l4x&_LCIlcYN?7= PE_4bB`3/>1[~h1Z;3ŜLi8(gkh!n\U'=꿒}oy F#'CLI^=F}h-^ԫN7z]?Wp/|z Ƀ'fZqVYG}%:e ?p<+}Q \AB2iG>쾉쩖c{/qmPe)zi޼3*12wWˇsAG6JeIHpWu%gD *+@1ogtzJ ʗ -9n\O!~3(5B;`sL06Mq r}6%՞>FxNs/7a 3VGC'7@;c`H~]W  -m1:# +Mod,DO?q-*Q| *p`ow'diԮ{,$c]PL0~:nZoQ| c=]4rP]Zޛ#.5w:M37$7L -7yx+b:ݱv[,+o\p[$2+$]&kqoԝ.U֭g|lZQu71vJ z1ij -rS\CdlCuɴl'紵=\}v9FWp ?ED`[v{j7w!"YlOZ^lGq̸*=iFn;w"h0Oʙ%ҽgJ^ko^a:2g,߶x{oŪZyJ"ɲ()pBQQNo[=]m4\LthjujI`21:o9Z91]n"eS.X9r߀UaUKƻvYҏOҰ1ZtޕH}mj/VmvA=MԬuu0%^R溇y".w05ټ3菧u>-&Z3r%ZԁԓFѴƃeVA87[q= k#uAi'5ΒR-Cv1~uMQ7}6>>mm>|7Ǵg Z]^a=ԇfiB].`8#w牵L&}guK<'׉H˪Z3ZO4Kqdoh$B,T8{ sw{Wnƙ3%\fML. sl52ӧltЦPp )X U7tTV& dL+(F,5GxvG0~rH#!{YGD{Ull7iWjmN̒88<%Len=NSmL NJf~x S<8h& .?p{d-k -F#̒Ge7  bk -2*Gڶ-zǶg#bZrZkCvG׉)9Z2ě4o}\<1וq`Q,+_exm4eE*+?O샏f[ -傷m)wuqy^yE-􈧘W -Wk5j*',!ot` =oqKfW;K$_KKԿ]|oW5/B+3Q -i:ςh]mvf$HT߽4MR:ޛCOa }ɤT O,5V]UTqӵYxآ@ː7cʜbޯ*:gK-3xۿ[h9Y)`s[T|_{jCCY+ "łg#F@9POV1G}3;B>Ѷv/<%mְzMOmPc% ĩLNP -;#*p_0sqזH-/Ra+0"7fw[_BFu\MBSY2T!U_ӹl]qa-Nm8#\0Q@G[T I} 7g8s5g[@/57:|Gb~cz=׶jjvWo/]zԻ 9ڪ*HRj}Ν |7ϴ_{7X.-Nz)XZo>E;aU`<>z:yBA|@zÙ)@2#F}=^_6Z=ӂ wUGgQ8/e[Ez)c/7]zA_^ -}<3Jˆ r -E{_%>C!jr!!;zmGڱU]bBXb*oɹV*Rl.V_ٿ.*@(<,N՛"Fz;qeUxd0%5M1{⡒9_ߕZ>soճ}$⮟&'8LOj X,g4&Zo"4-55ЈnҺqݸ':8jɇX_Jk QtVY-~3q9&㾗+ m{ߌ@7N.1MzpQsU[f)&Ao7Ϭ6jo8ZlXP=F-1{u_!&)iLבǵpCfLAѝe&ϑeIλB+w(|rc?ZW>WFJi_n3@{VնS}zķIN.FSvkQ62q uAMȿ01s} (R^+=^Y%K!UgSYE+g}NH Non"+whꦇU ˔".,V,% 飹2X5U[4(xoxbw3zK &ԋ춤`֢]+|H7 #(_9/YV:n?;fS5z+>8,˧wvz`؞p nɼ-U׾ bbѹ5/xr-Q$u=ALPj>|fՓ/H]ާ l52l)yCYְ4Tdۯ~J*Z¸H+NnaOЅ=j欳MRݡg͟aT&,WKjX8{ AOq羹!_v.8ުz2t>m1 mTyYu^֯8}5KꜨB/H$hL:JLtsNN΂OaӄzDon }^/XdJWJ3V 8[t㹒 oज#wme-L ( '#Էk? W~XMn~Bv.ϧ(&gv/'#޺k6\C>]tKsRhqN/*\^v"Fgu09V*EkO)#.%3Xv{˳~[8{ZeG:<2C KSmf*1K置Dr\ q&*}Py2͞_7c+dxQ^RL`qs7gu34kA4:C_xJd3OeR+OZV=`uVmx_9۷;VDb~P@ ƻ.=mXvzndtVt,GN!3zsYŞꥳu/ UJǸw=}Wblq<}ݬ&CRoKXy3*2;vٟ)h"ѧL!ڭ+i%M082 4C\0O3~lz (loǪ-6,/ue̠ZbLo^}FBG9M!c~8צJY ߋ!Ἤ,|oJn3?j{b䳟}PXJHJ=v;7a_>Np0f~z^f>.5$d^I՗"?WC}¯%ztկJ7.u"wȒq%5t飲ʵˢYw!}1]S7uJS}Gn}n0@pCUւA DOH$m`m5|U[QJîɠ2]ȹcx -$'+EV|V[5">ToDeڜ({RP.G]K_{8\խdzXhL @a|Y++HAm+ /ҬKFU )i`]&%OvgUʲLݵ Lk 㿄GOjq@ˀֿY$ϭQfUǢ[1]g+S@ WAuӓKqlIkl.,vԲMZ֛Xdʒ$6(:Ze Dm+)2{#Ɂe$},3TT^G`ͥ^1;:W#\:g}vȳO~01bo5^뮉-wVBf>b7W҅2P[eeKU`'ԱE#|tvjIn+P4'[ok ?g/]=/XMjonO4- <ඖC\͘PC.7ey-KnDa7uT|qΌHJU $CGF촮v7rU=6¼B~|/By;=3\nFqb۷I񱮰!Rv](hUGIS*$;=ΰ7WJpC[׹\g+XJ𞚢RSdm(ξT?U7!N,w|w! -5QK>ڤKG;\.ρcR84韦kͶLU/~>? -i=;S/k#Ȼ@;kCh(V (56Z)]Ӂ&M|#y4J~i=V~|862hCnVYrjx 9DWPExٝGP^<ƙ|tXRTK\1'6ʣ4;Avv6kr Im ~q+qM>U&77-'u'ʒ>.dvrIQ Ϳ$hߪLw9sÛq7|;idJ qe&*5~y)vuA PT%x4y -ʎK>#Խw`O#4vXk *ssC"iIbt͹HE䦋VR6 -1W+]x3Půp!,SkjюLi9dgF8f r̍zr0ېB&o{TyOd^ ʳqLTҍ#b>TVY ;NkGkx0g=(! OYdU^muVQҍ* -q6Š`jo @7(SHRb:iߨ)_=/ę%v t -*);Kpnف"0}'@66@):'`yjW4U"}?nY`)Y0xj;h6v}4Q$G)K/_-zmzfS'<0~\2$YB+eE*-oooV 7E}ο̢1)8*'5GO~U| I>luj2}@-/K -S8;FWQ̽Xfb r+<;~S3Huʋ[f"p'V/ K9s:eы^z,@5y|c3f)1bm6/=Τs>]QQ%]CH )Ż ˿ ɶLl8~G#QjlFbtT ۤJ<_A)L{4'2XG9T׵nfIXY+"YEk+rmt߼HekЧӽB8q5A^XtҪkms<f/YI2Av>ƶ{`6dVOzLE8v]׮T݃9zT@`&vSZķϷb?iG1Q']T5avKc%.uDτeׄ騋ʝ5yXOCȫNKVD cgh\47a.&wR/*W׷&a^hV -4*keԴdGկ)*by6,6^9jP| ;LAzO2KZMh, -JcK&u;Ņ 0E>pJeSJ_6-J|)#ڟ 2,=IemF tۉ9L>m˜)zSJ1x5N я6tG,1Jր_rKH?{nZ5 -hc4f-Owz+_"=nosK̐ [;=G;^` _D Y(:Lc`X]ԫ zoHdN}^76Rr1p"ƯHAp<>IU2x3vueiޱP&mYܰDS'=5O븵/ %ܝe{o)e>cC~iC ~opFke.hFOe$sc:B˰[埦!u#!.#gUݝw-jVm+s˨2ȱ:Ϩ'6-nb^r*OüVieQM'\t}b=ФkM6C~f|BkxD5g$4 ܨx$nЄqX\*8:C pD$N&L/Jܪ=4OY(wQAfD&4BN'U?OŐqK~57&ƢU=~)1Jeztk@_J(#`h:.۬^һ-5>)JK+M wh#-,.|/+tuV6@UK;K9xWXVlMX:4>O^ݮjϞ -9} 7 +8( ,Ew^s~zd -)kmOr`TS&|O@Hd )ohw{⌤SWS߂:l -V W5#b c_mn4L^ZH_]5kK B fq.}&sO!mZbe똄*nԆG-ZD -Tބ}*F̗І3rSڨ]cғnxmI;8qEJEg*E'a`J(ݠbw_Agk [7w,:p:P QD2K2o#W4Ltʕ9EBy=;Z{=PHThuk;LQKA(lwT/(\t6Wʪ\Y&WavԸ۶y3 0tP^nVŲ4HZ=,#~|+CGW,Wdut);Zx)7̑~vz ~-phۇYR(m -k-oH´!UoUMD]VoȖ:lYF ҇6vwwZx0}; V1{lhІ -&Wf?3~'goktoТ'r-ҽZjw3鲙YUvey~F WdX7Ỹ\ul$Z*<.6iZl ɨ'r\Mw*TV=Q*-F6)* p9mToGPsJfURntX~h֌"ejt%˩:v3z:-yj;k#ǣ- [v$;9ZÜ*4u&_;!\J\ %uijL"aL7 I/XnvO<=8r7Zt3Sr`+Z;Yg$ YT%\o7Ol yrΦsv(ٯ"?c ZTᕺ}_>J2*_̝XiQfykF|ẑI.l"ۍʯ }̩C蔸Ŷq}-QqDZKt -h-*\cn'qdmV'UðR`a>h][Zm!UpgDFpUY'A8Tq}Ev*2Ye;8.3*^.ˋI |fkGQFE36,=:SF<53ڈ1wbͧzXxVcjD۰5t I#NC\r`R+IhE.@T-V +8%b|g;l@~FL+ Wej[DJ{HRK]  0(y,Dq͹ )澮TSJQZTixϤNc? -ל,". .}uw.X:6[Jߨr!YvgXŖ+ v5g P$d,]4Xӄ')ɵcw۵(F;ʱG2ۢgYZ]Kv!yG>Qi.L^)=cbFh-)l*xl.tBҺVzovDѠZ|BU ꗏLFb*쌁g&q8cY25aXsܘ[w롴IpO#BmҞ]]E |[0 9Sj]@A5TP.>4T[jޏgF/gO@N۴PR:$+L`:].-K.{Ҟoqf%j#쨜Ϣ$GH*}7[pzޯ]r~r/Ax7DhqEz%)RkPcxcZYؓE#3a})J#ޛ'$+l5y~}Z`C:JURn26A˶2䔙M" k8ܑ#4.C] -=٩uXZ m+`=yu\>VMQEk$"7 睏Ȯ}Bӯ|FBRϕfQ;\˯{tN^/D5PBV(~RMM9kc`Dt4.sݻ!#(x䑬c}E˰O݄13eu!U|);}Q(3,^jBWH/K'ā3"k2^)H:VnAϭ_S0/j -*'8q/j:2cWDԃ"sOѽrRvXQe"b;l1WjaZG;6] ټ(#c%Kj(E -[eFTY!X]* gp_&؅5zQ\vRk_4y6rI:`o|x1ۀ( ZZqR:KS"ľPxRi޼^0}b Ks _?Vec?\?B)\.."*ҐQ RSO&~[b! G-T hk} /wZO=\ln}J_ |I''C`|*kU'##^hFǚXx~۪.=J;PJC÷A5'lrg+}^ؠJrEU>mducO/ث^@,J -r/ -e9醮E8.3Eo`Gd4ΐx̷!({1S]DO@MYV){67g6VU^(ӣ}ޓVH6C&Ji0du˰׉PKghfϋjexgHϬ#&;o{<}ZjMayv57#xFE8L&q$tr2bޒULyYHVi[i_S}t4e76*hu4d^VCaIBy84)2V9'tڃ}d5ÀO^H-ڲqt_l'7!}BZ ?Z@2KjOڟaWA?5'u/w8ݚ -qU]ifi8SʁinuvQ?۰Z5;W{ezԷ[}툜in(hT.~aH} l_&Epԭ]~*/1= 9=0WN3W< xx{1p;]GFgiVGY4ȁQ;a_C Nn0+V6؊$XT[k md}+53} pΕ=.u'Ekn)OD;Y==mQzY0j/[3HGXJZ1N]h70n ?{J GO/F-0hl-,` 3m}rlu㩗 lt+xӝ4lD>{m -{=ז[I'R9Lc!':ﰅ=F)/W{eQ/oQ<>jn締ieka?.]Am@4l|P/[sd.:N5BjE -= y;zud3[/ڰ@(Ftxp7~Jά/6I[4X2ꦖ.L}.py()TJqc{N -C eb32)Eŵwrug=1K݉7i -B#[{/ݨfC=^?VUKL4?6cpMn2Mv F92䨜>K-c3 T}Vwr#7!8;[*U]8AaMϽqԦDt9 }$SLb!5J^gX;32K>뗖FDy,^H?TzN=bƘ=D$Dk A n~Ң~tk5g}R^拍3AzsbD5>~e5"fa2ۄ1drk;iu`;ȦOg6" V';հQԺ$q:fmw@+.f/Q>u6Hw+P]]~rϠy5I ͇=+eM&g^&;h*DQSTV|H; EU5xRg2WSO`zҦ3617aD]!@Ҳ5D!n6Qwˌ.OvM^v3Xq-an^Ӧ%P 5tYm75'--|vR<٪HNXw굝w*E S]Н˞)Do=ۼ8iMSTN}1F%˸n\>-8gՁ Mk_+wl},6o=/ĿΤzҥ~盙~.@~ C+iܠ=z.&nׇk`ۿq".;lmmWqrn^L=f”S=,yyq-rvJ6 wڕ=زΔhwH2_tSx]7<olv4RS ezy[g*vE+l5O N )!ã .~=XDףhAEXW)55|T[-u'[}ãjh'EjӦw5|mW~ZjMJ7tGosςYW4-9ZYs]qٷs-#/B9:` (mtv^w'+lsVFtXok;L.կk#hҮSV1nHNǷ["=d͢T`G7S[eyrDo>H*Ӂ&ǃv/?}[| -2kSH K[!D+CcVȻ߆B7ikݻ$wjX/He^t\gH4=NqhEGZCsӨу7Z^hQb(xl&,ps 6pmRqu!O`Gp0ކPߖc{VkNLۡ^Ex=y1q*ipFObǕ_+qPbbLsoTM>\xUc% :[f"=fFd)wP:w&bMi}n_}YL};b]ǘ8(daԤl2*H]n '6-U|Wl4s|wȟ;J`>lЬh|u2|u;7&hjU>S9G8Fj$ 4r7&*#>nuP_bU ӳfF4Zj4PWSwce6I䤝r}؈o'慯ܞmPkFSG'*Y@{듳l)ŎCwv ]b>8>`E:S/v4w1XVڄ -RN*U3}?~QOcLZ[q,MhPﶁ׼H{?+7 -[pQ`%n˱Dp}u~o>V7׺2__Q@;h)Qv÷b7Qb0| B˽ʮ~ޗe>8E7~[}Z9ѸqHMpWۭ B\7Sc1hKҺKcG^HW\^Ru7Rs{;BUp*[JpΞYPxh%}/=[h4uyJ|/hKfSxD]!;ŔE>6s;jQ H%'su]9S[f N+o,8x`_6mikyvB^ D~~]>Lg ʘ3DbHu`wxT~vz3jU{*^:/VI9Pf*tw߲k/ k=73(\zҷ+.QMot^x0Tک~?^ x;"5l[o8'7 f 0 ʢZ)QeTƕ?]e\{/] zy%Ū)ӽV jThŐ8rWM3W?z6q/M]m8Z]`qL!T -e4KQAP=^< LY?].ù7L:_]c|?FI3b婃%죇'utKl?truDB([W5w5n`M5m^{-oK -`;qfT$ܹ[>hwS x?T_SVaSēDVyvla!RHy%:fGo!<R(  L$Êe߭Maʯ{F%"u=;YMד%tj$MflwrIRztigrږùfǬFƹTrf}bSKv>[rK9X^!y&tCE3;̌Iv>1%L L˚5Uw$&B6F)8{{qkQB6WǪJ>*qk}&3a5Ω0}Q<qUURP -՗0[^;֓UC6s-7=,SfĎK37TA'B6cГP!5JeNm暳N 80hL%REKKv|?{7;g-Wض tКɸ=U~ȧ5J-mTW.F^j4pҕvB3?>zM̺[TaUSaYzDvAuXNݯiV:ߡ aIUgPfj#-lrdU]FK ]wǁRo}bK%eTy˗rO1W5\}N4^:[9?c$lݪQ-ωÇLۧJ"v)|xѼݞɍ?̠=gmxάſD/pb{ 噏oLo|QZ<@:Ե͜ Fq=5ʿ`B2kcqhC.x.=R?K;`>Nx+]b'/zc(MO9n)t{JNb]_\,_kqQ"Gʶ.J#7Xj -;/SzF/ybhVYq8`h<՟Rtہh}=NrxU> klR2Gg^T ܶX~wlW+*v[rjbɈ)V^R4zK3m/VsbuonliWǽ+e+俵yh/G}]> O{)LL U^Ngԉ!e ǡ2"fdޙj<~mx`0uy~+܇%N~tK٫x}auO u=4D7!<c mo ]DF*ۉ"<]of+G_qw..p9%n#2p_:cX.VG yAJ`q2/ZV؀%ifh.\exkZ[.r$l\CE,K\X]NA'+G&kXg-LBodQ-PE -OD8֤]Z\['^(krh&͔uX%"4JѸU?RvQ3\􁪠sAW<^]J"/9 iXϹ4,qҚf1D2]vG[jbRoK× z KƂr3FrGLej԰_^Undh"Z>NAJmaMIb=EFIWQM(A)DTAYॗrC'<ٟveqbRjka~jk0$FWi*ͰX7ȳ Jn)CnVļPnn=PMTg*jbf*YTe"|==ƏL ݕƔ>5EluUC!sǤ2ȶ]QXjMԶo yV%菓SvU}l{S %]'砠J}ލ(:mFV֕϶9Ķ B -ݾ6? Ɵ7WQ[ /,N v+VS_ ݧZ'r;Jz4gf2e$&L:`+2F6 E\I{ ~8K8<7S2a_R3?.FkKOh@%{s'Ӥd2 .7nS &H*-"bB6{ -8;fPz[N5%yL*oV#Oqn]JcQpda%r0(PZ>jhTv]Tʕ&;1]Lɭ|M bC-^rc^GdalLJgX5z͟ፇBTh'wL~W+T{; o @"Vj fJQകM51Qr/XJȕǪ7LQR.ew_Y.x{R?fu}[.Qݺ*+ҊC4K?-yS9oܭg+bvIFHe(ksy!i֢7B\@HNcܤd {vXD={HJaNoX|jH yOތDoVqؠi!yԑӻQѕ]#um<(Fnik Рotm -%4u_X? G@Ng}wE~Ꙓ2XW'RI[htDT`7Hnrtǃ2ټ, NS+%E+߻js  -VuެNjtj<):|*B@eUZ]|2nbS p9xJEZ:!)d!3Ȩn;|M nѰ*Hz``FLzcW&M/Uޢ#G}yFǃ=oC7 a6ı0:]Wqks ;+5/M0o[AgD<;DwJt(lb ڨy-p М[0'΢m]:vr3ݮػr Fs j ״ EeL -:]s6uӤDV o@xp&0pBj淓Bk7: 0켋9r0͏U!ogO -8w -ZՅ~k7H дNnG;4b#JmKҢQ*mgߵGpB*>ZYq Ϭ|pd=Y^)p*yX]n_}4*J8b\Wܠ#֯ypO84BگCe,Բ2۶J]׬rI7{ttQ͊JX}p9مew00#IH[1B^~ݬj ez7暇R>`{av&wkN/Vܖ~t\\ #hK=MŭC-e?^C -Uld&5ؠWny4 pخ wnJS.P0G 5HW;Izs[FKliZ F? =g}@GF1Qg1C=%M[,f} 6Zl<6h%0P(3Ƿ㚿kd߻ Czv{&){AcU FTɮ`< -julwl 0#qY unc;Vu s^چ*%௛h5܏ֶN( -\Eg(/Zfy {qzYRFw+B`zm9(hm+f?B7a_Z=^mB= (J6w6Vnim8y|S/yY酫Ss` G_{ij~~qš3:n(@ogv6Zic`KUZ~+w8m=^)BmR!k6%A_\nb=Wֹa=ƾ1!g*\|F8>n^-'9RӶӨP>?F}SvķnιYXv!.DiRK0֊6.烽}MO -ta4(r^+m fۤVZokaC[mgcUH7q]0Tn􍊓zSm*^jsVρ:'l2mA\ހ^(/UPwXptcSKۏUo-u>[j]! D>%/ՠNMfPnHe8@fEoџN/}6[h&VњZ{{eηS__eS薜`*Ijaǀi&|c -hSt(_ -IF~<:sdLWVHPGko+ReE6kk[G{W{yTO> uGP8axb 1=%_2V$Ɖ7׺AЦf.kLr>>X|I_ukqЋ; # -<~أ_UÃS{eʩ@m`oeI=xTKɩXMvFXNٝiwAw6Ċ WgMΠHsmvT]'eT0oQk>A7*[oe󤦪Yomxk&S Eu^ynW38A:#T7~dmW{רSNJ~sVWZQ^bt1K0k[|u`XC' Be0j:!EP _1npB=i G>J giAWGjxyfjũ!ϓDʫ=t~jQXYvǭ*^Z!Ai=:'ki\[v[ dv@}Q?mn^A>b{lsyTܫ:!sdv&CV9 aQs2S{p^ ʢR .R{/̀=_'?Ro:iO -A -se ZDZ6cgjehDY{+Ѿk'U84NWo P_|E]FZ sdݧ88OgYn},[O@L mQRNoz hm7ADaԽ=Z 11 ~e24qq@O:+e-^aQ핈4xT[υ6[#ܫM".뙇oI{M%Q8*tTF>㴿m)w34"L36^q֝6im gA0[AK N~.ק#kiy5[#K/f+fq}**!+iJ&DĹm~LřF=3w:_U;xgWn| wy*[I 4OژgϥB<' ^'y~j~[ ;*v:n?RlJUv swvK1't0?"}y՜LJ X WFWN]8ST-$e&ӗ͗]__l'A٘ޣjdPb!11Z5F낙 [,^{JzŢ&l?Y( \ *B3Ɋv̔ݜ͛xC7zVUNz:i~܋cɩ^Z-N4-9lE¥oS? KaͰU#rn?Ki_^kYMKBΕUZtJ3+Mܿnd6;^4N MK^7I+8@?:0)o}:w&LӪ +𡷏ξ"3> v.hi' > o›$dȏ1Zܓ{zNPwQ~a3>>~aUs mB,Y)(n{~|~'.x-R\\`trR+gEqp7gMEL+$>~9y7zUA#/QuYsGb}Z^> N^!yAZLNw5 $-x1>$ޅ.`ec2aJQu`]'U͞s 0oN{Qa?+oYZC3â| -T{ PU.C$)0=,0)fڝ>3UEU?hRqvɌ -*aĄ3];pP9x;GXX83Pkw޶n^:*dRck{SUAQ ٯfH2d yvъa|T][jeSfD&|zM3o_$,YPie;P&@A4,{/XRweܖ7`S z81D -ʴ.kۗ e0ky'QQM! Uo:18}|4;<~x{{c/o +П6\2;hQq+\kq|G?}-UcAc7,Ͱޚ5vq* ʄk-=b}!-oZ]# X&J@뮺u,qKn vrZLEgFq9in1uBO:Ѱkyk?heW\Q#õ=Z`/>65z'cA-ɑ x 26h ƽ -VX~:'m%OgJB„k/3aUM->tm1Ef9"MP_n1|xri ݲv Vd6m_dWzh.b:0ARы"R:&x〨(WѤO.*#U G>j39 0 k1 }Q9QuK{o))~S-X b06u%LVeo,.G#aԯm{ܓfe6&t7j9H" d=\ ez(Rqx@J. QO;h*ymI]?6'rUe [kֵPL)Y]@KU}ʡ6ngLCMĥ屦I^l_/rS.TݖWg;%!/;u}/m繩q%kg[ir@3KIKhGvw`l457y=h븲ظ[~jr\bQ`|Ƶ4rGRe;5ˇ$I?ttml7*c0\IQCޓ#ʫ@q_:Sz0J0^X+F9iw$ԝ5?` A>+7_2V^Rp&"A+ۥ;Z 'wW r҈ׇ.mX 6st̏1ꤊX>q$ܣ݊YC93meͩʦj8g%Vd;Mn2 -OP0V -f]~ޅ=^0jVA;YרIeq?Cf:+rz.K?(fߗwY" MMH,Hhs6SDEُ*Ol 0E_2YjiY=Shw^đLX8k5@n3֑ta[i1kJSlg=yͮ᪫T:!6cX*Fψ<](eS]QvlӜ5o<_y iӹknS+%ĪUao2cD=5b)ԗδ%g*̐:O`?Urj]zuь]aqg&[OqD&jf(V&FF&\&Mvw)*GAX9&&Yg{C {ɘHPp p[q!WMKJmPzOR3:UPk_mzF[qtZ_vp`D-#7>p\&Mr -)d UA??hDLN۫ug#΀4K3i>M_c&23.`Ho{Bj9qg/q_QoOlC/E9;Yc*4腎"&}1Is3f<ދs7vAzgz4<ŸwH>d|FԚ9qޮh~9ttzOνcK覗qJɏ;~iRMfIiD=ɋIH$yiB1w)~u8#oGHQA9=8namH3DlƤeP{N5[5|llOemMtHGS׷*3mJQ39.Ggm_,J̈́di=(K롿v--#kC3Tj:lSq' -ڲCuK|E lS˃Gv/#d'm`18Ƥ>`P\[-{Ӗl/9뻖Mo:iLsެAʻ U?X]<,TԤ[[-{Xje4;U@ު' TWZf[Ug;ˉUkzGTjK3w-^Ut0-ӇU![މ{ ^1?ۧH Ƽ -{ʔi~G㷬Gh)*N5qI˼إ}o-cⵋTq*rU6pi6syinˆݡφԣ蠟xIZb߸S]Pp%Ps%2j});JF^ˍ"If<煰Q]w:6DB6j}L2 -dݼV=z Mq6䪌ԊE/e4g+F+L_\*Ф,WxvRxh4,SK'oBI;#?W3p4=ywV,~9&hgG./&ڲ_Y5}oLn*HAEn=ľ%ïu+?&eeA'$ >qoetȲZS8䷮ʯv~d+dz L'tݞP]| e5NOӞ0mO͘+goeDM~2~d4HƯ*x ݷTgΕ]+;$tr)ӂcqml]FZZ31Jkn1^Tصb`OGq dbB>Ⓑz1'n߅kܼp8NA=[6Ӈ7g@@zNWHѱOj%!w"Tbit18 E ^TI)lb WA͊wJ{eצG&1jRtz{e:-hڞďtMU*-zh5mئg #[$T@ ?1))@~0q*bKT!w7=F,ߺzZZU!n.#7X&XFqctuo9YG>bDuۉ3,૦΍sْ[ Ӭ K -踯ܶ# iϣwm7ېoc; -{Ps>&OWt{%g_N~<>Bx~=\O5Jn.n%D۴߾yMqeIJ&\j mhA2ϙ %TDFpp"#7qaZsz6-~ۋfX+!Q}cjY[ [y>12`t.:^l.8ND+(D mGV[ ocWwo1AZF∁Κ7{]<VtY׋Mh7?ܵ[M+,'82JK!G䉙f9>ɻzճ)J~łOMKT6|lA4ZGJx>|t^Ǐܻd~!I E̳(,ڡnx>w4k2D5+2"΁nLw,Befe*jڤL?\5]Aw꺧VFeWgsMTߗ?*!AgXDijd ARCe{7}KUŠ1z -fIy;m$E&""jWMC¬y43n2/7cgˬEft;㰎vRj3XQwWD>o~D7-Zӝb0ZuN۵1]%Zbv.H=V\c7iuVuwE^a;_?Z̸~Uw֢$xj_c'=f.\ԛy"jkA$\3V>Ꮟ ~G\aFN=;qsu{ڪq9K3m̄| t< / jAO'/mNm3^ujP 4mZexj|:ٞ_4jy(C@cU)@m?oV?x#Z4(q>?&o)B4~ Ve&{,iuٗ3W;O͙Xxu-6iJ6>Ͳ~|9@cc8 # ?Í`Ek=|e-ԕVI6/9EYp]j)6UZA:ӝg>Cu6Nuz؅83-Sx ? -neQ)TywvپXs=KsE׫b0)8Y>z+ñ-Y5;n(cxNVg~HݺgjC,<~ov݆{2m MpNT;2~/570NvF};,7@o 4brO~z 8mv-[j%c[rǛ꥘ND^P7-?m 5 -ߪ|trmjV.v;y{n>)"+$3% B3R pT2356AѨ:T\N^ 8{i4 N5m̘Vy *[h R7l}h`g, mE(~yanູVg6,:o@ŗ"-[b Re'߭IM>>&d!ͺU@p!{oIeL>'K-: Sek>|I_F7QډhZpzͯpConw{-lԜWo$uZk5ܪ)o)#fw7iC%D[n5?7\Xl6/v\O(q2H#5,؇gsiagJO]IYs%l Ӻkc|kuw|ZC;'EYG4 >{V]Lf׍E9VIeqD+JRoKD7/RMJQ8l ,yFq蚤u–%㽐)^: }Dޒ|U5*HuyT7yt̘?F|H'08"r&9_G|06+Vћh~DEFcץ[~Vʡ w-`_@Ek&!t٪gˮV>!raI%+;/G~rAxI/t2<ݝƗ˅ldm:X;}w 7aTsչr[#pJ$|üv{zrˣ+;GיWpa@%Qsvy5?j>_CuzP co#_ȫA6|w`U8'b-qHZJIM1<5fMuJ¢UlI9My/q([?ǘJs{ r[!n2b;a:T4=i;( yN'JkϬl䙎TӦw,:׿ՊV|ݰb#b,a GU4VI;fsOΚʅkNQs{ͧ -_e%q`6#&zM:L:j[]3n~(O<" -mdPHRsuL?NQ ]#7vG`f-b ni6Y.+U %ZH񇵎ˎ#i'JG8'.#[8ͅOKF:'M~hBT>mmWrS+ t5=yK?OCKofHhFKDGdCø%1K -4&>=?m3<{aE'@ɫ`C:܇t^yhOq*6mZ}ŖaR dQ[o\F1mw$[vTÁϨ>Xt/q7~]&& ^0xxϘH)µyTPmyG?)'fj⡉:w/ߞPqc7MV FUKK(:9ks>\[ 97u;)"MoD^MUzXzrA{MUJYRy2|7ZVZyq -wpsV ߛ>1aw%I*,]Bm;I_̊;*Tr;cD?$_c ~'R7k]"HvȜhRXYgCp%HwYI!K̡GDP}:g~hYĸQ}zf߳ -a1`~Gl$mƩ{?\PAY ( :>d,?Mf4N'{=!^&{?? -{2|%7Į's_M%7cӽe-*Q79V^Z -3S7b4 J'1Uq!e`zXw]b6d(vt-˭tyi>\]$5yӨz$畴UIxDubfz#AJ>K`Ts\iv眛2Z]Tk?kh*9<̣S8->4!56IiiJM:΅F󸹍s Nݟ%K}xlw9 8bYkd~qݮE~t'J 8Nzw:U&6͌ @WӋVc;WlY+/Xyl'fGtҶg~Xb mC^'zZ$_~f|jwEq9l)K6T[UnڕYWyT4o{HS >׷S~\d<6F)ԝ'LEYY<4bbi4wJ|Nvi}Zݫ[-le71676o^,E@%GztV;ǯȺCZN5_6;AMMM/m2,>?q9dT.X) Ga'[jQPOv/uz9/WcGV$ea-noЛzb+m}X85#hϹ` F=s|!oNgUNIw3+x%`FvV6wZv1brV-R ZkEvԙgZZH@E(N@ ]C{˖]1aZ/^mT v5(.;zdطili#dt?h Pގ›  gR ^K0y6݅io%h%Z]$7naPQ74.!Ԩu` ꇨ'+W[ӄ'[gU|ZI;3wV/Z'-wN KN>G98_ux3B3fSMr*zw[djmVdzC Yw*n$D{:ely>`+D?gVEJͿiUݻZho>7D0BlyBґ`h0;|KoFf?7>3On6y>[PKǚXpJ:fyZΞ<*HUО fR`\ʬe_7V\nO\,ey=ȢdXڈjJvC]ܹx5~}F܆K2Ռݩߌb6BEǬws2فLU\#fiN~6 `>ֽbo5}1Wvj$L>;1Uo3[rVݓ$B7S77om f -:(a+ ^.lo9Ҽ([|=ڮ._ؾ`Vz$pm`wi; JL_G][&GYQXX=gk|mvFiu'>X|u\#ףm*zkc8eHٟ Vʹ`ZWw_ hRh/HJHnX+hlG1ww2J<^m!"#c7##'X\ѳ#'eoy{ln(w_}N~ ->2EC7PgG>̶`2L^]{Hkm1qw\BZnky089d3qq!&[?{OHjM7[6 __N[g|(*ٳN ]g?]*55!~CrrlͲWO+בde#Iqj&qmu/g^{+r7݉j); -TR$`h2,>mzhYǢʵOw=ĝkk=+Wpwɞ-MT0HyY%î7> -46|)Nzxf.P-|p9eTgukyœnݷq p}&%.0y|^.yՂZ~VӸDxjYC|sϟ!$}sꚴЙ:kMƽb#"gų߿*__jhsFe ap/.k@>̿K#逼?O1F7 o,YpZ)TKsp8bD ꉋ ŎaO -οE-ai<):`:M?v%y.aېԭOӭ~ lCDj% B'jT@X^yKAԴ|y菝œI̙{%s0 dutL`6r@l!4mgPhSY]za$=zAHPOLVba$:CL ~ĆxJf{1o&ռ:ovʹMzb꿶a߶gGbR'e^v.ֆr7tϛbܬ<8E \е2=EA@ˢ[Gk!( q7ۗ0T-enΓCo.*1WwiΜ?#3di`iusݚ/Sk.wKޑCE \V|k׎oJJNɾ!ZA߉a%54 Q~u^=ndBx&h1dAkFR"^=A\~€{yog}4&!qJknwaO"к K UOo}m^ε'N8sa\͉?~:^uM"̍{rઃ Dj(<^0D3Tr{cF-~aehd6{k!٭3Ty2 9<ԅ2u۳p滅iGv 5KetPke&[a@{M;QqǵCΩNE|jcDfP R oJNZY%v7eLQo#pu1y]g6+A{$%qׄf=-ޭ 5ٝ3~b])6I4)!KGؼڭݺ}z;%q(r k%k/1w*?o=T}h/+\O+aAx#eK:z_Cm;mDؿO'+y AK/#*AE˹Y5qݫjt׃Z?h:?w&`sRf0Y|_ʱ[F\g_ޱd! !1z1 IC_b$*9Clsb]Eί挪C{i]C< g%>=Q+Rs͹vF!vy.1ְp@T鹬F8u rc`~/mq -h$<_q+jm nW q)Ą]EU׬i7%2o -a4i*;)';6ؐa]S¹^JM+7/"ΜֲȲpΧlAtvtjUa?N_ J^Wƍ2GQ*tK8[Ǫ #9۳/XG~d[V -,uƫؾ4suJ]؇nҪµBci颱N 7A)T^Ci\DžOBЌFfEccXK -Ъ/y@j} -ê!+O;uEe3iNR!v+uE=MwsČ0]0i|̅ෂi}r[yl-kj^G2c0w˰m2͠ٿĥJ[CLmauYFر3nQP͵aj&fцPiF%MӖݝTk_]A8Ee8kx]zӰ<9gtkҾv y\I#v&!ls&\^++,gJu}%(s<'ۦsx>yhM|^1"7o^,U*Yuux52+.e_rӵFlýˆ"<.r4[,땸M=EO9 G)Hod)ۡud(V;|;Npo#Xsܝ&[wqBoخKvuEwB9H8gln^0k*(f':iʡ,N~9q[jrd롻VIDuR dU'9ck<?4K`.ō:3mp&fbiN@dx=r5vI'X ?p*@YvlNճWX -͐-5K|{:)Zdqb&za4؈sxCt[a=86vEr%tDc$p_\ϐ2K=߲>WJ=a}1枝ī7|N,^l{4]"M}LM煳zï)j]H,%ˊӮrʄȺcw:\Gluysj],hL'%(ʎln^3řn_iw԰e6) -16^I+#;|)LxpNῈv]tf|е5:!5>+xiOIx uB>0ohze`LE_py6{۬ 1/Ί]5v3SO3fʖ)=ʧ.QċQlV;=NaE;7"bk/)yz~ŚugbqT-T%r<\2v 'ɵ\} n; -\ʺ);l səN_J ER݁גҮq?.wޕyb4Fz9ckcy_c[w?j&kxnri 2pnj2BIG]<]BL4Et>/ӞS}B.[VDnEђa-ʈ*C-Q/kIݛ[\KXuaf^Jٝz׼uG^z-V Nh#E^4^;ႏ=mY_c":uYq u8መYh ۷ VV^qpإX Ax쪗%Rck} -mWOhX|3#Oi*_y՜$xp1`N瑠eCcgk%|4NbQ$˄߹U.Z-ɴRA^7+sX~gl}8sJmof&Ӿ{zdG~] N LT y&q^<^ RA m9VR]OgWhD**G5"Zc.9h;inL~m sFLF #\r1=yT𦝂Bc}M` ?^N;+/x;7U.w7TTQնHEVK; }g*6[C30epOjiNHדIRj)-ͬoIՌT85_7ډD58g>gVʕ^(m&$9|ݺ>6YrrƣSh> _+^A[ (xNu*/qΪP5۩N,YRp[ \{XSwurĹxa?Oֽa7`2t{rяQD+kzCxkml9a/MRmbdNb>3Z&i7enl[-.T/5G9^{Qۚ*f@r*{LW?&sKʮcRcY2,bkZw)UEm)ѣ}h3gahzseȯx'tܸT5u{5Es2$ZZ =2_ ^N - |4p:g骦m*7ʜxǜZ璕,[czV/ [dۋO3$(S_U#V!F/[)q4هrvljK Æn||0Sbֻ2x-Vm\>S`x>2Vgܙk4s&jg}'wUM`j:\} rtʄu;E`3͜T6Xcd`y)G AY~ N#Ljq,IÌyl#S:gr+EN pj`t.PVC vj}Qj˭oN?gp[/-:͓A!.6KZOo3\ꀌ|OAZSPe~2 Bkqwd$|o6:;ҝu/۩XD>o{j~\‚5~dMK1v0weKs.Ssn,TY#tg_iOC;guP̉͜ӛni%1.F/)"JoJ, D<>P6-d6i Ȑc~FjÏCwqq&8Te0ݣbT]=rm" 4; )xC4!`޺ڙ*VW..=wԸٛ^v+V\ںcЇ&}c6g3cE::94˾l_h(Kpy͠% _`&r}5…F<ʦsC)%W瓃BxĶeoh6մupZvRE@nv)C83[~.#ugd8m։WV>VkU -څ;yٯ{1J*1/&#'y9!Q:5j;Yl+82"'q^Hl|;i~kBMg o@oUJ rsˣ]!џ3--_δc{0s2Δ7zDkqeO-{all=ڈdK}LXî^<5с[Es~Ҥ S2xs9aw5Rttb{E\fAI;M=?Lhy{kgpluԡ -r2$ A\{3w=|mfi$NqOV8qk!SN֏X(2oQ' mng$U9D>lT!>ΉQ|0&q6#0UbSZ 3^tel2m\wdc绹f5Ks2H}zYSC?iIS@Xhԁe"C2w zjEy:i|-{e\M3msck9b8VH۴n@BY֯G DBM( +9:,ꦷ߬#4M'&r!x9{Dcљu-UIX3ρ| {F$"$KאL4ax]T\Ro;ޕDZ.bۘ:]ZٗVass(Ŕj+DS8uSمAߜplj\Bm}pO&-~IK?]vRHh9н`q?y[A}0piSEX~TͶnLZ/~C)NZT,-ףknsR4)l #+.tK} T=<2?;Uqc5O;,?m8܇)lLL34EL*Xӆd@Q&qUBxN*_?y2fc03mspV;{kJU/<[5[8me0|E"%Hww -vwa{-723S2̻ln0 IZo/86]e+K5=26nj Gp+zAJZ4ȿfZ9?BVOÃSlPl8nAݎW`~8\qn0>W^9|i嗳Cw٬.A=Ջ\ٱυnZՕ~/5`~^-'!.3J+&^O ˉ7?׳ ?Y*շa2-W릇g%\O -®@kMze"\zruDN?{݀u# q^I/^$zźx3\wF -z5$|y徇VKڛCak=מ5M&=xۙ 7 z<]\*_*YfB!|ձ}kJC&hMze_EܭմmSߍ' o,vZfPܗ6uQڂiPf=z`˞ϮGv{5#L#=eŶKw͋cQVxi߁^?2N0?[^ [؅Xn V3Z.[6%+sEq7^L;>8z/ȵ7t&t-3!ַn}RIK+DON'fd?-TW1 Q qUc PV^i#DiL}ì9d~jz?-1ya@gQmKpƂp('2~.5L -BbMZ[w,u9e~u ꗊ?aq8G!˛ŐsWƯ7K8քy}ĈX_S -E }!s?w~1 -nqNǿG= [}~݅z(z߬8 >Cwr0DoA3Uw3> WU yPXۇ<؝|3>?$3`z_X̸{M2=nT:{}t )^*W<Hrٱ=_\ځze[\>oD0b ȋW9~{%$}'; _*Ȏ_W655OjGz΋|Rq1|6R`0Lm9g<lU%XN0eyH:*S?(f:eٗH+%z\:yHQ"Fp,rs#mUxN}-kp1AUmq{oXE~;͝X~1i!(cfݝ -,Xc~+"ЃtҴb͂30|Oír\aPLi ؋Tn!>vSX%7:\5ȫ֮Br4i6K(`U} d@N -[w@t[omdxƩ11‘+V؆\oe29b;m﹮l1:8U]a)L+q${k=eyК 7* s uCwכt&c8nl{7-y7M+5l"i[=!9ػ K ³ ԟ}4L5{:UҚE%%)ٓABGm^U^(ڟ4+n2\ !=GM -%{7d ϹvA5sJEК$aǃ[9YWK wG&q=,I`^R -)`i[ /p;fQ`0jFdWB -y?zT&{*UIa1a+ :S~$6 - ^ě G@3;n߸ФW=~Iq#&_H0r]X:;;wExGFK |]))7uȀ -pܳ"'([i#!gWmb'Ra dх7ܩpJރX8VZ^|L|ds]x*8@oL뻱g/-wNu5y|bZٕ@opH -mO[$Ӵxaō 6ڐ\ͶiKY#Pp36)B?Ns}xr$b`5=~5s<*"PnNr$ÿK +G#ueVe1?ڏDu*0yr#riZz+*y솱.|vXWnPh/[[+(CSFH3gW2ᄢĵbb϶zGqqlR8oOnUv$*`՚4ݓȚv4&igG٥3hhcufH1ڪBl]+Ӆ3UI -#O/__9K9c 65B1sa"oϚd-_+%ں|&.[ߊ;8;P+6ۡ~LizkgWZDwNP̸ݖtY0j;(.p&E -hQ'uNFRf/LWB\3#R,_ / CW?[S -I.S٬I9t1jp2.ț]jaDX|GѶBrF02Np޷|qԷma*gd 3@^=6p/bPWb5~m?x>stream -7vɒhPE;8OۑbA bw f&.Ҙ-;`EJ&as󯦎MY5ORŽvKUdkæО0{PpҎ۽WבZ\n624yi^2[M/k)P)L3 -Q?ZrZ}9 27a5<8cuW>',4?TX VgତcqaźOVYk-)L,\(6tX -oɼ5[-]U)ܾT}O AEDQf\>R9ؼ8f)ΥGGVuwH=}<}久 /:DV~?gMts$ʇcT9^S"(rB"78NJ 1gY%!EN3A#gR) 1e`%|ِAsQׅ澡 12mk)8<W8d){3=Z>pzr79/k41BUsr_h:%k& >a7SL|@eQc˨q[)%3nWh)g1M9k@-[qh~ÄXa[x[;sy'm#ۯ3$UcNiGM--L;dufq<ā1#GL%+Ȧ_nb犷<"Tuyq1]/6R>Bצ@o:83R{ .vV=@+"1hf=*% -ˮ7&˿_hiEמu=؊qJbgA;_]mMRZBDW %ޜWӔO цP?jogwxշ:qrB#f]s-F3cZ:۽Yoٷ;{ww*;Y_GQrr越ՆXz.f ur[n:[N:>=[|$ mlzR#01٢uvم̲%۪+v7Nޡomv*&aAGDu8`+. -B8+y,\2MOf5pVuomk+X˒Wgz5ÌH%v#7~ <=^[l~vcbkD'l-C} hq&,29"n&UJJ|IR:5M_IH9 zBMk[4 O g$7vM.\]xP_ә;i崴lIqiI7sB++5 ,Mat6w?!tz90YZa/xK,g@-`v'w,k <`K&Wy` gA率y. _}xaE| -zs 1MdmyVc]ihnHOP[[mD d*xc̤OkZվ]U -o-^\g8@TH瞛´z9\-~q1d@Vze.pG36EHxT,_>x5Wk}VY{ zp){GIevw8|XQˮ30e ?;1*B{Λ_#^v7 {tE>V 76x1rܦ%] p .wB r<.̽cQ>t]Π큎7iwCg3 {vV;FZv{c;Xvh?W|dC*f),pz8LN"|qH9W'>å|vwDSH"=m7!'WfsP^~d] Z)3 ‡[[ MP CW{םD^.3i:ꎶl4z]ƙ|i} ވ+;_B= yy?p&y?3W67]q 4eA f6# -,lV=0⺒G$h8mYt(/ywn])]QǨC99J9\7*?s5*[ 뒂5ijż AشmOr500cVіېQ~h&!XO^*<`m ģӶg\GA߾ ڵX17Cכ3ujQ2YZUO{xd Eֳ%k0` yֲ}_@OHϾޓ,UN)iC -Ngɥ:5D|q7c[>Yd&!m@.{åc/@]-Fۨž:о}Fhj6s- '8ʄkS:EObk4ՠRṂȨ.tD Wf ^k̎Va23TXz鈰85Q6b3PG8}CK*fZϞ{{xTl!Dl,g% %Uɹ?/h:P8 ˪{ ^U@hr -fO}&RVN8sJ1 WS0 -Iq-{AfIurnQj< uLo -:yo-wyPmxԽ`u6*ox̅?ж`=|ȽTg=d&{g!FN+.7_WK61)#dv1k!ӱ*X|mieDaSPӛz?~iuŇa2y89Vf6wx<*.vl"_.y|^فȮ* WePu~ڭ!t -5/qĽ9]0L4xԦ=c!z 8KYXfgBͱxEea}g+{_#E8?-.]o~YX /GmQy;kf>++zkgSj*U"Wɛ8| }: =Uc.lO_0qrWg;|C:W.YhҸ'ֿ^mh ^Ǐ[KiJJKG͈;:M{YP_|g؛>=Y񷖡Ovcc! n[z#]&#݋hְO!q8Qw] -CbmC,5Ft^Pj,v6!Pm{#Rz8Ne Hư={G+X+betܧQ 9Vhn`;۾J2ۙ0pAP/~^_Gϸ^,5MA -ަIZcь;y kt=eK>*/e0rg~NrlHWa?yqV s+_ 쇼u9U-W*`lwϋ=yB@kt'['>*VV&}bKN8i95OO CwHBkN*_N/bӍ ck[ubձ v n×+o~gl)-tr2`=Hiq#h,7#7p{Ր6;^[ass賝mDJ'ӛ26,%˹53пZtQd^<"y%{enV&ʘ8Pfx%Ǭo+e?Ǘˣ7r7%fzǕ8оKϤQkwga6B0r*o{pW Ov-|@'i'-[l]jK'JҤeI5rSΊ_~U [] 0=(dNoICih_#Uqޥ-mXMԒf7c;cSڪkK+s)޶FP'V-ŵ4XвWVUf"尭҂jd]"GΞmgm#4Bfȍ"iU@ӛtE"n~<_]S5W3W1܈i< e7o=o=cy]6܏6_aL&]O[ -G nBN q2S'~; -ev&LiQ}=)=\`5j'x'"Mx ~Rbnq ~XhkS??|Qc^y˨½DdGxΜNpۄP0] NnSYsz*~?;R.@Heo+dۥ](gcX̑x3*-ՈObn-ez0OW9\,B:6f"gk3."tãRDSWnVw#Vd&l9I}>6^^v4v[DpŪjpCƯ$ڬfjb1:8Hd}I=t! &HՇ.z jCJ9zHGn+P,+:3ca&v@?S0)~^]1enR*que\o-bOkY 02$xpjX|7=W^-ѿ+Bp;riY-wR, p3fd`vO88 Z85?G"ǂ-Rd0) [z2U8 !ЙeXUU aH4nsra@] 8ر*3+E8ffIQWS$b`9GuM5k 'yۄ-bsϽvu:1Ǩㄖ(XLC7W ڸ}|Y|}Ȩ.A>Ap/uᾜ]{|+{g ެWu|jVB^qN,=I<9s?x&4}SސW=8SNʟ.^t~LtF Tc -_~bq ]NJ Zh՘H:齇G+51i/mun+(hdt^^/as;iKeY8sN1DLJLD0mRb~P_klǽy.saK \`_]tG9tD̈/ޝNgDrnBdjӬѶ;j^fC[SVO2\lXmQ)ڷQd6Ղ}{2F:GjQU0y\݆8l+asu7ӕv$4#^KrEFvY-FR #0U|ppStYS{F^QKo՛,J?Z*J|Yiwb)4 ]pVC9X**y} Ӏm+%qrj`76S7q0Jֱ;ySk -ec"6NEk\>UӶ2GEn{H{IՀ^} d\Xoș`Ső_TbyOV%1EY7J~+PBVp=]nY"|OdN Acu}|CQ^kB{^*JGֵ|'_pxoIԖn@G dǏƗ.bó0@jX^lަ/ƯnI?&}mgF92!jpCmbAۻK WޙҋVS(2\ey[WÿxHIf]NC=eK^sz:ZY*Q_FHmp-FmƑ1uY+>Y#R9[al,)jhR0G~_i7UޖsYti? fi'0PTz<:34MH@sa_3^7%{?YZ2ȻӪNՃ Fto o%p}I\~ \gxS""Q 5*o/;)n(y$r՞,sR5  ]7c9˂;Rv2"0d]>GCW$ [u 8KByYz̳0\!k+jR{L=Pu#XnUZcAwGGv,;.}7~y{0A&o*v w!Ͼ@$W ?B$.oBy:׎3X;'-fN{Nb舿~.Q%O= sfΏ`mG@ݲ-H[!3o6zuK0X"rxW⸷wJ -yTՇ"QGe؅f)͇+irnn7 2s-K~zT6*Aŋ -6>iGwDʯN.j3؎z8{o[e-Ɨ2d+Kة\Š1c%/ݪ֖%v %~MJ_njF~s Ek1,{Q;Rۙ, R2̣=$ˣG-$5iavb*}t$ pu,̯zZzS%_ X$%- Br2ZV>"SZVKWf3{n/b~~4:KehϥzM+O4BU-0b> sg=cW9d{7kBҒf@-EӪ -(. 8VÄ0mޙΉa=uxaɲ1?U0:R ->^&DoǞWꗥӉI[+%F(1.A^݃r| $ΐFV9O '1+<㴬Xc52tϕ>W&ؔ:Be#IL*dš㓂(;:sJ]^ T6uu8.y45oͅӐR?+jΜdlTe-$TxRǴDXV7ߤWw_"3kv$K@cd*{s،ߥ- (;KW4zMEeYG6L`%tJHF8KNjldz6SxgQfz0{ ]Gq|iηb|^L*;)}ZAs/[T~w\=剱ߥFH3| -UfUD GF?ߌX3jAS=Vy'9pd^YԠ:??c`uN[2݂bzҭ4p A$ KXT!茎%<0N]Fav6 ,xo A9b0X.M/gُK@)pa5Vy̤=o|yob؄%Af=KX=D }K*&n{5km,U*gux -)ۑV0Y?T!mm3f1PIwiOը۞w1YT9ǷƐv=~}\ūxkC VƦɋF^benYv&2qN?)VNh3EĴ6qOɳt~´A'1Gw" 5~\b]e\lm2ǿ52ͮvK[v61gaTA[Ҧm$:"N'utL_oױY֚oБG -3/#[fGwd"eD%7W5œTtq4w2]\@WJ7^n^A֥,*gV{ᶣm@&ُ9O26 =[b8cT ;f/bytL*1e'MR P?P:֡ׄ/y(Sl'g՜s s $`-ǯ R9yxʮ ;&.}It!n%b"3པ4ZҌJKX Rm&&@^aL pǿvF1QVHt,!f/ Fqeˑuo}u9}Oz'wT*vC5 XGBǜhMwk'Q n{ rUԼnEnt* -RQR |d[G8 "ʗeeYE^{`A:Y% m^x/t3AIڑo|9YTT;~;2zzz@g5j׷e3Jի -uyk_oJ%ȸ'V0U')ѻ9;։Zj7KH%FMM-&O-aFmKPFGcq^PgA훊"fWEoYK<)&3^w{,|",ugubly,@ -c\]Y~Y'fwGH+E<ʌq./+0%|D5KgL6\t H5v.+ĥs-J>B%L(ymB8NM -Qaw+]O)=Epv&1_4Gز@ŋ͢kz$&+ j6mNu>J2E;YۃEYkM׆[C~PE$ *K$PgXFC_1N{-~ 2>Y}zlxZ|[OqW\ZwXcBW[_/xXÆ9j_ a/jlaZ7{l\FsEUP̠4A3{9yX7Q%m2{cՆ݉:s |9)Qz26 =!4*"սseDVP:cnRD Yp']3`v&j>޿ é(9(2kiv[ljxqo%XsQm:.¶h_,n|"z.*c`-\Jn>T+""T~4f vj2k9H%ΐ&-f7eh0cNnJsA:2_)X DZLf]Y&gU߾{qzT -:w>u.G*q]HһcYQc1cv:YZһA'oQԍF'.ʫ3耳U+ f饹Umne&v-=V+3ĭdZ8 3K9M_̶9â%vn/EWO[?2(v5 4{\ O#$s|&iqQ3<>pqR -&8" e9r3P99zVl-F);@0Ot{f8| [ѳ1Di]rwղSLl//:~}>a3|2&;Mvį>߉-}fazUnXE֮k;vRk˩(/'̝[レ;<&hU A\MPKMAU۠k:CqP(i-ugiUاuHc, n]W`k{^MW'a3iGpC ]4(IdAmcaWcxmmuyQMkv(]x˸tM3ag}zw1%&Q q *4ZGk6yTksĩIHMvDx؉R)c>wĬyKmc6Wso"YMv. mֺyNr LT~㸒/W){TbG=Ѭs6͏F;=6~ydpCߙu0_ yX.>]njrZxHnۑ1hz&9U86e -*a7y^y}dߚ@; ߏ-T 1ǖp?jT64mªkSW_5oN#Et|pgˠsC[Uy%VūȾlНkvX^΂;K?$ۡ:`{K3'Y;vVޙrLer^!/ָs۔FT=@[_jbtXwPߖh!q/7"-hvY0N. pN0ٽ h+' 6:l))wG 'G:ϛkxayEj[c$duZy(V-b -dž69rtt?41춿Zw787Ջ :ﺡ}Kc._G<(7MW[7\3%EQ͗,p(~G0 @Gߗt#Dy3foMPi f9զrp.IHFb7uZ?6WCV("\v~SLyĉ{8OxR9A 3?IwqU'w ATZPhXi57dKw~J=v<@r$I7js#3- xhPmTQ84݉M!H/A0 (9K>dy<{b1 -7*S$\Տ+t5 3?VP[QUx5f/n3w^TbG~6R?r%L*RF/tʍ[Np<#7h9\egꌦLT⧀# -6~zHEϮFhYl-5`ݟĵ"^18*ϋ_aCa²ueQ;sϽxmc:}q<֪{lU0wt/m W ‡X/m. Y3ץ,ira]qR_8CS}FOvNgXO{Hnc< r.F ,f\&ѩ+<]mz"cA[R])JG͞ ^%_ (}ϠH+m9?Q-Nv64s-uLƒ?N.:;-!P}5+vm r5OKx}1YW{M߶0ҟ9m٤WA_ >WɾƼMoVZ9|Z -Ό.]u;4~+n(zv8zaʿ2 m#)dz6ܧh` ΐ5x-dx4WbXp\(sNxWsI'5BV>LYmf4Ǘ;Ʀ2j:C?_9b~qFIxޘAwp: }--m}uܻZ?iw=#XL_5'(|Ӵ`M|Rq${1 !v\}NS^Ltt -x'&J+/C9.=qS`ۡh4%VlU)TkћdnyJǦ\}*}e D=Ov: Oxa]=-7A؊s˲PV -ۺxp><;~ouǙx}zIܰLNn(Ҷ]wGrq.-/7) sCC xrB.9Z\]0`m§yOUvocc*`&ֹ,-",q[/1}D;!3/rާw0J}$7zs+JhA)-UE}U@X];77kKv˩8q<%{܇&2;Vgzm.7ç?O=t\55Ūt?5f#\,ȟJ{ a&ԸU}?dLx%3dFb/%/+e[C:w;ut(e4M1Kc&lIUI?Z)]d3BDѣn -7y*w}l,"T5\0̶.ggM bߢ~$~a~(8j*QyT}Po~eꈡ)N8=+HNת7ʻnl!ji8L3^aUG;U{WM+1v`vrrgBA>yghh[bJ#Lu͒SkSWK+id}9I vGɒELoZE:-{Q9އVnz%k?i -SX,2ތ. ҍ]kּf -N:˧= Wb [>WVlO㒄s|18?]Lmv(S+LY>uݟ}5$qy nc4Jȉ#;<|xeˉUEW>WJ\F[5U-e }B1s|F{y@C]\&=r2Î ׉E[uopl~o_* :ҋ -EFx,Pk_e(%.YqQ ,5k_q͙iFw}(3=E=pQq SѧO_0Ts\AO (SklhVrC"Ftn 3cU 2T?wR%DOv}-6!}QSGTkXڭ}>I qoG!Ic{i4fMcг PUK)퍇gWiy8\2U}z.ò d gދ&j'$sZhs O+w&L[K/ kәDjP\92w~G7$ﴋ>z3M l= 4IrzܘϟOq5+d)8l>-!ZSKhQ z<ȑexf÷3<4VՑgjGR:)W_-D-El6aWuk<>0oTXNB_;w[LF t4:1o8~D6B3ѣU )")EzjL&K/>]yh u C<xdg*aUV`Q}4+ 9 L,*uDѷ[}hnVsGQoS wiT귷'%aqp> -)^FF,*N5)&Qͷ$o 2]ӹ\d.HG|Y1~G):KWύYL62R+> G\meefZvva<_'qI._F%{9J' XH'zt+ c~H:Cj۟{2c S_[wU_J[kѰs8Np $:E :|d{eAOx)B{W}e]0_9QTYpXL" :`r1m-sib/J?8fYfe6 -S4RZXS(M&=;3EL# 2EU-!vfT"^EF^2Fyhq Q4Ϲ#4|!שI`C2n^[0XL7j`ֿ-WV~$)MͧH ~ ,c@&x%8WݠC]6jp6̯3ϺFD幃jх=F۾qiJ@$yOc Z9;熣U1^KEp3mZ5A>2\JQjV.Kk_Q[18=x^I.#mQ2c}Ek݋ -Z{S uh\ag#vr= i1k%Sb6VkKETޯ|'2_@b^^,.۹X{m*_dr*+\xX!#6Y N]?67eywgn/ b+'kݛu9NًMRϓb+"Re<:d~.8ʃ>k\nםO}6܏ !}xx-E*qE_0z s+ՙ8!&\ZpȡK ^P.^8GKۦ%*`AqC5(eM8QɎnE;}瞅`vߍ:;O"#'Y-ɏl>Wl9\d@TBPA#JL=B](Å @%|R8ez #:|5x^oq0Q-zFǎe:x7cVڽ6ȰWT+3q6~${hi[8%j vNot׸\ 80hiB=>I7~236%y)7VZ7LX'"cxt.xRK4k*.4ӱRѹu? `Jxَ9s?#xӎux!d)lV\܆$,SXϰ3؅F탷".J=w.t -7l<{l- -9=43P  -˷) O%>ҜCqnЈ{DPȖR1w`}ebugS@ݪf VpcuᐼvB#E{" !ĉ4Aa;_[.eOzS\wT8$ML!"-A39O^Fã&W?`gd9~r… Kԣ~(}'WY" -6t]Q$mGA8 8 -8" -<~3ۻ2ς -U.ˆH.D`QZYZVQ\-~l#pV>֡ z/g?Qr3jxMT#(dIQ.5B{9j3@۴ %H|v62¦0mlZ-Be_95:1d顐Q+*XKk}+wd]I_:OH0&FyT/>4Jt< -ABVawccC~l<ä橕2e9mrO7yW6%KD-ɦo|$NDjGyz#"+WF;RQֺrb: 9zIp55 ä=lHyCύ86#x]n٠Nb4#RJK %R_@l2Ȇ1w%2+kPuz1&tXݪf#.52fk{b*b<# w*uUA?b QTŐqNJΡY\Bt;^<'zʡLK+g7ArSLu8߈Oh~{_kƠ_|~E ?s?\~O=cCg 1;x#Š?&Hk17{^ ??NA8yH3ɹKѦِ8:M:dՔ`4[Z(R },=(n -ݯfC#*[tEvy:c`Q4VUSo -H{:;]-7&ݓ$W5;| b Zhzt.d<8NAzSIKJYR-+epJ\n஦??H>_|=G곈A6 Jf%ʚŋ6B9#i.,lSg pJ⎙,Qh 1cV娠qS.vMǝ\Et?9\}[R3i2m.9 -shs6CLXaG650{*Œ&L.;ծv<6tozԥ1Y@O)/!}g[(l㐥SV%i1OCoZ wůmanޒkE'Pi:p{j+G2Yww [lť-;/[u><@醋գۃq8H}/hGP>4YI_= z<@ /l*wfl)!$:J(񻎒@Nj-A[jj9A}L~~K ج򣂚r#)} &!<:bbly69pB/_WtYz#Ġ|~/7|>o¡]IGKvrez -&6ɨa͆vǶ(͵}fVꭔT)opC3hJ^9,4.`fCȺ>bztCى%F}f[5J(g>w4xkճbaJFs.),T(70 ?'dA=DAuNj)ބ'_P7$&z6qw/j!4;-$Yjfn-EM1?:EoIt{{•`OSeq{4;Q0 -g?he$l-4Lv%jIr/&ӥ3}H67Wyv+?hkb+IWH;t>El󵱽ׅu"`m|,6ιuA?j(@1 >Q…Meh3Mݿu\'˜nx);]v+V_|< */}g4ަhHd] -\Ewu9Oqe4m%\vDb?8)ޢzB?p4S*~w=Y%ry< --ҐWJmOIx$z4ev*laylCĠR BNQs4+WB H[U?f8^z|%9U߃,Vi2g t Y{5.qҙw[q$F1hfz dDJ֪j@?E~TYmҌvZys(21?4q:%cJ>LLK)\ݳi/![D0kg6&S/YmyޏJG1\%($vBNj8Gw tz;c)y+vZ*͒uB]A`3~B^7@))'GH 3+敢r¯K|kA(l ?NuppZU,_ -GH?@X\-ږ+eMW՟W'Z;ƕ&s;~ši̤9 b~&mIG2b&Z -p{U؏r+LC7PX>j#m٣^dv\isR3ʼe!#F wAFz(;ݤvVr ;8p(N@o(=(7S&=rzZMvnQNh- ĠOߙyWĦA ^7b.y2~U9Eg5sxnaF׉Df-7kMɝ'1%1̇ڳOj`YĠ?JP4[4~&!Q7e}53Zg2m̙$DCRBkPA[T5NHۤR )M --7>4s_f eOh9~s.'M C R=co7 Cg1shL-\c_(2Ebu"=fRv=" z6S{NMsdC<5\k>ZД<#6 !h >DEY>l x;r#esdcYV[jxx/]j1c ~oE{X78o]t¹ =؞u&Mc{/ä;k?擝^ /ΌKߚ"J -i[~zp| vdeĨELJHF##>t@֞iA2ۭo0Aau̯6DsL WD%UՎi,c_1^tA7N fLI.5޽fR<4Lwj뫗#]oE{ .Ǯv.l-(ۜBCrEx Z);9+PL1in3ٛi/sӶ.}wG-!3olѿTwا{=f 5bѦJEP{&BJwG]䲵VN;?p.?;\dGzkTPJ%6H˞Y~-q-:Nx"n_wvk̞g˅ktt}pUu/MO㠘C,]phKÓ:w au !VApy8W*Y0A  dP\sB8D-%DHriPu{7rZq]nM |{ ݐWx˟ImlxQfqxHg/HrsI(WX}|7RZӌ?_G= \sԟiʹgf:{d'Ʒ *&0ti;}ka\;vUɩ, Emnf_52jfJ)MWVioiՋB J3jWY2ח0鞳R;i۷hoxb@s'V鐘~PPL -Y/ȗ~4Uq}㳨v9SjDr -cW jγG@9ՙUIs9P:~\:u,x z\9>?\(('5H]u;5jwbpnnLPm -ϕFR zOqk @3*'xqi/c -L3Sdk`>6H-=ߐ3+΍Gg+$ nN:/XV%P|cF)X̑wv{`_QhMn, +)3ziGZnkR4(9g` GA?=MtL[jBC*C)]ux;@.idN\+ȽGOMĈIzsvC>4ɿDѴU>o =/g4Ke,ɚ -.Tz_ԗ)~TfLrPIB!0w0ԄW]A#B`!{}Cs a 2ߎjYÌM,U !x[e똈f΢%oK}uİ6$3bЏZ[TzN}^f %{.ny}>aS%T!źrX|R 'KVksQ>|EH5;}GPrn{ XY=h0O(ަrbˋ{IHy 塚1gNЇ(pUQ0[?CWS!ˆ7Zg"[NQlx@qR8XS l MmS:YĠ[#$UaX%p~'.6T% es"Ks%fIvmz{"%54< e8m~~ii"o^FJդq#AP6Hw6U^RW9^_bw pqfwҠA -  &= sA}oLe߀ny]6<ƃ3)׹-,4EY{K& Dg\ JI|~^_2tȚx5#5#i{oz?{li.D>&q1.<[&0u*4z{6t!95i%?ZpGX6{7|sj\%E_<}ޟTw0C Q.Zٷ(9eɌXK,jm^mj@ӏ(Ѿnn%*bv5\mƃyUY;*qo^m"5p_7@Sl3Cϒi"HqCS>UwKr0t 3{ea2G~1 -zFǗ4Xԟ_/d>o4[NGeRVCWAdQCzur~OGR -jMjԮR^!Kۋ|A@?]N!,J$Z)": pFfDT~LfwmyB9[?kiєg&r;O8}QaNq sRI SA?=Vþ=2;%N}g6qdS9Vy96E^gEbM(_F(rS ܲYFHNZLb!눪TcЕ$/\T8Ce ڞr&F0өW菉ģg7F[VM<;fPeaSF*>6_64q`3Q4k!Y&9lJyM m੧I\̬I荊ympX+UiupH/Ġ~^|sTjFGD_>{}rq%S.z29a~1seh`vQ|1- -jWr( zI z%yJ<RZE`!fa7iɍ3Z-5+3 3h;ƽ'{)wbӜLԏAܲ!VRců2&;Zu0W zvm w;s͇=Ⳗo1 ݇1G5S.hv$lEIz[9oۢf?hDsrJ7$4 -?|؄F(ve]/}' bk[>H 9QezQstp즲QwoyCGo7Q{D8gy5~?m˾E%wwLHO,%srpY>w+YHc3N^l;J:Vߚv!9i 2_͏Ui۳ -r䌳-͚])C֤ʚ~#"b³#A91R{U{9<C&ԕou 2*# ߫%bsǓ=YWn&ŜG* -T߃'d1 (}S9̪BW)ӃՓ^!ZZ@%z46~; kgovY\0QSO -Qtr$ TW_"{+E`&ٙK{]!!@{kz.@sQx@ߟ=A@<)[7^$KJ^nFx̛Ξg{M&$3; -<ƃIS(z! ۡqŀȦI`#=疊ƚj!JWlIHGp7P{1|' -hK4 7*eF3 .Ь$Q̂bNsªa9%譨++-C7pӴ)4WLbY$(Mnߞ'|$}wPX<`.(1ςKe.5;"t.S1G n,.AMܠavA}?؃P4:tg+<8^j{L‹ހ92=>+N52R}36JBy:&A|$6{B؆UgX,mLJK;K='| \n"O!=$/&ޚQkR+/Ih;ɘ3XY&͗!8 pTU\}{)о^S.Ǥp"5 MoYXUw N1B6%}r{֝p|b Ġ"˦!ێJ2uv!]̅h3ڦ_y>:POn|H]!}CjXGq|7ijKO~<؀գA]#U_DSyw=}|EA&XpX%O?~h&=/ArJoƽr@Wt2|eh6}ȊK^3t҆_s{ذ2#F-1"*HZN#igP؞| -ŢjiK%>pOFzÙ -onkZ9$kpM -tF퉫Hı٩fwbw ?pVQ}9tY)2\!n]]:Ә`$ѠO6~R{ij؝sCmz0D0040S@Kxl"T6Au ÷ mfOc"܁Z;v\4.N󑼌|'ұ00n0;-kg A` 7VCq^Q̍il6hMaqMU!2s;}NܘWI&0|T -{>5Xݐff/H:=~/v{8>"݃`ͺ1A}`Ϫ|W~J/X^~_ⱚ\D]{Fv¤XElXsXJhiAEЎHKjgJ3-hPӦx0&35#qN=2?{AF -NgNZY+6 g{-1֝X@~>*dTD5#*rTP8QWrTPqzPb+#wÜNqNӮvw:@DN>vBLMc>*F;ĠO~B3'4ɿ( M0UOh9 C#?[sNCTWb{KPZh=bZ ՚JNw7!=,{`H v:+3!{#g?Ŋ1;mA&._plfTjT<NuoK؂L1(shunŅ~}gfg㓔%kԴFq8)KjIh~VAGK*1<(Z6(e&Z`Zrbz\hdjmCncxIlict 8u:"׹3g.fT mGw\Ԟ["UD?DD|pq@E߫T-V~ w79wrSkvmi'b[3C)>r?A#T "/4N\k -ےQ@^wTMuTA -Sjυ#doǝ^#_ӂvֵMp)=@C ZL#8IlF6o{n+Be7-U7#}21ήe<#>;n-/l.m0nZpOu М%Y{([t#4M ;>d.ݒ-#=5;e/A0? l-{\2u鯥0Aڬnѥ ïd>A#c"h]h{Ƿ79]yt.N.޵ZKa)sgp{;:L*A}Y]^6Jh_A5(!"tjm|!dG/ToBG^ 2`4Bʬ /o_&xC;GvKY"?woY`ʠCBo;qJYҼHMNvrM#z=WPriZO5QvshqY"r/N}[ -fYX}\ 4g 0J@m00נ|vwnT`fuE]%j.g7TP8OkXŤB=Eg}ֿm%׃m߂7;w~'V)\m)i6='fFJhh|LP/Gf55a}Q\oGdIQ>| ?U h=7ʱ1:H䴒x7qhѢ?Nk4:gC*Kn],uM 1w.M\ |ߐ,]S /3.@RHf% ޥZ:MY +tutiqr9C|y?e5K=4kW]j-ChW>9?d9߱S׷PKXq(4"Y%+fn%DHS OXPK|pS"b/-=nŠݠ~'%VNxO8a -!zFc?0p4 L~7u]Nu̸pP,!^;mU"/ )z*Lw]7fM'z䜹jn%փ*lv>{0C}'+_B\v}Pʌh߬seo|plWc k ',ӵʑ/nf8qmƃU;7O,l/'o}  -hf*^WD!ퟘx wgZ.6}2[Ruǹľ=r}P;mx>N>)i7d7j!G -a^MK+x,S$SbӳIfkݧZ~پWf{;1t-NnY쌚ځіvRs%(y!\;gsE-*#J VW]n7ǫnѸ˭,p 6d:(J+a~,fhwUPa2 Q!LP+/:@jyɤ:Xo:i4YP'l ^VSusy~Qx }&LQ;WGiԎ -T**ekt79>[9|n{tvؑ2p`dLf߆V_$o Fe/Ġ7ѥЈ -8822~^"?τ-{zqe,.텴R1/VO2Wc)jNP8r}薾]^W8S /rs? `X}da'^F{+&YQpÐOYM+E7-g5,\jȾԎn8uLa/t%нZ>/?BKU^e!=}??őh!)ZUuf ~{w}pVs4']xxn"X#z@oΘk֕fݳڳb Ph"\CԡP9GrU݊c K0 mDD]>wq$ƟrQuf))Ao, [IuR~3L^3ܱ0U_g5tGD; ㄝ"\4P71}_ZobcF}+ȹ-G12 ~t^IĖڄ,Zi.aY/ ɼy´S{Cu_\&_P5ٜ?ޜrZbZdK06(U.o88 ʥ -*sT%}Rc{}[LS.Ԩ-WO'Ҍ,fn&Z -粨tFL3 ozh293 h_~ PPz:K}O -Ӏ/:C[ -n.7)?VԶn})cM/#1T -4,1z~; =4֠Pn}GAW̽Qyxw!mmڧR=ǃsDQ}h]Ms]uz=GEXyQ63<2MtJJ6? -n1_~AlU:׷-}hyro[":Hm&,9#oQ^Oτ`qJ˜JaȤt~Qzxw1t9imi2%-=MAEIGg±ZI{An/`pEX ?DW6JWfRݖ{rŽ'-pʰ.y){3NѤpGRXəgXД^ !`"Qk'QO|)ͽq WrOڍW;lVC4 -3Ev*Y†y8o$J<LR9\WSAn| $˫ys߹~#3SLrT3+Υo\ܿE)}Q15[Ƀj%;o\Z)^㱨~Jc܊^M%"5-+$D(LXE\ y {LN@."ҹc"o(?LpX](6YaIT%'I*U3k4->l3K3شK%\7qYh9zsm¹M{/a>:(?j 0yqW W5MуTҵO~zCDH -jD3Ȏ*+:8Gyj/YϢ:>8I0MH>+ug*5xcʕbu/ɢSo[{waԥ+SmjCM2:Q rG?km,^8ު1OJ|Kj|uڂo`؅$O/?tʙVfg+o9sb:ÚxC :(\UVB۷*"(N̶zN P9=a FEKvf4̬΅-7YJ 9S} ujA$a۝7kEGl&A4؝i`| `y]6?v2@Pi/.ЏkdC?QC'{r#W?D2'ygs*=Q؏C%9:x)6-K ;-SacF5.gnp!< ix#~п_Т GAAo|JutGKʍ{W;8b6c_O맘g j܇~;uKd_fi EuxZVHٻ?{:˘,-_3@SW8_ }KA8IPW- JAi_cǨQQ̞DpU>Zk8Ctߝ#|vWhֹ|ͷ -u5={ u$R/h;4r -LeP0/g9z䫧0r ERY;xQ47(5P>UAu,j&M255aԚJp;BK; -^G@@1olMra/'4]JR}6]Q+Mn&}f/H •nuYv1?F˿)T@jjܾn`Ww G,ftɫIyqߵ bZDyK=Q>ZFB@R閫(on򊾞kO[W -'0da_{D]oc iNm`nGPJt.ϕb|bA홉.>ǖs1?r7BPl(S|J(qA4R^c!ch̾~nq],U,щMR)>:%z\|6Gg!*B{i=(3%,^~6wn-&[Zi1"' P/wwQa -bکnVmg/ٜQ j[g!{t'σʮaHjݫSpY~n &m gڄ~dF N߆Vs_/ w Trp|3,PQl F0Õpաk6@'zey ] -+)p)Hgk=ZY'˪y9߮%Fbiwb5#b7=b5{/ETg= FV| -޳oB,‘zL.ΧhՕwvGP6BK\ɉyFpӇ1oGt`b8Šrw5F?}FefJlRuwDjgw -͍4E EqsiёOa:܄m7E Z<##Ch_!ɻs! gLw/'(c  ''i|6 yad\h8IG6`P4۳;?`qNOqd}jX.ㅱ͐[0Ox3_'It)gьy!.;"Xk4nC{>7jmMmVuRGVxj^i#}ţEY( sb,_5_V {4#JD籽TY{|ą}};.v=#fD3(ZkKZh5opLZ$1V㨸oG*oA f*'4+1;4ק&VS:!u ÷>XbG$GV#{vxs  Q}u%LDpRFVt4[[6W#H-\GSlu,OC7Ao\j]O2_ד4R ܵD$A$% ɿ(u(O-al&uo3KyVa aT;QAy*44n@\D1nhÕ@Jdٝ"Bً6(TF܊aqg lMH2il~i&dP"r_])sLs[v[G+esڏ\b@1I1< 0zmy!~PюOg!7mJ^[N35YйDTIc.cļ[ -Z4:wr8ʪ[Pn6UPxK{Sg#^k )rkp6s>9΍ba4Z -*\/A 5zm7֫tکn5f;lR4ZۿIahEJkA\)+=p2Y]O&OrJl1Cz4'll1t LZlqy{wxN4~N"Iz5}7/ -Q~>danx<D=S%6>ңX\֜Z=x4k.԰C/(+Y* ra4g$A*@bIZ@  ш⒅H{]<)@@Wa:Gײ0"/Xa[ZXO ~![O? ?NwJr֒yҸC^YMl2PTV+= -Ay:İ`Sw+r2{3- zIz -b|iym(|G?\ >K` %chTm *Ш -~<+?r9 P :}/qוr2>N [/0^q3X" ͞sP7\v MP1:d $&A6ײ^e_L=Qxݲܳ@36>Ժ,C9n=@N@eCb{%[K\EzZ/^ӨAMP$>cخ[{+eZVOk1kvB( (>JUK\WJuSQ^ jGj0C?Q|ڿl? l*Z8ف%Xk_`5'5S)_SP8<$PL}p+UP2Y h17NaGi -LHSsTlDӵkޣ% *vw|!ejn- ?Xu-οk?BS`_hȚoPj] ПG֒Φpd|y(Rtb+sZ n95w<\J9TzAvU_`yhoJPJt%m ip嶫6qxrzV$X~U@L|*~fzBWm.|VS" 6A{z0 -} ɒ;5⶗C4,Y>8;g-F!~]W╫g28FVZq"Zq{I EJ˲[*hr` VKHH< W#u$OMޤ]ZٙXʶw4Eyr%n]Kl<ut^b~mqG\B.F+$2taP -SVԗ414t:hpW턼Ӱde7UC/nff}={t8P㻚oUc>]CSN P(/Xv.N|?ƴVͶYxCpxbL\ -KESIpqc8KP]#~п[PRe@͙IA{r@y\\Xwc!`NݥrHemǻlc.e-NQ1?'W<0rLj~d|2̵=k_0{H?Bc~&uè6cIV7&O-8HG +ǃآo{3tMԄ"lsNB(Iq8K'CNKFJe}=H"}my5}n-wNvEL=n,lyVxp&xUxLV0^e(4l@+K>|<ϾUHLaZce~"l/ʆy}>fMfWʦ>m֯= n%n{tzrHZrM}oPȠpQDENT #XSxn@$nVphKé $_T Zk6#@UΥ?(Oa~(vp \ -SU?yfֶ4 ޔ4:j}\̷´ |ʯB~B_B. o7G7HRT -SĮS?Ge<az)=S9xL4ɓMNRUkrywjS^ov5Px$Q-PL6}{Ԛ[Ny]P(!r{kso0BsL.KxqCk0!hRfz׸1D%:=>Ui;1 -+r\~Prlu!הtF{a W^g>G[ Ϝ,*+Un{z*lU;MS hvwF#b\|?8rЩ7΃N߽16Ooq -l ?:(vKTKw0{iVNz?LͭK^_zʬ)6ZzTž6yҫlmx2qx}OV<zpg= @Q٠X{NV65Vd18b=tO88e`q쬵h\6n0u#t >5蛩hxH<ǧd| |Kax! Y|@ɺ޶n} -%d(|"4@6*@2LC4IU-!@~ - 6@ xebn"i;K$ս=orh ٲt+}}O9~46 YɱUj@ - e zwP  atbfy$_Nq -'O -]FCu{d=y -== Gk$I#Mzz6k@W:e;gl?*@.s eAI|M쵐خp?s,ָ#~?[P> Uj(ȧ#omyغ >՘Ȧ @Qf>ҭZWq,#Fߓ(*y6jG$1=9w٣vwe*+) L@~ETWZ`p~:1lRyvT70|Jp?]AN~x]8=,NΖ:&N.u7`|fv  ^g=jT_#Ӊ>2 MyDZJךiS 0bgFNi}r -rg-SObu֕ņw|׋A1]T (̩/AKEBZ+ b<e]>~f(]=msWކs̝u8AkBI+fѣZ$r;\/r AfN - -}OHt5lv*~tӏ; -93a{ܠo2>㷊u&؟f .yByt||sGzG ޶j3J5yabf-V>8Yņ"?/w|:o]}-ϯ)M 6_ZGewU>VasPXoTcƬsp7%jjr7M;.۾sGuL3t{\-+AMSA1G\øO -xtu|o6xe*>~Njruzōe~m/LwHя =w2fF&.mR90|2<LN>hc&+n#p;N75IEK#.M6 -"R(mWy=h<EM׺;tڣI 9d$E r '3k+ONp-P|[[Q{t^4ך/Sz3'\gg#6ct+e~QuiLHGt?F2FzdTٱ0gg`&?m-.L3L:UI{|8t'Z)qk)'\c.w -wLCKm𚡥] Tn<QGykft ܴ4՚hNLr;[fFjDIRFۺ`]yyؗñ(p=bG`?[KM2zf=m^a{7f{I&WI&iЪâΖ[si&:2{*3Fn.k~oœ /m3P*7m%-!cUyDѨP&k7v=Or`k}ʨ6? ( szp=Vy+wS+U f"6Osr*뿑+#Bye|?4S9G쒍l' d%@DI?C˯Dv heƚ.{}2Gpo},}N7zQ9.kc!9@ -8heA@&q ->`A%>Y>24pQ|\t5|L}϶E,;@щe hݙ7+.EJ -kKo'apB8o>ˊ6'vі0Jy} !9R >Z+|x\\jzsfGqQפſzKoLk)kN?Z v{1mHT"~6#gb1ٷ\6.]z[i/+7;gl^ ީkd9Njp99ŕ$w134+sfI;-z= >Mf(G^Y%1OXfvqYk9mEb\Ғ˰?i2[U6?E˜8[ݘ{u"IߎlFdRtՠ -Trզڔk$נۗsc L{;4@oފqi -CCKFuyyo'5Bl[5e_n%pqr{vԐ3;zڪas)Q/Ń I>UtmwRlbsu*QPOHso 6*&?ZVKUr.{]-ӬPL0:2Htm^t M,ۮy%zёWVki텶-1`F>0VQ]R~~I*T]fyBիE~b?_A'G]l/MiYYp}w6M1S$;{Z{aGM:\h1Ϫ>^e!2; ajH7Eh !`ϐQ6L:9@8t@z -O "2K=̐D@}L!FS 03Є2Ǜ 2R>{~UA&Z|H&Xٽgjm w [T$2RvLC ̳̍BVdC c5.Kdt=HH }%SA:ˡ}qt~пZԮl2V >cBh|ҴQ2c -[ C -d>@ -sHAdOqr*UL}H[>Pp=%[QDuyderZΈ^Q1ChDIedj$w\//}^}Ӌ0rB.RVwYOeo ]Њ]8 @M drU@jUR_9q%A祏ұ9c,Dٰ#h#DN%  X?y{ͧCYƩ5/ٚf, dOrGrai -r\,ۈ|lRb?#q%z˚; l9!HhN_?/O簿\<&Kn.{߾ŗsՋN@ ;w{jS=|0_+U}G>ǰtt?7[}=5sdr/韑9Nl ~^txҾ0ڤrTh_Nq5!| -%If)v4/=Ozؤ^VU74X]닗1dκvHx*dHI厳e5:CNoᕋ)jEE…8ma@*uN3eg P{v_z:yQ=Y'6ie&/۞J4oS} s(+!t[ }Vba=e"f~A>w}weG'%cGEQ}Ƿ3(@KǗ/<#K{PЎ -}Swtk42.s^fjƧa ;mv*7g -ݨcϥ\M0*QG\ȃ)x9 ïbgr3v-u_Z -U~1&|K*;Gr5 $DiE'u1w$h9nQK<6i8֟nwހMC.?pW(r7-ל;\zuwO)N?&оW5G<qp:8td~2 -0ovOW~ܣ[,:oyr UD~O`lePkxQ4m ՛`ې(V{7QepIzڦkMkX!=SyKc}T5Y@hs;ue(=YXΩxcOv+ANO4=ڨYkd^uGU,.*+\t3 $d4/S7:^AZhnL.m"9lh1!!Q*||p6>loW?pMaj`-U,gz]*S8SZp):?a#[QyOܾ+:C7O -|)q2DV0Bc  -FÝh}HB/jQgTz/v1HBy~ʤH+)I1 W3Bqd?ch -woH.T-=gW}.]d/*F]@9KT#k{\ ^5O恍y9RO|wZ*(C: r5rСͧҎj.8)j4Rx]@ -@/btKl>UVDպʽńX77@`S-r -rux rr69CO<t3u ΀XEE Y5%hSg=rD nO[,IL=ը7lx+dd 7ڤjr -hog=-r(}z]zx+UҌ/GC:vKc$>*:wN4|^ gS zΉyDfzT'@ncvY ; #}B -E5ȩ37R4<\QuV^Th(gK oJte?zF8 z8pfmŋw<]؂>@VA~t@յo\w[UKj(Ձ4!Z.x:@~9܃B@a{KJE[ݹ\ch>o;1rϊ| jv_=WףC^DT8k*{"[ G83om - 7ҥ瀏ԡ; -sX;ToMH]|7\S;_C>k1??sӫUޞR';wL[#4nѮf;ΰa7Cn!]w_9@2  M3NP⣳>SEdqnpo>|OO&%Bc.DQ%ȽumABC?g8ī'ɬNkoo/Rk=ߦP+w'Ys49I1L:Q{2P|>߮xn# ??Uv}ԞvļQ2{"M7δL7+Q -_G^KxNes#,h_IJ:ׅ"u͑ -PEҷAipȢ>!S~n^26;'Xݲmάr\y/-G 2sx3rCH4sVҳai1IOW4i_bO~' T񻋣"]ܔ=֪xrk1 -|:j[5w|0BiTfjJ썡w<䓾|:<-6d0sI(xozJԠ(fRU<(j{P.Lͼ-rN~g8ZAc1&+l7^lU-sVx?7t!)T-kʃMhEr?FA~Iÿ)sMRlbkqֿrWBSγ^Xlmegzh - }^S}i 핇39_6&I2Zd!t f[?ppح^ȟT}k;p]V[U>x"ܝLմ -|VĶ/Q監![y]^FhaWޞx ʥV"y4ܸͭ)؎T4\27ɻoH3o{u>tA9ޟָ^oⴍan\,)^Z=MH_u_vH!LcA9S7w&t_I,̻Fݸ8HUU;q"sԮݵ{Zζ򌺽wuL/rhv\wW\:Ψ-q'`RAC {}^OۛaL=V[>M?+;`3nJ]Yyh艓`vZ Wm -ĈF})z6{=bfnTs촊 qϪo.i5u`UC -\AcDl/r ?̷rQ\TP6'I\Ղ-z*}rT#w9b‡%>O'5nlb36Z;fC70gT(0n#r ]b}yψp0&}jOԶAzFJ't:\wj6Gv[-6d9!?0KDf?7)"7,<,|aE. 5] {tDfjjYhjA^ӫT-W~Cf^wUhR#&γ0?U+A3yH(n -`mF9,kD;'N҇pig<r`{^]Eioi,B앸M4s9(u|G,f (5mh::hI"FMG& W\uhk,y Q+sSy)FLb]b -HpփiL6Ul"vߝى-r0ƪ|PJQh)՚{7W!UF1Z*P z59$+RXOtJ -LU>N[)Tu'͵&ff-uBLH^ocV&VShHbuo=+YSX]kdn'W'|Yy,[!J:'嗏n]#iڤ'yυMϨК)f"hD j<{՚|?˘EC뛄@c({nnTꄫ ]k>) \)Gj& D f] -Dtlw{p I {u.WʅANٷ,4I5zri֝ @lP(;l\K} zp dw QbKqߛMFu~j"Ѫ|3]*!A+UMy9!՛ii Vd} Il%yax:'7RqD#V$< 6MZEM8#*fš=yz, @tJMh]<ȍaBs`(w3]9eD\'Bg?%`F::Q5[^ -_ _ dY'C5$z>%ꩇ$vA{' od  ^ - -#x=iF:-1 9Et,wFBTu[ʫ1o!vO#;?xPq6}Ut=Pܭ npmjW;y^FjGxVϒ?U oЪ* |n<#/Gw;*qa^͠=痍n*mk ?j ʵn?}Z2:ߐ|A|a -jS+-amamYpAϻi#,h -{8U;S_ono)[kW{Xl6tŚ)p}(ui;qθv1-! Mb^r2!pxz r/Ηk㊥ch[Ͷ}Wm$}f( sO:m]֥9\zi~e97nojx^̵&GzfN0A~1b}|igx)Y!ZvG尼H#lGnV}z)zLݦZZθd0Eqli)pw gkKafd7xZAN4+ޞ&X>Gd uCvѼװ,{6Ƴoq]yˮ紃Ma' V婃SD'vc7c}+EM'oE*('_mlߑcu{95{Ǿu.A[ڮn -_] -k<̽0O@ꌯQ9pdP\41IGKj⨪uA*EtUfbgO|CR4 - !,ցK[vz[G64^|b8vjA]K8+I]ZUMXa0f$#TF^6b-?E:b= p6ݳAgWzN@4Z do-̋7E<ί;il{IDfVXnIS166J1b̛=y D4 &22V֊܏V^7_R=OWSHdY=AA$K%Ьcx9 CODzrϺqeÁPNe^IިJk?odMqۊW6b Co@b>Ayσ(OVꇇbF*xcƠIy0lz|HMǬҞQuC;*?'Gt&QM=CF:G!\<8y؝d $w*U Ze}eou~ӑm a$ZM-G)@=vYż¯;[Tw:|};vA4F9'K|ù aEf}{ƞ'U͍UZ86!2fnh?>@6aޫw+~ów\ݽϔ%ʦ2BFb[umϧ wT,.*%O^$aj/_+7:=hh0x.ҡ;$ttVInTX{:QJe7lSsJWw]^i*j qwNa)jܥi & -1Nv֣%[WU^v-Lj~ͭ}v1Y6 gk{V'hYKL`}8c;dW, atX$clifQnEa}D;햏fkxs{)opp8Բdrۚ^8@Ϩt}~`iwQoVqnl6vR+C{V=cLE2Qѵ;S95epϧ7Rdk>8BB2f35%'߿gGaIA~sLZVnڏ1/*pI{=#)lb7އ#?3]EPnC/+1a:oqiJ|k -sYU+*el 2URr5ZLKIeir"ѕqaW\0p\V+ߔccEľnYF)==TnUta@ J1Г"B{6+&Y|޿X E?w 9M'q^M@^_ }u敠ep ӱ~}HZc7_.(S%>.~ŸPp -7r>2DF,Z_v2,qouoYL>2T K!2k2"ڹ fRИ@5dd[07Ȟi*VuZ| o^'7^׊}?𠊧=y6znax^}pJ#x#8WR=F*arw契":ܹ&)*§|'QY8jiDߍ;;Z ro֫wuIosAanD\jFY`?-TS.\OE~?H٭h}h.RH^ -y =3[!~ 8e[z8axn:Gְ3y/:]cazs_:Q~φv@^,|+/k 9<\vUԗjD>{OV٩mH+/G-ǬR;J`̸ -~3hURĻ&~|-=F+tMh-mg=vۏ*bk=`L޾CcK탵[ ~}cm icvl~tO;7{ixح] [j0 ܾ.pxehi,ܘ?H:DFBk]go,śvg:3~-e]toz\*歌B+d.ҹ2$|K)vH3-y3hÝ[ץđz[Tkk^m;9]~[K>Xyj$[NC{]/ƣ/V-}Wu2M=qy?JơT~q^0F+؏Ⱥ733|q -"ޥx4Z5 -x$#݅S;Jԉqo?Q0TlZ׶GpnLpl`)LEGII?Y;Ȳ<ح m~o*Ҳ.(UZe f) 7AWhBTx&yw|[H98=LeZB -]vR9AP`'N_ -,nR8e5|r% :hƫ?s -f}MjG.0}qŠ}ȦCS`K||cKs+B]kԅ|zk -PX{Au6@Utink=|޳ouӊj&Hw NQI_ֱz/3nu=ʎ9a¼uݻ#Ρ!N~` &!׭ ' JU^2̈lJvcu[=cٻjnD:jACE#ʽAB})-vwBC-ڑ77Sb֯KvtR6̵ K5ۙ2txg'[5~P,Of#gS -}Vay^"ûhLԉW=oH]Ӣ7Ty6ۓ1k\.zZNN2Fֳ)s,Cqr_gc>}C˱@Wm0Pg3wh=#0k *jߦU.L endstream endobj 121 0 obj <>stream -QPT!׬ټM8gwZ]c]*VjS G+,nAss"ޫS[?:CmLRqS83jR}P++yf52[& ħIߊ+lwl[̻W.[Cʩ?ĔLXVveOt[ZXnWjgNL_?Dۼ)3)it'-޴nᆐB0 &/z|:97QBvol  -Ze;h뵈Yk@/w+}MH[-qQJ]mĜ,EԚ@EAv˹[ETT'WJN.\\bzU$h^6\pp9- ]!+D4?4x͝{C>4d"hW夵On7eևnY LfUYgV-mND÷6:&"=Qŏ Kj)dIrOI>XN>(b^J*u~ҫ-= Vɧ?ޓOI>DjqGOI "-l粪J(+9'PT ( -$H=UYZrajZay͹oks*~[)8 g}oS'6ozҢ1"yK -Jک9ShT Sh~n6eEQ}92j m5B}PRW2@+0cU3oS`)<ΒG_%'$㓒ָrxW=ŵBxoL#.H6 -c^{ ->0oY)+x++S2+.+ Ͽm Pdf@r -*ג2du3kNVuż$EnC?G;SιB{T d8ʒdxȯxξ~?I1Ԋ@Y3[scV{Vx"n.:xmD.F ٹLa\-/ltP8)}"sTq=SM=૯ /:f`,쭦s֫zd [ ,_*G4+2QZ-Qà/ZǞ#u_׼f^P].nN5ءӨeVG!4(p>_?dBV#y ;tyV |!jTqvۡ8b;zb3HYB[ϧ&ˠ]:jӧiձgfOXQ YOGSww+n>Ӽ6eJt< - -yA\D-L|~W U(e EjhJ7A“k*΁{:/ R"W.7&_svZA&Q4/TB=7ZUaO j|lrL\t>[|| E Z -Kvf8,ԹW8A2qU{[ʳWi=M.7@QisK$Wl[|1Lzb[L=/f }X-lUso~ 0VT@%ϗJᰰhh>xrHr%<@//{hj uJu]uN-ѭCXX .4RFs';dd8ţ~*+{LMz_4AՂUe}W[Gu[d,Ll q3\B!oCj/)MuاO weƄjǍ_73}j/[TJYuȯa}0xlOcΤY-e<|?3Ӕ SLBR$aj, FܑC[q{fdv+Sd%B_pS"m R@lI[{ژz aȌ.Wa#wpyO Ie|dbrR퐻_9ikn -IPsc`45(]. ˖_W[D4; X"u.&տ[]p2|3F2DbA3nNC1\ƞj<-Ti[OZvOe)T^`1M WY.S歒nʢ93 rZx0#*&c_ncжcAG,FHX[gy8~- L[t|(@$P(&W?&2c3YW_k|t(SoY4GtƁ~ ;r9zx6H瑸A^T AQ,\Q(\-U&5g}=ǰE -o9{{Z#W} 6v73k.X7&gӬxYnUFSG3?@,X,QD495.>9vSQ_[3Y8ٗ6Za8;+eTP,2_jLgsK#]OcܒG j(AS,%}f~sbmPpQkò `Ŋ~Ly+m 'Y!K] =܃/lv}acAyhBD64Iێ Ѹ 6ߕ*CƋB{!ʭU}j>D?ƎpqPϯA-‚UMr &{WT/^T <3Ft-}pZOڍ%TkKT|r z$+ԃRJ *uk~U.>_ҖyqM*E Ezs?HN3٧\dM_ۢgP(pmFf3l7ڡmgnҴE8LΏ8jͺS\|R%Sؾ=谔kMYu8GG?☪pr]أ uh/NcbAB.Q73]vwӦ]Ӎnko^޺rAdJKT9iB/ρ] -AΑK ߍdɡm_Yy9QiHQOM1OK3uJ3HI,y2 -[1?p^gCCMe|yM Y LbuYsX{v3NIJA1< ιHQP:I?Ϝ5ϩ8Bs)8Q3nJ`4g:IʑJnIf\&Ew5Xf?]3(j-%ܛ@'@?? GE/YO0yv;;,xw!N$}9)FV/^WAoټľL`n< -Á^EziE[.<0T3@;\&0z3 Pi-IqOV)rD#bu7jKg$&*Z4{6L8>i=I{ R-j^㱣:?C5Ȁ=DYa0*e5<+ tJmO~'nfmG: -"@b'LJ(lMuCk./2Ev(ot}#==~PZ<01 \iq(B?| { kF6@W}Kb7ouֲ:u>m0XG^T^}$2rh\VGZ#)݁!+ ]zdw^x:oFBo/ _3=7:tLY/XݔrSy~>]Mlf86`x)}աfJ!VeA=u;Í{{KVqv(6D9p?;2MzՆm?fjݾnd3Jy!-j[[GP xkG3w9m{ގU1\cI]i"gDf+YG լX:"B׉hcQbտ:t8kgk?}z\A[M_{ -olZExM4ylRiZ.e2!N+";啩Բ Κxw1\ T-W ӢorIKCkPfkŠufrA$srAZҬ^ ۣeў/.ez<۹ކV]PmyC:0=M#]^WWp}lNS[Ѯ[~geb+ܡ) اxZԚ6; b3|U߈KĝCwoƋE8Ƞ*c# -j+hts9^R&N7fW`l^*aG:T]6>xhʉ0͢e}M.!wJ ^əx&бh=" z:}"u8uVKzp5+/kEsYcS-8.ݳt06]N!?P^zVFjA<=V,ṕa:I.-7\e=gR϶Ѳ6U2v:1q~yWGGΏ.yQHL~lL]4Po -eiǰ "nnИ]A2[4RXZa!1X_6x[4AukVr3v^LAʐ}2dG4ɏ8EqhO^E}=ulc.lumIN&N}T`=R+j x^W@f?$+r|E8.*G4j2|`OyZ?y#uL[ $XC= - 361F/s~:yo-7MrXO׍wfg>wq sqj|GX%& v*o{bX|=Ds)c(=}>F:s<JH(W". zr-tL{6z\=WuC1+oP}f-C`}4 1jgӯ', ˋm -c]!ǃS afPaЛ|F/;^3̚|ln G#|G?+cx mw6l?jdz(Qɰ%k=4` X{G{}¨ -Fp볋TҤRڽ1ml,s d؞QZ7فKYڵJn(W;Od=P>W8Ժ銭7EYs|΅8zq߶oM7ha4Z\/I>kwڴUz%N a Z[#Fp=>:1pu^?xqqWՙlQfY{z[*`[j Z%,[e,497םv֬z%F3+uw1s:C[NC{ -:幡Ge.lfuZ dh}OD:a`.Ъ$kQI+&zZX6jC.E1B,'嫣eTCU&MUYakyXx&$@qv]o;ڪ9(ٿό0 -@{gﳣ;ґjqӪ膋z>8ׄTVU)د { bTmBn4AIC] g8>8ӢXgv!Gu 7d`D LY=Fi>v3v~D\ҕC*?dwC,3UUH}9]8Z尯(I>`F찞푤R8?wƬD#Q ;dDCBnTϣIBdq!hiHFAnMQNx8]ۜZ~򠆅BɣNs[C#5vf䑦RdAӔU4k8M t:dr|n --gfMԊ44}/*E7S|$oc@ T? -ѱ"3hv|KjzF9<^97Juw4xxUG1v[(5މ_@0n9~FL.w5SonOx?A'152~דɉILghH?EN@H$0bpb%qp~5kVm[f|н{GOB_E՝Ga > -@~~U?^N{waEn'Ww59_-jm*vjv{F fYU8(׊^M_D&wYl^,J}*mŲ/[QrȀd]ˀO[f7e7p<:A{}z{0OC b%A =G_1gWN_#[Ofhـ̢k"ls*i`qz2KY06 -K4$W((z.uQ4ge j ={"ߖ#3k?O>BGX3Gߪ^06k E(=Cc5hef)2PŚIA%gfd-{s@"7Spvq7%~o%^Z6LՊx=@3`ϻ+we+KV+co/y=zVm#c+ ~[JyKw -e̺On,7d9I-r7Ǘͫﻝٝ9M*!a -kܫ]#~QHNHP>T!+.z^A'Q>7_/ůhWC'_ 9}<9}w~Ч۱#ޔ5h55z KFI0y<؁x)sf~v UѹAc<}xp=T}oJ>7vvVCцz:mQjdL;{N}_dؙDA.eNT,F, rqӷ6>MŒ4;j7vLAw-9X3# boP4BOU/H5}Rdj-E7Jc,AcՒcP/d(l٦ykJ#Z܉0wzN/Ch` /ٵX)jT=?Y؋i0U8hwX8aUW&4n]#~;QmjtVA\bh܊ wPw:]9W%W%͡K-]cU=DRA/=*ؕ/afx v:?NХ9q,ujz6$Qlr!`mMԥO>8)[R_KSE|]f_RbVC*O uNӸMe6."SI{4QFi]x3a0*l2QK9L7?%rP7ፋmUՋ>Ɯ~Ks,a3uMG+-&IE/yWZڮlyзRZ@pmwj2Ƙx6I-PXQa_=lcB8%Fم GD^c;[?V~bb$ yNwU^'Yq{ۦـ{]imePИg|qhɩ@&+aiόa)FO=ykVjL -v*+CH*WpT9q犬80"Vr7Tˉ/(nóK֞Pczmz(#r^d"+a o!F-)x6p8}7僣/5H!Unx&[8Mկiu)Y gR!:$0-4(;) ɦM|!)oN6a~1.UQISZCbH-հ\m9u?`]dSl jmcK38Y=Q?%>U*.ҦOHre$}n`G.`5}|F8N:A>?hhXTPsPǔAfdMmWjB{^&.q仾T(~Aug%v>^sj?pwtu{ Z[BbaVEm%mx+pԩZtQ2lJ7Xbd> SdWS>|3= ue?gVc5MbtUPdW[j;֢P}ɧ"MFHayEEy?V ?rPG]8]`l<$E`|Mm` 0? -(wN}6S>k3y9b ,e|!A7AfFk{oU(_sVR+RѬ-PގA>Po0F<{8TWG!d0׾ L3?=:uLQ -5yN,Ѧ3:3pcGZi_U6.C ?V:d o+ /xoTaZ96 -4gDgPϜwo8]ט.yǙҌOI3`s~m {|iJR8M *]o/i:KZɵ:pI?p|oqbx#<:q}ptlf: 5w=O=̾n]vϦdSL%Uj$P%$jdy[/<D?j GUKw%%5ګ!4urہtӊgkR1;BR}_,T] 5(+(>>^r8ٽeǹwxwrgh],p*\&Ev]SV+pi/Mnɩ?~y\8G~vGֿM1-oJлAw ,ndYQvcms~y{-&X=|3AS7cThKGVFY }Mw[QkWf9fUoS?Q= Yar׳1/|[Q_RH]6{է_ f~F1*+]nU$E9톣'm̮Y)XcGǺk%wKv:\F޺Q4Uد91W>ͿdR |aR0xU5[$mɋ|d8kբN6;Nfc4O}Y-o9ߺM P^^l1()cYKWK|KgQIAN֎EZ_B2Kj$v%v{y7ٌlE^"%/A7A*X d]J;+Tڹ,Nt17~-ǃ -wQUy#dK z:ܼͮݎ3'ʹ$gg)Wv M3vyFsp:5>":r;u*gKv|# 5wڕ$nYڬBv2X_U҆"ӑ֞M~"gUrrsz? -A깶zuN덊n+%K P|y\o5n ;@-,/jkz9PR}.Q#<\{ V&-[x:!CpoFsUff|>1d$d:qt~/`n}5Z[ g쬮)HqK< >@U`W8nJܯ~2\+5xUn 9{}wyZ<,ۅڞFupf3E?W5&O'*_DJlJqr.ɕ4ۗ440ORclnrg,O 6zC0z;kP={.]3w֪eZu,X-{ғX\m>\Y;>|p&pugp͗n_ܐe~hOu)O;Ĩ>O^W99ĉ<"fRdK';Y> | –KՔ6egOpWpd}p֡Kƻ76τ;*Ȗ{o}Ozg) #XdCy0F/[Y96zd+cݱrkTxmLĈjll\b'\eOl2]8if|D"[?` +4 qƷa?<;3+7: jh>uI*7ggu'&Q->&Kz[Yf7HnWn -K7{ǣcFxTr}c&cY~wQYPVbhu9_J5`_K0xQ[JbTU:E!@k)^\$$fC&M!K!B~æ'QN3D3GD}p{A ,Ze9綠"@*jmfr8"w$clkT-'{ĨG7`.VΙ26EK:4ļ <+mwnր쌀GQ'$T֗;#@;6c3r;T6Ƅ6{t׀ϋaBQ_-p~@N/u kx鐨Xm% Pfhhޚ:{%o{͘1\1 !\ancۨ#s2Zzu[*I-a -)g;|zPt}S\G1۬Zі [x륙ڟصf} %&Fw7Jvkǐ[Ӥ=/}r|pіOgV} DݏZ-]ZT}A#yn\@\@$DYW̑H}\ϥ_fq60KK|+C#TC)=?ٻti?E|$hriᒰ/—KcS1kpٕ/u#V\Q -n`X̢Bs.;Rnؚn~ "[Y&h3dp_v|϶+ҷT@ a%7~@*` Wz8jvBoc-#xUh$wٝ9zd)g@bp H#Ѭ =I;D< :l#WhֈfZbW faE.75.=J3N 0"#ʗ|:5O:lS]i -"CtcTY|F0,$ $~NѫMjv{w/?A0%@='^/RN_vVF=C5#'7%- ъHS(yKS.7OO My!he0rJyɭ#n`Σ؇CnkԻ=똱~\woumkښpr9rUx~iܕ/r=dU'㖽 {ؿ>"1IB>U=R/ځr؁qSouGmrd~>Xcuf"0ϳS JgtY@NwEe\WL2ىAo}?tREk{X[`~3U8_Yyw.mr&\|g&toɫTG?>$R;L]' g'H_`v+K_8 r5tYBo{p٦Fх^9pB2<\ZQl r;S)&I3V["A>~̅_djay@+6Z|Ҥa0t|n)vP%#'WeTD+8CNJqP5k?Wc%luKtZk1JW^N@v~`L=?θDrꇀ*cM%ؕy\v?TׅhoeqP7)^r9qY]En0o26Ív6{VC u)]3,Eٻ/&溷6?7WWqRY.(9i-N]bkwzV`3?_u(%>qp$Йvk'2;t"SClܱ{U);Nnv9AsL{j%tN׽ԠF50$vQ4ׄ+W(\4r21̘O?N.6.VyȊcL93n3εaW,GlO^j٦A$TA[*`G%7tgP*t+h*&ĖoYB#'wKUb#S5[~=XEkzͷf˞ -'7>b4Nʰ(VsѾyΰNOrCU0#yqVa^Ap<X޾\4Mpht3?SR}QSJ$O%M"Ƥ:zLC\%Stbʍ<wVKcbD})iT+gڻ`漛֮kc;.>fANg‚a$$OT|K]m'KcwrNͽP1~@H-eb5Bl*e&RaJ$>#b3ړ"Xꄞ^ okp8#MɌMOMqj:'0B{ cH0oŃq@+~,h< @l5ja۬%t3ѱWݽ7̎8+XX䱮c`d;~@4!* -wZZkR{RM"X KLIMY׮2b/J|#CD} )xlLzԥLHBCcj *ZJj;8jRox{NvVgC@1/=Az0F5h+=ݏ;ߟ|/j= -3uD -s&_1VBY Fn +z!'v+ -,@fǜs|QVJ>a"޸KGa⸩5m!MfBvCP>tn.^؛ezp+Њl-?fO=a2S(2mB0^y|"s[$-i!ܩT6s9[ uA0eJȍjB8U&5P7O?76D*n`Rf=+j;ʕnjmENUWo-;en7h3ֺW$"I>W޷Yzam@yhU{Tn(_!j'ФR-RUK؃;yVO,ay=mR'oޑXs6?N-*i'CsJ#gBVI1^mw6˝.g;p~Mv]Q4=){9Nޭ>z ̮ƫVGla/Gv_@q}vJ&+B~_|{(f`O惟Zg$\Wj~gښˑ^4ϗn>;`gtFTpKJr*XnG)>9LW]asyWj}/Է߃J|˳Q"[ֹj<})/ܽqF{tYD1U!HZcawxM-}fV˭tnv=ij-جs=hE!MTݦnķC.Pߟ|L{z7a|Bs/\nНl5]emӞn̂f7YCjui{?1Kԩsi gWW<3v+.cum,nɷjm}Lmbxpjc ʅ1KkWRrlYaH-/I"P4\06̣:j"c<^l}ҍ#}R<}hW}i~˺{Yyz Oݨl>VnxSOfz{SLÍ0"x#ɋ$0)9%}UȤ"?{\\5zb}ÝtVl-HK΋-8_Fv!:ug>^)WbHP-vP;`M r֘;)d,E;UԨJQǎ=#ǷM[bOܙy`r`viaW -f=Q7p;{vp-q=d4롺7ǣqz noawEݦ 7[6>ќF#] nRKANQA¾9Շs?38#!K~Eޑ*?Fema$Hfa>oeN4=HjWjmWރpbQ>*r98 -Js6?E0ڢy9c ow2yRoK#⮕&.?GX/8 x١\f\M"-T  R ;10rͼUhz>hForU TλLћm-+5fq*rbYtO0D/1Ѓ%gX0suоyqƵ :IMC%O^7.cqWbK{A*/ 7M4{pSLpfc:);ɢӞ#0,__vYk&lѤt}A2*{<2^M](kO -58rJɒ\=t_=xTvlD[q̳0^E.kF5D\] -e &R &1V."T,\hi^?#KI JeWCX)F'^|aՠ}mBJzۢ&J-Mn 8WߛgIY=ʟݪ\2mJHJ`<v*H4Nvڗq~=Ɗ6gxkt.m1guEY 4zj'#!hjy:ZLr\ LTzQ,pByDyn!ҺuFi?|Mfpsfvz ;*UjPm UcݭjY֪mY%?W;oj]SV-!\g%e:'LS<;hݠ!flm"z%jk2|7wuz<5ؠUr'" -ܲ7$P?U(N)R`I6g_͚,GRc4ow^/ ܿ6;Bh*\C;^*t0lI+^]ǻz)تV)ĜbγWXrªH,U 7'8n u y4yLl\Z^x,GVUf{źdV;,& TTeB -TACLИ73+ 鐱}h6hVٌ軐ψ.t?g_8Y;Tgnj`(mΑUTV3aff%?ꥹ>9#g%`!nx"848Ӽ<)]|Rg,@J@#c{#[Ų2 -?Ysձ' 645>.3(;{b @,֓@ @n 6P@|ؘU`}S-W,:s)*4`b辺~Z%p٥˦bzGG%:8@w:6P2<$%Mo[`o%[`mo%4+o#(5K&uw[#T޺ʭ>A胫ε p;ΧX4FS(o\(r<+icV_#ĺV.td*hn -&n_[7y֋p͎VOB/P·s~XI^\&%q%qi7-?kJ/;Q{6BS6טft԰a|vZ^[ES./Ԯ^N/t|agWDc 6sդ\qmfq-ϣDPry3< ik>a}G5?τ "̏+g[+eӀGΓ8W="׭Bm.~y ,Q@vCR?8q +w>?ݝL0^6KN6z䮍7׫k7sMDžB.CoGح-;=*{9]<{0B >'&'я/!`8 *4qMl̽mxM;h:߭{&kscg/QgR<R+tcE.-s|˚u[P{0 ٫ ZߥrmW@ -STt`>XbZuo몣É Dt.Jg-/oiusN6i;<6v'"yZuYYӯ fi,b4|fP8Kb YFw# n.Ss4<9##(=.$xOom3ݎl|L^kp?Xh렲G/eg_.KQݑ]O{*Dv). ]ƦȂG ʰHVhM_MSV#'*bC9y[^<]YF+9h8'(IZT}C)EHZ .-\hyOwː [D3-J? S!>zI]Z nC7ulӦx hlSB] Dl|s2%z̸N]+b!JomyJvI,Li!pE32uWh N( -iGÑQ$.6GB+EB/Yl T[T-Yy5r'AneFS&R[D:X -׌/xoQb߬ՅfuǴy٤ Y^Ikn 0Ⱥ΄3& Q2U 8)o$s v2&Гob&M3n]ęxw(h? /r:6SSx'mC;rk 8i9BuŰxjzi|҈k/c -:&+^Xc Ib|6C#}|RѨAYüt2=XqS SEdxוE^;٬|2S:û v2 -Mw>22VSD܈w]2k#V9ͲMm+{NkdG?\){ 3{̺BœA1u4(TeO}LJ%=s(W;е%0/zи),:\%[pvqj3M՘s#[!(/Ṯ\@וKCs:u fk냦iAijGx5/ZyV ہlE/6xEz#LTU(}u (2)(8"™c}k=OddĪeto x,Y%L1z!!dTx5sF~ <.m[{8XdƘ+VfQӰ9JfЅD~1Oqg͋-,^F؍w_?L6Ok j20˺4*6H00e' [&+uev I:,1-iTFǫ/fvyo{A1khƥCT/k^fx -vh؛j8Xu?PeHZx#U*KR6-\2=t^ɹګ=\J(P]yMad}bb4+O/rP=/yjljr᥶n;ᱹ/USwu[k%>.]kSbq1VVAB\ ҾL%+r 6r!wWs%jn9Tη G)AcˌWv1qN"}0.6fVK2[\Ӈ<2# >oQYL/1.z< ~pv@UCC)I1ur)< PJUpDLwd@MsePHn~Uv˱j=J2#J=?VRK|m;E~~"X=ƇKVqkTi#&R *).` -0! C;S:)S)-C/ܜkcmhKlcT)]zB=ix/=u:+ -r:`)NVlmY[/{{_-;W eF!4Er:)K).]&9s_9~ eh(n6]>_j[,{Q>_vVᚖl_PiG,5[W׀;w<.LX]`Q Xt냵v} tE -k U4ڮ,cWnfyxz5pܓՓM -.VaXd -S0 F $rLqIJ@R,'ETh_㶕J櫄ҫDZv_%ey}Kk Ggv6~6E\'nօYeX-&hp]~ʺm\XW-mQb}Я8rMy[iskMWsF1tu縙g~}eI9Ns+|,(QOYd;<9IV/=A9;Piw4}_%;<26+_%姟w[bhrxrXٞ|ݣ |}J9e{A\CNZ#8iGYcu5ʖ\Uc sC"X&+NVWθ6[%\ɉS; MJN䣣qI!d^{56CqaZuKx3d Z'NH ^JVѥ(` llCnqYZg_j\ -;B}fBB$mTePmh3(eu5O >lzfwR(]a {,K *V:: ;ٖoc}3:}6Mbf12_6̶;b_nFw -U֝ˁ\i֗%]l v:O~U|H͝fO~#3.sVoXQME|O^`5vI&!#<ċZAv[W{Y,s"ynlYίcIx^v=ڸ`N)cDO23ox!&[V5^Xq&#u_Jfg>bN8D&w6?'ndjkvws&=><xQؾ޳;: =נ팗ژc4k -p4^{hܞUmܦYϋ垥:pfRrmµp^)Ԡ fUQevתhۡr_ܢ{)c8T4&n>Na+Q熕6Y] J'˴j8ufEs6^ o7^]|t72j-/ڷ̢hZ#0{h=6Pd{1k`Lclc#.t5iXi^):O63OtqֵlUI+#S^dȁ˖sU^Nav~]@zgڹr 32жIPø}˭6QKg[;s)i/{y;#njhڮ_DJkYm -wlu$N#)Qz=d֯0ٸ;`+y.u+&U7(ctQtA(+tjӵ4N9N;9&CSmO.sZ@c!4MkױR -ũ7t`\z+.=F@M7ipKZeJB4ś58f?k] cL3ɍ|Rlߵ._A-\maWetFۻ2@2Ɓdrv*{6<3b(F[e^zYUB\Mbzh 'mg7$əZiUkOHQ8?9{PI -6{Ֆ~2gбNo7vOlO]6/ݳ6z0ԥnec;ݻ04&7x- eZfuefxb|.SW_iiò^ {lCz\llפ\]^\P{ -G󒺹,oYIsUlo ?Y0(KLD -ŧ b= -$ %EضȢN"?+hbW8R i]Uz1M굷yMsؽY);$ |F^V6jT4QPQeW2ћ=" $ bo0": #I".*w=L"JmU2*Ol.ʧ-Jm>a\k鏞fU"XqC-c-&\ ^ -5{?ak?dLuR/]^F۶E5SW .ԜDZ%2s]'ZXVdG*_6SP݁0pq@'>g7GR_|02_|!$5?d3Iu(;1o) #V@އ/sk]/Z0V}7==Jq/ c'RX|'vE;R?(J((Pv VO%j5*+s_ P4)H 哏k^┵qI5cޭK_'$g)?Fzkajk]eN/_%4Oy(ĥnzT^߮Mk::J}M`v}Or\2A}V>޻,FLikʕ͵S&ױ k> %C6w`oVr@+%A3#A~3P)пG(U^r\𵗔sȰyʧuoBddY`6ڐ7\UofaZiCtS8!fէ?Wr<~0~a֮زr 7z-D5J|ϧſ:Se/WGsT"?מuj3( -zpJϒgA~By˰ʎmPۆ\ +6~m ui_Ԕԛ 'GڦJqzV, -52ӧ Fp^JQ ?zcvϭ©:Oŷw'o}6֌9gi)/N&G,&Rm~`Sa fڰ]?аBokN)_1V˅SU!t=] h֜U=`v-T9Lr2=7P FlTl̰ aY;<#60'1o .4I3w%_^yȼ#!3~9ÖŵogKVo$W=\Σ,jҙ>LJDq^BrEẅ́kgjRnѥ4o;I:oN59griag佸O|].\~nufieq:?F OϠl5SO_S;ڃ>.<|=p?^y倲Sp{&Kv=tGp+ZSiwiYndC4l۾8]86G4s =q||c̯ejMgAf:V=*ׅw=nVӚ.liR? XِzŞQnH.a9kUFs5M0>IRvB@%qҢ5rMuRl*371s_S_mxN-K9^ӝ{YNϊED<t @tW5 -bK|"iYĉlCgP:h Ym^Һ>T ɱX@%jbpI>jGʵ12eϑq\UfL؅T뢗gtfpH^4*߽**H:Ci+gSip30`,Sպڪ<\g9lkI/ԵXmMZVҚj[}@[ -!Ӆ}{Lk7 ٷWtPW3/}Y[NME\4Z|Rh̿68j𬋕G\+u6׻Dj0:+Ֆ[R> ]DCJ)ܳ^ GύR_lj9Ry -2~o3e_=$v 57:MXr[fjx`[͓\[Zcv{TKALTˬh,i9&tkW %aOYNTiν-^,;LRt_)7aҬ>#kS&XezJ#|ݦL~ydqI+Ķ4U nk\[uᆓ %Fcfנ E+bLіj`xEE|2!u\ڐi-_Ӧ6,z;b;! N;LV 5Y^{ZX\xyqLp ݎ/y!;u'd fqMBF=1Npi9M物Prςd8ZeP8˜MzLTeAm<#i+;_BxWp簘(z?* Y8UiHn#l:51+?o˹ >i[r“87UpUL[_IƤtzz83 68kmN|1T&1 WCa_CK@}gYTQB߰i946pIoWor1'XtS\2-X&"go✌XǕo!t~׶h Y=^1Hhi\[@\t<{(8h  R8%|J6+]13N5NyP1z͖妵42{BdPA0!H:em<)`?3.VȩDʳBBy@C8,APf~6ڝrSTGnb+y|̸9Q^ƕ G+:a͈Q֪odŧPbʥ"p]KW]h05=t MZ6o8$p*D;r0vd1&EuuעǢ>Q,W- G!F&˄FB秶ή/4Н( 4Ma)Ԋyw[J.L@j|RRmΈR ϵ]ᢃ2_+US<ްwT|$-G[7/1Q̩)^%l^slh -d$nOqr"R/@NuӨȾ YwowSrnkFl>g?N΍lZ._2fʤH(QPHZ/2v 8cIJPl):nzVjw;ڝ@P1:IwPOcNq@Mi\]@Kn_;Fr*`$d@DwU[sк7H}8>޺}Ķ4#|0W\A V/;&#:ڵ{'S-`Zۖ3`Vw0h)f^?bd 9@kaRF[n4ʧzZbAZ#F6(Btsag4  ֠B鲀1w@ -`X4gE<g_ dydR|u&.qSg&8 -it^a#6AUG![V"BL_=_GY; o/ hnB9Sqz)q@ 4p'[Z-cӲ^"-Dz +w|5•]դ5BžMV$Gk>9g:*wrpyzGiglwuuږ]4/JhU w6H̪VQ%|+\p&p>29`]QCȮ8_Ŗ.ken;^V1 _C_|Y/,DпWjU3gȗ:QGy%aL!YBmGWn>aEJ|O*UFV~z#O - xmO d[̤p&FsfVc)-vj^ڹ:bgb/+!u'~VhYǷ./v@fMLcMcO!.}M -};=7fn)`]lS]&Xr oeo( =h/-;6Ǐ4md̟ j;ݔ.)=~-3z]Ѳ9G0jɟ1Ja4oq8 m->_ <9,3R%a߰d!$gi6.hYfNVo9,mf{o=c92yE,'hZ ""WO 4TUe%u&X-aGa1tHlɄ ˯cv bC\rXZv - {0)"D>?>iwIFFز'}hO,E%#noAa:y5ΘH$Ψ;N+h>Vpw a>wN=Rǵn"48s\q`?kȀM2} -Cpu;?hmUKJ{}k}V}.+*@Gu[lm8Xܞ>Gga*f9f<+or{>vO|~@_o>}8c)uOh -b(Gؙc)DCLq=jvUywKҺo!u0ebu0zG5^oү%E޻3Wɝ#X"=Dw5b]E0+ܑܓ݌n&2zvCk-%K]*yS̤% 5 vz?tGah fJlu&fةve -o>4|Wgԁә6]u 7 y՞#wФe^Vvsgp4;gODuc>fq;SLmƸ=wMO5թ6[}:Jhup30iiǕzfzC/Yྟ*}zZKe{Amt;ӞiU̞kvf8q߹^OCY{JX_3Z:5WjC)}PF-V{X7Mt+TVzdP>h\ kbŮߺ--fRyu2|b++?"i4{&1 -uF$!'7l9; 6K!,ܟmCV9ݣB6zZzdnu2KrR,"7Ӿ<SY-y‹Q'ˇzg_ k˹t`a-a\!kK#v~8Z ->@Mlb:y(|n\;H=n:crZYz.$Ձreq$XU^';*ubR$w:]/79&lڧbYM4% :^ )cBQ`u=>>ux\{i:jW^WڹU-#m_(JK6!'ʸ'4H…a #C(` .(&uf} Μv1G5pvՅ;ڼ6ֵ\v6ZurlQqC:tCHJW)t.^Y\ڛ -e־6GDh /CcrנYW-zoWLp3!Os|;, Op]Әr3ɴ.(tcHh4uC>sa% -o>r2ziџi|ZN+&u($tUfYZeUf!S4'{G3әw gFF'9Z - ><*ʫ>U+r1I -U6Q"5x마Jc^jٿ9֙foCCd_ܔjMBUK(!SCĒ&P`esXsdOOHXl)5JTifmFH=$$ 8*4Kk7`$Mӫbhm^QXFׁkyKK*/\! Nl`[1 fe4ZTIanG1:,6&N%:q5ZyfzCh+"A\E_`S>:E$\V!Whgfɥ꛲nQ7V=kWN̢;DUߧb+lfm=e/a @gV)>T6W$JϢEg"2fL"Nk(H1@ˀG.WE)pto̝B_m<?TPg!9R -LEF YtG\ ePz<`3)R-"Z)N6,NT2'YG A8K7SF?|}z:KrMMo 1RxYKP],a'G@LGa6ŵ ID@\N~ĕ1 * - 4buG蹡vVR{xFƮB6۔`7be- @&JPT%\] u>ټ5ᡮw>GWЂ?*1)#O/WO֙$.[kWt@WG-@;dw C|F0sq英kc"*vܟi"bٰkX-C)_)W}5o3af95ipts8'x8Ok喓lWGoxuJnI# upٻYGJ.Up6?ܯtCM8@'&nfČn[b;@da7E}W; O5ddQ.>_ʼnMjR:#Ռ_]ul#*9ۖTK(aOͥqvnQZn=l;b>lUII?U___;DFN/^;2<9gx?Q*Wc|B0ㅛ}#~mvIA''7iKG(وB6a^FB,y/g?/jy$`jNlmaqvیt ktA/gXVc]gΕݟX϶'4۪ -ϥ#3)/'!3'jUYqO s[3נ j8N.q&>G@FFݵ7v XWYӫY6jf]b_عl8a۟*^> 0 ked"Hlxj n܄(n*Q4֌Ct 3=ֺ[駣kx}W^;,[n-,#V.C Z蹝ۇ~P.Mpގbz!k:9D iK?8eV NL?U=wY+^#/nrrCݠ\hݨt2~_ha?:eQ -^ތՙ8־\/]Z 2=ꧽ˶ݎI>{#s^RxQp:*vxX6˫؉cA^cL.jnI=%K8}SϬOTcX7@ 9xۿ.X%Y%ty뫋8.LjYWf]E?~R6UR4wԟMlqS1֛'A+DjnʿE){6O< -SUNd/?ֈkkjf:GyGO-",:OMT9 ->wo{4o,eRDV~:xV(_k;ZVZr}W$b5eSLQL#/ϢčWi:n xm&IG:8Qu}m*+ԾVZ9_PY67\ؐ=٭. (i7j`DJuifSs[-TћRqԺ z/L^=oL60% T^zݪ}ZEB{tuT:߇W>%ZWu^,r.м0tV-;#Eq#AeOkլ)Ry6*!Tqե:6f6l |, %o‘+p, -|G6ܰl̇%/rPӆ *x9񶹸!栂v&^quDcb7lus-qW2iv)ڡ܆]asEZ.2T*4:\i`{uN I [lm6g~lkaFpLTwyސ|ˌ8olηJJTrnm -f׏`0 ,XDpN@.|c*JB=vC--2jC7 -TQHb]3T! i|+^Bm!$h.,FّY$< IT2tz Sd 7-D-@VO+}vc{k6E1h0it':[,!(4 whAc ȝ%)rPQ)ÉGT% . PvTc3lM@5) P$,~^diJ5l:j\i Bv~5,(L6")S+e9ST7dPFj -e;7wP8"?ޚ@54=i^]C@ b' -b;zN39]TS 勂8-ܒVo2RP|E8ARf90]`90W XpUfGZe:XV`F=C6+,qjVF^E)-!>BQP/U g^(#%& -?z"࿎_S+x g p#\rwOW>1˩ׇXwMF'> /M {":en`!5_ -oviTW -ȧc W .(#Jb2O{T|ʫSm_w8:#Ӳrt9ߛ,)pc G#vġ0(2,V:&0Kl1^<ܖ_w 4dm?){%k)qxW8Hw9eG,Itlr';Zɕ K3ӢX)"deOt]ߏ-f{zGzA;#tPqoe/{㗾n}FqLV!OW0/WEqcG[l6O3wX)<THCezg󌻹 Wu_{-Y ⇇1_lzTQBUȨR{\+SNԚ,z(tRg>Y͸:{T kw.7 >Qrg<`:Z1?cӠ=I5/~J,fhZla2Oak=0~azwTH<^_~DT|ǩ4 k/m8R'z%LFFna& vCSƯuu@de`~G*^/imɗkoÓJ> ܁kEw:0x~g֓PhH\1.,Q薇vyݔ$%? ,'S2'Ѯ'1fzj3ů}píRҡ֋k^ -؈Ri.nbNyN;UyWߞJV,6N4 {\EĬٜɧOI3iwܽ| WPz wP%.Bnnߝؖ?9pg:qćk6OӤu\ҹgvCTtؐkla߃$>}{c{4[!BN6ɫ-|]'a} a6g䬬NNڤ^mogfwe{NbG޳ѸXLS>Wj{hػWUT~MwMSx~5.H"U+kmJx^ڬkscI4fSRc!%}Rzj4ZFn9ths"Y9fR2:(9ZI%!ea1y:Ϯ? {^R LΠ,GomvфS ղT;;sE-Y#A:Shb/T͚> <EB{©XG:4*7CmwXq/SsA% zRِԻ|ɭXȚ~Xyk22{yaGzрUR/yNM"\T꾕,97Y7T]Qt%&xBVyЫ#ujlqopĬR.L:iS^P6IDқFO6(a \8khjEL_WXGOFoO ){q}]rW _U0ѐp]i16̮\6TlK]5u^exv|Eُ]9Ű #dHlsᤓ0o/[T -֮SV{)s}_7)%k\X"'UKV}H%xztiR ޡp꓇7'aOR5I?S8*G&V)~}nkVY;}vvS^5Z38noգudA[:ɰ@a{f~>| %XMb_흢aQͣzGQyPH+GXɶbE-'`.O[s[mB~˫B?i|]ފ8Qۑrf!'Oe45ǂb1Pphy|oWSiPizRdr|þee2/#۵Cl!+lmB^k+4Ȳ>HiW U KJKu46LYa#,Mc0WFg gk6<"I_^j+D֐ V7O/ʽ?0j˫5Ȣ5'2}4xpqM2_;xۑ3mN2L[wif pu8o m}O,_w}Hv+\0h1zRê) Jz'׋ {ae@>'w1 s6fBg HU\зgEdIWb }8c_"Y-ϳS _e|Q:L NqχQjxdd9*ܿ8@uGO_<@> ^/L)v֖~Op@ڸlXVoؼ\"'^OV?O`.)2{Ny*2NWOh(&Yߓ_0_ PtiMnW@rhYZ)m^~4Β&+OZw2&j 8;6J~dXOoMЇBa VM v=,Q 0(CSY6u W0gEm1LOn>C\aSbK#aR*=v0VmGOfK8C+q,c_&F{C3q^p%.EڳVUZ:s|;&r^tz5)'nG\#osێ/֠^@UK",lHr@,؟ JgJ Um #c#f]F '$K*EW·f$拷{~w;ۓ)mb kwLm-8`ˋw}BIW65-U_!]|Pb#o^zM?~Z'Kw4:փMGn+-#L__]7Z}*f|斑x*- 3B\=BEgaxX$$ZW?(;C3~[m'nRv1~*dW -[Ky0ɵ߳!_lha]DJqb'6!}>{.ϸNG|Lgg;ŏnW7k?L-gSnWrZ-G,O†4dXx2TU4.8 1N׿uN{??wYL@m܄gq!VGFፌJMG>)/g6j1e eޭgNAHoxT$ipgP$ 4+H.&DfiJW"kEnMڕ;H:! uW?-ӑUjw׺zC=pwx끧Ͼ֗THotDVjAV&'P MКb!vn~nAbO[>aO#ӗco} s=S{Puy*%&q͵BHFm]Z뼫Գa{'KC>'(v%|nC9 zm k~ N\>]hn 9q(+p:6Su2m'Ck;Cj;hehv~A 8]N9OOʽZus t%0Qt 6p 8H"\ulٵsl;lxOk{"@tJҖ/ysy:9jQoS,SDJҖ~5ĩx{b2޿Xһk5tb =ٶ/l?3f}dkY)fqļ)Z.Ft~-^Ca"W[MJZY 2fpt҉ccS;_^isl/-I8A5S--4g]}^Y6]֒+Z4b,W֮|H K̬s}[BĜrh-,=ND]Q}ݨm ΰk}*]Zڽ{{3ǣdHfۗ"VEj憭M;^s} hl*uː}(\_o񍑅J'|Cix'^ -<*C\d3igɿ".) 0>v3%ܥ9٠s[#z[z77'D#uj$q+6~]vL eaͥzz5B2~Srۯ!If0h'Υ?pXtwUj~ja~5\b(3m7ӬɪmߏZ2S -Iy,(KC@h6ͨPףkZo.Mm].5cje_D1QḒ}Hڂ2H~ΰk^)+3нpioG.lƜ1WSy,K]z@j\{GԗҎrWvR3Iwӆ+%o,R:@zVe9_+/]uWO&W1jÖ3bzFpȋz23ζi!?+TǦk{~Unb,4zSȯzbZ׷)wbtW2q% :z\2ӲR=y 'Hӆ~\Ђө.[0'XfF3H&';ELIW&;qy]$Tѝ\QC;VS"q  Ϫܷbs*ˍ:,Of - ([  -+Xċ阹1qV77b #9<O!+_1Jk?R)ǴFӬhmƈT~}#Zfyv? DA al5M*Նig &)]O -y%$qy#~m[6>W?0ΟJglrؘmӘx ĥh hpIwۮѺHm^)#z6jRiq"l* b*}TOiQ89TUƯ_;ŞG\@?N%*A=/(zW`WyY-lJ_KjQ[~/Kc5QRnzY Xؑ{^.!I9ѯmEp!.$ `ssk h⑕E?UG)iYûr~!"@F, V~B̷C̳_ Wsvjwآ:WkLW.9kG<7lH[&#ɨCpZo hBN|ZOـpD6=AסTJ̯]rP9[%"YVVJCHZ!嗽B"~Ew,y-fmӍL6)/WPjʻZ4Wou:5ch1'福X8w*E+brb/MPhKNlߊgf-zCp>x D ~7@%@ R]@T6 @RBx<|P-GĈZ5:VUZ`WKW? sVot qvʈIy Ucx I(e@ -["̰%w@= R$ Y@BH>Ìm^pZhl6' )B;I+YiOO0 \VL\OՓUrx\) -N@65O(f˾Td8 qHu@ݳ:P@-7>M-ym0#r]̷ -lapV:L/G?2.]-@;<Ǫ -mF=gfl _N?"3V#,3YԠu>(܊ [qhϠe Sh~;_+okd9ʯ+r2.[]7}w+hW\Cg7/ 89)mWޒL)x/mWOkM{*_\o;ꏞ+Z -2Ud7+{*@@+)ri-_|+ Xm)LveC[Gwoã|Nڥ7ObJQw.u<٦$IUq6kTi/ϴ1[_AS n?oK > -vv% -T/"@ -=7Aڣs+6iw=B~rN?t`=efJ}+h$K퍜)bE//ډ]kA;lN$?>(o-1=/pu:(zP}}7zTD/g"L$/JϓV͚;%Q,u˟~]ʷ鈫}IAo/MB˰z@[GǺn\?2QD"8RGqKGFinchc}l[ ?a/~Bzf͸dmOGzIpr1ۉfHb.B!12eV8HRe&9 =AͧӵOw=A>V=^<$@ӭ :~U_Y1:29־(V,e,]cn:=GGf7Y}vf׻H\z5rH.jVVPqluĝ1HTspA_i]b:"D~ _#nH?.E7b? m7Tm߅޸,9na/ -b@㽃$饳BzvW*miW\WV +׸,SJfG#ҟwŒquӻ{/J˷ -npt krJw=U.wl'^l;O6q5=۫hV:.,~ؚq6G*2^+`6墢wv:HQWHɰζ>K8Z>{to3'j9vnv=*Y~@YM"+T#hʆ(Z=4;=]&ZM2ӫ3Qphm;2[]+O37 2y5 HTx! -<?k1۷JطO %oۥŽ;s8ekx?7u/Ю6I -Gm./4To4dSΕӖ+=|!-ɾ7òBzTo؛qi7~ݿ7n$NKXZ115o Xxpq csrWyi;ϸqE?>%8'u#1|iDMәjH덺:ULNڰswlomVTۢ"9g(0]Osڽv d:}>YrƆNӅ۠0bW48uH&^V -^fͮԔx5,djD7t$n~\,У3z3Ip~i42]szi5R'2ma_E&ۯY rܶjɝl>xa5Woʽ=?{:\=KJǒ7 5ۆZ ={+u?:=T 4JdZuERGxrtnSy|N^ uѫ>wUsdbպ%1`]&>iKK۬K,/34@"+] IgtTTҙv回^șعJOK}HJ0{lx>IOS;zpu(;N?TjYZ5AiUQzY?cmk-7YoՃs9rS(ʕ[[g,ZQblIt%pUbiE}&ϋ0]Q`UK㛀t[K0g -_褊/otuz•4#c4W^$-3 &TjIo53?)m!gFϵdz]Jô8:IνaȆ"_0l:JsL(;(;y?cw~5%$ea<Ҳ*Vk\Y:b1 u%V4O*Ou+>:@űOD87olz=Kcq -ZXWKn\?:! -Zcs9Fz&khN,  s9 I'Bsk&zr -$ > .i;%z-S2ZuhtX=5hPD1=8; -s"D=$؍esu -P$BY,lT2EnlҀB\O293ѻ"Z+7W쭗FE;V~ˤaƜhSG"*~`9"QԞ*V#@>3,Auh|tAx@먙 -mP#@#I t4 ?JoRN: ~A@6P7=$~X& -`'ۖ=[=1`_/ظ*v j\P 9u 2|-*ᕓ59V8S?V%]mVbf?zrgv-8 [5OiP9,KW=۷OC|^=jόǤnc/fCZ-i=Qiq&}t <'V-+%pq"k-݀S  lga"q ( uBywrEP-$ldN_=ͮVi?0Gh>>-'k*/!e@ZDÏv)Fh Nȩ H'z7in.cXek`N&tѓf?a -nee -k@)PP65YοZ_VR v I%OduzU{h^ږ[1pgT;q0XSϒY%tyOXw3Yb4z7#NyTxS6wmzh;ڸp_&ȡR c1NZW8~{s:\FhhQ^=N͆b -o;bԘxa/>VW*%}5֮γZ[=v=)ڠxBܙZANYBk1۞/sw&AݷF C|?Dfo]јzu~]j9C]Mvjzd4[[qȷ|2,5ك=Ҏ[8u3ߘ)Ңr/jO?OBXL qls^eG-ZT)69 -qj_Hnuۙ%[מw%| x;YJ=YtjgZ2: N ZUZ /g40[ap*.~;lN& ɮ=! -0 8bC>+Gwa/;k{\y AquR*_'سd*zk7h9dҹku̵tNފC䫆|y Ԇ.$15h"̋M'Hz^W:j?m7]7o5tU:C]-QQbE:c;{eVH%49I6k1twKھ^Mˁ>X?T,nZlI~uPsmUV9^,;x6%m\hS&- G^r捑QG#xLS!Ҹpw~}z*LjލO k5PJ|VMmL -{1 <!{.&s1wehZlaR6839 Zq}.~F0W0/yA>ԕL6m ͱO}C([qU(p-5]{f c# ǎaŵ>uN=c@~ -WrK7mP̮K᫲Na8 z. n6c7ޏC{}uZQ93۵5 akInnwբ֛QnVN9d4憚,^2Q QݪSDe*mI,Dr; -|m=qe͵Ů^x)2o~kff?>*].zzsyd*E_Y,+bSP#u*_z#]F*ټH0VD8oMqq("&+r`7;׺׶[76^3Tdk"tj u}ИKRwP㡔\}E6gE -9DzX|S8:5 S}6#_9jwQA8R|Ra->D^t9Y\3<֏޾V* @s_ -`NI'-Г&Z]R#r{ٯ!~^/(G)?]$oNxՋz/?zT^zPY|dXE泎U {t^2D(ڦX꾧2e%|-9=yXJn^כD -!v$ ㉚p+3fbKb*`=f~p1[YAEg~TvƯPBfLLD&ߔ-=+W2۲hOUaʭ"9zBz"ĭxb㝸U{q cgYڗz2Z~M *f>=!^e9fgYVG*!@o$;-&09/ e\V9-&/GV{9=S_s7FjUa-r:u/zEKnZTv-ɗ3{+&&gUnXPNϿdcl٥;hTPgʝVR՜(̤/̭1WX9dHgN_j50ΚJ:)r0j=BMb@Dެ|^x[%85 pyLOx-]!l~_+ŵ SJx%AW`NpWcdwlѭڥڧήF]MZ19"!?$&tHo7tUw߇Ls)Vzm(t#UbXi9ZVIi'W.X]M-_i'4)DzE`se仕#$ղHU+Z6q/XpOͰ~zG71SR@BFXKCcӓu3'] ,.ve+z{[Nf_;_u5d_iI8ɥML ãH`!D">>[!]ZRv+ib۸1=6dB -UCUY|Mm,ݞ}eom?<֕v|:EaYiD=%L_4NJ==9 "5{v#'}'~:;LЗ@ C)W_rq咂= f,ְ+uyL0$?Qm`4m솻)d~k^/SUiSYu)H%wbk?p~{`y!zj|`vy68]O0\ i6`y18[tJ{F/3btм[OM ^f^e+¢\q1j=Sm!&bp&-Wf n_\!WI08T74J_'X~f] Mh|c$Nn)wz*knp8Т-7G͖dY$(WLdiXyp c}.?"No6 -DhfIN,wc  8x,b?z`Dd#(CY@vM@heaoF?m~*ח),HϧUD挅ZQ*W  HQIi: aѵ ȑ%, IIPN6@v k nL(Y4 ._?|碌 ;vtL@MZ_;Δea[e#*ee)qy@(ʣJ -5@m@_c@|_uĒ?\>m@A U@PYdw5Șlir^ʐ#K.\}D].sиNm4@:W,bZ]@@W @7`=A D`Nt\t?,1ߙ}qTzEb%zqaIFjwsfR'_`qk -_3ȶ-+G7`{" 09Hy8dkP*,`LX̣6مu۽5$Bly[҃^cLPa  ^&iQL?".1 -p 7\&4)<~s;$Q9-6ZDK?njsK̡Fb>eaIhRʺUVpk>F0Z׻K9mښUk1(fֵvN2ChOdnə >q>Y?JT_cZ+.'^I) I밝 (iuE4Ӏ\ʋ+qGl(37{Ӟ3qߝ'n~_m -h`p_:LD<}NGϵo+q66A=_~an*BZ#Ls& jGu"a⾰ۘ_y@ýwE"fO鷅ǿ{=_աB"~gbk2- ~˦ҳφ_|kkWLw aMAD-kpD}(~W餃ZO4L㤈C{Gm!nm|νZ~׿kU% -8E[\`2_{6q޶Rzʎʔռ5 |_jӝIAv!.)i-̪JT6M-A F׿*_]xE{,aX^Ba -l"O0I֪Ⱥ3 <6O5O0D_Wm-xBƹ4"{ׯQgڡGg%nZwڮ tfRuR@+դȒyIe |M+>-+ԡ~_f/f꣎fz/JtR _ p_olGzƩ|ό endstream endobj 122 0 obj <>stream -jC>NrUc`#J_J\' -MmaCe+:r)ұxbbVcj<+orqw߱44I+B{vB(L%5AEKʏ=-jm97٠ .U|j^/ܼP\:])^k߃zq@z|ĺy(C61b0qeF>ny꾻6= Kp8?tN`_ٻoɹsjUhtjn;%1<#jvsB1.84+ [̫+7Pޗsh\)9ٔ붘:VV,tqBKP!nU@³Q7==W -_puCmcoCmR}l;AMWv |À]6mv3VxڄI|S4:ըNar R9lQ9<\Z.5;^v* _a>%˺ -*fM&x35Ie(3cL5ZcuGwr"vmV>'$t+R9Rf}H~J2+>[q!*vl|nLo7|3e|0ɭ2U#5=Qz\gν(dzUqt5z;-쪪rCC["HnvfY7)[btaKB.f;nLBw,z o_/u< -ָF+Zʀn^WCO55{Ϥo+im] XuGrIM%_JWV7_r}vZ8y 6H" xg2҂W4Õ~ݠ6[h1UdfwnWm!aC;4CѓS)(8S@|gJ)'j*3VJL.+ZcA@c#pwR܈A',f}dBc2bp(E\gն(j"uv yڝ\W2 bkBQE9eNRX4"4p!t-wV|2ͰT`Uf9z`νLj:3ϞVΝ)jsGy:iI7b$C%jVr-fSXQ2ˆs4T%XG}]7E(&')<"# `cUϙFGχE+K֣n%JuVB /?0ib+ cE@oO;^Г=lQ]k.^H-ki*܌lU^5\nWΒ4+U$3K"?0_4p+qd^'QBCR~T qO'!FOBD;g/lRg ,+,F6RZ>G(SOL|ʸ-*JDE0=ۤA' Y9oGh.,IYKBpNZ *{ Ҽ6 EgocfM 9=m^He1z\ -trަ[NM_ KwPׇ7$wd¡iQ2& _'Gh]JM.,W!4ib+/I˓v0g -V@A.m {( ЫQ:`ymZid]G4GJIwͧ%JY"e1-KaI5V X?~j; lYa0JFGt@~&to0 nɪL 4"+W8ゎGtH?S~8NpK*px^"0p4nAՊp +%G=o8OV'[-'Pe7|qrv'F=w-+_|9h,gTuK*HN2 -C/ @dJM>JP$?!(@, j$~ # u\i:gG - ,#M/֦%sW9!ַ+4$7 8,GMJ -$ ["m2+j]@2Yl$|^4g%xxko@2 F* ZŅϻxsJk~|yf{̥SV1ҭ"^HF -rS'ߒmyE #Q@ (jJeb19Eh(Y>(i\u@Ic†|2B^_=zק ` PHa&Հ1@}DIPwP{G`c - 8 gЮ_m 8HP[o/!SXm4]L;ȏ?0Z\rTʀS,ѓூULSRjxlrX`SXlM)w]QTp#L3sQm00B8i(So5[u>GQmO6<&pj n2nr 8#(Nƀ#ޒ2{qNcN*^&T^EzXQzg+_zo!U0~_܏8}t[ B"Kz@M{@02Oozŀ_ڹ>\}kbKW6XgտoQ_U[JWCl'%J@>L$ -~ܶԆ=pT.Ă&fmH,=}Eߡ-Z^@ =Eߒo!WsKjOzߥ Mm[,P(ʻYOi@%o!Pj/W~H0|.ِfW:_L^O|=»w+tZمz~UGm|Ωnh_?_BSc6.NJk/{VX{OF{joB+%8a77 -k]B?S{nQזB(bMvmѾ(u n#7|VmTU c/ VnSmz*ԉ͕7ѠQ·[;Xq=4Y5ٕn欮]q?kvjl#jD'T <4|]WJ1.j_OveibyV:$b;gǬI{=/mxK1{= !j#~4|[Mڔc`S'tj?Ry%Ŷ) gDŽMvhJX -]hOyx8$=2Z3]IگZĥ8,UBKP)jC{@pfgNػɅbZ᳀l3ӎq?N/Oͼ/ɶX5XKVΗ1F {5J}}X:沓:"t5㩠hK\_ucnI AO{w%,l)G"+Li ]wǤt=Xv=S,*e\;-V(tL/WݗL+ BY;nbdNe~g5kVۗ$ꢩP )I^rTTBRR}g#U4$ ޗ [O[3^(@4/z!~yg>z~`:ㆪ2˳4s}$ zPc7w7X#yh]䙥60Yb<3t -HXn;\|eH?W}~R?>K|cYmfjLACizN><@a@61Ԩ a#=>/ܑ住#뮎y\K-.҃Zq/_vj^k\,)2Qbdxϯ=8@]@Pstdj271˼HBT%a4 >kdlܩe|OctȖ4$X}3}Np/x?t1}9F*3TqgE{]BpMq}\zlgtMn-1|D٦a^*JdLFC-"լG<>om$,LĴ 6ʯVi*_}Uy˿;wZ(<[ ;}ng*8H=1HntE ^i:~MqTE*KV['1p27NJe5JPMVtg E]E\R|ctdN3i=>}-XqSX"tӗtKk@zAU tB^tzB*u.@^W '@dZ?{&Bna+/z4\ØdG^\V2Q(W^!wNL77Н|љݹ,M7h)Ƙ@qwTAx4&h r$" & $RD;5k>/C4+^4(3}:}2Ko*(@KXI`ZC>@h+A0zGzMNs.9Jcy~CaJ9y#oq|[d"E5j4!8Op]9!ȃ7KS3r`Vf)%1A9`J4%lw\cM[lԃ f*M\t+{#E/] Kg7AQV|ݑLwKj(LNY -&e_,/b:P<=w&rk ?a疀+ms&$#应J6Ia\#Gԉĉh;EE[_$JŜs„? ݵ*-Ys䀠SWm -Fl Gd*%2Z*:ݏMi ,JF/¯pi^>P -ǖ.9?v :ҧ'tQz(E0!vUР -8|H!Z'.@#N"tS*za7mFvs(7w Wj~Sv6*@qÀnt#ͮEbFX޶4NSa;NXGgE - #ywIȣPΟOGmpMLtL^Vƀ%hJy晣?.`L| ׯ0H0]HExx)I]Bn/kVpTIFa>|nɧ+pWa1xO4-CAč@85Aod8 XC6YS,2ꐳ3rY(Na%Ye.Uxav.RdՏf9ߦ[#u S& &Uۖ,Fxrr+M-e. o"1<\FJLd_ۯnpS{Q2Ăhwjk+D@Ad?zDI]k./D4{A}fW{Wmo~ "aALhAA,g@ ib FPn}wLV(RM/OCŷ_ַnGm}ז+@"11XWQW@<7 v%nοZvOn!X-GL2N~>kCyz!Ve gum,zr?h3^6p_$y&%eφZ3cfֲ c'= -#w)_:EֽF//;F>Memnn92M=G"}-UC-j"a/KN&V}]DwwWqkRd(fdaiZ\!\ddYmZAY;MKP2T(^Mu{Ⱥ0誕_h^Yošmئ~jcm"V╮~!|7\?UfkѪ}F('6Z={V'ڎ8kZq)L3z]҈?wͺ;)9בe-KwUn.湣Q]) IKݪ/~ _v=]Yڌ^^cLɍَE;@;鹣8T_W5شΊN~Ybư"* -udZ:(H.Xk}Pc׉L،:z]TJ,U 6W)ӛr]Ze$&PZrڔ~(1NNrVf eh^^(}2sH2'ݜuyY]f!]/f^vLr}vB>h9tt~|4,U>ՏP{ϋhtsYN䗹YZg6;k׾w,:S7Sm$42є&WcL,+](LXݤPgߜ3~QؼB9ws)gg:Rf-3YA)SZXWUIBjF%J*tIc*mkf% 1F'6f6WC7V/kw#Q:㉂.p9yY$S`mLiW''z&`!#dEpCʱNc^Eݍ.pa BRD_܏mt1űb,B 8NjUP -H"7̲V'L-&vb?EG(`j0no <>ZT\ K+iGb)% -#Q<}\%fZQZDTϝk k׊SXS8+0Ќ#g*[16>nV9NbL -,{xk侏^ 7:Yh!vQ9D"rB3Vbk5U)ܩٳ0OGԆRg``쳲` -BAh -ȸ^f)<sLF:pv$ų&<[h1UӣFִ`F$;)p& C|Bh?J/38-PfF,&OTB=L6'R9|P'rBeDe9*\ -Yߩg- 7kT`Α*󍠠ü[zͱʎ='6F.ٚw&?5HғuӕVRJ-@'BIJ:]b+j" kD}0>+ZD]ÒI8o|'~X[?Z2탵]xGp4TAĊI=e;IXT v~:GDe_ -i.t| Ju [ dx& -PNm@ y$`a p1: S/J<#Vi.ruq6$_4P*:ܸc;DwpHt'^,6 G;  -BAXŻ7S2PUg%(X#rRk Oo7^"*~-4pb~Ezy-;CE ZAV8$-@:󇃛Av By@$*b ʰ ŅSjf (pV7] B{T!UZMt5IZy7>m|^oEX膆O,AH0مa*[~rp+$eP#h6tӨznM@7@'7\PkPPv+&*2O $~GGhTd8XHr_LexB\!(QɿY{7 ^(d5 0fp -C4?&|'CX.ԼԍfZA \O\o >M@|]AE o6?>\"CZj'0p\7AXd]fGĻ{WWXel#]lZ/Ns* Ek@}"/~oݾA>JˑӲepnNjk+cK-au9 `t9Y)}rA-IpOuu0'glUxn UA՚Z 4Iiײ B ¯?Ny[GPT56q'o~ʧ _Jn\]:lS F%2 =wC\Ыaajl k) 6&U/rkuo//ʲ{#"7,fS@,9(+7}PWӲl+4N22?X3b{=w`8J;~Hj퐜UFڽSkhSY - 2. ץ{aj#HGSgJ#4FEND}E~ձ6N_?{֫ j6oD7`k.S9UOvo鎖9y7UR}.9=/zIXDwyq>yjYhWgѠ=V"s^Ne87Dɥo6zzh9vۀ'ZFTO/S^D =Jgݚ$5}6vӗ\8Li(gj7kx -1f:3ڷ&ˆ;C) -/@]vUfg;y=MQ+/}ϫ]*%R*m6P݁bQ3r_Nr~_JYf#Ͳ]{lp!=-[N-j )h7cPqM(ri)|)分֯ x=M}Fz?.R汼^^0 y^Rj薉V+Nn:]6FM:u`=M[P֫pi #.z -/kwf)_˲\d]/LEv3־Z]sH?BM)>}6lY6 ?4*[sVv_BJVKKE=ڨ"ҠR&mrB^KxLzQqHԌ~Ƌ; %߂2xrn)sT Yۋ>箟\\*yeFf*yVby0!deX7b -Ӯ/޾mrBկPԅ -bٻE(Z6K׮Z?XqZi,~wMTf,tO@%<Z:>l@|9E2Dˌm7}',D -Đ̢i¹e9rxM5KD2 -]é13\HJO7DKMwD\m>e\M#&X!¬?{euf YG^&v0;ca&Koݝv˾]҄WEK}`NܜYA(Zˆ-JMb,no\*>1׸Gbr{BleJ0ވĪQ'Iި>ڢkgwB9Cc._.xm=.F,7w{| iќ[2hLLkSܾ!f71\ }Yf)dy}d;/Q2{DI8Kfzb,Of Mq,.{ wj(v3+]JXZV*rT&[HZdY|o¼,yqE"RnfRZC3#kǢsSY^2WAc[^7*gN -ڱI<0_ގY뱝R;`@oW̰0"3h׺pT,OmxTA,}\%=\v˵n56V>2WxĦ'Ni̐R(fBe: WP;B6`d Hbو*OP[ ;B뜬έj-V 'IfOPr!i\"ȵKۡ `eQin5| -֐^hi۽ƪ=rV$HsuFYC&'X!,2xE!# Fab Nnz6dV*D5)R~wht|FKä39ii@g\eJǽQh%1=<,Dlv6ý,rJ_[LGm!}膌2$S .8x1\'HH(HkAG샠y7sMtqou!,#Rr׾4Lӱ5l&ALD#B\A;0 ޓ,8j05M-lyHd<8@:2ͅR=ŏ~[&SLKng&SgQЙsxA!ī%dLeNSD )xP& P -4 -$@L Q([1/(wy8͘D8^\"y96ߓ#nyhC-"TC_CzXvWlyhBo -h0D)Pk(@gJ -qrM.\:r-o˽ v~a_(}ڙ|{6`BO& .*vPN n6@' `8+xl_AYl|Jޡ8W+<[``+Շi1 .]x>j,?]ʳ̍u>LDZ -e%Yp9E*nPm9m=1V`[:@_,p@q(2'N"bp gLGKEs-dW 0 珜0$(ȊQg -sɃ p#C)nwb5(wce3g&$!x", ǘ&i³g@/Qn|v#P<x]!W)-@L7@l21 ,BK(-թ) "ῃ jXzof/~c&M1"-k2m,LQ@IW'^=>D[}8adPe@0*/Y Q4tLrfck? ģ&=I?Nx!?& Don Q#ׄPEi)dP4B~P XeA/HvT\9/d%ηkmM6ec: L&v_L/e (.TlFuf^3WvU-c -jwP# Agpx~qsu8~py -a*k'>w+-'m3h)`et &ߊE>*hE#F9ma;c"ӧ鿭ۿo=wmw3-QvjH5 Ƣς`H(b8/3aqDI$?v}2?Nb.;eON#t.<8m2 -r)A"O ti?19IIF@g#yWt ϥw_`ϺWMx%tLJX @D eAd6R !D66lzOc$2/Y\3 y|)ArwKVA,]B["ɾ1N^o h}„!k6̨=z63p#8xU~>W30o_|k -m2lۣl$q$׷'H%@\= S ^2wFSdC|.u\1<!p&bS;y3UDdr\ngʕkVqb=rh b`._a~'2v I ƫFȋC}) -ΏײA U1 -P0J3Gh&gJ{o_G7h3یAmf9^{TO߁u83ɌAa-Z@!-o,?u_ >[tB9C,Th~Vw{l-zf;-6}57NplZkײTܭSBTNPg#r{$}yJ1MMMJdnAdn+ά$Ӄr/2S'AK9B /Yl_%](]b[\w>ʹў =8~u ;E{,^ `% #.nzyVBh;<2"6X%)9x'u^K)`rV;](C]}3zՇrsLIlt2sM1.Y]m mpVnty]ާ%"~RWXOϸϬ9N`4Y̒gvWV[DΝYxCӇ?I~\7w`zWI]ce `85O31NoӻFww`}L/4'MwA?xl:Ak!$Q.w0;dGGO&cNzsºsJAӹچ\\|dN>Gos2Ca<#b zυ{k#8M8@ ) ^kGU9hQ\0ߠ=̞k av?0ô%9 4,%'Jv!hb k1=~O?k:s]i`PoÄWbh#Bа,APOoo5]0Tr`-AU.6y/x^vd 3+!7tyht 9♐D=P+3tNu`POOw<%qJbwNTG?=QeGyu Jy}.rt -sV -1Q±p_7g,o(G\V~ :[RNjZ`(0=y֘5ws<#ERJh}c4 Y\tN+z{Z?挫 -C.8V_J{)KqqNGEAvi'^W4i{Xwq};߅i; %1C1Wu6l6󲳙Gy۝no\ƿ@>! - -ЗFJNWq$ɯOkm8\Ka7ڔaN =ϵ-\k(]NE<֏Lؠ.atBny?O -]5XgP2WfQ jڢldLO]3,?KL1aE44Z;;Sf63W3I_{h=XCxUI죞+V>&[<$u@4MC8w~QJ6?Ya_% -.*E][Rt팱z!KM~yd>΄ji"X(K֧'k5,JV 'cZg~љsm%>"$y%[^H1\fLwA%&H6`Mdl-B᫿GW OWkh^#o|(ՑEHfWk eηGMZq̻z|F;UAPh -%L@IPZ$=FRٰFq v/l:7E3$Vs JMyeO6emmKC,U+=Uv '%}]XM7G:wa_ޅrr)=)6#уVv 7KնvL\p9cI;^LK/˷0/~&<*w`_4B I,$oBo-TSȃmx'I4a:@PlBmyAۨCwj1+\OCst3ySMh}üAǼo -~,17(9An 4?@-\ 'C -LI&[׺ 뉿[Ψaaܣ/8A(hb2YSmAP ~x~6>a?3ۈobw>xH J.Tf㇃bH4{{[hn.NY9jTG6 m5gm1AmZ,~NSGKP\M\44Wo5+oؘ%wɜ|%R,~qtHCk'ЬV3%%Չup3l!~\|hz|PzA(1!n#1C_op?C ShuỏveNPC۹?`VJ<ڲ6ho}*wwa4j]'Zʺ^Xzrs[FnϘ!v( J d-Kzwb-ƪP9?.Sh -rMKf:9Ⱥ \qN^V6Sߌ'y3=ިpSDMܐMMؼi9+٣9 mn5ZtM+plst7At큻6n7)Sw=]'+ =v}Q kzr4R񤟙L"G[uWrojOwN)yw.8;#v5.@Biݹ&K:K(u~S||e=畈O& =^/h؈L4;OJ_j .7h꺳c:6}mhIj.Kڭ5*sȂlóxm1f`7%^~ӃtV7_.n?ҏ_ ^Jbcݕ5w,_R򨡖~vQyŞpR:aE2ԙqwQ7Ϋ:W _}q`]S#'=V͆m͵4f -NV{Ry6y~ۃ{YϷX'&2l6$2M}MQh6'T `LmT`GBaDZ'ݨerfLJ|7(6YCngIOzbLmTʅ6j/_'K&{lítLXS_XBRLE4clhWUv=m+ɪ^S%XUaB9<x<}]xp: v`_s -bp;-1+Nz7us]˙Ghz|J|NCUA'F[ -o tC+Uؿ.^s_ˠ߁^ Gc}L?xh7H'c/Ю|WKPm6LXG$q>v22D&ʐZW:6tܠ)?)ÙӃg'(>8p2&3ykY븾%}>QFi -@F%sJW̼kS#@8LoNN"[ۆy|j FSu: Rɢgcj6SD\P6Pu]Aetsj^Fِ~Hؗ^|ل#Ou\e'5i#WA[-G Z'&ә$ǫ\ɒVA\x/@'Ux)t0GCƈ˭f[ZYZĪ0u)9/֢ -( <@L||)ޭ2 `4|æЉѣHnrY!+dlh tmz N絚~VU펖X[E>6:rzXvm f69mf?rA^'6Miw"QKŝzGռTŻ6\TOJm'enRSžৎՍKrOkB?*D؝m Cuޭ^dO]/8_TA?Ak^__?.9y~b@b7ԙoC `.lӡ掎H&gyD#Q/E7)2lᩭX[ll]sz:Jx0)F- `ȗϠq"=!#.6l6wwUuOgzp -GTрg׾,EMU|>y~T`SjO(ZJ\ƥ0{L;ybUML=&SJ wt.TΞ']OwzϾ>o+Jr>^omI dF՟ ;еxr+paG[;sp9?Ҙ|i^˗ڇZ~Z1(xmls2*hm V^_l3 Ңj"~_I% ;EGF3U-%j`%oTV\mH9 -]/b]oyֳ$l;ݶ56<*|t}Thn͓1};YAmp,]&! ˎ>~eGfˎ&[~ y]ĕCYBv8ZH\6{i:G9w&&]^wzk *O9gsk<sn ۳|.SĒf,?8ѽ+❐\ip.R Ny2\j.3wF>uf3͝:mUdm[g;%Gn$sa_..l3tYGr_L~C>g/.#DC`߫ v֮ZS:pm5y8Wi}:Co҃եMNCU /Db~+t7<_MOWCw'no?Dm> pF+z(@ l+D1̅SB⦇Af͢f= G0Vxƨȁ lXY9WfQS;U9+O˹hE\aoƪ-z*I?߱I -p6w/AL(CQs"F(\VB L%p}mOA"GE,"4<5/ĨbQJ֨nU *VvW[Dq1d?@7ڀ ,mf#csw=Ryn1MQ} הK'f~Tfw\JcX=l/%׻̻[) -HDԴ& \d6ZI :}z cڳBZE]6\R}g]wb3Fo*ꑻ7Z@r̝;m-_3$ -k¯>#7Ԃ[ =UD\RJS7ЃԢKo-GБ^P^)r Ƿ%gݖGo5/[!Jg\P e-'z5;A{zHzjz:F;~>Jv 9ν[S4?D$Qс5~2 V^_;Jy[,tZMa(ڡvd`(;@V:Uݑ@Ki$9gFOͪGBWp- 1$>0 a% E6Gj)cW,6Fs[jr}<-l/5s{mq*F=@VOl>y`0םغҒPzv7n";Ve&mpKm޹'"}O#> jU W+؏8|.J?}Je\F^°7n\z{שyVxV̆_z؟ -)zUE$n#&!e 9,/j* ſ%]tg"&?h P-߾rZ.^ fg6Q_.'RbC~LFl;ճ|؂Zfvť!oj0DvCg; ϷҬGf8Wa;>؛: >m9vx~Df6qeDZ?kg6Kmƈffk?EwYbcȄ Xrpe;2\kYֺ'5=w6pbrU,|j*V -U)Em]|!?T(͡h*k9(Q\xtF:rֶxSMN&[*\{i F#,ƍ>m uroU;FVZm[[EtcCYM7H-ˆo`MNIgsQrdPNxNM^fk;!GnC[hvǦjN*γQ^Pct(^6^'_!^ev/@zKv*悾jG(ki0zĞo\khX/d/1A4 i/ClˎA -| 66Vo 58&g(̢$yvk$ UN5u#V7Ep3g&\ zR՜yvA>w_,Rvffr9ro!:p(x']^'S! "pۧݚ0uOkŚϫl4řtOU}gu$ǝrr:ҟ(w\bLL}$M`uxCF ezZdm71gyX,3uҠ5خ'E=MNgI&\pJMSͧ{>Kij=US؟,{KrMzF.L37-ddƥ+ĺʊ7QβQA߂V[= >j׼;qlοutEgM"GF|}IX:&}1H= 6bzݷ1S62aR=|;$߲fF_>D5[0ZKrλy2JV^"!ŝv`&gs,܌ySY>dĈ~l*]}M\<\@>7]&tyq -V~ zu:ƕ,E"a~Nի\|ea=: Qk,Sre =+ݨiֻm*6FuFCF.;(,_ȯJ5zve\NWqOek1;qh}Xg.!ދ`0ʘo5RTSݘ"ҝ7;hwIOO+ۜKu-ҥd#ڴ0EoӻyS[YX Rd2R76/ͫ[dnҭH<[Ǒ.pd:ٳjpZFjjQ{bc7`(PhZLNð gx^0b}d{rd~ut 170v΄qBǨ [myM5-ʏ+-}_ȏ 9(6vEu}?!fLIX ߗ^듛ndy-]7m%\D6{#-7 G1k^(wU:߮mx{Vw5S ^ !n[|xapI~.nZKݜLnlXr9RsѥopF]y/H\)P?E{Mj -VӘ{r9/iv^({Q\d̨_٫ 뎥 {Rݖ8hMyJЖR//~\m|"Vq(T>1;hk}25y@p&6+.e9hӻw^O8ãL}kZj'-u%jJu*}y|4+]v.Vt*'Oͱ5DM?{B4Q5R;ۮ" ϶ӦV ^5nZԼnȸ,GNl&f#rFtׂL)xò33QlD}z s]kQ6Efd{\U;D?)=>D8!'?DuQndž=9(!.ϰyKz_=&L;#msFo43uӾ[U:)8yF1_Nb;s*ԁlH<x#$lA9@}.w!OՆ>?ni{NHݗCಫ gyܠ;[>/KܨWs˿{ ࣷo,քan[/ǖ~d@D[[C wA+anY$D̾RޙYKqr0Q4b} BB󢅻%=y8 -5V^r;`n&?ۡ~vry -CӶV-֤,6Φf0?;OD)>9|j A  ̸;1wYbYva8|9ܺ$VэCq-+aT@CXUuc=w:hyB~UEa<;Tdo C9L({kSr.3&Zf%7a~.}!k{IE^͘d-D3u\n5TuoX5sPˀhOIsq wHG(@G,c5: 4QO_X֓2²&ɼ7UBCs~Ʒ/d!XoVN҃U3EHPV1_IG_JYʙ.x5jfx;x-ن3tfLB|̓u(0 -tLBQEaq> /bm5J0E3W|GzE#?~6Kpy&`ǕfȍKm͌WJjBpskSe6 5BU3I~q'7+^seo~ϛkL'q7inF6]Z!Tޅ(?F0dMUuE.,\"Oݖo\Sd[l{.˵Aw)!}S5HjhmK)wN}{LBL[&^LVm: wK`eEd6O66|J"uX&8'\r7|u[Q'u6mtJnm.r=T'9q -īĶ)Q ,T&e hNԗONT FjҷV¼yp>jZYʙI9-&Dyi 2%vk:.򫖄 K_p9˙xhJ?:ױ^J5؍W<^%ufax֯F^C>{IC[/lZsVOj Uj %V*u`( UB)t@!#A^  w g6f V n8RѬw.xP A'0"Q;y;Mѯ`ڠ_lȌ-*D~~C^* ͯ\p -/a<;ȿgtyir]e"a:ߧ -?^C 1w^ MX5wL*g9=gΗ3-Q - -W - - m%PR  - -L:Օv8C+}'ck_I癓 4w{lY-sq],Ky"\$=?+/PP8w PH%4V܎WAf^^/aZȋ2h}HVV:n-T'We P0<(={PLߠ`WzeAa^UR,~!")ݲ^Dt퉷sm w"(B)t硻=. p[|uP5>QEZS -`B|H:kW`-ydѲ[mk5w>Do˫{@ݞ.YP ^|v+c0tЛ7w0$yP qlǛ'Pl5#sn*\x{Iظ/ptO ,vO flbGmfor 9dkwh4"h - A8fH?;!_n˩nN+PqyNn?,vZ6vJ՜`’ǫ9_d|*^ %ʹi9T:oW3݂}lP 5wʅkM- `j쎺s#Bm0+qIar~,A:3u?cq# T3{&i ,SBöDg!? !SidSz:¥ %caIG,ο{&VfBn{}*,ʹq.4ww̶T0'$uz Lbz%ce#niP'E$W,tv/СA;v6+^~!Q:$/̗SOxDQaΗd(v Ƨ#q!~!Ʃ6^>'>kuAB*utp ti:Hj^AuKI< קnf^^/J:r* % -E{.+'-IٺbSlQ9}Y ilKjD/dbq8 T.K֩KV ˙S9qyx̽>c=d -8xl62|N},c]a|awc2 M==$ˎmKRd7釷X11RBsàc Y+g(ګeglCjߞܷλy4aW| Ó\5X {Ra0uH_ɣ}۟H?N/f}8}4pW|!*H~%alTW<&Ʋtx$ZA0K&I>-^c<]H%P\ReJ6Lq.Mq{JϽCwݏM/ >O;Y&?~es!`h­7<8R 8+(*Va5.Kg֫sζV ' t1tit|RGBO.sWOiDJkкJq|QȎ;ϳ8X"`EgRV7^xAțNl:=f&AZ'Y,bl׫گOU>P]c\[w$͋vFS\rS{̌TQ+ې$h+b=wsС{ ifS(RרV+^l|uV P^ޑ򅞥n_ɥ=vu82cԉDm0(F+fOhN׾JFdSL)4Z i_HZ͖7Iu}/"өE"<)%P|5sНlIR/` m=NZYrTXn εeiNtjpI g|!?9X_d Cq*L_1MdOhNrt5 Jk{PzҁH~{UVoyo}<5>VZr5tQ2Ezo{ PCMYÎ 5rF[%}E[h@fqrzs~.Rm5^4gm4qHmnXLAðZ-#6 i~Kxv_c.%Z&?($2{*_vB[:ǃ2G:ׯE +] p{z9iRO{<+:o&ǒ PSBjGP.bP8렰|vH - ANrpq5 MP!k dʾanIY^j{n0tײy򗂟Ga V66('Pg{qY,+0Z&l-7ҠNO qMӃ#-ߺCI8Is7Xpq2/wG{^f;>t k6]+T"u#%?v1>aFLfgk]o_0>!D5"O[R^k;<^OZ#UR7&x#GZ|Bz:Dyu_(珷'WBR -%v# `S : 0lwΔ)߭Jzk~l7Cq֛l{@=à^?]Ԧ pA6(5AK v}mm.|剽1ŀSwO{o2VW '2HZX-SfQ Ԅ6xBx!bV?w3l` 6LzX.,~rXImw>߭^YNbg3>ykȋa~X n"#] -i"$7Fo#쟆 -alymFʻO2!.Ms( ;rI^u;.яU}>7Qj{t#yZO7L٥"g -G%n7ZD۵-%3O#4Nn"=h{sMuXC*B+Sl<f!.KU( Jxۢ4[{KUZر܈ h6-q]Ж /mVNDZ^9 VS^qaXJ=KG1Z9mIUbѕD1aP [Ϗv5u]ז^iKdGFٍD>:ݪ5wmV[))WA >߱**) -@N ;|W8ׯJ{-]G( ,{zzPE6ib䍏!@  -Ctp\f5+DUxAjs }ѣ|oޟzW5J)o@8@s153ԬK6 [*D?DP_w#Z =7 C2~&_Pi_3__ -@vBQW4\yMl&H4A̳еi AXrG7q\zނxM`04Ɇ0dѸ2|F_U)Ȥ?{| | du2h -RA*Bۛ#+Hs)}A0G s˱R?Bb_>~ӯMoȚȖcdpƮ4r 22` Ӝ:1z} -` B[V[΁Rv~"@-l5(ykg6ca7}ЯdcvSP"=PTMoa@ - a m0tL mq0 C .@JZ9f@ yӝ4Ҭf4g;Wrhjr2[ - ;}~/)썗&:Tdk*,rhb=4k%+mwg=6s4X]_wS\p*]^ Gs= sv/0E2!MTeu+FTlAlQ@EQ;?Y1xȲjKD6a 8  ȸka_?C#t%܋f\$S_i?mG.$Xw"x+~?@֊Т - #K?E@oyj1*lO= 接K*[zsFLeXhk61tǑ|6C=,ӝ*XeR< S@P y_ߙnONO.6Xͮ Ē.j<'Li/:)`8v{rtp9.#?[L7hw.谛R>Zxjg<σ*!`^i(X<۬jhN[oS.4nqLO&Q{7{k-ZPpU*=+g֙`2T6<)@ -``؅L 7>p8['x[Į):H)0 =Xܗto:O{mly ag̀2M[^Qhʌ -+e/%gY -+G1{QlqWw21`ԧP-h,-#ami80B>]Y)jmǣ:$R$lg7ᅜ;v鈗^TOH+P k3VRjY={7&CdO -?I@GNE-#E:K}G^H>t;Gn3Z'tkae4Ⱦ۝2PweZ룈ݐwBQ4TwxLA\h=Gw[FepDlj~&,K̭V[T#smgx<>V~N[֭3aZU*ucPtuK0jn~zU^OPh9FH# 1^׶b? ~Gȕxrxy6ivҴ7 RMNkFnKgxWC*kv/]B[oP]o},Pm$=iaoE:~UǰySeamyiZVGK{Mi(݇0+*?=Yr@u![' {$2{h\߲řxz r/>(ES= i| Ikѥ6.6Kq(Hk֔‹o4Z/k_uB{B_l]]v?,_j.z"^1G˨8@J77qa+mf{A]q"[T'i5w$~.{UʿP mc9yZ|=\FvY[s'R;?ٴfڪq^ekd&UvGN3>obUbt3x_(c+4YwMnzn1qe}罭n1O+ev*ʙ?#c߆'m=#.oְWLٵ~Y|zm n+9NG8. ??m@?.aG7au q&T܏bZpr0[7.Tv%Ѹ X]jUGr9r=(G؝>ծ7qX3'=۽6ñ-h|z"w)ު4\m6 u:irRpjD$έwm[e.ʜ^Ӟ=T 瑮9:ů{s'4WzBٕՒj E'ɅPlԂm7:ϼKnl}!gAVP+ XU4[/MCl\:lmՈx+. w:`w*JLVX z/vm)Ltytn v -96gKx 3v֧vZ t:6 AW~pE]+iQ2 T]ĆPOo"wRzRRJf-vm5O t ٳ_1?s S%rŔ7eply:+miI)ֺ7"/RHvy% Ӗd3=˿Bl*uU&<cR*beZZ}cfB?_҇2g8y=s{`<'t & 6.g2 ~ H@}wD AoT\Cc:p?CdGىvF_o -8E(A`+7Z -fjyv_ȃmC#gmP|hIC<@Ur四4G~Ƞ ٠Zvv8Q jwb%h%qM^J=f}\^ :TջY8_KQJ` Ω -ւ -ɳ3ZĻ>owYL5_F~X]cA5 -2{ ^}(44fC[p`fۛ_I^b^Z#^S@vn^vCI+wn -9I}iIgwky>q?+{6ՒB/q QUPݝ886ݷ6j$d*t#;~4a2 6- 1G߯&=^nss[N(+5nU3M&0GPKs`l26ZР4D6o@9Z ˫'ÐNfw<Αe?xtCs:Y[KߥV u%g]E}|em -@(SK:pq^~M KP=apxqK+$N_n)!c[Tw% v΄| F+ -N_.zc[6۝m'3-WhBdyTI ,O"-kcR~^Vd.ja_͂B[Yu[&>g2i 9YKw<ѦfA'E%i~ڀ\ 'ۄ>G=@@}dj䍓 ep>TOeY䶙@Xޢ:-~p9ǹRyDVo֞312ҸW Z~ ҃2(` -*[7A/p{2K6\-uVүc1WgmFȇT_4ov3uϸWwo:EMrDlද]["nW_zW -}um$`;>{VHZB|c -l(>j]^< M&3ģ9扬V#V۟ 6F#k - V{,*~Q~?@+ڂs7?N<ӨGsXo;B/ ͨ/:{tuZ)7er(S"`li90ff 8R1O*'Swb9i"@S<{~mJMX{Ԗ-]WsY`M88_Ug^#_ -q&t^̛>,ĖgAt% ;<DxBg{3.y\Og;!uZ);zzv.j&7to)'Jf7ًT]"E$Mr6tB JaSL8 ;a+Hް]:"ީA'\3bljVw,jPjGƣBol4,2R3Hw:*eI P~ο s[f٤ Ζ){o u`,%Y{Cy6''x` \9t -rz˳?U/~=}n `L/R;DעM$8JpB2˜B m>fύ;#ȥЁ?ßmOuCg+zaΏf08“Q^4 ȥ{Tӳ?i6Lk\op{Zb.b'S_o25>m@!}|>RwpZYlnL'qWww*kFlw F땺؟z´M6#GR+ǧ^[+FZ$=j~`m>t!;&. řWFKiNV+1[,2Qښw„W*.j\.u/z:Pcd[n xjd&d]9mA -϶W)8LݵQ6v['=#(l9bAS -*̻p-R̪auh9^ӥ\jhՈ%?xI8~IxH. -Zm+a2ͺ<X󮱢nX{ 7V:箝_ hlU>+JMfǯ^"z8?k'bܴȒ/y%BnaUfŸ&Uh1. Ԯ/rU[ nXTmR/gkeS?xQbxAeo. -rYVɱHȖҫ9cwGaLb] -\I"e -?[ej RTuZ18kw;XJb AL( m’<< ̶6;1nP#'.WI2T[W44ۮ0ٍšnZ)LalЬ$NN1Ll\Qe#Ws˚M_ALJ&#Vcgʹ";|oծ4;Ұq1s5뿥&@}dbUl&u7W˭$x?x\WpF"@"{g{1ac`ŶSMtx|N)R`u5XaI_*2U`W>1'5m}k{~K984/1)u" >Pz'FWc(2o ݁@ZS ܟ }RKaAы S}vE>lr( IY-oI<ܜq4'M!FFbk15 CI*?Fy_ҬF$ 2df -dqZ'3F'Tܜ特€_7#gl~YzsiCj׍*~N{5B9L毀Q9?|e T Eqmc9x 8h)1_s2`s8g3(SםjG=MLts2,{.jžVf\AXL`+@ZiԪ>Ճrt$3 {p?uβ%F}*'w{ݝZϳ8J-#bFiMN5cn*N> [s@څo^_]|7ƀBc;6w'o"j\֝0'-G}; T7+ qkJpavC_lt,,Ug^[GۼΣ}rWj!@<2= -}MWoʷwi)ŚMO1죵}&$VeL),ͨ/q-d迱ohN#Etma"Ls7);*-Kd1* ->2WoH%]2q` -G}[o6>uU)6~Ya>C蹼v+5gyWr=i19;jxAHO}?&2K^N=br*L%D^oC< zU9yZS^wrYnb43[)8yqMb.x\ъώyBSd#H}MoY7ɳ~٧FֹG1¼836=\_/:I9 3Jj殑ʚԃ1Ņ62$+@ S4@ PS5yT&%pPb3X4Sm;C9}'Q<)cjcO=1w|dl *µrxAT^6?R*{gft;/Hߛ_Rd Lp<F=k>F~fiO[ o@#5d*;i {\k*n.O؞GEn항61owԎ`ZE=!氷>Z)v=3($%z;J\}ˠV-.*s/#vqtO1V+?C\7=z:a93&x'I&+VXKZu2O'cgS|༟|3|9הٓ-it% ?ϸOF2֧WT_HXՊ^͕d>:>4ۓ.uBr>݂r|Q;ܼڰX^VrfcL`jU)Ոƻmי6c0I_q{m-"+8gQ̞3#@;D||iT|:6B\x{\/ftwNUپA|f ?xdũhh܎O\hr!yns'^I=g.2ޥ>y-Y7I1V(kzJ{Jٸw-?*syPz4%LILP!Źp07%dksU -&2z&PBl!Lj:⮾oբ9l%8`~,`(Ek5agĒO#Œz5a|9{M=VApz(ҵ\﷖(]Uzj^G(.zzWqԩb+yz^;(`϶˃8[`96ueMTgTit1=Su~XKCd9Ɇ[ {jAR5]Jbw.p+&K4-wٳO]}.3ܒٝDwPzH@άW,eR E[*-"Z;P46/B, -~]Cad_i;[Gr7eK'TWz=^Xj10L&>PvC&h5"bg>ud+#³yBJj -Gur84\cߍU<[.hV$[BT Rŝz'X= tMB#Ot;(Z@je6!x}}[k(6_=>R hTQ)eA@o֛: ,pbڃx=P.+r,KgGtRâm U@Jx U %sj`;o eS 5 mx۳R u=yI{P 2A4kVG3M =y=(ޫA˞Tsw-Qks Rkk q[ԅ?!0)Uai>@ 3ѧ^ -QP/gӣ'Ƥj@ PJ6 @S4¾{d2&e!@ j v& H}k}#eM|?yJ~tυ=8RWɐWE.xqj(N=u -@r wSЪHRu~Wϵ0?yNrXbB]iV%ZX?ٳPL_%q µ 5ǹ" `n -P6OEsJ'y?^cCc0~917qҾ\δVgk"sѳeo1R2 --B̭f7w ҇}VS v9^^۴8ba`*nM -ƺ-^|\3Dd#6Rb -HbZ1;D55tt5{x`hkXVvM4g }7WoM9>y-lm7sh%_)v$ս%֘}<V뮬r4v|X4WǠ~kb@, bqNAOjs'4.]Kl%†srVFAU# dјz3~j=^xcgb` Zwҹ>=_*=4;]Kwz=C.=x~j" -e[:a-xp1D-'l7j -Yz\Tւ̷>"'Ԙ Oq]Emg}v޸aDj k uC nD,Wcބ++lRSm+zޫm S_+mTT\Է5솖RO[V77ZIYh%8`LsɕvdT׭rXGrUU.:'9G6=˶R^bvkQMf"7JZ"$G;T] 9kIPN=3'`JJ͏Ua5 -27gjsvSGh: #~Fu./TH[Wz}?汷@O15k71a -&/ONFj6Y) AzTQdS-Kި}ɫ$TQ~nh:σGihk~㧡Ousoy:I)ޱ 360ncm&Zƺ6jѮL=-u.%$?31ǿ7!߀hGZLmY|I?xM'KN3 罆~FqM*čL~ՠWa)~a]ء2M SVUWWFY=?kAoY,ӻx]uPYˎ&3]zٝ) YHp6m|Coȶ _d@Q 9_< A6@ ! >PEt^ |$$eхHЛH&D$DEdK i"B Р! -u1$ݨ`aÏ-4aI?ٷ~ & ;Ht/Aÿv$h!؛R Jؾտ&I[)P([`́U8BH6Hl Eo!l !yR`xzү%b.B[N_D!>1>暿~29V - 4rM s ^(xMS"--vi!#! 5+BfY9M>¹W=+u;͹]?oo!Bu,BV!o G_>]Ew9F#Bmecd nE29A3?O6 E$3bJV^o'ǣF+%ɝߧu Ұ7j(5(FFI#F07;[nxК괗=u߾njc2>G/8w;W?K ;.OCgY>?`-3P,I9ZfKq7qي0 >gaVOsEEdD1>lxɯn;xz9:όWd\T~w^*>k<'-~!JeE"JŘގ豊يYd2~PL$W$.W̳w,y9?VPj_7uo[KKLǺ+rvq2>'z[BVRG69TZS)g `wo/DXeH :`#+ʔcB[;il SYn?.l;yrHL+E8TCnth+݇O?.z?][MY8Ɉpl:?4 8}\H*"҅ +gbJ+dHM>t7F»3OݷXw?zsho&mB~3?M~z~Y_Csb+e;K,Ω=pdntCgW)lċpnx\$9j{w䦳(U`]6 tu_Gt0ΙO\FZot<؆ګܺBD(4@H9{@aL[i͂9guJ+%&nPm -b]Fv;͢箄u!a>[b:-b?omuy-9>koTFi񚾎jWWlZ68%9 @֡#VSjm뭑J%#-%iX/D";sbή@9V֜zQW@#Σ@_ة,Ku\**=N #za#ߖ0A#?'fvSs'R@'O9?cMZU&OzO pD%*i_IcI.Su.(Sgrܲ(mMc3QžMS_}ZKz9'n&w0P~;!BJ5n!d]*F'{%F=1e -vwgY2wg\wkVtSdsLC7|ΨM?lv}Sf2[|!׳l|ֳD^ORa-La08w墬RK_;mXJM򃫓i5=}x '(OTM& gبUw^(,fd:}Rd˭>h.⸗^Iݻ~uWZ['G9}{s^>BYXDBK2~.nw? :VJ>|]twE|U(X놶yI+0hyz缾T팇rr]w7Jx=;Av47nر5nRκoN:H_.}${td^oSe;kfҙ}0/yB<7|wKjIntsiԛ~=cC.t**t ؒΣ9}eLZoL~XHNo/#]_`v=)F_\I6OgԬЦA]X!+1(L._-]|T0R|@"2i"gLHfpܒz7AE39OBﮍLhwitLd(-ׯQZ=Ƞ0̉jevM}eSGr=e>جѶz΅ٵqx[<6/o A (ͼʾ3@i1JWj:Wӱ\r['yWEUK\e,L {._jĿ`+q!3a/Gb;&qwgɜ((]3p^<]w KpײzJ2ظ~k4R*jV`pmoҽјꥺk4j5+T/\ < NdӏƠpjQ^"vf?9/M3W̞?]͚r9/VmV>t+ם R`fu1Yu|{GCyQ/J;@ ɹjm*ݕ~RbST -]6?_3P`|-{)x.*۷qc?.^:ʂ@hss%ZL[nlԬC֛Wc 3˗-/(U^ĴJQLUaS f%qeVV*NX{Rw4fz-X1MY+ O!8B$[SJ k*D[ܐia7it4r2qs'BfAk{"1K?mo4u"*[1Q64"%qFjX9:.uǽK)xzZ99gAMgǤ\H{ZqS;nN7"k/-Gkgw)c _v$N/vu),Qg#6];HkM]˦.Qg_ L$p?ۮѹnk L[,nPվ!m-wD rcU''0M۫mpHF25]54;=sX)sYenF *8~l; ljtQ "ܯdjm 4j&,$ă[b @G(D /` D`AWWODZDu Dh=oDd C=A QʣA|mdSI@P{q<ϼ"L  p@eTp 4MbٰAlu^/ ւgI >9_\b[B7$I>W͇5]XnA,  -bN3 ~\* d)$I)AR&%P$c A?&15 <  ߃ٳ+@P 6!CQ5ϢHMBd[ Q?ܠ!H|w Th+/ucDo2JDb(neY!H7ltd;YDȡ#|3O.}ӬxOmֺ7 sk(̾hvp#|s: w`(>ߤ)n#xG7nE,wQ#d:|Aoo_EK8p|جf+$.NGE>ѺDvk۟@h9QwJѴL"\2or3>fōUI!–1lOpAQ|G٬tOweN7b{Q9=[ZUoUP5ejr؅vXiy{-o,lrQ)X3mn] zcoW#*=Ka<V~/4~Դ^g:`L¬;|[SqYr!ü'ƻ^d[|4n}o5#uGRg8,/ ,y+?fV|-s8//L{v/>"QpkYT͵0CLZp/?=ܦqDqw 3*6rWkc29G[mW>5]d1wbm   >Dph^&or uϥ6-z!^;f `%ً{Q6=tʲ_:5\>O[".骥5vӻ6C[;^)?ъkCd5q) iFǝYsX1.+/k0te3Ljo]]kDH((氻oc/D;oh> T +OΪ2(U=%Kd)5S_vvZ -xaٱ5v3P4HS8P 4 bm ߀-V{m)֍+KK&CH\)S1Z$?<"Ux0u_uJ5oq጑VطDln6KBi,8ݾK6XA-_]rwV0)gxrA]ݗ:WmզzvcWYfy\YZ3/dlX5?yRw$%.78myq:ԹFhպwۯt|7y}s;NXcnAv918;)T3_Kj ׍-e_u -qF$N+pפ,(Nځ6شl!.45hac@.0 [>WrOtfeny=j첺j^_o;~,;kyF>n\vA&>]m@WڤчhĴl%?;%kt1ґ}sotzڬqvB.όXlX0)joENmskXLT,i9.cP}4P!l9'7$Mƚ}1Xr% D€;]3+_𷥕~*Mw^wyu43Lnv9zjPRep2u5VwY>ȳ8|3lq/<6'zMX r!O}:fpZavKs8}/{"idW>?fihIXLo'pN-мzyϵM殚%4K5ߨ6N5_^j ?Mls9P=VJyL~"l?ăU? ~xfX6~KbLVq.T4YRX6ˮdܨ8ZJ}':9&:ٮ5:5"r٩$]kNk- .F}˅G+ܑ/ \0uǢU[Mb̤&v4?iV% تZB֎?j|!q?xWÐқN5UYv'{ti^~c,{2B"YlEM9mlq䑡~E˭q.::'ٕ6|DE_U5lE񱌗¨\Ή]ZFqxi4܀U% -xys6^ϓϯC\WgH*<ټS5֯ڽ%`ݎ"Ptaz+Sap/:SnT^@.:ԛk<;rB6_3^!lO_wzY[U _|.T[^DƯzȦZ|ӛ9܌vV5vdX:Y.lbq[ dc|\yu23maO%F!|V5KK߈b`_8WXQvlF}l+ZҢEp3I/}gImު.IeRV{ETAaZ:}g:V$Wa=e[_!PM#/_2TۀnlJnTZJ/8+]At@Ư(w @C !A`TA,^"@2`&<4O0m団,-~oC+xD6ԝ#eBTnBXC(BlDuVS> }Dd - o3eh93@<9r[} =B4jVD7Fѷ-Qȃwx xM@CxGK -MO ƒ|ϽYf.7s45z @7bM)Xt@<OALg DZFz[ pƪچD'y]׶Z+KR_@|5i1"$-RA$Hq9@ 54/ u%]N̨]iۼӦG 2v$ܙ[MZNf%nf3͇99ޏab,}H:Z%$jRu7ΕiWGOlgUsNuy, /6w.|ѷmVo h]>u/Y|F:;?TvEu*T3J֣ݰ{:^Rq>ă'p$U2XG϶l;2.kǑvۥt*V ұ 5ֺg#ti>MO:BFgokRTП`3;xY42!Mz{= oY ~+{%y]}ܩ5Eh<Qstk{tXq̣yE#Š9pd*_W4Y43J_>k7kg/I~$,FƨVz畋X 91z{3T4WR_ {PهBRA(=s/;oG&PmN4}{NU t3ƴwgZ*[Q?KhٷSck^Sb[.wONCb:Y| ؘ3HɉpЭoe -~}PH=gaxyJ;˛I*mixFD>ZY7Il{X!>3_͍\%})L,1ځ=(QpTo7J$x:O J7[Weu8̊h!1[*"(sI ->:#.>) [g;BkL[:DNOZ}۔Fg0G 9jq Q*\(-VS^x<8ügajJ`--TgJ}Pi.^md^v'6v *u؜m3ursaısn ߷ |aEMo"g]3NJ%t4\63tn*jq$1C,C{{)m?U" l[M쭋;{=JĞW /ʧ&+r·_ܓ=M"2yq oؚ<qL5p EK,JMzKgʹ̳+jU=վMPoH2H7=l %b'K~1G7*'y~* iH. -ۋ Xa`=AN;p;&gl?^|rU\*g\p1WstS؟Yz9Whf/p$\}kN-K׬t Glbkq_}WjMqYS~t}2X_WjEyMEx/F~ʲvKG=j:X;j玉8X3fɚl}tXw(\AhUӓ٪w&7&hoz3Gj[Ρ5my1tEoqZ'i w"5go#0Ô7+MstR r|>'.GG=GgЛכb{Z-[l;{vnvΩ-п?k<ϷoI)0PWGE8 ^X\dsĤrAdY75=4o r2p]th#۩d۵6&JQ}WUzO-v})Pl!pʹw ]2w5--iuLeȅu]tRz+t>;j,[8:%"thd_ag跍5 -pԁjՐ -_m}8)!ۻRʷ,vHX' -.7\9?\v0L 5f\MHGAseJVBGӍ@0~cb0QRJIidf+U7~ -5sIt\u|쩞U#Xm n'me<,̠#$|G{mN[5g^fb2 -ʦp˓qTfܠdJiRE G*#TY%:ps=y^έ4m7Ȭk=kZ͇j[]>EZ[6ɪŲyk",Լ.s-c<ٺlsY9’nexΪEttsÏ>-\z6LgR,5m(IT<˨p[c:{If'B|i> `~]=o=@,YRA<(fDa<@41Cec0ɤ UkaPmɂnKU )4of }Vqd}jԻg~{\~&{p柧TgZ -k :9מ½$G@|C0V,Tz,:wKD]zؖn_MR/um~nzC6kEM3!䀘^Vq"zGг{:P#cdm r3^P>(X |moRU|% ѕw k?UW}=:&NfdV[{\kzSuOmН!6E,TxJͶe")B{[na.6ڬ A1JV_\DosZ:{[žBo -}YІHҊOrwOt@b^7qgʴ_<4"MzVn210:tPbOKgZKs{5H+žշKO+Ŷ("so9Pb^Wo\Vs5ICJB)RO?ʵ# ׍c:LA?´^ )L?oSfd/ݲѶ0<9gJO4[Ѽ!fnoEwrZcvI '-T[!"$Je6 ژVF ;/- -8{7~Nz]]⳹O9im4y_j%ZQ=E(wD"P W_'|)g.ͅpόnJXEnr{d!Bow?7Op~fjy֛k)U'0˘=R?ˍ[[$yҥܤ9NJt|A&RCA[G ]'c+na?淽 ؿ?D):9k>=!?x Lv)ݬ O .~T:%Nׂ`!5z -^r+SlInuE}{|{>O|Y6S+l -SZ_aq[_D?>91\=\Úƺ4p[QGe"͢<[)ɭMOsS6OѧiiL]yC^gě IZ;d{2[;KlM:~!°ܑG3ի+'f#/7cV+IEld;{vj$ϏEq3M.K_8XVاu+F<{9_0bf"A,*QZ\?s{0Hlu0_b(KvY4;vI2 k.*=WYevd|jw3}&,= endstream endobj 96 0 obj <>stream -HWI$ +toWsj 􀂖C? 3GdVW T'p1//^񗿾K,T/Îo!-RvyӮ߱VZhxUgz+=y [KkrYAENe/m{?ޖr6RI)v%5-OS{.g\3 p/ .!M~3\\KPcu(6h?kx{RS) ˨GaL.Z?wdB~ , e$S4`Tg2t[8K9`´~!P-UQHMXnPKR+vmit6 tDvѹl6 1/ {xT|1_!~;*RVEny&+} [yAx!?d1@;&pw RvC|.Vd!j$ XlPJPr$[UgeV*)aMⰪ#N0bZ%0ZGML[θcp! r"IA c!L^(;ҐGWa1eDtza-IJ)a -(>\=Khxݸ,$%Br䥸5]E\f -gV؄@1z [ k7d}}k(i]vB -؅fjvX\;S -pͺ6L@$pJ7p1fL ÕՅa -,[Ma92mWBb#35Lsu!(# 4$7 Vkt(zBW#b$DM} 4"*E 2a) 6fɸ[nz 2X~ c  -pvɶ$o0 jBEqf fgVߊNIE)k5POΧ_:Aq͐;s"wH`YBjwd &p݉Ԫ 4& HV3RT Sf-{, -ZUŇJ&@MP .(]-&K -5:Š`jIڂ+hA#Z֎m:-3`!4 XƢ'lE `bO8եK7 l=vPóSitH46L;뷊6G *bd&C]1qDb*k\9+PC̪Yd(. -KSQQZlm?!难4f N e1~$}jl,$v0'"Ī:Ƈ5B4m7vO29S\휸 -0Oq:jbl#X0 -0a -yrV`xvH-qJS~)dX$S{Rpv[ ,xsbwӯG\w[Cp^b}| 9@ 4~!++m`g0h?Z ?kOYc_$tf4z!]E Nz>stream -HWn, +:Lr< x9h̪n=!Ud%dv~{_v ?P=ߏgLoAŨן_Vu9_[m_Nϐf{ p..%F:4}G8Tk<fd2&sl1\e4dLP&j%R0_D/SUp  -+ykHsa`f]tn< H)C3! 9U+_qWϲNCݞ9nyǫ4Y8EtPfh͞)s|B~xb0H79UQ!pm.Qy+D -, %Շ!>&"U7.sRI[vê:D+٦#U(udA,Hy-g1;aBX9pX* Aua,:b DAusqU|i0vۘ2 -2d5bYH0) %4q71 Z!uhR\@3CXM{lBsXXiz-9 Y_(+d߼2mgF -؃f;H+M;է~ .Lù" *yr#`\"vw b}Tj#ya,d -ZԮx> jMjd:1`'A:{`ka–+(5恦[XcJ ڔ#lH90$q]X׍-E XDc(ʬŅm}p1_m 0t;MO,xfێ&GʴBS7'/Ώk浸f@@VQ i;Ÿ"sCfݧo݈CB;qIn~u eݧ+:-F![81~?q3& 0Ľ-]z cn1ke<j}h<;C{31`}X@?XcwwybXfЄ  tgO  p~hWHn4FI' ^M~շvu2'ch}.plОL֬+1v$f)`M\+8z63SbԂ'2s1YS%Rް)`Yv.;wA%:78 斊\fT3&ڿLG'vC  endstream endobj 94 0 obj <>stream -Hn L]ڎd<ċ~Cv+$ U,3݃]44XFFд^г4|Ss0<(Pן^)%=r n,&uUi KV FE.Hϵ -1%`<,QI\`2$68jE7Kc+d|*X7KZ# |~z~/=? _؅/z(e "iZapxڷ -0xG endstream endobj 88 0 obj <>stream -HTnSA Wx 3׶ !T"UHA-E{!PGQe|Vl.4z$K >߇[?$[B $upuR b4%a*LĐw. ԒK5 !,ƔK* @ 7¹`V9ds5R@JG߰0c;e A~$5M!@JEYj(@f4eιRj:Kd*`CP!r ظFJ6 -N!vܦΖ#fҲrwT9AyTS> endobj 46 0 obj <> endobj 47 0 obj <> endobj 61 0 obj [/View/Design] endobj 62 0 obj <>>> endobj 59 0 obj [/View/Design] endobj 60 0 obj <>>> endobj 22 0 obj [/View/Design] endobj 23 0 obj <>>> endobj 87 0 obj [86 0 R 85 0 R] endobj 123 0 obj <> endobj xref 0 124 0000000004 65535 f -0000000016 00000 n -0000000201 00000 n -0000057896 00000 n -0000000006 00000 f -0001354122 00000 n -0000000012 00000 f -0000057973 00000 n -0000058384 00000 n -0000058787 00000 n -0000059136 00000 n -0000059497 00000 n -0000000013 00000 f -0000000014 00000 f -0000000015 00000 f -0000000016 00000 f -0000000017 00000 f -0000000018 00000 f -0000000019 00000 f -0000000020 00000 f -0000000021 00000 f -0000000024 00000 f -0001354563 00000 n -0001354594 00000 n -0000000025 00000 f -0000000026 00000 f -0000000027 00000 f -0000000028 00000 f -0000000029 00000 f -0000000030 00000 f -0000000031 00000 f -0000000032 00000 f -0000000033 00000 f -0000000034 00000 f -0000000035 00000 f -0000000036 00000 f -0000000037 00000 f -0000000038 00000 f -0000000039 00000 f -0000000040 00000 f -0000000041 00000 f -0000000042 00000 f -0000000043 00000 f -0000000044 00000 f -0000000045 00000 f -0000000000 00000 f -0001354191 00000 n -0001354261 00000 n -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0001354447 00000 n -0001354478 00000 n -0001354331 00000 n -0001354362 00000 n -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000061942 00000 n -0000062012 00000 n -0001354679 00000 n -0001353371 00000 n -0000572212 00000 n -0000078573 00000 n -0000078609 00000 n -0000078460 00000 n -0000062318 00000 n -0001351307 00000 n -0001348976 00000 n -0001346643 00000 n -0000059915 00000 n -0000062202 00000 n -0000062233 00000 n -0000062084 00000 n -0000062116 00000 n -0000062691 00000 n -0000062968 00000 n -0000079542 00000 n -0000078797 00000 n -0000078834 00000 n -0000079423 00000 n -0000572287 00000 n -0000572723 00000 n -0000573885 00000 n -0000586268 00000 n -0000651857 00000 n -0000717446 00000 n -0000783035 00000 n -0000821931 00000 n -0000887520 00000 n -0000953109 00000 n -0001018698 00000 n -0001084287 00000 n -0001149876 00000 n -0001215465 00000 n -0001281054 00000 n -0001354711 00000 n -trailer <<3350E1A5544F4739B9A51197088616FE>]>> startxref 1354899 %%EOF \ No newline at end of file diff --git a/docs/images/avatar.JPG b/docs/images/avatar.JPG deleted file mode 100644 index eb49e95f94..0000000000 Binary files a/docs/images/avatar.JPG and /dev/null differ diff --git a/docs/images/banner_readme.JPG b/docs/images/banner_readme.JPG deleted file mode 100644 index f5898fdf17..0000000000 Binary files a/docs/images/banner_readme.JPG and /dev/null differ diff --git a/docs/images/banner_readme.SVG b/docs/images/banner_readme.SVG deleted file mode 100644 index 5d3afa89b0..0000000000 --- a/docs/images/banner_readme.SVG +++ /dev/null @@ -1,60 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - The Update Framework - - - - diff --git a/docs/images/banner_readme.png b/docs/images/banner_readme.png deleted file mode 100644 index 66ee8a17fe..0000000000 Binary files a/docs/images/banner_readme.png and /dev/null differ diff --git a/docs/images/banner_website.JPG b/docs/images/banner_website.JPG deleted file mode 100644 index 992b4c7ab0..0000000000 Binary files a/docs/images/banner_website.JPG and /dev/null differ diff --git a/docs/images/favicon.ico b/docs/images/favicon.ico deleted file mode 100644 index a0e51a0bb7..0000000000 Binary files a/docs/images/favicon.ico and /dev/null differ diff --git a/docs/images/favicon2.JPG b/docs/images/favicon2.JPG deleted file mode 100644 index 15cd735aaa..0000000000 Binary files a/docs/images/favicon2.JPG and /dev/null differ diff --git a/docs/images/favicon3.ico b/docs/images/favicon3.ico deleted file mode 100644 index 43bd0ed664..0000000000 Binary files a/docs/images/favicon3.ico and /dev/null differ diff --git a/docs/images/favicon4.ico b/docs/images/favicon4.ico deleted file mode 100644 index bc4a1e7bcc..0000000000 Binary files a/docs/images/favicon4.ico and /dev/null differ diff --git a/docs/images/repository_tool-diagram.png b/docs/images/repository_tool-diagram.png deleted file mode 100644 index 6bfbdeb0b7..0000000000 Binary files a/docs/images/repository_tool-diagram.png and /dev/null differ diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000000..7e88abc5b7 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,5 @@ +--- +title: "Python-TUF development blog" +--- +This is the development blog for the [Python-TUF](https://github.com/theupdateframework/python-tuf) project, welcome! + diff --git a/docs/index.rst b/docs/index.rst index d62737f5da..a158b70422 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -13,7 +13,8 @@ systems. .. toctree:: :maxdepth: 1 :caption: Contents: - + api/api-reference - CONTRIBUTORS - INSTALLATION \ No newline at end of file + INSTALLATION + Usage examples + Contribute diff --git a/docs/latex/tuf-client-spec.pdf.old b/docs/latex/tuf-client-spec.pdf.old deleted file mode 100644 index 4228832d12..0000000000 Binary files a/docs/latex/tuf-client-spec.pdf.old and /dev/null differ diff --git a/docs/latex/tuf-client-spec.tex b/docs/latex/tuf-client-spec.tex deleted file mode 100644 index 7acc33f0a4..0000000000 --- a/docs/latex/tuf-client-spec.tex +++ /dev/null @@ -1,423 +0,0 @@ -% tuf_client_spec.tex -% This document has been deprecated. We may later update and include it in -% the supported documentation. -\documentclass{article} -\setlength\parindent{0pt} -\usepackage{listings} -\usepackage{hyperref} -\usepackage{color} -\usepackage{textcomp} -\definecolor{listinggray}{gray}{0.9} -\definecolor{lbcolor}{rgb}{0.9,0.9,0.9} -\lstset{ - backgroundcolor=\color{lbcolor}, - tabsize=4, - rulecolor=, - language=matlab, - basicstyle=\scriptsize, - upquote=true, - aboveskip={1.5\baselineskip}, - columns=fixed, - showstringspaces=false, - extendedchars=true, - breaklines=true, - prebreak = \raisebox{0ex}[0ex][0ex]{\ensuremath{\hookleftarrow}}, - frame=single, - showtabs=false, - showspaces=false, - showstringspaces=false, - identifierstyle=\ttfamily, - keywordstyle=\color[rgb]{0,0,1}, - commentstyle=\color[rgb]{0.133,0.545,0.133}, - stringstyle=\color[rgb]{0.627,0.126,0.941}, -} - -\begin{document} - -%--------------------------------- Header -------------------------------------- -\title{Secure Update Framework Client Key Management and Trust Delegation} -\author{Geremy Condra \and Justin Cappos} -\maketitle -%------------------------------------------------------------------------------- - - -%--------------------------------- Intro --------------------------------------- -\section{Introduction} -\subsection{Scope} -This document specifies the required trust delegation and key management routines -for TUF clients. -\subsection{Relationship to Other Documents} -Much of the behavior specified in this document is partially laid out in the -core TUF document. The system's behavior with regard to freeze and replay -attacks is covered in the document entitled "Software Update Security Framework: -Client Library Replay and Freeze Attack Protection". The repository-side counterpart -to this document contains a large amount of information on the response that -subsystem will demonstrate in many of the same circumstances. -%------------------------------------------------------------------------------- - -%-------------------------------- Overview ------------------------------------- -\section{Overview} -The Update Framework is a Python library designed to allow software developers to safely, -securely, and easily update clients running their software. In particular, it -focuses on the issues of timeliness of data, rapid recovery from a key compromise, -and ensuring the authenticity and integrity of installed updates. This document -describes the behavior the client must demonstrate in order to provide those -properties. -%------------------------------------------------------------------------------- -%-------------------------------- Example -------------------------------------- -\section{Example} -The examples used throughout both this and its companion repository-side document -are designed to be easy to reproduce, both to verify TUFs behavior and to emulate -it. The following subsections demonstrate how to set up a small but fully -functional TUF system in which to do so. - -\subsection{Setting up the Repository} -You'll need to run the following steps to set the stage: - -\begin{lstlisting} -#! /bin/sh - -# create the relevant directories -mkdir tufdemo -cd tufdemo -mkdir demorepo -mkdir demoproject - -# add a file to the project -echo "#! /usr/bin/env python" > demoproject/helloworld.py -echo "print 'hello, world!'" >> demoproject/helloworld.py - -# run the quickstart script -quickstart.py -t 1 -k keystore -l demorepo -r demoproject -\end{lstlisting} - -This will prompt you for a password for your keystore and an expiration date. -Choosing your expiration date is something of a balancing act: on the one hand, -you want to make sure that all your clients have had a chance to update before -your keys and metadata expire, but on the other hand you want to choose a short -time so that keys you revoke expire quickly. A range of one to six weeks is likely -to be reasonable for most applications. -\\\\ -After running this and choosing an expiration date, you'll see that it has created -an encrypted keystore and a repository for you to use, and that the repository's -contents match those of the demo project we created. - -\subsection{Running the Server} -To actually perform an update out of this, you'll need to run a web server through -which the client can access the files. Fortunately, Python comes with an easy-to-use -module to do this for you: - -\begin{lstlisting} -cd demorepo -python -m SimpleHTTPServer 8001 -\end{lstlisting} - -\subsection{Setting up the Client} -TUF isn't designed as a replacement for package managers so it doesn't provide -a mechanism with which to perform the initial installation of our demo project's -metadata. To do that, open up another terminal and run the following: - -\begin{lstlisting} -#! /bin/sh - -mkdir democlient -cp -r demorepo/meta democlient/cur -cp -r democlient/cur democlient/prev -\end{lstlisting} - - -Once we've installed our metadata, getting the software is a simple matter of -running the demonstration client, found with TUF's source at -examples/example\_client.py. -%-------------------------------- Details -------------------------------------- -\section{Basic Client Behavior} -When trying to update, the goal of the client is to efficiently obtain the most up-to-date -legitimate version of the package. Doing that means three things: first, that -the client has to be able to get enough metadata from the repo to determine -which files to update, second that it has to be able to verify that metadata, -and third that it needs to be able to verify the files once it receives them. -\\\\ -To start with, TUF downloads the timestamp.txt file, which tells it the last -time an update was made. To verify it, we pull the last known good public key for -the timestamp role out of our copy of root.txt. Assuming that the repo has updated -since the last time we did, TUF will continue by downloading the release.txt -metadata file and, like timestamp.txt, verifying it against the release role key -stored in our copy of root.txt. When combined with the timestamp metadata, the -release file will allow us to determine if we're receiving the appropriate version -of the other metadata files. -\\\\ -Since we now have enough verified data to ensure that we're getting the proper -version of the rest of our metadata, we can go ahead and obtain and verify the -root.txt metadata file. As we've already seen, this stores metadata about both -other roles and their keys, and as we'll see later, this is also how we handle -key revocation. -\\\\ -Assuming everything else has checked out, we can now download targets.txt, which -allows us to determine which target files (aka, non-metadata files) we will need -to update. Since we have all the hashes of all the target files and know that -those hashes are authentic, up-to-date, and valid, we can fetch the matching files -and complete the update. -\\\\ -If, at any point in the process, we cannot verify a file against its signature -or if it hashes incorrectly, the update process will terminate with an error. -This signature verification process is positive in that the existence of a -threshold of signatures from the appropriate role is both necessary and sufficient -for a signature to be valid. - -\section{Key Storage} -As we've seen, the proper operation of a TUF client only depends on the ability to -verify that any results it obtains when polling the server or mirrors have been -signed by all of the necessary keys. Since this only requires the use of public -keys, client key management reduces to the task of properly associating roles -with their keys. In TUF, the mechanism for doing so is via its metadata files, and -especially root.txt. - -\subsection{root.txt} -This metadata file is responsible for storing all the trusted keys for TUF, -along with the key metadata needed to do routine key management. It must be -located at the base URL of the repository's metadata files and signed by the -root role's key. - -\subsubsection{Format} -The format of root.txt is as follows: - -\begin{verbatim} - { "_type" : "Root", - "ts" : TIME, - "expires" : EXPIRES, - "keys" : { - KEYID : KEY - , ... }, - "roles" : { - ROLE : { - "keyids" : [ KEYID, ... ] , - "threshold" : THRESHOLD } - , ... } - } -\end{verbatim} - -The format of each element in the above should be consistent with that described -in the TUF specification sections 4.1, 4.2, and 4.3, which is to say that the -TIME and EXPIRES values should be in "YYYY-MM-DD HH:MM:SS" format, KEYID is a -64 character hexadecimal string, and THRESHOLD is a (normally small) integer. -\\\\ -If the current date is past that specified in EXPIRES, the update process should -end with an error. The same is true of all other metadata in TUF. -\\\\ -The KEY value should be of the following format: - -\begin{verbatim} - { "keytype" : KEYTYPE, - "keyval" : KEYVAL } -\end{verbatim} - -where KEYTYPE is a string signifying the encryption primitive (e.g., 'rsa') and -KEYVAL is a canonical JSON mapping specifying the appropriate key parameters for -that primitive. -\\\\ -The ROLE value should be one of 'root', 'release', 'targets', 'timestamp', or -'mirrors'. - -\subsubsection{Verification and Validation} -In addition to validation of the format as specified above, the client library -is required to perform the following checks upon receiving an updated root.txt: - -\begin{enumerate} - \item Check the 'ts' field against the current time and date - to ensure that they do not replace this file with an older version. - This is to ensure that an attacker can't replay metadata from - before a compromised key was revoked. - \item Verify that each of the top level roles is correctly - specified in the "roles" field, with the exception of the - optional "mirrors" role. - \item Verify that each keyid matches its respective key and is unique. -\end{enumerate} - -In addition to the above, the client library must also take care not to trust a root.txt past -its expiration time and to ensure that keys specified in it meet algorithm-specific -standards of safety. \textbf{Clients must not be allowed trust or install an improperly signed root.txt.} -Doing so would allow an attacker to forge arbitrary updates, effectively removing -all of TUF's security properties. - -\section{Trust Delegation} -Trust delegation (the process of a trusted user allowing another user to share -some or all of their privileges) is built into TUF's trust model, particularly -with respect to the Target role. The goal of this delegation mechanism is to -make it possible for individual developers to be trusted on some portion of a -project, but not the project as a whole. Note that since authorization is positive -in TUF, if a valid signature for an update exists under any role with permission -over it that update will be accepted as valid. - -\subsection{targets.txt} -The targets.txt file, signed by the target role's key, is responsible for providing -the mechanism for trust delegation. It must be located at the repository's -metadata base URL and have the following format: - -\begin{verbatim} - { "_type" : "Targets", - "ts" : TIME, - "expires" : EXPIRES, - "targets" : TARGETS, - ("delegations" : DELEGATIONS) - } -\end{verbatim} - -The TIME and EXPIRES fields are formatted as for the root.txt format. -\\\\ -The TARGETS value should be a list of elements in the following format: - -\begin{verbatim} - { TARGETPATH : { - "length" : LENGTH, - "hashes" : HASHES, - ("custom" : { ... }) } - , ... - } -\end{verbatim} - -Where LENGTH is an integer and HASHES is a list of the cryptographic hashes of -the path's destination. The 'custom' field's contents are application-specific -and have no impact on the delegation behavior. -\\\\ -The DELEGATIONS part of the targets.txt file points to a list of items formatted -like so: - -\begin{verbatim} - { "keys" : { - KEYID : KEY, - ... }, - "roles" : { - ROLE : { - "keyids" : [ KEYID, ... ] , - "threshold" : THRESHOLD, - "paths" : [ PATHPATTERN, ... ] } - , ... } - } -\end{verbatim} - -With the exception of PATHPATTERN, all the variables seen here are formatted -identically to the variables of the same names in the previous listings. PATHPATTERN -itself represents either a literal path in UNIX format or a path with ending with -a wildcard represented by '/**'. - -\subsection{Example} -We can use the tools built into TUF to see how the above translates into a full -targets.txt. While in the demorepo/meta directory created by our first script, we can -use signercli.py like this: -\begin{lstlisting} -cd ../.. -signercli.py delegate --keystore=keystore ROLE KEYID PATH -\end{lstlisting} -to modify our default targets.txt to add a delegated role named ROLE associated -with KEYID that has permission to modify elements in PATH. Here's the (scrubbed) -output: - -\begin{verbatim} -{"signatures": [{ - "keyid": KEYID, - "method": "sha256-pkcs1", - "sig": SIGNATURE - }], - "signed": { - "_type": "Targets", - "expires": EXPIRES, - "targets": { - PATH: { - "hashes": { - "sha256": HASH}, - "length": LENGTH} - }, - "ts": EXPIRES} - "delegations": { - "keys": { - KEYID: { - "keytype": "rsa", - "keyval": {"e": E, "n": N} - } - }, - "roles": {ROLE: {"keyids": [KEYID], - "paths": [PATH], - "threshold": 1} - } - } -} -\end{verbatim} - -\subsection{Delegated Targets Metadata} -Delegated targets metadata is stored in /targets/ROLE.txt, where ROLE is the -name of the role to be delegated. This file must be signed by that role and -be formatted identically to the top level targets.txt file. Hierarchically -delegated trust is, appropriately, handled hierarchically- if DELEGATED\_ROLE -delegates trust to ANOTHER\_ROLE, then the metadata file for ANOTHER\_ROLE can be -found at /targets/DELEGATED\_ROLE/ANOTHER\_ROLE.txt. We can create a simple -example with the following command: - -\begin{lstlisting} -signercli.py maketargets \ ---keystore=../keystore \ ---parentdir=targets \ ---keyid=KEYID \ ---rolename=ROLE \ -TARGETS -\end{lstlisting} - -And here's the result, stored at targets/ROLE.txt: - -\begin{verbatim} -{ - "signatures": [ - { - "keyid": KEYID, - "method": "sha256-pkcs1", - "sig": SIGNATURE - } - ], - "signed": { - "_type": "Targets", - "expires": EXPIRES, - "targets": { - PATH: { - "hashes": { - "sha256": HASH - }, - "length": LENGTH - } - }, - "ts": TIMESTAMP - } -} -\end{verbatim} - -\section{Key Revocation} -Key revocation in TUF falls into one of three cases: - -\begin{enumerate} - \item Revocation of a delegated target key - \item Revocation of a non-root top level key - \item Revocation of a root key -\end{enumerate} - -Revocation of a delegated target key is simple- the key in question is simply -removed from the metadata files that delegated to it. Similarly, revoking or -replacing a non-root top level key is just a matter of replacing it in root.txt -with the new value. For example, suppose that a role ALICE delegates trust to -another role EVE. ALICE can then revoke EVE's trust entirely by deleting the -targets/ALICE/EVE.txt file and removing the DELEGATIONS data structure from -either targets/ALICE.txt (if ALICE is a child of a toplevel role) or from her -parent role otherwise. The next time an update is generated, EVE's trust will -have been completely revoked. Replacing EVE's key can then be done by adding her -like a new delegation. -\\\\ -Revocation of the root key is only slightly more complex. Merely replacing it would -leave older clients unable to update, so the better way is to simply sign with -both the new key and the old key until you are confident that all the relevant clients -have updated. Once you've done that you can stop signing with the old key. - -\section{Future Work} -In the future, the format for keys may be opened to support OpenSSL-style keys. -Support for skewed clocks may also be added as noted in the core TUF spec, since -many clients seem to be operating under substantial clock drift. Support for -automatically integrating TUF with other projects using distutils is also a -potential future direction. -%------------------------------------------------------------------------------- -\end{document} diff --git a/docs/latex/tuf-server-spec.pdf.old b/docs/latex/tuf-server-spec.pdf.old deleted file mode 100644 index bdb4893857..0000000000 Binary files a/docs/latex/tuf-server-spec.pdf.old and /dev/null differ diff --git a/docs/latex/tuf-server-spec.tex b/docs/latex/tuf-server-spec.tex deleted file mode 100644 index 142cd0b020..0000000000 --- a/docs/latex/tuf-server-spec.tex +++ /dev/null @@ -1,215 +0,0 @@ -% tuf_server_spec.tex -% This document has been deprecated. We may later update and include it in -% the supported documentation. -\documentclass{article} -\setlength\parindent{0pt} -\usepackage{listings} -\usepackage{hyperref} -\usepackage{color} -\usepackage{textcomp} -\definecolor{listinggray}{gray}{0.9} -\definecolor{lbcolor}{rgb}{0.9,0.9,0.9} -\lstset{ - backgroundcolor=\color{lbcolor}, - tabsize=4, - rulecolor=, - language=matlab, - basicstyle=\scriptsize, - upquote=true, - aboveskip={1.5\baselineskip}, - columns=fixed, - showstringspaces=false, - extendedchars=true, - breaklines=true, - prebreak = \raisebox{0ex}[0ex][0ex]{\ensuremath{\hookleftarrow}}, - frame=single, - showtabs=false, - showspaces=false, - showstringspaces=false, - identifierstyle=\ttfamily, - keywordstyle=\color[rgb]{0,0,1}, - commentstyle=\color[rgb]{0.133,0.545,0.133}, - stringstyle=\color[rgb]{0.627,0.126,0.941}, -} - -\begin{document} - -%--------------------------------- Header -------------------------------------- -\title{Secure Update Framework Repository Key Management and Trust Delegation} -\author{Justin Cappos \and Geremy Condra} -\maketitle -%------------------------------------------------------------------------------- - - -%--------------------------------- Intro --------------------------------------- -\section{Introduction} -The goal of a TUF repository is to store a collection of targets (the updated files -to be served to the client) and a set of metadata with which the client can -ensure that the updates they receive are complete, timely, and authentic. Meeting -those goals requires that the repository be able to leverage digital signatures, -and so TUF provides a set of tools that helps manage the complexity of generating -metadata and improves the security posture of the repository. These tools -(including mechanisms for key management, delegation of trust, and key revocation) -form the subject of this document. - -\subsection{Scope} -This document specifies the required trust delegation and key management routines -for TUF repositories. -%------------------------------------------------------------------------------- - -%-------------------------------- Overview ------------------------------------- -\section{Overview} -The Update Framework is a Python library designed to assist software developers -in the task of safely and securely updating their software after its deployment -with an emphasis on resilience in the face of key compromise. Towards that end, -the issues of key storage, revocation of keys and their accompanying trust, and -the delegation of that trust have been given careful attention. This document -exists to provide guidance on both mandatory behaviors and best practices with -regard to those. - -\subsection{Relationship to Other Documents} -Much of the behavior specified in this document is partially laid out in the -core TUF document. The system's behavior with regard to freeze and replay -attacks is covered in the document entitled "Software Update Security Framework: -Repository Library Replay and Freeze Attack Protection". The client-side counterpart -to this document contains a large amount of information on the response that -subsystem will demonstrate in many of the same circumstances. In particular, it -contains an overview of how to set up and run an experimental TUF repository and -client. -%------------------------------------------------------------------------------- -%-------------------------------- Details -------------------------------------- -\section{Key Management} -Where TUF clients are primarily concerned with appropriately associating roles -to public keys, those maintaining TUF repositories are additionally required to -keep the relevant signing keys private. Mechanically, this means storing them in -a custom AES-encrypted keystore, but other precautions (detailed below) should -be observed in order to minimize the risk of a key compromise. - -\subsection{Key Storage} -TUF's keystore takes the form of an AES-encrypted key database with two backing -data stores, both Python dictionaries and containing keys (both public and private) -and their roles. In addition to the interface provided by its KeyDB parent class -(primarily responsible for associating roles and key IDs with keys and delegation -information) the keystore also provides three methods: - -\begin{itemize} - \item $set\_password(password)$, sets the password used to generate the AES key. - \item $clear\_password()$, clears the aforementioned password. - \item and $save()$, encodes, encrypts, and writes the database to disk. -\end{itemize} - -Let's walk through each of these in slightly more detail. -\\\\ -Setting or clearing the password to be used does nothing more than set and clear -a password field in the keystore, but note that if a keystore does not have a -password field set when $save()$ is called, it won't encrypt it- your keys will -be on disk in plaintext. Also note that passwords must not be used directly- at -the moment TUF clients use RFC 2440 password mangling to derive a key from -the original password material, but see the future work section at the end of -this document for more information on development of this system. -\\\\ -Saving the keystore is done through TUF's usual method of serialization, which -is to say Canonical JSON. Encryption is done using AES-256, after which the result -is written to disk to be decrypted at some later date. Due to recent attacks on -this key length, clients may also opt to use AES-192. - -\subsection{Best Practices} -While TUF places a great deal of emphasis on the ability to recover from a key -compromise and uses strong cryptographic techniques to minimize the attack window, -avoiding such scenarios in the first place is always preferable. As a result, a -few best practices are recommended for those responsible for storing keying -material: - -\begin{enumerate} - \item Store the root key offline- while a root key compromise is recoverable, - revoking a root key is more troublesome than revoking other keys. - \item No single point of failure should permit access to both the target - and release roles' keys. - \item The timestamp role's key can be stored online for automated timestamping. -\end{enumerate} - -For some, the above will prove to be difficult to provide inside of an -organization. Those users should note that while TUF by no means requires an -external PKI to operate, its design permits their use. - -\subsection{Command Line Interface} -To make repository management easier on maintainers, a set of easy-to-use command -line tools has been developed that works to simplify most of the tasks above. -In particular, the signercli.py script provides an easy way to perform the most -common tasks a maintainer will face. - -\subsubsection{Generating and Listing Keys} -Besides generating the initial keystore using the quickstart.py script we saw -earlier, the signercli.py script provides an easy way to generate keys using the -$genkey$ subcommand. Using it is extremely easy: - -\begin{lstlisting} -cd demorepo -signercli.py genkey --keystore=../keystore -\end{lstlisting} - -This will give you a good deal of output, ultimately including a line something -like the following: - -\begin{lstlisting} -[TIME] [tuf] [INFO] Generated new key: KEYID -\end{lstlisting} - -Listing key IDs is similarly easy: - -\begin{lstlisting} -cd demorepo -signercli.py listkeys --keystore=../keystore -\end{lstlisting} - -You can also get more information about a particular key using the dumpkey -subcommand. To print information about a public key, do the following: - -\begin{lstlisting} -cd demorepo -signercli.py dumpkey --keystore=../keystore KEYID -\end{lstlisting} - -If you pass the "--include-secret" option, it will also print the signing key. - -\subsubsection{Changing Keystore Passwords} - -To change the keystore password, simply run the signercli.py script with the -changepass subcommand, type the current password, and then enter the new password. -Here's an example: - -\begin{lstlisting} -cd demorepo -signercli.py changepass --keystore=../keystore -\end{lstlisting} - -Note that there is not currently a mechanism for providing these options on the -commandline. This is done to prevent the password from being pulled from an -ungaurded shell history file. - -\subsubsection{Delegating Trust} - -TUF makes delegating trust quite easy, and an example of how to do so is provided -in the companion client-side document. Note that the folder a delegated role's -ROLE.txt metadata file goes into must exist before running the script. - -\subsubsection{Revoking Trust} -TUF does not currently provide a command line interface for revoking trust, -however, doing so is simple using existing tools. To revoke a delegated -trust, just delete the accompanying ROLE.txt and create a new update. The -revocation procedure for other keys is described in the companion document, section -7. - -\section{Trust Delegation} -The TUF trust delegation model is described in the TUF spec section 4.5 and in -the TUF client key management specification section 6. - -\section{Future Work} -Three major areas of future work remain here: first, accounting for improved -cryptographic and cryptanalytic results against AES-256 and the RFC2440 key -derivation algorithm; second, developing additional tools to provide for -automatic revocation of trust for all roles; and third improving the mechanisms -for automatic integration of other projects with TUF. - -%------------------------------------------------------------------------------- -\end{document} diff --git a/docs/repository-library-design-ownership.jpg b/docs/repository-library-design-ownership.jpg new file mode 100644 index 0000000000..68eaafc8e4 Binary files /dev/null and b/docs/repository-library-design-ownership.jpg differ diff --git a/docs/repository-library-design-usage.jpg b/docs/repository-library-design-usage.jpg new file mode 100644 index 0000000000..9eb7ca711b Binary files /dev/null and b/docs/repository-library-design-usage.jpg differ diff --git a/docs/repository-library-design.md b/docs/repository-library-design.md new file mode 100644 index 0000000000..5a9b0fde48 --- /dev/null +++ b/docs/repository-library-design.md @@ -0,0 +1,226 @@ +# Python-tuf repository API proposal: _minimal repository abstraction_ + +This is an attachment to ADR 10: _Repository library design built on top of +Metadata API_, and documents the design proposal in Dec 2021. + +## Design principles + +Primary goals of this repository library design are +1. Support full range of repository implementations: from command line + “repository editing” tools to production repositories like PyPI +2. Provide canonical solutions for the difficult repository problems but avoid + making implementation decisions +3. Keep python-tuf maintenance burden in mind: less is more + +Why does this design look so different from both legacy python-tuf code and +other implementations? +* Most existing implementations are focused on a specific use case (typically a + command line application): this is a valid design choice but severely limits + goal #1 +* The problem space contains many application decisions. Many implementations + solve this by creating functions with 15 arguments: this design tries to find + another way (#2) +* The Metadata API makes modifying individual pieces of metadata simpler. This, + combined with good repository API design, should enable more variance in + where things are implemented: The repository library does not have to + implement every little detail as we can safely let specific implementations + handle things, see goal #3 +* This variance means we can start by implementing a minimal design: as + experience from implementations is collected, we can then move implementation + details into the library (goals #2, #3) + +## Design + +### Application and library components + +![Design: Application and library components](repository-library-design-ownership.jpg) + +The design expects a fully functional repository application to contain code at +three levels: +* Repository library (abstract classes that are part of python-tuf) + * The Repository abstract class provides an ergonomic abstract metadata + editing API for all code levels to use. It also provides implementations + for some core edit actions like _snapshot update_. + * A small amount of related functionality is also provided (private key + management API, maybe repository validation). + * is a very small library: possibly a few hundred lines of code. +* Concrete Repository implementation (typically part of application code, + implements interfaces provided by the repository API in python-tuf) + * Contains the “application level” decisions that the Repository abstraction + requires to operate: examples of application decisions include + * _When should “targets” metadata next expire when it is edited?_ + * _What is the current “targets” metadata version? Where do we load it + from?_ + * _Where to store current “targets” after editing? Should the previous + version be deleted from storage?_ +* Actual application + * Uses the Repository API to do the repository actions it needs to do + +For context here’s a trivial example showing what “ergonomic editing” means -- +this key-adding code could be in the application (or later, if common patterns +are found, in the python-tuf library): + +```python +with repository.edit(“targets”) as targets: + # adds a key for role1 (as an example, arbitrary edits are allowed) + targets.add_key(“role1”, key) +``` + +This code loads current targets metadata for editing, adds the key to a role, +and handles version and expiry bumps before persisting the new targets version. +The reason for the context manager style is that it manages two things +simultaneously: +* Hides the complexity of loading and persisting metadata, and updating expiry + and versions from the editing code (by putting it in the repository + implementation that is defined in python-tuf but implemented by the + application) +* Still allows completely arbitrary edits on the metadata in question: now the + library does not need to anticipate what application wants to do and on the + other hand library can still provide e.g. snapshot functionality without + knowing about the application decisions mentioned in previous point. + +Other designs do not seem to manage both of these. + +### How the components are used + +![Design: How components are used](repository-library-design-usage.jpg) + +The core idea here is that because editing is ergonomic enough, when new +functionality (like “developer uploads new targets”) is added, _it can be added +at any level_: the application might add a `handle_new_target_files()` method +that adds a bunch of targets into the metadata, but one of the previous layers +could offer that as a helper function as well: code in both cases would look +similar as it would use the common editing interface. + +The proposed design is purposefully spartan in that the library provides +very few high-level actions (the prototype only provided _sign_ and +_snapshot_): everything else is left to implementer at this point. As we gain +experience of common usage patterns we can start providing other features as +well. + +There are a few additional items worth mentioning: +* Private key management: the Repository API should come with a “keyring + abstraction” -- a way for the application to provide roles’ private keys for + the Repository to use. Some implementations could be provided as well. +* Validating repository state: the design is very much focused on enabling + efficient editing of individual metadata. Implementations are also likely to + be interested in validating (after some edits) that the repository is correct + according to client workflow and that it contains the expected changes. The + Repository API should provide some validation, but we should recognise that + validation may be implementation specific. +* Improved metadata editing: There are a small number of improvements that + could be made to metadata editing. These do not necessarily need to be part + of the repository API: they could be part of Metadata API as well + +It would make sense for python-tuf to ship with at least one concrete +Repository implementation: possibly a repo.py look alike. This implementation +should not be part of the library but an example. + +## Details + +This section includes links to a Proof of Concept implementation in +[repository-editor-for-tuf](https://github.com/vmware-labs/repository-editor-for-tuf/): +it should not be seen as the exact proposed API but a prototype of the ideas. + +The ideas in this document map to POC components like this: + +| Concept | repository-editor-for-tuf implementation | +|-|-| +| Repository API | [librepo/repo.py](https://github.com/vmware-labs/repository-editor-for-tuf/blob/main/tufrepo/librepo/repo.py), [librepo/keys.py](https://github.com/vmware-labs/repository-editor-for-tuf/blob/main/tufrepo/librepo/repo.py) | +| Example of repository implementation | [git_repo.py](https://github.com/vmware-labs/repository-editor-for-tuf/blob/main/tufrepo/git_repo.py) | +|Application code | [cli.py (command line app)](https://github.com/vmware-labs/repository-editor-for-tuf/blob/main/tufrepo/cli.py), [keys_impl.py (keyring implementation)](https://github.com/vmware-labs/repository-editor-for-tuf/blob/main/tufrepo/keys_impl.py) | +| Repository validation | [verifier.py (very rough, not intended for python-tuf)](https://github.com/vmware-labs/repository-editor-for-tuf/blob/main/tufrepo/verifier.py) +| Improved Metadata editing | [helpers.py](https://github.com/vmware-labs/repository-editor-for-tuf/blob/main/tufrepo/helpers.py) + + +### Repository API + +Repository itself is a minimal abstract class: The value of this class is in +defining the abstract method signatures (most importantly `_load`, `_save()`, +`edit()`) that enable ergonomic metadata editing. The Repository class in this +proposal includes concrete implementations only for the following: +* `sign()` -- signing without editing metadata payload +* `snapshot()` -- updates snapshot and timestamp metadata based on given input. + Note that a concrete Repository implementation could provide an easier to use + snapshot that does not require input (see example in git_repo.py) + +More concrete method implementations (see cli.py for examples) could be added +to Repository itself but none seem essential at this point. + +The current prototype API defines five abstract methods that take care of +access to metadata storage, expiry updates, version updates and signing. These +must be implemented in the concrete implementation: + +* **keyring()**: A property that returns the private key mapping that should be + used for signing. + +* **_load()**: Loads metadata from storage or cache. Is used by edit() and + sign(). + +* **_save()**: Signs and persists metadata in cache/storage. Is used by edit() + and sign(). + +* **edit()**: The ContextManager that enables ergonomic metadata + editing by handling expiry and version number management. + +* **init_role()**: initializes new metadata handling expiry and version number. + (_init_role is in a way a special case of edit and should potentially be + integrated there_). + +The API requires a “Keyring” abstraction that the repository code can use to +lookup a set of signers for a specific role. Specific implementations of +Keyring could include a file-based keyring for testing, env-var keyring for CI +use, etc. Some implementations should be provided in the python-tuf code base +and more could be implemented in applications. + +_Prototype status: Prototype Repository and Keyring abstractions exist in +librepo/repo.py._ + +### Example concrete Repository implementation + +The design decisions that the included example `GitRepository` makes are not +important but provide an example of what is possible: +* Metadata versions are stored in files in git, with filenames that allow + serving the metadata directory as is over HTTP +* Version bumps are made based on git status (so edits in staging area only + bump version once) +* “Current version” when loading metadata is decided based on filenames on disk +* Files are removed once they are no longer part of the snapshot (to keep + directory uncluttered) +* Expiry times are decided based on an application specific metadata field +* Private keys can be stored in a file or in environment variables (for CI use) + +Note that GitRepository implementation is significantly larger than the +Repository interface -- but all of the complexity in GitRepository is really +related to the design decisions made there. + +_Prototype status: The GitRepository example exists in git_repo.py._ + +### Validating repository state + +This is mostly undesigned but something built on top of TrustedMetadataSet +(currently ngclient component) might work as a way to easily check specific +aspects like: +* Is top-level metadata valid according to client workflow +* Is a role included in the snapshot and the delegation tree + +It’s likely that different implementations will have different needs though: a +command line app for small repos might want to validate loading all metadata +into memory, but a server application hosting tens of thousands of pieces of +metadata is unlikely to do so. + +_Prototype status: A very rough implementation exists in verifier.py : this is +unlikely to be very useful_ + +### Improved metadata editing + +Currently the identified improvement areas are: +* Metadata initialization: this could potentially be improved by adding + default argument values to Metadata API constructors +* Modifying and looking up data about roles in delegating metadata + (root/targets): they do similar things but root and targets do not have + identical API. This may be a very specific use case and not interesting + for some applications + +_Prototype status: Some potential improvements have been collected in +helpers.py_ diff --git a/docs/tuf-horizontal-white.png b/docs/tuf-horizontal-white.png new file mode 100644 index 0000000000..6698641c82 Binary files /dev/null and b/docs/tuf-horizontal-white.png differ diff --git a/docs/tuf-icon-200.png b/docs/tuf-icon-200.png new file mode 100644 index 0000000000..f18c0b3361 Binary files /dev/null and b/docs/tuf-icon-200.png differ diff --git a/docs/tuf-icon-32.png b/docs/tuf-icon-32.png new file mode 100644 index 0000000000..d19274a3e8 Binary files /dev/null and b/docs/tuf-icon-32.png differ diff --git a/docs/tuf-spec.0.9.txt b/docs/tuf-spec.0.9.txt deleted file mode 100644 index e5bdeb8368..0000000000 --- a/docs/tuf-spec.0.9.txt +++ /dev/null @@ -1 +0,0 @@ -The TUF specification file has been moved to https://github.com/theupdateframework/specification/blob/master/historical/tuf-spec.0.9.txt diff --git a/docs/tuf-spec.md b/docs/tuf-spec.md deleted file mode 100644 index 7d8df7b60f..0000000000 --- a/docs/tuf-spec.md +++ /dev/null @@ -1 +0,0 @@ -The TUF specification file has been moved to https://github.com/theupdateframework/specification/blob/master/tuf-spec.md diff --git a/docs/tuf-spec.txt b/docs/tuf-spec.txt deleted file mode 100644 index 7d8df7b60f..0000000000 --- a/docs/tuf-spec.txt +++ /dev/null @@ -1 +0,0 @@ -The TUF specification file has been moved to https://github.com/theupdateframework/specification/blob/master/tuf-spec.md diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 0000000000..8ca5d7bf0f --- /dev/null +++ b/examples/README.md @@ -0,0 +1,5 @@ +# Usage examples + +* [client](client_example) +* [repository](repo_example) + diff --git a/examples/client_example/1.root.json b/examples/client_example/1.root.json new file mode 100644 index 0000000000..214d8db01b --- /dev/null +++ b/examples/client_example/1.root.json @@ -0,0 +1,87 @@ +{ + "signatures": [ + { + "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", + "sig": "a337d6375fedd2eabfcd6c2ef6c8a9c3bb85dc5a857715f6a6bd41123e7670c4972d8548bcd7248154f3d864bf25f1823af59d74c459f41ea09a02db057ca1245612ebbdb97e782c501dc3e094f7fa8aa1402b03c6ed0635f565e2a26f9f543a89237e15a2faf0c267e2b34c3c38f2a43a28ddcdaf8308a12ead8c6dc47d1b762de313e9ddda8cc5bc25aea1b69d0e5b9199ca02f5dda48c3bff615fd12a7136d00634b9abc6e75c3256106c4d6f12e6c43f6195071355b2857bbe377ce028619b58837696b805040ce144b393d50a472531f430fadfb68d3081b6a8b5e49337e328c9a0a3f11e80b0bc8eb2dc6e78d1451dd857e6e6e6363c3fd14c590aa95e083c9bfc77724d78af86eb7a7ef635eeddaa353030c79f66b3ba9ea11fab456cfe896a826fdfb50a43cd444f762821aada9bcd7b022c0ee85b8768f960343d5a1d3d76374cc0ac9e12a500de0bf5d48569e5398cadadadab045931c398e3bcb6cec88af2437ba91959f956079cbed159fed3938016e6c3b5e446131f81cc5981" + } + ], + "signed": { + "_type": "root", + "consistent_snapshot": false, + "expires": "2030-01-01T00:00:00Z", + "keys": { + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "rsa", + "keyval": { + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" + }, + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" + }, + "scheme": "ed25519" + }, + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" + }, + "scheme": "ed25519" + }, + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" + }, + "scheme": "ed25519" + } + }, + "roles": { + "root": { + "keyids": [ + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" + ], + "threshold": 1 + }, + "snapshot": { + "keyids": [ + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" + ], + "threshold": 1 + }, + "targets": { + "keyids": [ + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" + ], + "threshold": 1 + }, + "timestamp": { + "keyids": [ + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" + ], + "threshold": 1 + } + }, + "spec_version": "1.0.0", + "version": 1 + } +} \ No newline at end of file diff --git a/examples/client_example/README.md b/examples/client_example/README.md new file mode 100644 index 0000000000..399c6d6b42 --- /dev/null +++ b/examples/client_example/README.md @@ -0,0 +1,26 @@ +# TUF Client Example + + +TUF Client Example, using ``python-tuf``. + +This TUF Client Example implements the following actions: + - Client Infrastructure Initialization + - Download target files from TUF Repository + +The example client expects to find a TUF repository running on localhost. We +can use the static metadata files in ``tests/repository_data/repository`` +to set one up. + +Run the repository using the Python3 built-in HTTP module, and keep this +session running. + +```console + $ python3 -m http.server -d tests/repository_data/repository + Serving HTTP on :: port 8000 (http://[::]:8000/) ... +``` + +How to use the TUF Client Example to download a target file. + +```console +$ ./client_example.py download file1.txt +``` diff --git a/examples/client_example/client_example.py b/examples/client_example/client_example.py new file mode 100755 index 0000000000..e747abef99 --- /dev/null +++ b/examples/client_example/client_example.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python +"""TUF Client Example""" + +# Copyright 2012 - 2017, New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +import argparse +import logging +import os +import shutil +from pathlib import Path + +from tuf.api.exceptions import DownloadError, RepositoryError +from tuf.ngclient import Updater + +# constants +BASE_URL = "http://127.0.0.1:8000" +DOWNLOAD_DIR = "./downloads" +METADATA_DIR = f"{Path.home()}/.local/share/python-tuf-client-example" +CLIENT_EXAMPLE_DIR = os.path.dirname(os.path.abspath(__file__)) + + +def init() -> None: + """Initialize local trusted metadata and create a directory for downloads""" + + if not os.path.isdir(DOWNLOAD_DIR): + os.mkdir(DOWNLOAD_DIR) + + if not os.path.isdir(METADATA_DIR): + os.makedirs(METADATA_DIR) + + if not os.path.isfile(f"{METADATA_DIR}/root.json"): + shutil.copy( + f"{CLIENT_EXAMPLE_DIR}/1.root.json", f"{METADATA_DIR}/root.json" + ) + print(f"Added trusted root in {METADATA_DIR}") + + else: + print(f"Found trusted root in {METADATA_DIR}") + + +def download(target: str) -> bool: + """ + Download the target file using ``ngclient`` Updater. + + The Updater refreshes the top-level metadata, get the target information, + verifies if the target is already cached, and in case it is not cached, + downloads the target file. + + Returns: + A boolean indicating if process was successful + """ + try: + updater = Updater( + metadata_dir=METADATA_DIR, + metadata_base_url=f"{BASE_URL}/metadata/", + target_base_url=f"{BASE_URL}/targets/", + target_dir=DOWNLOAD_DIR, + ) + updater.refresh() + + info = updater.get_targetinfo(target) + + if info is None: + print(f"Target {target} not found") + return True + + path = updater.find_cached_target(info) + if path: + print(f"Target is available in {path}") + return True + + path = updater.download_target(info) + print(f"Target downloaded and available in {path}") + + except (OSError, RepositoryError, DownloadError) as e: + print(f"Failed to download target {target}: {e}") + return False + + return True + + +def main() -> None: + """Main TUF Client Example function""" + + client_args = argparse.ArgumentParser(description="TUF Client Example") + + # Global arguments + client_args.add_argument( + "-v", + "--verbose", + help="Output verbosity level (-v, -vv, ...)", + action="count", + default=0, + ) + + # Sub commands + sub_command = client_args.add_subparsers(dest="sub_command") + + # Download + download_parser = sub_command.add_parser( + "download", + help="Download a target file", + ) + + download_parser.add_argument( + "target", + metavar="TARGET", + help="Target file", + ) + + command_args = client_args.parse_args() + + if command_args.verbose == 0: + loglevel = logging.ERROR + elif command_args.verbose == 1: + loglevel = logging.WARNING + elif command_args.verbose == 2: + loglevel = logging.INFO + else: + loglevel = logging.DEBUG + + logging.basicConfig(level=loglevel) + + # initialize the TUF Client Example infrastructure + init() + + if command_args.sub_command == "download": + download(command_args.target) + + else: + client_args.print_help() + + +if __name__ == "__main__": + main() diff --git a/examples/repo_example/basic_repo.py b/examples/repo_example/basic_repo.py new file mode 100644 index 0000000000..8d61ba1a81 --- /dev/null +++ b/examples/repo_example/basic_repo.py @@ -0,0 +1,359 @@ +""" +A TUF repository example using the low-level TUF Metadata API. + +The example code in this file demonstrates how to *manually* create and +maintain repository metadata using the low-level Metadata API. It implements +similar functionality to that of the deprecated legacy 'repository_tool' and +'repository_lib'. (see ADR-0010 for details about repository library design) + +Contents: + * creation of top-level metadata + * target file handling + * consistent snapshots + * key management + * top-level delegation and signing thresholds + * target delegation + * in-band and out-of-band metadata signing + * writing and reading metadata files + * root key rotation + +NOTE: Metadata files will be written to a 'tmp*'-directory in CWD. + +""" +import os +import tempfile +from datetime import datetime, timedelta +from pathlib import Path +from typing import Any, Dict + +from securesystemslib.keys import generate_ed25519_key +from securesystemslib.signer import SSlibSigner + +from tuf.api.metadata import ( + SPECIFICATION_VERSION, + DelegatedRole, + Delegations, + Key, + Metadata, + MetaFile, + Root, + Snapshot, + TargetFile, + Targets, + Timestamp, +) +from tuf.api.serialization.json import JSONSerializer + + +def _in(days: float) -> datetime: + """Adds 'days' to now and returns datetime object w/o microseconds.""" + return datetime.utcnow().replace(microsecond=0) + timedelta(days=days) + + +# Create top-level metadata +# ========================= +# Every TUF repository has at least four roles, i.e. the top-level roles +# 'targets', 'snapshot', 'timestamp' and 'root'. Below we will discuss their +# purpose, show how to create the corresponding metadata, and how to use them +# to provide integrity, consistency and freshness for the files TUF aims to +# protect, i.e. target files. + +# Common fields +# ------------- +# All roles have the same metadata container format, for which the metadata API +# provides a generic 'Metadata' class. This class has two fields, one for +# cryptographic signatures, i.e. 'signatures', and one for the payload over +# which signatures are generated, i.e. 'signed'. The payload must be an +# instance of either 'Targets', 'Snapshot', 'Timestamp' or 'Root' class. Common +# fields in all of these 'Signed' classes are: +# +# spec_version -- The supported TUF specification version number. +# version -- The metadata version number. +# expires -- The metadata expiry date. +# +# The 'version', which is incremented on each metadata change, is used to +# reference metadata from within other metadata, and thus allows for repository +# consistency in addition to protecting against rollback attacks. +# +# The date the metadata 'expires' protects against freeze attacks and allows +# for implicit key revocation. Choosing an appropriate expiration interval +# depends on the volatility of a role and how easy it is to re-sign them. +# Highly volatile roles (timestamp, snapshot, targets), usually have shorter +# expiration intervals, whereas roles that change less and might use offline +# keys (root, delegating targets) may have longer expiration intervals. + +SPEC_VERSION = ".".join(SPECIFICATION_VERSION) + +# Define containers for role objects and cryptographic keys created below. This +# allows us to sign and write metadata in a batch more easily. +roles: Dict[str, Metadata] = {} +keys: Dict[str, Dict[str, Any]] = {} + + +# Targets (integrity) +# ------------------- +# The targets role guarantees integrity for the files that TUF aims to protect, +# i.e. target files. It does so by listing the relevant target files, along +# with their hash and length. +roles["targets"] = Metadata(Targets(expires=_in(7))) + +# For the purpose of this example we use the top-level targets role to protect +# the integrity of this very example script. The metadata entry contains the +# hash and length of this file at the local path. In addition, it specifies the +# 'target path', which a client uses to locate the target file relative to a +# configured mirror base URL. +# +# |----base URL---||-------target path-------| +# e.g. tuf-examples.org/repo_example/basic_repo.py + +local_path = Path(__file__).resolve() +target_path = f"{local_path.parts[-2]}/{local_path.parts[-1]}" + +target_file_info = TargetFile.from_file(target_path, str(local_path)) +roles["targets"].signed.targets[target_path] = target_file_info + +# Snapshot (consistency) +# ---------------------- +# The snapshot role guarantees consistency of the entire repository. It does so +# by listing all available targets metadata files at their latest version. This +# becomes relevant, when there are multiple targets metadata files in a +# repository and we want to protect the client against mix-and-match attacks. +roles["snapshot"] = Metadata(Snapshot(expires=_in(7))) + +# Timestamp (freshness) +# --------------------- +# The timestamp role guarantees freshness of the repository metadata. It does +# so by listing the latest snapshot (which in turn lists all the latest +# targets) metadata. A short expiration interval requires the repository to +# regularly issue new timestamp metadata and thus protects the client against +# freeze attacks. +# +# Note that snapshot and timestamp use the same generic wireline metadata +# format. But given that timestamp metadata always has only one entry in its +# 'meta' field, i.e. for the latest snapshot file, the timestamp object +# provides the shortcut 'snapshot_meta'. +roles["timestamp"] = Metadata(Timestamp(expires=_in(1))) + +# Root (root of trust) +# -------------------- +# The root role serves as root of trust for all top-level roles, including +# itself. It does so by mapping cryptographic keys to roles, i.e. the keys that +# are authorized to sign any top-level role metadata, and signing thresholds, +# i.e. how many authorized keys are required for a given role (see 'roles' +# field). This is called top-level delegation. +# +# In addition, root provides all public keys to verify these signatures (see +# 'keys' field), and a configuration parameter that describes whether a +# repository uses consistent snapshots (see section 'Persist metadata' below +# for more details). + +# Create root metadata object +roles["root"] = Metadata(Root(expires=_in(365))) + +# For this example, we generate one 'ed25519' key pair for each top-level role +# using python-tuf's in-house crypto library. +# See https://github.com/secure-systems-lab/securesystemslib for more details +# about key handling, and don't forget to password-encrypt your private keys! +for name in ["targets", "snapshot", "timestamp", "root"]: + keys[name] = generate_ed25519_key() + roles["root"].signed.add_key( + name, Key.from_securesystemslib_key(keys[name]) + ) + +# NOTE: We only need the public part to populate root, so it is possible to use +# out-of-band mechanisms to generate key pairs and only expose the public part +# to whoever maintains the root role. As a matter of fact, the very purpose of +# signature thresholds is to avoid having private keys all in one place. + +# Signature thresholds +# -------------------- +# Given the importance of the root role, it is highly recommended to require a +# threshold of multiple keys to sign root metadata. For this example we +# generate another root key (you can pretend it's out-of-band) and increase the +# required signature threshold. +another_root_key = generate_ed25519_key() +roles["root"].signed.add_key( + "root", Key.from_securesystemslib_key(another_root_key) +) +roles["root"].signed.roles["root"].threshold = 2 + + +# Sign top-level metadata (in-band) +# ================================= +# In this example we have access to all top-level signing keys, so we can use +# them to create and add a signature for each role metadata. +for name in ["targets", "snapshot", "timestamp", "root"]: + key = keys[roles[name].signed.type] + signer = SSlibSigner(key) + roles[name].sign(signer) + + +# Persist metadata (consistent snapshot) +# ====================================== +# It is time to publish the first set of metadata for a client to safely +# download the target file that we have registered for this example repository. +# +# For the purpose of this example we will follow the consistent snapshot naming +# convention for all metadata. This means that each metadata file, must be +# prefixed with its version number, except for timestamp. The naming convention +# also affects the target files, but we don't cover this in the example. See +# the TUF specification for more details: +# https://theupdateframework.github.io/specification/latest/#writing-consistent-snapshots +# +# Also note that the TUF specification does not mandate a wireline format. In +# this demo we use a non-compact JSON format and store all metadata in +# temporary directory at CWD for review. +PRETTY = JSONSerializer(compact=False) +TMP_DIR = tempfile.mkdtemp(dir=os.getcwd()) + +for name in ["root", "targets", "snapshot"]: + filename = f"{roles[name].signed.version}.{roles[name].signed.type}.json" + path = os.path.join(TMP_DIR, filename) + roles[name].to_file(path, serializer=PRETTY) + +roles["timestamp"].to_file( + os.path.join(TMP_DIR, "timestamp.json"), serializer=PRETTY +) + + +# Threshold signing (out-of-band) +# =============================== +# As mentioned above, using signature thresholds usually entails that not all +# signing keys for a given role are in the same place. Let's briefly pretend +# this is the case for the second root key we registered above, and we are now +# on that key owner's computer. All the owner has to do is read the metadata +# file, sign it, and write it back to the same file, and this can be repeated +# until the threshold is satisfied. +root_path = os.path.join(TMP_DIR, "1.root.json") +roles["root"].from_file(root_path) +roles["root"].sign(SSlibSigner(another_root_key), append=True) +roles["root"].to_file(root_path, serializer=PRETTY) + + +# Targets delegation +# ================== +# Similar to how the root role delegates responsibilities about integrity, +# consistency and freshness to the corresponding top-level roles, a targets +# role may further delegate its responsibility for target files (or a subset +# thereof) to other targets roles. This allows creation of a granular trust +# hierarchy, and further reduces the impact of a single role compromise. +# +# In this example the top-level targets role trusts a new "python-scripts" +# targets role to provide integrity for any target file that ends with ".py". +delegatee_name = "python-scripts" +keys[delegatee_name] = generate_ed25519_key() + +# Delegatee +# --------- +# Create a new targets role, akin to how we created top-level targets above, and +# add target file info from above according to the delegatee's responsibility. +roles[delegatee_name] = Metadata[Targets]( + signed=Targets( + version=1, + spec_version=SPEC_VERSION, + expires=_in(7), + targets={target_path: target_file_info}, + ), + signatures={}, +) + + +# Delegator +# --------- +# Akin to top-level delegation, the delegator expresses its trust in the +# delegatee by authorizing a threshold of cryptographic keys to provide +# signatures for the delegatee metadata. It also provides the corresponding +# public key store. +# The delegation info defined by the delegator further requires the provision +# of a unique delegatee name and constraints about the target files the +# delegatee is responsible for, e.g. a list of path patterns. For details about +# all configuration parameters see +# https://theupdateframework.github.io/specification/latest/#delegations +roles["targets"].signed.delegations = Delegations( + keys={ + keys[delegatee_name]["keyid"]: Key.from_securesystemslib_key( + keys[delegatee_name] + ) + }, + roles={ + delegatee_name: DelegatedRole( + name=delegatee_name, + keyids=[keys[delegatee_name]["keyid"]], + threshold=1, + terminating=True, + paths=["*.py"], + ), + }, +) + +# Remove target file info from top-level targets (delegatee is now responsible) +del roles["targets"].signed.targets[target_path] + +# Increase expiry (delegators should be less volatile) +roles["targets"].signed.expires = _in(365) + + +# Snapshot + Timestamp + Sign + Persist +# ------------------------------------- +# In order to publish a new consistent set of metadata, we need to update +# dependent roles (snapshot, timestamp) accordingly, bumping versions of all +# changed metadata. + +# Bump targets version +roles["targets"].signed.version += 1 + +# Update snapshot to account for changed and new targets metadata +roles["snapshot"].signed.meta["targets.json"].version = roles[ + "targets" +].signed.version +roles["snapshot"].signed.meta[f"{delegatee_name}.json"] = MetaFile(version=1) +roles["snapshot"].signed.version += 1 + +# Update timestamp to account for changed snapshot metadata +roles["timestamp"].signed.snapshot_meta.version = roles[ + "snapshot" +].signed.version +roles["timestamp"].signed.version += 1 + +# Sign and write metadata for all changed roles, i.e. all but root +for role_name in ["targets", "python-scripts", "snapshot", "timestamp"]: + signer = SSlibSigner(keys[role_name]) + roles[role_name].sign(signer) + + # Prefix all but timestamp with version number (see consistent snapshot) + filename = f"{role_name}.json" + if role_name != "timestamp": + filename = f"{roles[role_name].signed.version}.{filename}" + + roles[role_name].to_file(os.path.join(TMP_DIR, filename), serializer=PRETTY) + + +# Root key rotation (recover from a compromise / key loss) +# ======================================================== +# TUF makes it easy to recover from a key compromise in-band. Given the trust +# hierarchy through top-level and targets delegation you can easily +# replace compromised or lost keys for any role using the delegating role, even +# for the root role. +# However, since root authorizes its own keys, it always has to be signed with +# both the threshold of keys from the previous version and the threshold of +# keys from the new version. This establishes a trusted line of continuity. +# +# In this example we will replace a root key, and sign a new version of root +# with the threshold of old and new keys. Since one of the previous root keys +# remains in place, it can be used to count towards the old and new threshold. +new_root_key = generate_ed25519_key() + +roles["root"].signed.remove_key("root", keys["root"]["keyid"]) +roles["root"].signed.add_key( + "root", Key.from_securesystemslib_key(new_root_key) +) +roles["root"].signed.version += 1 + +roles["root"].signatures.clear() +for key in [keys["root"], another_root_key, new_root_key]: + roles["root"].sign(SSlibSigner(key), append=True) + +roles["root"].to_file( + os.path.join(TMP_DIR, f"{roles['root'].signed.version}.root.json"), + serializer=PRETTY, +) diff --git a/examples/repo_example/hashed_bin_delegation.py b/examples/repo_example/hashed_bin_delegation.py new file mode 100644 index 0000000000..c8bc3b34b2 --- /dev/null +++ b/examples/repo_example/hashed_bin_delegation.py @@ -0,0 +1,217 @@ +""" +A TUF hash bin delegation example using the low-level TUF Metadata API. + +The example code in this file demonstrates how to *manually* perform hash bin +delegation using the low-level Metadata API. It implements similar +functionality to that of the deprecated legacy 'repository_tool' and +'repository_lib'. (see ADR-0010 for details about repository library design) + +Contents: +- Re-usable hash bin delegation helpers +- Basic hash bin delegation example + +See 'basic_repo.py' for a more comprehensive TUF metadata API example. + +NOTE: Metadata files will be written to a 'tmp*'-directory in CWD. + +""" +import hashlib +import os +import tempfile +from datetime import datetime, timedelta +from pathlib import Path +from typing import Any, Dict, Iterator, List, Tuple + +from securesystemslib.keys import generate_ed25519_key +from securesystemslib.signer import SSlibSigner + +from tuf.api.metadata import ( + SPECIFICATION_VERSION, + DelegatedRole, + Delegations, + Key, + Metadata, + TargetFile, + Targets, +) +from tuf.api.serialization.json import JSONSerializer + + +def _in(days: float) -> datetime: + """Adds 'days' to now and returns datetime object w/o microseconds.""" + return datetime.utcnow().replace(microsecond=0) + timedelta(days=days) + + +SPEC_VERSION = ".".join(SPECIFICATION_VERSION) +roles: Dict[str, Metadata] = {} +keys: Dict[str, Dict[str, Any]] = {} + +# Hash bin delegation +# =================== +# Hash bin delegation allows to distribute a large number of target files over +# multiple delegated targets metadata. The consequence is smaller metadata +# files and thus a lower network overhead for repository-client communication. +# +# The assignment of target files to targets metadata is done automatically, +# based on the hash of the target file name. More precisely, only a prefix of +# the target file name hash is needed to assign it to the correct hash bin. +# +# The number of bins is the only number that needs to be configured. Everything +# else is derived using the mathematical operations shown below. +# +# The right number of bins depends on the expected number of target files in a +# repository. For the purpose of this example we choose ... +NUMBER_OF_BINS = 32 # ..., which determines the length of any hash prefix +# considered for bin assignment (PREFIX_LEN), how many hash prefixes are +# covered by all bins (NUMBER_OF_PREFIXES), and how many prefixes are covered +# by each individual bin (BIN_SIZE): +# +# The prefix length is the number of digits in the hexadecimal representation +# (see 'x' in Python Format Specification) of the number of bins minus one +# (counting starts at zero), i.e. ... +PREFIX_LEN = len(f"{(NUMBER_OF_BINS - 1):x}") # ... 2. +# +# Compared to decimal, hexadecimal numbers can express higher numbers with +# fewer digits and thus further decrease metadata sizes. With the above prefix +# length of 2 we can represent at most ... +NUMBER_OF_PREFIXES = 16**PREFIX_LEN # ... 256 prefixes, i.e. 00, 01, ..., ff. +# +# If the number of bins is a power of two, hash prefixes are evenly distributed +# over all bins, which allows to calculate the uniform size of ... +BIN_SIZE = NUMBER_OF_PREFIXES // NUMBER_OF_BINS # ... 8, where each bin is +# responsible for a range of 8 prefixes, i.e. 00-07, 08-0f, ..., f8-ff. + +# Helpers +# ------- +def _bin_name(low: int, high: int) -> str: + """Generates a bin name according to the hash prefixes the bin serves. + + The name is either a single hash prefix for bin size 1, or a range of hash + prefixes otherwise. The prefix length is needed to zero-left-pad the + hex representation of the hash prefix for uniform bin name lengths. + """ + if low == high: + return f"{low:0{PREFIX_LEN}x}" + + return f"{low:0{PREFIX_LEN}x}-{high:0{PREFIX_LEN}x}" + + +def generate_hash_bins() -> Iterator[Tuple[str, List[str]]]: + """Returns generator for bin names and hash prefixes per bin.""" + # Iterate over the total number of hash prefixes in 'bin size'-steps to + # generate bin names and a list of hash prefixes served by each bin. + for low in range(0, NUMBER_OF_PREFIXES, BIN_SIZE): + high = low + BIN_SIZE - 1 + bin_name = _bin_name(low, high) + hash_prefixes = [] + for prefix in range(low, low + BIN_SIZE): + hash_prefixes.append(f"{prefix:0{PREFIX_LEN}x}") + + yield bin_name, hash_prefixes + + +def find_hash_bin(path: str) -> str: + """Returns name of bin for target file based on the target path hash.""" + # Generate hash digest of passed target path and take its prefix, given the + # global prefix length for the given number of bins. + hasher = hashlib.sha256() + hasher.update(path.encode("utf-8")) + target_name_hash = hasher.hexdigest() + prefix = int(target_name_hash[:PREFIX_LEN], 16) + # Find lower and upper bounds for hash prefix given its numerical value and + # the the general bin size for the given number of bins. + low = prefix - (prefix % BIN_SIZE) + high = low + BIN_SIZE - 1 + return _bin_name(low, high) + + +# Keys +# ---- +# Given that the primary concern of hash bin delegation is to reduce network +# overhead, it is acceptable to re-use one signing key for all delegated +# targets roles (bin-n). However, we do use a different key for the delegating +# targets role (bins). Considering the high responsibility but also low +# volatility of the bins role, it is recommended to require signature +# thresholds and keep the keys offline in a real-world scenario. + +# NOTE: See "Targets delegation" and "Signature thresholds" paragraphs in +# 'basic_repo.py' for more details +for name in ["bin-n", "bins"]: + keys[name] = generate_ed25519_key() + + +# Targets roles +# ------------- +# NOTE: See "Targets" and "Targets delegation" paragraphs in 'basic_repo.py' +# example for more details about the Targets object. + +# Create preliminary delegating targets role (bins) and add public key for +# delegated targets (bin_n) to key store. Delegation details are update below. +roles["bins"] = Metadata(Targets(expires=_in(365))) +bin_n_key = Key.from_securesystemslib_key(keys["bin-n"]) +roles["bins"].signed.delegations = Delegations( + keys={bin_n_key.keyid: bin_n_key}, + roles={}, +) + +# The hash bin generator yields an ordered list of incremental hash bin names +# (ranges), plus the hash prefixes each bin is responsible for, e.g.: +# +# bin_n_name: 00-07 bin_n_hash_prefixes: 00 01 02 03 04 05 06 07 +# 08-0f 08 09 0a 0b 0c 0d 0e 0f +# 10-17 10 11 12 13 14 15 16 17 +# ... ... +# f8-ff f8 f9 fa fb fc fd fe ff +for bin_n_name, bin_n_hash_prefixes in generate_hash_bins(): + # Update delegating targets role (bins) with delegation details for each + # delegated targets role (bin_n). + roles["bins"].signed.delegations.roles[bin_n_name] = DelegatedRole( + name=bin_n_name, + keyids=[keys["bin-n"]["keyid"]], + threshold=1, + terminating=False, + path_hash_prefixes=bin_n_hash_prefixes, + ) + + # Create delegated targets roles (bin_n) + roles[bin_n_name] = Metadata(Targets(expires=_in(7))) + +# Add target file +# --------------- +# For the purpose of this example we will protect the integrity of this very +# example script by adding its file info to the corresponding bin metadata. + +# NOTE: See "Targets" paragraph in 'basic_repo.py' example for more details +# about adding target file infos to targets metadata. +local_path = Path(__file__).resolve() +target_path = f"{local_path.parts[-2]}/{local_path.parts[-1]}" +target_file_info = TargetFile.from_file(target_path, str(local_path)) + +# The right bin for a target file is determined by the 'target_path' hash, e.g.: +# +# target_path: 'repo_example/hashed_bin_delegation.py' +# target_path (hash digest): '85e1a6c06305bd9c1e15c7ae565fd16ea304bfc...' +# +# --> considered hash prefix '85', falls into bin '80-87' +bin_for_target = find_hash_bin(target_path) +roles[bin_for_target].signed.targets[target_path] = target_file_info + + +# Sign and persist +# ---------------- +# Sign all metadata and persist to temporary directory at CWD for review +# (most notably see 'bins.json' and '80-87.json'). + +# NOTE: See "Persist metadata" paragraph in 'basic_repo.py' example for more +# details about serialization formats and metadata file name convention. +PRETTY = JSONSerializer(compact=False) +TMP_DIR = tempfile.mkdtemp(dir=os.getcwd()) + +for role_name, role in roles.items(): + key = keys["bins"] if role_name == "bins" else keys["bin-n"] + signer = SSlibSigner(key) + role.sign(signer) + + filename = f"{role_name}.json" + filepath = os.path.join(TMP_DIR, filename) + role.to_file(filepath, serializer=PRETTY) diff --git a/pylintrc b/pylintrc deleted file mode 100644 index 402406d245..0000000000 --- a/pylintrc +++ /dev/null @@ -1,426 +0,0 @@ -[MASTER] - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code -extension-pkg-whitelist= - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. -jobs=1 - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Specify a configuration file. -#rcfile= - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use"--disable=all --enable=classes -# --disable=W" -disable=parameter-unpacking, unpacking-in-except, long-suffix, old-ne-operator, old-octal-literal, import-star-module-level, raw-checker-failed, bad-inline-option, locally-disabled, locally-enabled, file-ignored, suppressed-message, useless-suppression, deprecated-pragma, apply-builtin, basestring-builtin, buffer-builtin, cmp-builtin, coerce-builtin, execfile-builtin, file-builtin, long-builtin, raw_input-builtin, reduce-builtin, standarderror-builtin, unicode-builtin, xrange-builtin, coerce-method, delslice-method, getslice-method, setslice-method, no-absolute-import, old-division, dict-iter-method, dict-view-method, next-method-called, metaclass-assignment, indexing-exception, raising-string, reload-builtin, oct-method, hex-method, nonzero-method, cmp-method, input-builtin, round-builtin, intern-builtin, unichr-builtin, map-builtin-not-iterating, zip-builtin-not-iterating, range-builtin-not-iterating, filter-builtin-not-iterating, using-cmp-argument, eq-without-hash, div-method, idiv-method, rdiv-method, exception-message-attribute, invalid-str-codec, sys-max-int, deprecated-str-translate-call, global-statement, broad-except, logging-not-lazy, C, R - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable= - - -[REPORTS] - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio).You can also give a reporter class, eg -# mypackage.mymodule.MyReporterClass. -#output-format=parseable -output-format=text - -# Tells whether to display a full report or only the messages -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - - -[BASIC] - -# Naming hint for argument names -argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Regular expression matching correct argument names -argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Naming hint for attribute names -attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Regular expression matching correct attribute names -attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo,bar,baz,toto,tutu,tata - -# Naming hint for class attribute names -class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - -# Regular expression matching correct class attribute names -class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - -# Naming hint for class names -class-name-hint=[A-Z_][a-zA-Z0-9]+$ - -# Regular expression matching correct class names -class-rgx=[A-Z_][a-zA-Z0-9]+$ - -# Naming hint for constant names -const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Regular expression matching correct constant names -const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming hint for function names -function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Regular expression matching correct function names -function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Good variable names which should always be accepted, separated by a comma -good-names=i,j,k,ex,Run,_ - -# Include a hint for the correct naming format with invalid-name -include-naming-hint=no - -# Naming hint for inline iteration names -inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ - -# Regular expression matching correct inline iteration names -inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ - -# Naming hint for method names -method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Regular expression matching correct method names -method-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Naming hint for module names -module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Regular expression matching correct module names -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -property-classes=abc.abstractproperty - -# Naming hint for variable names -variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Regular expression matching correct variable names -variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=80 - -# Maximum number of lines in a module -max-module-lines=1000 - -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma,dict-separator - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[LOGGING] - -# Logging modules to check that the string format arguments are in logging -# function parameter format -logging-modules=logging - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=XXX, - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=yes - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[SPELLING] - -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_,_cb - -# A regular expression matching the name of dummy variables (i.e. expectedly -# not used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_|junk - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=future.builtins - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__,__new__,setUp - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, _fields, _replace, _source, _make, _generate_and_write_metadata, _delete_obsolete_metadata, _log_status_of_top_level_roles, _load_top_level_metadata, _strip_version_number, _delegated_roles, _remove_invalid_and_duplicate_signatures, _repository_name, _targets_directory - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs - - -[DESIGN] - -# Maximum number of arguments for function / method -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in a if statement -max-bool-expr=5 - -# Maximum number of branch for function / method body -max-branches=12 - -# Maximum number of locals for function / method body -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of statements in function / method body -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[IMPORTS] - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub,TERMIOS,Bastion,rexec - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=Exception diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000..cd877c5edc --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,156 @@ +[build-system] +# hatchling pinned for reproducibility: version should be kept up-to-date +requires = ["hatchling==0.22.0"] +build-backend = "hatchling.build" + +[project] +name = "tuf" +description = "A secure updater framework for Python" +readme = "README.md" +license = "MIT OR Apache-2.0" +requires-python = ">=3.7" +authors = [ + { email = "theupdateframework@googlegroups.com" }, +] +keywords = [ + "authentication", + "compromise", + "key", + "revocation", + "secure", + "update", + "updater", +] +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: Implementation :: CPython", + "Topic :: Security", + "Topic :: Software Development", +] +dependencies = [ + "requests>=2.19.1", + "securesystemslib>=0.22.0", +] +dynamic = ["version"] + +[project.urls] +Documentation = "https://theupdateframework.readthedocs.io/en/stable/" +Homepage = "https://www.updateframework.com" +Issues = "https://github.com/theupdateframework/python-tuf/issues" +Source = "https://github.com/theupdateframework/python-tuf" + +[tool.hatch.version] +path = "tuf/__init__.py" + +[tool.hatch.build.targets.sdist] +include = [ + "/docs", + "/examples", + "/tests", + "/tuf", + "/requirements*.txt", + "/tox.ini", + "/setup.py", +] + +[tool.hatch.build.targets.wheel] +# The testing phase changes the current working directory to `tests` but the test scripts import +# from `tests` so the root directory must be added to Python's path for editable installations +dev-mode-dirs = ["."] + +# Black section +# Read more here: https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-via-a-file +[tool.black] +line-length=80 + +# Isort section +# Read more here: https://pycqa.github.io/isort/docs/configuration/config_files.html +[tool.isort] +profile="black" +line_length=80 +known_first_party = ["tuf"] + +# Pylint section + +# Minimal pylint configuration file for Secure Systems Lab Python Style Guide: +# https://github.com/secure-systems-lab/code-style-guidelines +# +# Based on Google Python Style Guide pylintrc and pylint defaults: +# https://google.github.io/styleguide/pylintrc +# http://pylint.pycqa.org/en/latest/technical_reference/features.html + +[tool.pylint.message_control] +# Disable the message, report, category or checker with the given id(s). +# NOTE: To keep this config as short as possible we only disable checks that +# are currently in conflict with our code. If new code displeases the linter +# (for good reasons) consider updating this config file, or disable checks with. +disable=[ + "fixme", + "too-few-public-methods", + "too-many-arguments", + "format", + "duplicate-code" +] + +[tool.pylint.basic] +good-names = ["i","j","k","v","e","f","fn","fp","_type","_"] +# Regexes for allowed names are copied from the Google pylintrc +# NOTE: Pylint captures regex name groups such as 'snake_case' or 'camel_case'. +# If there are multiple groups it enfoces the prevalent naming style inside +# each modules. Names in the exempt capturing group are ignored. +function-rgx="^(?:(?PsetUp|tearDown|setUpModule|tearDownModule)|(?P_?[A-Z][a-zA-Z0-9]*)|(?P_?[a-z][a-z0-9_]*))$" +method-rgx="(?x)^(?:(?P_[a-z0-9_]+__|runTest|setUp|tearDown|setUpTestCase|tearDownTestCase|setupSelf|tearDownClass|setUpClass|(test|assert)_*[A-Z0-9][a-zA-Z0-9_]*|next)|(?P_{0,2}[A-Z][a-zA-Z0-9_]*)|(?P_{0,2}[a-z][a-z0-9_]*))$" +argument-rgx="^[a-z][a-z0-9_]*$" +attr-rgx="^_{0,2}[a-z][a-z0-9_]*$" +class-attribute-rgx="^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$" +class-rgx="^_?[A-Z][a-zA-Z0-9]*$" +const-rgx="^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$" +inlinevar-rgx="^[a-z][a-z0-9_]*$" +module-rgx="^(_?[a-z][a-z0-9_]*|__init__)$" +no-docstring-rgx="(__.*__|main|test.*|.*test|.*Test)$" +variable-rgx="^[a-z][a-z0-9_]*$" +docstring-min-length=10 + +[tool.pylint.logging] +logging-format-style="old" + +[tool.pylint.miscellaneous] +notes="TODO" + +[tool.pylint.STRING] +check-quote-consistency="yes" + +# mypy section +# Read more here: https://mypy.readthedocs.io/en/stable/config_file.html#using-a-pyproject-toml-file +[tool.mypy] +warn_unused_configs = "True" +warn_redundant_casts = "True" +warn_unused_ignores = "True" +warn_unreachable = "True" +strict_equality = "True" +disallow_untyped_defs = "True" +disallow_untyped_calls = "True" +show_error_codes = "True" +disable_error_code = ["attr-defined"] + +[[tool.mypy.overrides]] +module = [ + "requests.*", + "securesystemslib.*", + "urllib3.*" +] +ignore_missing_imports = "True" diff --git a/requirements-dev.txt b/requirements-dev.txt index f8748752a5..2afa895fb3 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,8 @@ # Install tuf in editable mode and requirements for local testing with tox, # and also for running test suite or individual tests manually +build tox +twine +wheel -r requirements-test.txt -e . diff --git a/requirements-docs.txt b/requirements-docs.txt index 2c2d6e97a9..8b88ee9053 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -1,4 +1,4 @@ -# Install requirements needed in for the documentation build +# Install documentation build requirements # pinned tuf runtime dependencies (should auto-update and -trigger ci/cd) -r requirements-pinned.txt @@ -6,9 +6,3 @@ # install sphinx and its extensions sphinx sphinx-rtd-theme - -# Docutils versions >=0.17.0 have incompatibilites with -# sphinx-rtd-theme and fail to render some features. -# Pin the version until readthedocs release their fix -# (readthedocs/sphinx_rtd_theme#1113). -docutils<0.17.0 diff --git a/requirements-pinned.txt b/requirements-pinned.txt index 90b36f2a75..1a8cd59303 100644 --- a/requirements-pinned.txt +++ b/requirements-pinned.txt @@ -1,11 +1,10 @@ -certifi==2021.5.30 # via requests -cffi==1.14.6 # via cryptography, pynacl -charset-normalizer==2.0.4 # via requests -cryptography==3.4.8 # via securesystemslib -idna==3.2 # via requests -pycparser==2.20 # via cffi -pynacl==1.4.0 # via securesystemslib -requests==2.26.0 -securesystemslib[crypto,pynacl]==0.21.0 -six==1.16.0 # via pynacl, securesystemslib -urllib3==1.26.6 # via requests +certifi==2021.10.8 # via requests +cffi==1.15.0 # via cryptography, pynacl +charset-normalizer==2.0.12 # via requests +cryptography==37.0.1 # via securesystemslib +idna==3.3 # via requests +pycparser==2.21 # via cffi +pynacl==1.5.0 # via securesystemslib +requests==2.27.1 +securesystemslib[crypto,pynacl]==0.23.0 +urllib3==1.26.9 # via requests diff --git a/requirements-test.txt b/requirements-test.txt index fed1a6de24..7de253e2cc 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -3,14 +3,10 @@ # pinned tuf runtime dependencies (should auto-update and -trigger ci/cd) -r requirements-pinned.txt -# tuf.api tests use python-dateutil -python-dateutil - # additional test tools for linting and coverage measurement -coverage -black -isort -pylint -mypy -bandit -types-requests +coverage==6.3.2 +black==22.3.0 +isort==5.10.1 +pylint==2.13.8 +mypy==0.950 +bandit==1.7.4 diff --git a/requirements.txt b/requirements.txt index 84e209f3ed..8f7cc7f09b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -26,7 +26,7 @@ # 1. Use this script to create a pinned requirements file for each Python # version # ``` -# for v in 3.6 3.7 3.8 3.9; do +# for v in 3.7 3.8 3.9; do # mkvirtualenv tuf-env-${v} -p python${v}; # python3 -m pip install pip-tools; # pip-compile --no-header -o requirements-${v}.txt requirements.txt; diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 5baa2d2173..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,27 +0,0 @@ -[wheel] -universal = 1 - -[check-manifest] -ignore = - requirements-dev.txt - .travis.yml - .coveragerc - -[mypy] -warn_unused_configs = True -warn_redundant_casts = True -warn_unused_ignores = True -warn_unreachable = True -strict_equality = True -disallow_untyped_defs = True -disallow_untyped_calls = True -files = - tuf/api/, - tuf/ngclient, - tuf/exceptions.py - -[mypy-securesystemslib.*] -ignore_missing_imports = True - -[mypy-urllib3.*] -ignore_missing_imports = True diff --git a/setup.py b/setup.py old mode 100755 new mode 100644 index f77fabb46a..607b26e4fb --- a/setup.py +++ b/setup.py @@ -1,122 +1,4 @@ -#!/usr/bin/env python - -# Copyright 2013 - 2018, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - setup.py - - - Vladimir Diaz - - - March 2013. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - BUILD SOURCE DISTRIBUTION - - The following shell command generates a TUF source archive that can be - distributed to other users. The packaged source is saved to the 'dist' - folder in the current directory. - - $ python3 setup.py sdist - - - INSTALLATION OPTIONS - - pip - installing and managing Python packages (recommended): - - # Installing from Python Package Index (https://pypi.python.org/pypi). - $ python3 -m pip install tuf - - # Installing from local source archive. - $ python3 -m pip install - - # Or from the root directory of the unpacked archive. - $ python3 -m pip install . - - # Installing optional requirements (i.e., after installing tuf). - # Support for creation of Ed25519 signatures and support for RSA and ECDSA - # signatures in general requires optional dependencies: - $ python3 -m pip install securesystemslib[crypto,pynacl] - - - Alternate installation options: - - Navigate to the root directory of the unpacked archive and - run one of the following shell commands: - - Install to the global site-packages directory. - $ python3 setup.py install - - Install to the user site-packages directory. - $ python3 setup.py install --user - - Install to a chosen directory. - $ python3 setup.py install --home= - - - Note: The last two installation options may require modification of - Python's search path (i.e., 'sys.path') or updating an OS environment - variable. For example, installing to the user site-packages directory might - result in the installation of TUF scripts to '~/.local/bin'. The user may - then be required to update his $PATH variable: - $ export PATH=$PATH:~/.local/bin -""" - +# This file exists to keep dependabot happy: +# https://github.com/dependabot/dependabot-core/issues/4483 from setuptools import setup -from setuptools import find_packages - - -with open('README.md') as file_object: - long_description = file_object.read() - - -setup( - name = 'tuf', - version = '0.17.0', # If updating version, also update it in tuf/__init__.py - description = 'A secure updater framework for Python', - long_description = long_description, - long_description_content_type='text/markdown', - author = 'https://www.updateframework.com', - author_email = 'theupdateframework@googlegroups.com', - url = 'https://www.updateframework.com', - keywords = 'update updater secure authentication key compromise revocation', - classifiers = [ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: MIT License', - 'License :: OSI Approved :: Apache Software License', - 'Natural Language :: English', - 'Operating System :: POSIX', - 'Operating System :: POSIX :: Linux', - 'Operating System :: MacOS :: MacOS X', - 'Operating System :: Microsoft :: Windows', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: Implementation :: CPython', - 'Topic :: Security', - 'Topic :: Software Development' - ], - project_urls={ - 'Source': 'https://github.com/theupdateframework/python-tuf', - 'Issues': 'https://github.com/theupdateframework/python-tuf/issues' - }, - python_requires="~=3.6", - install_requires = [ - 'requests>=2.19.1', - 'securesystemslib>=0.20.0' - ], - packages = find_packages(exclude=['tests']), - scripts = [ - 'tuf/scripts/repo.py', - 'tuf/scripts/client.py' - ] -) +setup() diff --git a/tests/.coveragerc b/tests/.coveragerc index dd9c57e8ab..2c8c989206 100644 --- a/tests/.coveragerc +++ b/tests/.coveragerc @@ -2,9 +2,6 @@ branch = True omit = - # Command-line scripts. - */tuf/scripts/client.py - */tuf/scripts/repo.py */tests/* */site-packages/* diff --git a/tests/aggregate_tests.py b/tests/aggregate_tests.py index ad87769e3c..835ffd10ba 100755 --- a/tests/aggregate_tests.py +++ b/tests/aggregate_tests.py @@ -29,13 +29,16 @@ import sys import unittest -if __name__ == '__main__': - suite = unittest.TestLoader().discover(".") - all_tests_passed = unittest.TextTestRunner( - verbosity=1, buffer=True).run(suite).wasSuccessful() - - if not all_tests_passed: - sys.exit(1) - - else: - sys.exit(0) +if __name__ == "__main__": + suite = unittest.TestLoader().discover(".") + all_tests_passed = ( + unittest.TextTestRunner(verbosity=1, buffer=True) + .run(suite) + .wasSuccessful() + ) + + if not all_tests_passed: + sys.exit(1) + + else: + sys.exit(0) diff --git a/tests/fast_server_exit.py b/tests/fast_server_exit.py deleted file mode 100644 index b54b7b9230..0000000000 --- a/tests/fast_server_exit.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2020, TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - fast_server_exit.py - - - Martin Vrachev. - - - October 29, 2020. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Used for tests in tests/test_utils.py. -""" - -import sys - -sys.exit(0) diff --git a/tuf/client/__init__.py b/tests/generated_data/__init__.py old mode 100755 new mode 100644 similarity index 100% rename from tuf/client/__init__.py rename to tests/generated_data/__init__.py diff --git a/tests/generated_data/ed25519_metadata/root_with_ed25519.json b/tests/generated_data/ed25519_metadata/root_with_ed25519.json new file mode 100644 index 0000000000..9c6758d1f0 --- /dev/null +++ b/tests/generated_data/ed25519_metadata/root_with_ed25519.json @@ -0,0 +1,71 @@ +{ + "signatures": [ + { + "keyid": "5822582e7072996c1eef1cec24b61115d364987faa486659fe3d3dce8dae2aba", + "sig": "9589295d2b23b2e4747eba36e782f5f5775af975cee67ae75be7e0170556788253ca6919be9ed36e52a6023c49314a7d2b65a06541db6c3d06f84eaee585480b" + } + ], + "signed": { + "_type": "root", + "consistent_snapshot": true, + "expires": "2050-01-01T00:00:00Z", + "keys": { + "09d440e3725cec247dcb8703b646a87dd2a4d75343e8095c036c32795eefe3b9": { + "keytype": "ed25519", + "keyval": { + "public": "250f9ae3d1d3d5c419a73cfb4a470c01de1d5d3d61a3825416b5f5d6b88f4a30" + }, + "scheme": "ed25519" + }, + "2be5c21e3614f9f178fb49c4a34d0c18ffac30abd14ced917c60a52c8d8094b7": { + "keytype": "ed25519", + "keyval": { + "public": "0e6738fc1ac6fb4de680b4be99ecbcd99b030f3963f291277eef67bb9bd123e9" + }, + "scheme": "ed25519" + }, + "3458204ed467519c19a5316eb278b5608472a1bbf15850ebfb462d5315e4f86d": { + "keytype": "ed25519", + "keyval": { + "public": "82380623abb9666d4bf274b1a02577469445a972e5650d270101faa5107b19c8" + }, + "scheme": "ed25519" + }, + "5822582e7072996c1eef1cec24b61115d364987faa486659fe3d3dce8dae2aba": { + "keytype": "ed25519", + "keyval": { + "public": "b11d2ff132c033a657318c74c39526476c56de7556c776f11070842dbc4ac14c" + }, + "scheme": "ed25519" + } + }, + "roles": { + "root": { + "keyids": [ + "5822582e7072996c1eef1cec24b61115d364987faa486659fe3d3dce8dae2aba" + ], + "threshold": 1 + }, + "snapshot": { + "keyids": [ + "3458204ed467519c19a5316eb278b5608472a1bbf15850ebfb462d5315e4f86d" + ], + "threshold": 1 + }, + "targets": { + "keyids": [ + "2be5c21e3614f9f178fb49c4a34d0c18ffac30abd14ced917c60a52c8d8094b7" + ], + "threshold": 1 + }, + "timestamp": { + "keyids": [ + "09d440e3725cec247dcb8703b646a87dd2a4d75343e8095c036c32795eefe3b9" + ], + "threshold": 1 + } + }, + "spec_version": "1.0.29", + "version": 1 + } +} \ No newline at end of file diff --git a/tests/generated_data/ed25519_metadata/snapshot_with_ed25519.json b/tests/generated_data/ed25519_metadata/snapshot_with_ed25519.json new file mode 100644 index 0000000000..177e48b91b --- /dev/null +++ b/tests/generated_data/ed25519_metadata/snapshot_with_ed25519.json @@ -0,0 +1,19 @@ +{ + "signatures": [ + { + "keyid": "3458204ed467519c19a5316eb278b5608472a1bbf15850ebfb462d5315e4f86d", + "sig": "259dc96079f9b49e811b69b79253914ac5a963333eb24e5ac2d779a2d5d9d305c96c745613da59c1c48ae92d6f01a619b25066e51fc028a6ec8b56d744387006" + } + ], + "signed": { + "_type": "snapshot", + "expires": "2050-01-01T00:00:00Z", + "meta": { + "targets.json": { + "version": 1 + } + }, + "spec_version": "1.0.29", + "version": 1 + } +} \ No newline at end of file diff --git a/tests/generated_data/ed25519_metadata/targets_with_ed25519.json b/tests/generated_data/ed25519_metadata/targets_with_ed25519.json new file mode 100644 index 0000000000..8a10ac3fc2 --- /dev/null +++ b/tests/generated_data/ed25519_metadata/targets_with_ed25519.json @@ -0,0 +1,15 @@ +{ + "signatures": [ + { + "keyid": "2be5c21e3614f9f178fb49c4a34d0c18ffac30abd14ced917c60a52c8d8094b7", + "sig": "0a83294ac2930655def327a010be1c22f15b2b229d26792e63ff974eeb3c529f4ede60fafffabbcd5db5defae63e76067781e745164a4d00af09d5a3d4cc560c" + } + ], + "signed": { + "_type": "targets", + "expires": "2050-01-01T00:00:00Z", + "spec_version": "1.0.29", + "targets": {}, + "version": 1 + } +} \ No newline at end of file diff --git a/tests/generated_data/ed25519_metadata/timestamp_with_ed25519.json b/tests/generated_data/ed25519_metadata/timestamp_with_ed25519.json new file mode 100644 index 0000000000..c9aac6df3b --- /dev/null +++ b/tests/generated_data/ed25519_metadata/timestamp_with_ed25519.json @@ -0,0 +1,19 @@ +{ + "signatures": [ + { + "keyid": "09d440e3725cec247dcb8703b646a87dd2a4d75343e8095c036c32795eefe3b9", + "sig": "0f57e85b060f06d778c0e2bf131be3dc828beb39ffdf12776f60a9787cc5f02fdf126eb0875a5953b451bc1e88cea9b720cf180475ed0fd54615c1434de07503" + } + ], + "signed": { + "_type": "timestamp", + "expires": "2050-01-01T00:00:00Z", + "meta": { + "snapshot.json": { + "version": 1 + } + }, + "spec_version": "1.0.29", + "version": 1 + } +} \ No newline at end of file diff --git a/tests/generated_data/generate_md.py b/tests/generated_data/generate_md.py new file mode 100644 index 0000000000..649e2bab74 --- /dev/null +++ b/tests/generated_data/generate_md.py @@ -0,0 +1,123 @@ +"""Script for generating new metadata files.""" + +# Copyright New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +import os +import sys +from datetime import datetime +from typing import Dict, List, Optional + +from securesystemslib.signer import SSlibSigner + +from tests import utils +from tuf.api.metadata import Key, Metadata, Root, Snapshot, Targets, Timestamp +from tuf.api.serialization.json import JSONSerializer + +# Hardcode keys and expiry time to achieve reproducibility. +public_values: List[str] = [ + "b11d2ff132c033a657318c74c39526476c56de7556c776f11070842dbc4ac14c", + "250f9ae3d1d3d5c419a73cfb4a470c01de1d5d3d61a3825416b5f5d6b88f4a30", + "82380623abb9666d4bf274b1a02577469445a972e5650d270101faa5107b19c8", + "0e6738fc1ac6fb4de680b4be99ecbcd99b030f3963f291277eef67bb9bd123e9", +] +private_values: List[str] = [ + "510e5e04d7a364af850533856eacdf65d30cc0f8803ecd5fdc0acc56ca2aa91c", + "e6645b00312c8a257782e3e61e85bafda4317ad072c52251ef933d480c387abd", + "cd13dd2180334b24c19b32aaf27f7e375a614d7ba0777220d5c2290bb2f9b868", + "7e2e751145d1b22f6e40d4ba2aa47158207acfd3c003f1cbd5a08141dfc22a15", +] +keyids: List[str] = [ + "5822582e7072996c1eef1cec24b61115d364987faa486659fe3d3dce8dae2aba", + "09d440e3725cec247dcb8703b646a87dd2a4d75343e8095c036c32795eefe3b9", + "3458204ed467519c19a5316eb278b5608472a1bbf15850ebfb462d5315e4f86d", + "2be5c21e3614f9f178fb49c4a34d0c18ffac30abd14ced917c60a52c8d8094b7", +] + +keys: Dict[str, Key] = {} +for index in range(4): + keys[f"ed25519_{index}"] = Key.from_securesystemslib_key( + { + "keytype": "ed25519", + "scheme": "ed25519", + "keyid": keyids[index], + "keyval": { + "public": public_values[index], + "private": private_values[index], + }, + } + ) + +expires_str = "2050-01-01T00:00:00Z" +EXPIRY = datetime.strptime(expires_str, "%Y-%m-%dT%H:%M:%SZ") +OUT_DIR = "generated_data/ed25519_metadata" +if not os.path.exists(OUT_DIR): + os.mkdir(OUT_DIR) + +SERIALIZER = JSONSerializer() + + +def verify_generation(md: Metadata, path: str) -> None: + """Verify that newly generated file equals the locally stored one. + + Args: + md: Newly generated metadata object. + path: Path to the locally stored metadata file. + """ + with open(path, "rb") as f: + static_md_bytes = f.read() + md_bytes = md.to_bytes(SERIALIZER) + if static_md_bytes != md_bytes: + raise ValueError( + f"Generated data != local data at {path}. Generate a new " + + "metadata with 'python generated_data/generate_md.py'" + ) + + +def generate_all_files( + dump: Optional[bool] = False, verify: Optional[bool] = False +) -> None: + """Generate a new repository and optionally verify it. + + Args: + dump: Wheter to dump the newly generated files. + verify: Whether to verify the newly generated files with the + local staored. + """ + md_root = Metadata(Root(expires=EXPIRY)) + md_timestamp = Metadata(Timestamp(expires=EXPIRY)) + md_snapshot = Metadata(Snapshot(expires=EXPIRY)) + md_targets = Metadata(Targets(expires=EXPIRY)) + + md_root.signed.add_key("root", keys["ed25519_0"]) + md_root.signed.add_key("timestamp", keys["ed25519_1"]) + md_root.signed.add_key("snapshot", keys["ed25519_2"]) + md_root.signed.add_key("targets", keys["ed25519_3"]) + + for i, md in enumerate([md_root, md_timestamp, md_snapshot, md_targets]): + assert isinstance(md, Metadata) + signer = SSlibSigner( + { + "keytype": "ed25519", + "scheme": "ed25519", + "keyid": keyids[i], + "keyval": { + "public": public_values[i], + "private": private_values[i], + }, + } + ) + md.sign(signer) + path = os.path.join(OUT_DIR, f"{md.signed.type}_with_ed25519.json") + if verify: + verify_generation(md, path) + + if dump: + md.to_file(path, SERIALIZER) + + +if __name__ == "__main__": + utils.configure_test_logging(sys.argv) + # To generate a new set of metadata files this script is supposed to be run + # from the "tests" folder. + generate_all_files(dump=True) diff --git a/tests/repository_data/fishy_rolenames/1.a.json b/tests/repository_data/fishy_rolenames/1.a.json new file mode 100644 index 0000000000..a55173a269 --- /dev/null +++ b/tests/repository_data/fishy_rolenames/1.a.json @@ -0,0 +1,15 @@ +{ + "signatures": [ + { + "keyid": "f9b50dd62b5540788b5c5cde0842124b64fa467261bc349dd77de49568eed0ef", + "sig": "a36aa69e0c35d8b5b9578bc656ce5d8a76ea05a2c814f59cc710a11f5e3fe6c7bcbef2bfba4812e3b2936f99e89f10862f6320c901e213f1343e79525474920a" + } + ], + "signed": { + "_type": "targets", + "expires": "2050-10-22T11:21:56Z", + "spec_version": "1.0.19", + "targets": {}, + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/fishy_rolenames/metadata/1...json b/tests/repository_data/fishy_rolenames/metadata/1...json new file mode 100644 index 0000000000..e5ae82eeb1 --- /dev/null +++ b/tests/repository_data/fishy_rolenames/metadata/1...json @@ -0,0 +1,15 @@ +{ + "signatures": [ + { + "keyid": "80a5bda93ec130c2fda8ce0c619d7b122b24cc2e0743afedf98a8e368d32019c", + "sig": "8fff438c2347dd7c4fb94c43ec347bcd6b0e79521bd11d95121cb8cc25723efa38565a959a6123da0a2375a2093e53f13a5412df9e51397e06b313837d0d590c" + } + ], + "signed": { + "_type": "targets", + "expires": "2050-10-22T11:21:56Z", + "spec_version": "1.0.19", + "targets": {}, + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/fishy_rolenames/metadata/1.root.json b/tests/repository_data/fishy_rolenames/metadata/1.root.json new file mode 100644 index 0000000000..69cc04c4a4 --- /dev/null +++ b/tests/repository_data/fishy_rolenames/metadata/1.root.json @@ -0,0 +1,71 @@ +{ + "signatures": [ + { + "keyid": "72b70899257dc30b596af3a9fe141a924af821aff28ed58d1aea0db9f70a70f7", + "sig": "53ae844137dd04abf9d3ed10380ba46fa2726f328963ffe006aa955804afa3b0d100bc59610c1584234a9598ab4b9af762b533174b8b8d8aaf2be8e413c1b304" + } + ], + "signed": { + "_type": "root", + "consistent_snapshot": true, + "expires": "2050-10-22T11:21:56Z", + "keys": { + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { + "keytype": "ed25519", + "keyval": { + "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" + }, + "scheme": "ed25519" + }, + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { + "keytype": "ed25519", + "keyval": { + "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" + }, + "scheme": "ed25519" + }, + "72b70899257dc30b596af3a9fe141a924af821aff28ed58d1aea0db9f70a70f7": { + "keytype": "ed25519", + "keyval": { + "public": "3ba219e69666298bce5d1d653a166346aef807c02e32a846aaefcb5190fddeb4" + }, + "scheme": "ed25519" + }, + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { + "keytype": "ed25519", + "keyval": { + "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" + }, + "scheme": "ed25519" + } + }, + "roles": { + "root": { + "keyids": [ + "72b70899257dc30b596af3a9fe141a924af821aff28ed58d1aea0db9f70a70f7" + ], + "threshold": 1 + }, + "snapshot": { + "keyids": [ + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" + ], + "threshold": 1 + }, + "targets": { + "keyids": [ + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" + ], + "threshold": 1 + }, + "timestamp": { + "keyids": [ + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" + ], + "threshold": 1 + } + }, + "spec_version": "1.0.19", + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/fishy_rolenames/metadata/1.targets.json b/tests/repository_data/fishy_rolenames/metadata/1.targets.json new file mode 100644 index 0000000000..285b3f4009 --- /dev/null +++ b/tests/repository_data/fishy_rolenames/metadata/1.targets.json @@ -0,0 +1,75 @@ +{ + "signatures": [ + { + "keyid": "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093", + "sig": "b390c5d9d5355b963e94dfa30ce04520c462fd869fad968d01f0a3b185db5895807b14435e725ff376adc793fd21ef8f01890ac722c94e9c05ab3797c4887101" + } + ], + "signed": { + "_type": "targets", + "delegations": { + "keys": { + "426edf0d9fa383688c5b40b7b7d15a7cd11a991f12cc20da87f1b48dd6c036a1": { + "keytype": "ed25519", + "keyval": { + "public": "d38eef769f6dee77b6d898dce548c0ea0f90add0072dc28a20769b6421552ec3" + }, + "scheme": "ed25519" + }, + "80a5bda93ec130c2fda8ce0c619d7b122b24cc2e0743afedf98a8e368d32019c": { + "keytype": "ed25519", + "keyval": { + "public": "bb256c0b6d5226a5a9ae8377c0bf68e958fb668d063971f48638b9bae5251f3b" + }, + "scheme": "ed25519" + }, + "f9b50dd62b5540788b5c5cde0842124b64fa467261bc349dd77de49568eed0ef": { + "keytype": "ed25519", + "keyval": { + "public": "da1b8586dc0cdd5fe0d8d428bde62dc63e06138f58cfc39770c424a4636f59f4" + }, + "scheme": "ed25519" + } + }, + "roles": [ + { + "keyids": [ + "f9b50dd62b5540788b5c5cde0842124b64fa467261bc349dd77de49568eed0ef" + ], + "name": "../a", + "paths": [ + "*" + ], + "terminating": false, + "threshold": 1 + }, + { + "keyids": [ + "80a5bda93ec130c2fda8ce0c619d7b122b24cc2e0743afedf98a8e368d32019c" + ], + "name": ".", + "paths": [ + "*" + ], + "terminating": false, + "threshold": 1 + }, + { + "keyids": [ + "426edf0d9fa383688c5b40b7b7d15a7cd11a991f12cc20da87f1b48dd6c036a1" + ], + "name": "\u00f6", + "paths": [ + "*" + ], + "terminating": false, + "threshold": 1 + } + ] + }, + "expires": "2050-10-22T11:21:56Z", + "spec_version": "1.0.19", + "targets": {}, + "version": 1 + } +} \ No newline at end of file diff --git "a/tests/repository_data/fishy_rolenames/metadata/1.\303\266.json" "b/tests/repository_data/fishy_rolenames/metadata/1.\303\266.json" new file mode 100644 index 0000000000..e6aa8023ca --- /dev/null +++ "b/tests/repository_data/fishy_rolenames/metadata/1.\303\266.json" @@ -0,0 +1,15 @@ +{ + "signatures": [ + { + "keyid": "426edf0d9fa383688c5b40b7b7d15a7cd11a991f12cc20da87f1b48dd6c036a1", + "sig": "faada7f8c9a238955d5b27dbd88032a6c9068742cb114a66f97c730235a8033dd1ff0647f4bbc2b49210c33655a3d7755e754e245799683b3f4e00a59f3da006" + } + ], + "signed": { + "_type": "targets", + "expires": "2050-10-22T11:21:56Z", + "spec_version": "1.0.19", + "targets": {}, + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/fishy_rolenames/metadata/2.snapshot.json b/tests/repository_data/fishy_rolenames/metadata/2.snapshot.json new file mode 100644 index 0000000000..bf91cab320 --- /dev/null +++ b/tests/repository_data/fishy_rolenames/metadata/2.snapshot.json @@ -0,0 +1,28 @@ +{ + "signatures": [ + { + "keyid": "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d", + "sig": "5b00100e9cf1c083f8347371ab840cf60124780305124ed7a53fe31bf43473c90b1d2c802ed2f11f5057ba21e6b7a05118b1907f737d2e29c9692aa3345f9801" + } + ], + "signed": { + "_type": "snapshot", + "expires": "2050-10-22T11:21:56Z", + "meta": { + "../a.json": { + "version": 1 + }, + "..json": { + "version": 1 + }, + "targets.json": { + "version": 1 + }, + "\u00f6.json": { + "version": 1 + } + }, + "spec_version": "1.0.19", + "version": 2 + } +} \ No newline at end of file diff --git a/tests/repository_data/fishy_rolenames/metadata/timestamp.json b/tests/repository_data/fishy_rolenames/metadata/timestamp.json new file mode 100644 index 0000000000..6bde92c126 --- /dev/null +++ b/tests/repository_data/fishy_rolenames/metadata/timestamp.json @@ -0,0 +1,19 @@ +{ + "signatures": [ + { + "keyid": "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758", + "sig": "f7003e848366c7e55f474df2c0c68471c44c68a87c0d3c1aa56f64778c91e9c8f22c3adc4dd9ec0535b6b4dc04783f7fa4ca992bed2445c7395a58acff152f0d" + } + ], + "signed": { + "_type": "timestamp", + "expires": "2050-10-22T11:21:56Z", + "meta": { + "snapshot.json": { + "version": 2 + } + }, + "spec_version": "1.0.19", + "version": 2 + } +} \ No newline at end of file diff --git a/tests/repository_data/generate.py b/tests/repository_data/generate.py deleted file mode 100755 index e131329ec9..0000000000 --- a/tests/repository_data/generate.py +++ /dev/null @@ -1,163 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - generate.py - - - Vladimir Diaz - - - February 26, 2014. - - - See LICENSE-MIT.txt OR LICENSE-APACHE.txt for licensing information. - - - Provide a set of pre-generated key files and a basic repository that unit - tests can use in their test cases. The pre-generated files created by this - script should be copied by the unit tests as needed. The original versions - should be preserved. 'tuf/tests/repository_data/' will store the files - generated. 'generate.py' should not require re-execution if the - pre-generated repository files have already been created, unless they need to - change in some way. -""" - -import shutil -import datetime -import optparse -import stat - -from tuf.repository_tool import * - -import securesystemslib - -parser = optparse.OptionParser() -parser.add_option("-k","--keys", action='store_true', dest="should_generate_keys", - help="Generate a new set of keys", default=False) -parser.add_option("-d","--dry-run", action='store_true', dest="dry_run", - help="Do not write the files, just run", default=False) -(options, args) = parser.parse_args() - - -repository = create_new_repository('repository') - -root_key_file = 'keystore/root_key' -targets_key_file = 'keystore/targets_key' -snapshot_key_file = 'keystore/snapshot_key' -timestamp_key_file = 'keystore/timestamp_key' -delegation_key_file = 'keystore/delegation_key' - - -if options.should_generate_keys and not options.dry_run: - # Generate and save the top-level role keys, including the delegated roles. - # The unit tests should only have to import the keys they need from these - # pre-generated key files. - # Generate public and private key files for the top-level roles, and two - # delegated roles (these number of keys should be sufficient for most of the - # unit tests). Unit tests may generate additional keys, if needed. - generate_and_write_rsa_keypair(password='password', filepath=root_key_file) - generate_and_write_ed25519_keypair(password='password', filepath=targets_key_file) - generate_and_write_ed25519_keypair(password='password', filepath=snapshot_key_file) - generate_and_write_ed25519_keypair(password='password', filepath=timestamp_key_file) - generate_and_write_ed25519_keypair(password='password', filepath=delegation_key_file) - -# Import the public keys. These keys are needed so that metadata roles are -# assigned verification keys, which clients use to verify the signatures created -# by the corresponding private keys. -root_public = import_rsa_publickey_from_file(root_key_file + '.pub') -targets_public = import_ed25519_publickey_from_file(targets_key_file + '.pub') -snapshot_public = import_ed25519_publickey_from_file(snapshot_key_file + '.pub') -timestamp_public = import_ed25519_publickey_from_file(timestamp_key_file + '.pub') -delegation_public = import_ed25519_publickey_from_file(delegation_key_file + '.pub') - -# Import the private keys. These private keys are needed to generate the -# signatures included in metadata. -root_private = import_rsa_privatekey_from_file(root_key_file, 'password') -targets_private = import_ed25519_privatekey_from_file(targets_key_file, 'password') -snapshot_private = import_ed25519_privatekey_from_file(snapshot_key_file, 'password') -timestamp_private = import_ed25519_privatekey_from_file(timestamp_key_file, 'password') -delegation_private = import_ed25519_privatekey_from_file(delegation_key_file, 'password') - -# Add the verification keys to the top-level roles. -repository.root.add_verification_key(root_public) -repository.targets.add_verification_key(targets_public) -repository.snapshot.add_verification_key(snapshot_public) -repository.timestamp.add_verification_key(timestamp_public) - -# Load the signing keys, previously imported, for the top-level roles so that -# valid metadata can be written. -repository.root.load_signing_key(root_private) -repository.targets.load_signing_key(targets_private) -repository.snapshot.load_signing_key(snapshot_private) -repository.timestamp.load_signing_key(timestamp_private) - -# Create the target files (downloaded by clients) whose file size and digest -# are specified in the 'targets.json' file. -target1_filepath = 'repository/targets/file1.txt' -securesystemslib.util.ensure_parent_dir(target1_filepath) -target2_filepath = 'repository/targets/file2.txt' -securesystemslib.util.ensure_parent_dir(target2_filepath) -target3_filepath = 'repository/targets/file3.txt' -securesystemslib.util.ensure_parent_dir(target2_filepath) - -if not options.dry_run: - with open(target1_filepath, 'wt') as file_object: - file_object.write('This is an example target file.') - # As we will add this file's permissions to the custom_attribute in the - # target's metadata we need to ensure that the file has the same - # permissions when created by this script regardless of umask value on - # the host system generating the data - os.chmod(target1_filepath, 0o644) - - with open(target2_filepath, 'wt') as file_object: - file_object.write('This is an another example target file.') - - with open(target3_filepath, 'wt') as file_object: - file_object.write('This is role1\'s target file.') - -# Add target files to the top-level 'targets.json' role. These target files -# should already exist. 'target1_filepath' contains additional information -# about the target (i.e., file permissions in octal format.) -octal_file_permissions = oct(os.stat(target1_filepath).st_mode)[4:] -file_permissions = {'file_permissions': octal_file_permissions} -repository.targets.add_target(os.path.basename(target1_filepath), file_permissions) -repository.targets.add_target(os.path.basename(target2_filepath)) - -repository.targets.delegate('role1', [delegation_public], - [os.path.basename(target3_filepath)]) -repository.targets('role1').add_target(os.path.basename(target3_filepath)) -repository.targets('role1').load_signing_key(delegation_private) - -repository.targets('role1').delegate('role2', [delegation_public], []) -repository.targets('role2').load_signing_key(delegation_private) - -# Set the top-level expiration times far into the future so that -# they do not expire anytime soon, or else the tests fail. Unit tests may -# modify the expiration datetimes (of the copied files), if they wish. -repository.root.expiration = datetime.datetime(2030, 1, 1, 0, 0) -repository.targets.expiration = datetime.datetime(2030, 1, 1, 0, 0) -repository.snapshot.expiration = datetime.datetime(2030, 1, 1, 0, 0) -repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 0, 0) -repository.targets('role1').expiration = datetime.datetime(2030, 1, 1, 0, 0) -repository.targets('role2').expiration = datetime.datetime(2030, 1, 1, 0, 0) - -# Create the actual metadata files, which are saved to 'metadata.staged'. -if not options.dry_run: - repository.writeall() - -# Move the staged.metadata to 'metadata' and create the client folder. The -# client folder, which includes the required directory structure and metadata -# files for clients to successfully load an 'tuf.client.updater.py' object. -staged_metadata_directory = 'repository/metadata.staged' -metadata_directory = 'repository/metadata' -if not options.dry_run: - shutil.copytree(staged_metadata_directory, metadata_directory) - -# Create the client files (required directory structure and minimal metadata) -# as expected by 'tuf.client.updater'. -if not options.dry_run: - create_tuf_client_directory('repository', os.path.join('client', 'test_repository1')) diff --git a/tests/repository_data/generate_project_data.py b/tests/repository_data/generate_project_data.py deleted file mode 100755 index fc93fd594e..0000000000 --- a/tests/repository_data/generate_project_data.py +++ /dev/null @@ -1,109 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - generate_project_data.py - - - Santiago Torres - - - January 22, 2014. - - - See LICENSE-MIT.txt OR LICENSE-APACHE.txt for licensing information. - - - Generate a pre-fabricated set of metadata files for 'test_developer_tool.py' - test cases. -""" - -import shutil -import datetime -import optparse -import os - -from tuf.developer_tool import * - -import securesystemslib - -parser = optparse.OptionParser() - -parser.add_option("-d","--dry-run", action='store_true', dest="dry_run", - help="Do not write the files, just run", default=False) -(options, args) = parser.parse_args() - - -project_key_file = 'keystore/root_key' -targets_key_file = 'keystore/targets_key' -delegation_key_file = 'keystore/delegation_key' - -# The files we use for signing in the unit tests should exist, if they are not -# populated, run 'generate.py'. -assert os.path.exists(project_key_file) -assert os.path.exists(targets_key_file) -assert os.path.exists(delegation_key_file) - -# Import the public keys. These keys are needed so that metadata roles are -# assigned verification keys, which clients use to verify the signatures created -# by the corresponding private keys. -project_public = import_rsa_publickey_from_file(project_key_file + '.pub') -targets_public = import_ed25519_publickey_from_file(targets_key_file + '.pub') -delegation_public = import_ed25519_publickey_from_file(delegation_key_file + '.pub') - -# Import the private keys. These private keys are needed to generate the -# signatures included in metadata. -project_private = import_rsa_privatekey_from_file(project_key_file, 'password') -targets_private = import_ed25519_privatekey_from_file(targets_key_file, 'password') -delegation_private = import_ed25519_privatekey_from_file(delegation_key_file, 'password') - -os.mkdir("project") -os.mkdir("project/targets") - -# Create the target files (downloaded by clients) whose file size and digest -# are specified in the 'targets.json' file. -target1_filepath = 'project/targets/file1.txt' -securesystemslib.util.ensure_parent_dir(target1_filepath) -target2_filepath = 'project/targets/file2.txt' -securesystemslib.util.ensure_parent_dir(target2_filepath) -target3_filepath = 'project/targets/file3.txt' -securesystemslib.util.ensure_parent_dir(target2_filepath) - -if not options.dry_run: - with open(target1_filepath, 'wt') as file_object: - file_object.write('This is an example target file.') - - with open(target2_filepath, 'wt') as file_object: - file_object.write('This is an another example target file.') - - with open(target3_filepath, 'wt') as file_object: - file_object.write('This is role1\'s target file.') - - -project = create_new_project("test-flat", 'project/test-flat', 'prefix', 'project/targets') - -# Add target files to the top-level projects role. These target files should -# already exist. -project.add_target('file1.txt') -project.add_target('file2.txt') - -# Add one key to the project. -project.add_verification_key(project_public) -project.load_signing_key(project_private) - -# Add the delegated role keys. -project.delegate('role1', [delegation_public], [target3_filepath]) -project('role1').load_signing_key(delegation_private) - -# Set the project expiration time far into the future so that its metadata does -# not expire anytime soon, or else the tests fail. Unit tests may modify the -# expiration datetimes (of the copied files), if they wish. -project.expiration = datetime.datetime(2030, 1, 1, 0, 0) -project('role1').expiration = datetime.datetime(2030, 1, 1, 0, 0) - -# Create the actual metadata files, which are saved to 'metadata.staged'. -if not options.dry_run: - project.write() diff --git a/tests/repository_simulator.py b/tests/repository_simulator.py new file mode 100644 index 0000000000..ae1ad3e6ca --- /dev/null +++ b/tests/repository_simulator.py @@ -0,0 +1,397 @@ +#!/usr/bin/env python + +# Copyright 2021, New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +""""Test utility to simulate a repository + +RepositorySimulator provides methods to modify repository metadata so that it's +easy to "publish" new repository versions with modified metadata, while serving +the versions to client test code. + +RepositorySimulator implements FetcherInterface so Updaters in tests can use it +as a way to "download" new metadata from remote: in practice no downloading, +network connections or even file access happens as RepositorySimulator serves +everything from memory. + +Metadata and targets "hosted" by the simulator are made available in URL paths +"/metadata/..." and "/targets/..." respectively. + +Example:: + + # constructor creates repository with top-level metadata + sim = RepositorySimulator() + + # metadata can be modified directly: it is immediately available to clients + sim.snapshot.version += 1 + + # As an exception, new root versions require explicit publishing + sim.root.version += 1 + sim.publish_root() + + # there are helper functions + sim.add_target("targets", b"content", "targetpath") + sim.targets.version += 1 + sim.update_snapshot() + + # Use the simulated repository from an Updater: + updater = Updater( + dir, + "https://example.com/metadata/", + "https://example.com/targets/", + sim + ) + updater.refresh() +""" + +import datetime +import logging +import os +import tempfile +from dataclasses import dataclass, field +from typing import Dict, Iterator, List, Optional, Tuple +from urllib import parse + +import securesystemslib.hash as sslib_hash +from securesystemslib.keys import generate_ed25519_key +from securesystemslib.signer import SSlibSigner + +from tuf.api.exceptions import DownloadHTTPError +from tuf.api.metadata import ( + SPECIFICATION_VERSION, + TOP_LEVEL_ROLE_NAMES, + DelegatedRole, + Delegations, + Key, + Metadata, + MetaFile, + Root, + Snapshot, + TargetFile, + Targets, + Timestamp, +) +from tuf.api.serialization.json import JSONSerializer +from tuf.ngclient.fetcher import FetcherInterface + +logger = logging.getLogger(__name__) + +SPEC_VER = ".".join(SPECIFICATION_VERSION) + + +@dataclass +class FetchTracker: + """Fetcher counter for metadata and targets.""" + + metadata: List[Tuple[str, Optional[int]]] = field(default_factory=list) + targets: List[Tuple[str, Optional[str]]] = field(default_factory=list) + + +@dataclass +class RepositoryTarget: + """Contains actual target data and the related target metadata.""" + + data: bytes + target_file: TargetFile + + +class RepositorySimulator(FetcherInterface): + """Simulates a repository that can be used for testing.""" + + # pylint: disable=too-many-instance-attributes + def __init__(self) -> None: + self.md_delegates: Dict[str, Metadata[Targets]] = {} + + # other metadata is signed on-demand (when fetched) but roots must be + # explicitly published with publish_root() which maintains this list + self.signed_roots: List[bytes] = [] + + # signers are used on-demand at fetch time to sign metadata + # keys are roles, values are dicts of {keyid: signer} + self.signers: Dict[str, Dict[str, SSlibSigner]] = {} + + # target downloads are served from this dict + self.target_files: Dict[str, RepositoryTarget] = {} + + # Whether to compute hashes and length for meta in snapshot/timestamp + self.compute_metafile_hashes_length = False + + # Enable hash-prefixed target file names + self.prefix_targets_with_hash = True + + self.dump_dir: Optional[str] = None + self.dump_version = 0 + + self.fetch_tracker = FetchTracker() + + now = datetime.datetime.utcnow() + self.safe_expiry = now.replace(microsecond=0) + datetime.timedelta( + days=30 + ) + + self._initialize() + + @property + def root(self) -> Root: + return self.md_root.signed + + @property + def timestamp(self) -> Timestamp: + return self.md_timestamp.signed + + @property + def snapshot(self) -> Snapshot: + return self.md_snapshot.signed + + @property + def targets(self) -> Targets: + return self.md_targets.signed + + def all_targets(self) -> Iterator[Tuple[str, Targets]]: + """Yield role name and signed portion of targets one by one.""" + yield Targets.type, self.md_targets.signed + for role, md in self.md_delegates.items(): + yield role, md.signed + + @staticmethod + def create_key() -> Tuple[Key, SSlibSigner]: + sslib_key = generate_ed25519_key() + return Key.from_securesystemslib_key(sslib_key), SSlibSigner(sslib_key) + + def add_signer(self, role: str, signer: SSlibSigner) -> None: + if role not in self.signers: + self.signers[role] = {} + self.signers[role][signer.key_dict["keyid"]] = signer + + def rotate_keys(self, role: str) -> None: + """remove all keys for role, then add threshold of new keys""" + self.root.roles[role].keyids.clear() + self.signers[role].clear() + for _ in range(0, self.root.roles[role].threshold): + key, signer = self.create_key() + self.root.add_key(role, key) + self.add_signer(role, signer) + + def _initialize(self) -> None: + """Setup a minimal valid repository.""" + + self.md_targets = Metadata(Targets(expires=self.safe_expiry)) + self.md_snapshot = Metadata(Snapshot(expires=self.safe_expiry)) + self.md_timestamp = Metadata(Timestamp(expires=self.safe_expiry)) + self.md_root = Metadata(Root(expires=self.safe_expiry)) + + for role in TOP_LEVEL_ROLE_NAMES: + key, signer = self.create_key() + self.md_root.signed.add_key(role, key) + self.add_signer(role, signer) + + self.publish_root() + + def publish_root(self) -> None: + """Sign and store a new serialized version of root.""" + self.md_root.signatures.clear() + for signer in self.signers[Root.type].values(): + self.md_root.sign(signer, append=True) + + self.signed_roots.append(self.md_root.to_bytes(JSONSerializer())) + logger.debug("Published root v%d", self.root.version) + + def _fetch(self, url: str) -> Iterator[bytes]: + """Fetches data from the given url and returns an Iterator (or yields + bytes). + """ + path = parse.urlparse(url).path + if path.startswith("/metadata/") and path.endswith(".json"): + # figure out rolename and version + ver_and_name = path[len("/metadata/") :][: -len(".json")] + version_str, _, role = ver_and_name.partition(".") + # root is always version-prefixed while timestamp is always NOT + if role == Root.type or ( + self.root.consistent_snapshot and ver_and_name != Timestamp.type + ): + version: Optional[int] = int(version_str) + else: + # the file is not version-prefixed + role = ver_and_name + version = None + + yield self.fetch_metadata(role, version) + elif path.startswith("/targets/"): + # figure out target path and hash prefix + target_path = path[len("/targets/") :] + dir_parts, sep, prefixed_filename = target_path.rpartition("/") + # extract the hash prefix, if any + prefix: Optional[str] = None + filename = prefixed_filename + if self.root.consistent_snapshot and self.prefix_targets_with_hash: + prefix, _, filename = prefixed_filename.partition(".") + target_path = f"{dir_parts}{sep}{filename}" + + yield self.fetch_target(target_path, prefix) + else: + raise DownloadHTTPError(f"Unknown path '{path}'", 404) + + def fetch_target( + self, target_path: str, target_hash: Optional[str] + ) -> bytes: + """Return data for 'target_path', checking 'target_hash' if it is given. + + If hash is None, then consistent_snapshot is not used. + """ + self.fetch_tracker.targets.append((target_path, target_hash)) + + repo_target = self.target_files.get(target_path) + if repo_target is None: + raise DownloadHTTPError(f"No target {target_path}", 404) + if ( + target_hash + and target_hash not in repo_target.target_file.hashes.values() + ): + raise DownloadHTTPError(f"hash mismatch for {target_path}", 404) + + logger.debug("fetched target %s", target_path) + return repo_target.data + + def fetch_metadata(self, role: str, version: Optional[int] = None) -> bytes: + """Return signed metadata for 'role', using 'version' if it is given. + + If version is None, non-versioned metadata is being requested. + """ + self.fetch_tracker.metadata.append((role, version)) + # decode role for the metadata + role = parse.unquote(role, encoding="utf-8") + + if role == Root.type: + # return a version previously serialized in publish_root() + if version is None or version > len(self.signed_roots): + raise DownloadHTTPError(f"Unknown root version {version}", 404) + logger.debug("fetched root version %d", version) + return self.signed_roots[version - 1] + + # sign and serialize the requested metadata + md: Optional[Metadata] + if role == Timestamp.type: + md = self.md_timestamp + elif role == Snapshot.type: + md = self.md_snapshot + elif role == Targets.type: + md = self.md_targets + else: + md = self.md_delegates.get(role) + + if md is None: + raise DownloadHTTPError(f"Unknown role {role}", 404) + + md.signatures.clear() + for signer in self.signers[role].values(): + md.sign(signer, append=True) + + logger.debug( + "fetched %s v%d with %d sigs", + role, + md.signed.version, + len(self.signers[role]), + ) + return md.to_bytes(JSONSerializer()) + + def _compute_hashes_and_length( + self, role: str + ) -> Tuple[Dict[str, str], int]: + data = self.fetch_metadata(role) + digest_object = sslib_hash.digest(sslib_hash.DEFAULT_HASH_ALGORITHM) + digest_object.update(data) + hashes = {sslib_hash.DEFAULT_HASH_ALGORITHM: digest_object.hexdigest()} + return hashes, len(data) + + def update_timestamp(self) -> None: + """Update timestamp and assign snapshot version to snapshot_meta + version. + """ + + hashes = None + length = None + if self.compute_metafile_hashes_length: + hashes, length = self._compute_hashes_and_length(Snapshot.type) + + self.timestamp.snapshot_meta = MetaFile( + self.snapshot.version, length, hashes + ) + + self.timestamp.version += 1 + + def update_snapshot(self) -> None: + """Update snapshot, assign targets versions and update timestamp.""" + for role, delegate in self.all_targets(): + hashes = None + length = None + if self.compute_metafile_hashes_length: + hashes, length = self._compute_hashes_and_length(role) + + self.snapshot.meta[f"{role}.json"] = MetaFile( + delegate.version, length, hashes + ) + + self.snapshot.version += 1 + self.update_timestamp() + + def add_target(self, role: str, data: bytes, path: str) -> None: + """Create a target from data and add it to the target_files.""" + if role == Targets.type: + targets = self.targets + else: + targets = self.md_delegates[role].signed + + target = TargetFile.from_data(path, data, ["sha256"]) + targets.targets[path] = target + self.target_files[path] = RepositoryTarget(data, target) + + def add_delegation( + self, delegator_name: str, role: DelegatedRole, targets: Targets + ) -> None: + """Add delegated target role to the repository.""" + if delegator_name == Targets.type: + delegator = self.targets + else: + delegator = self.md_delegates[delegator_name].signed + + # Create delegation + if delegator.delegations is None: + delegator.delegations = Delegations({}, {}) + # put delegation last by default + delegator.delegations.roles[role.name] = role + + # By default add one new key for the role + key, signer = self.create_key() + delegator.add_key(role.name, key) + self.add_signer(role.name, signer) + + # Add metadata for the role + if role.name not in self.md_delegates: + self.md_delegates[role.name] = Metadata(targets, {}) + + def write(self) -> None: + """Dump current repository metadata to self.dump_dir + + This is a debugging tool: dumping repository state before running + Updater refresh may be useful while debugging a test. + """ + if self.dump_dir is None: + self.dump_dir = tempfile.mkdtemp() + print(f"Repository Simulator dumps in {self.dump_dir}") + + self.dump_version += 1 + dest_dir = os.path.join(self.dump_dir, str(self.dump_version)) + os.makedirs(dest_dir) + + for ver in range(1, len(self.signed_roots) + 1): + with open(os.path.join(dest_dir, f"{ver}.root.json"), "wb") as f: + f.write(self.fetch_metadata(Root.type, ver)) + + for role in [Timestamp.type, Snapshot.type, Targets.type]: + with open(os.path.join(dest_dir, f"{role}.json"), "wb") as f: + f.write(self.fetch_metadata(role)) + + for role in self.md_delegates: + quoted_role = parse.quote(role, "") + with open(os.path.join(dest_dir, f"{quoted_role}.json"), "wb") as f: + f.write(self.fetch_metadata(role)) diff --git a/tests/simple_https_server.py b/tests/simple_https_server.py deleted file mode 100755 index 29b6e40ab7..0000000000 --- a/tests/simple_https_server.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - simple_https_server.py - - - Vladimir Diaz. - - - June 17, 2014 - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Provide a simple https server that can be used by the unit tests. For - example, 'download.py' can connect to the https server started by this module - to verify that https downloads are permitted. - - - ssl.wrap_socket: - https://docs.python.org/2/library/ssl.html#functions-constants-and-exceptions - - SimpleHTTPServer: - http://docs.python.org/library/simplehttpserver.html#module-SimpleHTTPServer -""" - -import sys -import ssl -import os -import http.server - -keyfile = os.path.join('ssl_certs', 'ssl_cert.key') -certfile = os.path.join('ssl_certs', 'ssl_cert.crt') - - -if len(sys.argv) > 1 and os.path.exists(sys.argv[1]): - certfile = sys.argv[1] - -httpd = http.server.HTTPServer(('localhost', 0), - http.server.SimpleHTTPRequestHandler) - -httpd.socket = ssl.wrap_socket( - httpd.socket, keyfile=keyfile, certfile=certfile, server_side=True) - -port_message = 'bind succeeded, server port is: ' \ - + str(httpd.server_address[1]) -print(port_message) - -if len(sys.argv) > 1 and certfile != sys.argv[1]: - print('simple_https_server: cert file was not found: ' + sys.argv[1] + - '; using default: ' + certfile + " certfile") - -httpd.serve_forever() diff --git a/tests/simple_server.py b/tests/simple_server.py index 74e84f0d80..08166736f5 100755 --- a/tests/simple_server.py +++ b/tests/simple_server.py @@ -3,62 +3,16 @@ # Copyright 2012 - 2017, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 -""" - - simple_server.py +"""Simple HTTP server for python-tuf tests""" - - Konstantin Andrianov. - - - February 15, 2012. - - - See LICENSE-MIT or LICENSE for licensing information. - - - This is a basic server that was designed to be used in conjunction with - test_download.py to test download.py module. - - - SimpleHTTPServer: - http://docs.python.org/library/simplehttpserver.html#module-SimpleHTTPServer -""" - -import sys -import random import socketserver from http.server import SimpleHTTPRequestHandler - -class QuietHTTPRequestHandler(SimpleHTTPRequestHandler): - """A SimpleHTTPRequestHandler that does not write incoming requests to - stderr. """ - def log_request(self, code='-', size='-'): - pass - -# NOTE: On Windows/Python2 tests that use this simple_server.py in a -# subprocesses hang after a certain amount of requests (~68), if a PIPE is -# passed as Popen's stderr argument. This problem doesn't emerge if -# we silence the HTTP messages. -# If you decide to receive the HTTP messages, then this bug -# could reappear. -use_quiet_http_request_handler = True - -if len(sys.argv) > 2: - use_quiet_http_request_handler = sys.argv[2] - -if use_quiet_http_request_handler: - handler = QuietHTTPRequestHandler -else: - handler = SimpleHTTPRequestHandler - # Allow re-use so you can re-run tests as often as you want even if the # tests re-use ports. Otherwise TCP TIME-WAIT prevents reuse for ~1 minute socketserver.TCPServer.allow_reuse_address = True -httpd = socketserver.TCPServer(('localhost', 0), handler) -port_message = 'bind succeeded, server port is: ' \ - + str(httpd.server_address[1]) +httpd = socketserver.TCPServer(("localhost", 0), SimpleHTTPRequestHandler) +port_message = "bind succeeded, server port is: " + str(httpd.server_address[1]) print(port_message) httpd.serve_forever() diff --git a/tests/slow_retrieval_server.py b/tests/slow_retrieval_server.py deleted file mode 100755 index 06ec495213..0000000000 --- a/tests/slow_retrieval_server.py +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - slow_retrieval_server.py - - - Konstantin Andrianov. - - - March 13, 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Server that throttles data by sending one byte at a time (specified time - interval 'DELAY'). The server is used in 'test_slow_retrieval_attack.py'. -""" - -import os -import sys -import time -import http.server - - - -# HTTP request handler. -class Handler(http.server.BaseHTTPRequestHandler): - - # Overwrite do_GET. - def do_GET(self): - current_dir = os.getcwd() - try: - filepath = os.path.join(current_dir, self.path.lstrip('/')) - data = None - with open(filepath, 'r') as fileobj: - data = fileobj.read() - - self.send_response(200) - self.send_header('Content-length', str(len(data))) - self.end_headers() - - # Before sending any data, the server does nothing for a long time. - DELAY = 40 - time.sleep(DELAY) - self.wfile.write((data.encode('utf-8'))) - - except IOError as e: - self.send_error(404, 'File Not Found!') - - - -if __name__ == '__main__': - server_address = ('localhost', 0) - - httpd = http.server.HTTPServer(server_address, Handler) - port_message = 'bind succeeded, server port is: ' \ - + str(httpd.server_address[1]) - print(port_message) - httpd.serve_forever() diff --git a/tests/ssl_certs/proxy_ca.crt b/tests/ssl_certs/proxy_ca.crt deleted file mode 100644 index f079e58b7c..0000000000 --- a/tests/ssl_certs/proxy_ca.crt +++ /dev/null @@ -1,17 +0,0 @@ ------BEGIN CERTIFICATE----- -MIICpDCCAYwCCQCFr/EhHmzVajANBgkqhkiG9w0BAQsFADAUMRIwEAYDVQQDDAlw -cm94eTIgQ0EwHhcNMTgwOTIwMTkyOTQ2WhcNMjgwOTE3MTkyOTQ2WjAUMRIwEAYD -VQQDDAlwcm94eTIgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC/ -rVOeqSzJb01Vyliw3dnfLJsWfDfs/Lq5HLn+Xqnzl6MqnYirDqHzTErD3vl8lo/o -OJrziO0vYCWGXEylRQlZp+P37bLToSWiVqWZ8pH6CAh+AhA3WtegN5JwTgIUSP7A -aDlxuZrXlJM50QVlXJIPkc74M8ALz0nu5zmyWkGFvmTYS8503T8cXs9Alr4Bo++9 -Ilixv6lW4QS7FKTeQXlI49K4TeGGGsfmEO6Uj4WTUkwMZym9wfiqtaWc6I9ZMese -WmU3LuufY+pFCdjsdMWDJpYc+HabTSrbgXSF5Iq9a84Xuum39qhVpYhBwBtLk3ye -cxZmIxde1vnkWAitJFETAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAKV09r/x3WyO -McH0RU4WRVzvQN5F0e7swpDlLUX7YnfvpPEkavqQfmrL1cYyEDgsm/347Gvcs1Aa -iaT77axYroXOvCEJ3DxZdzUErKH6Jr3MmHKcZ/L35u6ZXKnmx/edFjdWr6ENkjuZ -NVvKbTrm4cl6Wy4bXkp6b24rBa9IFJncOouSkIvHENEcH//OD4xeTK8vSJTJ9nmw -TiJ0TjCRujtJWC6yb03ZV32VbeiHa1zLlZhcyKqUtt81dLti5t5+L2hAAVCcnEgI -DBWQdlRs/wilHGWVBo/9srOoMNsmvecTBpLH2JyC5VZ1+faYLPrNlgkWgHIFOTTi -h4ByR95Wbi8= ------END CERTIFICATE----- diff --git a/tests/ssl_certs/proxy_ca.key b/tests/ssl_certs/proxy_ca.key deleted file mode 100644 index 0e08b82d76..0000000000 --- a/tests/ssl_certs/proxy_ca.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEpQIBAAKCAQEAv61TnqksyW9NVcpYsN3Z3yybFnw37Py6uRy5/l6p85ejKp2I -qw6h80xKw975fJaP6Dia84jtL2AlhlxMpUUJWafj9+2y06ElolalmfKR+ggIfgIQ -N1rXoDeScE4CFEj+wGg5cbma15STOdEFZVySD5HO+DPAC89J7uc5slpBhb5k2EvO -dN0/HF7PQJa+AaPvvSJYsb+pVuEEuxSk3kF5SOPSuE3hhhrH5hDulI+Fk1JMDGcp -vcH4qrWlnOiPWTHrHlplNy7rn2PqRQnY7HTFgyaWHPh2m00q24F0heSKvWvOF7rp -t/aoVaWIQcAbS5N8nnMWZiMXXtb55FgIrSRREwIDAQABAoIBACxJObbA064+3xlh -RRioSXx86+BIFwvUYLgAYSDacl3rvTFNcJRFLznteKDE1dPpXZqD6Zk3G8YEauce -UD8nMj/awJs5+kVXSEC30E8/cmbYkE284E5J2OQVsunrvCM/skx2SD90aMhCdbm4 -B40h1EVwpOdH3alc3XIrTnNc0yK5MWAu41qwkxYxXHmW9Y0L8AjZve9JBrnKsJMB -ETEZFhHgi/IWtfh5PLbJO2dbSe7Nqo4ikyWo3r5b3yvuphFz1il88ZLjJ5nDmtlH -is7sk7pd0tYNsK1Di5G1ku50XvcbOE4F7mOVCxICTwjN+sdyG8o+AVlgbTKBo/JF -uEhthCECgYEA/3YXS9mAEujlstrV4VOksYWtySSrLHC56tLjj8cHVPJ1qkzT4OOC -X9TsWReDG4J8/t0DOHn+5dnhnqGcYjMMAQx095KHU1bQGrcRdmi6cjnNLTvfEbge -IcJTYG5P7NpLfLjB3DOGqFR4o0iz4K9ZLTYJc+BaCB9qJBEw6nuoP+sCgYEAwBTN -WpRDrmch0+LFPQwboLwtEPiFscTj8SInV0KsI/MK8+5Sm+tXS8PQHYJYcECEQxQM -2gfyM8vy33UP4yn4edJGWlaz7a4hyDxn944vv2fBQ3vjJTNz3X3skkhZ2/F+ZW9e -SFxPj+Vbif8VTEU+wK0f5SUmpRec4E7y3fq+kXkCgYEAib8ZbLLI1mlygfBx51/8 -rCRSwuTcz8ew2CgCwGInV+ys+bkXfmnuwNHE531AGrNPxvVRaUCO602C1NB7zI+N -53raDyyZf5yN9fnElr592l3EfqGL9Lf8t2NbJeIVgrdqgMP29E9sSpPRwOnQ5FRo -l3JNwoe0xDB8QRpr7+PhoyUCgYEAp+GGmmR7wzLgnhDV00WB4DqYKP0N3RH5KAhx -2hKr4b/LEuh5y00mP1Il06TZJ0M8VmRv1yCa0CqxXB00hZdpVRAz7UFagaJwZFJn -jDb6BJDqmdDt9tXBrxUgb7pMz6+CiaWNAjGsWFheaX5JXyAmeMDX369Y13KL6oEW -RG2jogECgYEA/1vLZcWNK/0yd4ClU+Xbu8xC29q82JUMsaazHtbgSNlOfo9LMQlH -z6xBiMYfHZ/SiHCy9RsO8GD4caXiF0RsTVnhqjSRJf3EARamufelNsu2ApLclkSN -fzSoB7ZHddGaYKYpXkGzcwFcKd/QjAlHm1yIsZu4B52AhCxC/WS2X54= ------END RSA PRIVATE KEY----- diff --git a/tests/ssl_certs/proxy_cert.key b/tests/ssl_certs/proxy_cert.key deleted file mode 100644 index 7693865610..0000000000 --- a/tests/ssl_certs/proxy_cert.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEpAIBAAKCAQEAzZO36nZvb9wLxBNB2cZyHqcX5poChJd1YnFBtxbtQwiISxid -eGdiWImQE80vpUyTQbI7TxM+w1xZeEeu4PXuYrOgdTDRFEnjM2mteG+3WpHQBN4H -xoah0msp3046fMkYqcEvhvHbsc5DAWgLK4JFHQPtG/+CIH0ZY+lBBPQhFIhBLYkt -YxNVqwpsXOGreASSw6mO6cVehCuVFJQO5NnI1sCAvp3SeosMKeIcDZxpZWmZhSwH -n3Rj6RMNM66C8zG4YlpvIniGzgV4UiW8XrTUG8HmzQ2295IcfB4No2DZeJDSR9oq -jOkyqJXll+tSiAMuzBRtTQKvGZ5bpZWW4XELEQIDAQABAoIBAQCAfW2cjD4GimCI -QwkLlq9JXWLg7S3ZtdjWmLdcOmY9WZ3mYhI6aVPcxs5Ysgyvonb/vui2+e5mqNf7 -B8LUNKK06lTGKqbjqXLqdYjJF/pgD3cXM7dkbE3EeNqJChogWIijwW11SMHqFmNn -A6LHpPqRshyHPWIV8FroSagr8nKio5BjUEuUiQUUAmSJPGN5qUhdIWXcQu8R1JB8 -9qqqtwPR4FELbFVGI2vYHaSWGnf9V0boPOsfFXWbSq/Ksj3Lm3gAqMtlAeOFu84l -fhP9RkgeXfaCXq0VaOM83UDgLqXm4Ni4wAMKRLwNs4LzumqMM/dfUTn+mGncj33q -idp5qnDhAoGBAOXkwuf60F7aBbo98A0vWZli2CbkspsJz2J573pf+lVWI+ZHBZLI -MOM2DgCOEIUfa2TIMkwFr2t9x6uXlACEwFbEtEBpM4J5qUHgGtXZIsnTsv3qUg/C -L89cNrMddOuuRkxQbyK1QMYZZmZQjSKG2jW6m1KING+shtkOzQ/P9ildAoGBAOTs -DLyyPeEZPj1UMqxVNmeYYRfWnt+YyTPulOIbSuFN0DhZPNLsjrhSxvDwe/3sYH/p -nKdjnlFlx8frz9wtkCt0hWvY0pG2Zam4IBCvreFN7rSvpzHwUAK3oXic2TRKKu1m -xUPZqMJwnWAPX+XxGFn0m7UJj+95VTEOJ2d12ClFAoGAdexXMgmM8uqg/3yf8xNz -wWNbfu/W0gJBN8FWXw52aWmrNob9y+IWeaYTnqNAxBhuzR6H9kkAR4IYduNkzrNJ -ufhigZu1CVuAv8LF4SXlW2PVL7wPZff08Efb4xrcC7y0YJbtuv8Af90tkpQFIU3N -Brx2yeoGA7aa4SJfe5nwKh0CgYAo1yP+lh4MBqDf+CGCNUGbgcfwpM17PprGtQ3C -uPPG9kbrhqAfUSy1Ha94VK8KQh2FNHxKMK+R/gKCXEOdGFPcLNGQyAHpFQ1WFg9C -atUumOS5P40oj6L2mSQpjHIDrieyat9Ol4pQBh9Nf/Cv6S9a/RS6W5ZeNttIASpu -fsutsQKBgQCq+BFeDYJH4f+C1233W3PXM0P1ivj+9TJMRUP63RRay6rv2ZTZXyPc -Rx6Lv4OVWh9VMfv1kHRloJ1GKEBo/uD3nid1WqoNxpXv1iwxeGtjXkFHfvCB7Ruu -vTyQhJQQ7WSCJJOfarstusIn0udOG3MLRgG4X1pPQghyS1AT8NUglw== ------END RSA PRIVATE KEY----- diff --git a/tests/ssl_certs/ssl_cert.crt b/tests/ssl_certs/ssl_cert.crt deleted file mode 100644 index 4812078bcf..0000000000 --- a/tests/ssl_certs/ssl_cert.crt +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIE1TCCAz2gAwIBAgIJAKqz8ew7Z44mMA0GCSqGSIb3DQEBCwUAMIGAMQswCQYD -VQQGEwJVUzERMA8GA1UECAwITmV3IFlvcmsxETAPBgNVBAcMCEJyb29rbHluMQww -CgYDVQQKDANOWVUxKTAnBgNVBAsMIENvbXB1dGVyIFNjaWVuY2UgYW5kIEVuZ2lu -ZWVyaW5nMRIwEAYDVQQDDAlsb2NhbGhvc3QwHhcNMTYwMTI3MjEyMTMxWhcNMjYw -MTI0MjEyMTMxWjCBgDELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5ldyBZb3JrMREw -DwYDVQQHDAhCcm9va2x5bjEMMAoGA1UECgwDTllVMSkwJwYDVQQLDCBDb21wdXRl -ciBTY2llbmNlIGFuZCBFbmdpbmVlcmluZzESMBAGA1UEAwwJbG9jYWxob3N0MIIB -ojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAxyFVeRsWnb1UlCKBks2azM9W -9K+J/ZkzdSb6eCxOIxv79M/Ug54CfWqkySSaQejsu0U/gJxkFYRvwQAy5lATrspY -2kyiWYiggWXFDWz+i8ETPkL9zn59v13sNIpT/IXQj0S3Mr9ZnsUn1qCyEOOIxJxZ -lyuV/M/XP1DP4tArhEvrex12V6MQIK+8fYzEjHG/W7vIIet+wTStIR8ArvVQi0Kv -PbbGCfrZ+e+gq+UpBLBuAfMzM95TW+YJ5duMchie2n6LDmOeegA4jMEv2ppeOr8Q -JJtZuKpXWVbJvLg81yrDjr1rAwJR/WQrnk8GQWPCyPLneAA4mJbi75LqjLxn0AoJ -b3kzLfGEMJJEWXspxNg06bLQU948hB4L7nKARq6s7KoESjEV+/L4koMPWJoNq6fx -OUVw2+S3ITNrDctecRQ1j3RGVPaj5l6bn03C7KV9uRrfqFY3OUjn7A0kDczvRnmr -e1BZIpe+mfGFB+Uu7JiQoBv6I6fqyrdH9rX1LUKlAgMBAAGjUDBOMB0GA1UdDgQW -BBT8LvRkvodP9bR/bBs/aI+AydRIvTAfBgNVHSMEGDAWgBT8LvRkvodP9bR/bBs/ -aI+AydRIvTAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBgQC6kwuSEF0Y -5yLMf1TKfVkeBaZ4tOqR2kzpggzPPog+JcfIQgVmI2QTUDritHWFIM4YUwQ/00WU -uol2BCUpgaLci5gNgyTw8p538Q5cZHXE3kK/CWJA4zKag+oHdmXzGjMalqzvPuVJ -9VdtPrwHhB0Xntf72iWWhE2dIn1QZqVmJ/8hhIU8cQ91pIqTjYjhrYE/GhGH7HMW -bRiRolt37VxbzfXjEBMqVH6fOQq0piTRxwTNPBFp6JO5mRakRmWRvN3dnR8J9qXi -6tQhNNn2uQIpPlKlqVQnh5j5YxFrb50b0FCjDw+eNilXP93yjV4+lWK2QZychcGl -6/7Wu8snZkJCImPbwmcT80XSKesf918zIkauekWiaJE02+ljNtbM7MUAE+XLsKJy -NFGzpyZJ9LihGC/eeVl7K+xqC41jGVOXOOHtbDMbIQfaEZd1nPvy3+V/tublv+am -jPSlj/FW3bLTkjF0OspFjHvJeCeAJdM9kJdYfZoahd6kcejGJc+vjXE= ------END CERTIFICATE----- diff --git a/tests/ssl_certs/ssl_cert.key b/tests/ssl_certs/ssl_cert.key deleted file mode 100644 index b483851d7b..0000000000 --- a/tests/ssl_certs/ssl_cert.key +++ /dev/null @@ -1,39 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIG4wIBAAKCAYEAxyFVeRsWnb1UlCKBks2azM9W9K+J/ZkzdSb6eCxOIxv79M/U -g54CfWqkySSaQejsu0U/gJxkFYRvwQAy5lATrspY2kyiWYiggWXFDWz+i8ETPkL9 -zn59v13sNIpT/IXQj0S3Mr9ZnsUn1qCyEOOIxJxZlyuV/M/XP1DP4tArhEvrex12 -V6MQIK+8fYzEjHG/W7vIIet+wTStIR8ArvVQi0KvPbbGCfrZ+e+gq+UpBLBuAfMz -M95TW+YJ5duMchie2n6LDmOeegA4jMEv2ppeOr8QJJtZuKpXWVbJvLg81yrDjr1r -AwJR/WQrnk8GQWPCyPLneAA4mJbi75LqjLxn0AoJb3kzLfGEMJJEWXspxNg06bLQ -U948hB4L7nKARq6s7KoESjEV+/L4koMPWJoNq6fxOUVw2+S3ITNrDctecRQ1j3RG -VPaj5l6bn03C7KV9uRrfqFY3OUjn7A0kDczvRnmre1BZIpe+mfGFB+Uu7JiQoBv6 -I6fqyrdH9rX1LUKlAgMBAAECggGAEogMn0ehFC7xdxO7AUF3HYZSLlVDv0EJo+Zr -utFMuEG7ce4Bdfo3exp4mWt5m5akqUzpevuS6Nm5WLm/AuYC3upf2Hj3RuPLJB+n -dfdlvPXL56huXFAzPaLs/3q8FC0T2rFnZyadnYP1kCjGSYITUVDHmaTpwWxKOM85 -eX8r/ZTfJkb4o3E+Z/xSy1BVXkibqVrRZi63Th2r2wA6nQ2hYERlcJXY2kbpEDR3 -vGeIKLKOmknawwH2uf+vfh+vc1LNE7p9C5w16ex0OcmCo6G1ln7/dcwmXmcS3M0S -Bax5Jzu5ozaJFL9G59o0AUGJoZj9Gj9leeKPZvShsGcA0JmBMQiLIdhgRwj0B83x -HrYXTZ6P5BjJmwrIv4mGdv2bHV20pbWKAATUwo8EVBzylipexhhAtQJ5B6OsPDPS -HTluaEC2niD6lE613uRnzzbjw4SlwkoMLE0aqOhQyWIPS9/8oRjTzQi4otL7Dt69 -oMrVhmSfxUqZhh2R3KMHDcMKt5nBAoHBAOXkDovYOhTMD3ei0WbKpbSB1sJp5t2d -/9gVil4nWLa4ahw7/TsZi3Co+c9gD2UJku1L9JbOy6TVZ2LoXOybLvIJfeAjNdYH -vi/ElG7498fgsSyw6bua/1VEd7VtbtpWJIQt1LdJG1+O3ZbJNTY6tbLbYVuy4FIO -e/484F8kdZ9PtRsn+I0I7kfoYJ2IFoM0UWgwQETOBguBCua43ZnHoxrvyHKABAO+ -Iuvw4RBZKphGVxMCEjvTCB9S/CpGCRAkkQKBwQDdvu3reA/lVdFDN56VNUn0u3vr -zPSoiOjojlHDyWVAWiLB9I0qaE61UMvVgChM8VkmjhHYQEW6Cj0XMZMkCnsfKDQn -TYF16jt/sTteWSTcx0PTeiCGs3yM5wK4B8q9coOlzSqDd39mjDIFiUz4e+44OIcU -+ISc8pGbwxw0W8qRwIUJPTSVoaUZDnupuR/IE48q8CTPT1Gf00sMLWuv3SYuFHKX -djpcMLWVf4HclIY6y3BqNIZ0JaUAOd+OZT2kdtUCgcBLWPwLics/lcJcC9lmP3Ug -PI4PGna4nFiGkkjPo0XIXZkpt9+/xxeUzU1TUsC49PJbJFH+O7kzRV6lZFNQmWxB -mCrRk7jJdbA4J84esStFL7fiVfnFq3+UiuRRapSyqxk82WimyidWopSuHzR5mbSD -8rNuQqqTOnwZUAqaJHEIzi8lv2wPjaXLm7ZO65O1XShxZZ8q7fu9OYZBKMY46N3k -rkKchKjMMT1w53pcyVzUm/leGYewY/J9kc1kbZ/60oECgcEAj/qdzwt4/sa3BncB -wA4GxCJL9zJwFVI4MG/gRUjqNluQP/GDC2sI2A/rGeiJwlPfN/p9ObWZ0I8/VWT6 -DifEA9n96xsXGTIKigHQ85TcK4Iy1whwQCYgk/iXOljM2i+VrT1HAm+/yBz1icS5 -ton5hoWlqAcpTCLwSnvoP1Lud67ScspL73Aym89cmjo6mZWhmxasP/NXo3f1PaXs -SxdD6B2cvh2lDSEPdk+BSXEiquBXUI5kUtvyg/AP6Qxxdu01AoHAO05qTh9zokkT -yg0sZf4Z5i01em2ys4ZhQjhhbw+I5lIO76e/ZyUWpEZusBVd9TV5BHgiATOHw4yr -nbjEZKwLEb3SXoHl3/CD/l9vWk4gKAYDJdW+oPZttDlkp6dfPJVDupQwLhrxXYmE -fgs4WFmY3Q5b1wut2pnSs1UEPDqJBvykt59gFgn7yVwyTy8VLihNVtH4mwVPYXha -jz2T6BzRAPlYqx/FpkK2YHHNcyj+HFtnBUMMzacnSl/aXpJgHTKw ------END RSA PRIVATE KEY----- diff --git a/tests/ssl_certs/ssl_cert_2.crt b/tests/ssl_certs/ssl_cert_2.crt deleted file mode 100644 index 6d6fb63a53..0000000000 --- a/tests/ssl_certs/ssl_cert_2.crt +++ /dev/null @@ -1,30 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIFOTCCA6GgAwIBAgIJAO+bbero+zKtMA0GCSqGSIb3DQEBCwUAMIGAMQswCQYD -VQQGEwJVUzERMA8GA1UECAwITmV3IFlvcmsxETAPBgNVBAcMCEJyb29rbHluMQww -CgYDVQQKDANOWVUxKTAnBgNVBAsMIENvbXB1dGVyIFNjaWVuY2UgYW5kIEVuZ2lu -ZWVyaW5nMRIwEAYDVQQDDAlsb2NhbGhvc3QwHhcNMTgwOTI2MTgwMDAzWhcNMzgw -OTIxMTgwMDAzWjCBgDELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5ldyBZb3JrMREw -DwYDVQQHDAhCcm9va2x5bjEMMAoGA1UECgwDTllVMSkwJwYDVQQLDCBDb21wdXRl -ciBTY2llbmNlIGFuZCBFbmdpbmVlcmluZzESMBAGA1UEAwwJbG9jYWxob3N0MIIB -ojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAxyFVeRsWnb1UlCKBks2azM9W -9K+J/ZkzdSb6eCxOIxv79M/Ug54CfWqkySSaQejsu0U/gJxkFYRvwQAy5lATrspY -2kyiWYiggWXFDWz+i8ETPkL9zn59v13sNIpT/IXQj0S3Mr9ZnsUn1qCyEOOIxJxZ -lyuV/M/XP1DP4tArhEvrex12V6MQIK+8fYzEjHG/W7vIIet+wTStIR8ArvVQi0Kv -PbbGCfrZ+e+gq+UpBLBuAfMzM95TW+YJ5duMchie2n6LDmOeegA4jMEv2ppeOr8Q -JJtZuKpXWVbJvLg81yrDjr1rAwJR/WQrnk8GQWPCyPLneAA4mJbi75LqjLxn0AoJ -b3kzLfGEMJJEWXspxNg06bLQU948hB4L7nKARq6s7KoESjEV+/L4koMPWJoNq6fx -OUVw2+S3ITNrDctecRQ1j3RGVPaj5l6bn03C7KV9uRrfqFY3OUjn7A0kDczvRnmr -e1BZIpe+mfGFB+Uu7JiQoBv6I6fqyrdH9rX1LUKlAgMBAAGjgbMwgbAwgZ8GA1Ud -IwSBlzCBlKGBhqSBgzCBgDELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5ldyBZb3Jr -MREwDwYDVQQHDAhCcm9va2x5bjEMMAoGA1UECgwDTllVMSkwJwYDVQQLDCBDb21w -dXRlciBTY2llbmNlIGFuZCBFbmdpbmVlcmluZzESMBAGA1UEAwwJbG9jYWxob3N0 -ggkA75tt6uj7Mq0wDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAYEAFWcl -1tAmt/3DJDjk0ppF62jbwcEOu1N9Nono9a70ojAQYYuMC7Ditw6rLbeXS8tP8ae/ -drlci3VxlE5PpmAjuP67Uv2CuGu/2iMqa99AWZ4mVN+x4YL6awvYs8ea6I1Xe8tQ -5+RqvNA+QtnjtfOeb6yWQBAGrc2eTX87IzqvV/EewkdKAs4GZUWG1Zjv3effqjTO -qRX94ltW1GWud7fVcqpZLOaK9U+4IaI2nNHuCtWODoyQmMoVApXyig/YQqFe0eyj -76m1T+2SZLRtn0xn1fTHuLZ2bdtTMZ7k5PTAKnBNEn1Rr9MAS+WEASN1ZyoQ3reL -VYrgkMTrrXPO8bdDTvP7z1Jzv5Cq9WMHFvOLfnj/vN9ZPH6w4QT3Zb97SAAOSPK/ -gzOzRtIe+hqCYBh/cwMoeeoAzes/nJgorj3IOTu8JXmtZrZGrdLIhu2Q8U+yKasf -+TUrr6xdcJI/fyVM5BVelpGhqHzzOQe1tO4VYQlAVaaVvFidDPHqTI2/S272 ------END CERTIFICATE----- diff --git a/tests/ssl_certs/ssl_cert_expired.crt b/tests/ssl_certs/ssl_cert_expired.crt deleted file mode 100644 index f0b79cb95a..0000000000 --- a/tests/ssl_certs/ssl_cert_expired.crt +++ /dev/null @@ -1,30 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIFOTCCA6GgAwIBAgIJALtyUsChEIJpMA0GCSqGSIb3DQEBCwUAMIGAMQswCQYD -VQQGEwJVUzERMA8GA1UECAwITmV3IFlvcmsxETAPBgNVBAcMCEJyb29rbHluMQww -CgYDVQQKDANOWVUxKTAnBgNVBAsMIENvbXB1dGVyIFNjaWVuY2UgYW5kIEVuZ2lu -ZWVyaW5nMRIwEAYDVQQDDAlsb2NhbGhvc3QwHhcNMTgwOTI2MTc0NTM2WhcNMTgw -OTI1MTc0NTM2WjCBgDELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5ldyBZb3JrMREw -DwYDVQQHDAhCcm9va2x5bjEMMAoGA1UECgwDTllVMSkwJwYDVQQLDCBDb21wdXRl -ciBTY2llbmNlIGFuZCBFbmdpbmVlcmluZzESMBAGA1UEAwwJbG9jYWxob3N0MIIB -ojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAxyFVeRsWnb1UlCKBks2azM9W -9K+J/ZkzdSb6eCxOIxv79M/Ug54CfWqkySSaQejsu0U/gJxkFYRvwQAy5lATrspY -2kyiWYiggWXFDWz+i8ETPkL9zn59v13sNIpT/IXQj0S3Mr9ZnsUn1qCyEOOIxJxZ -lyuV/M/XP1DP4tArhEvrex12V6MQIK+8fYzEjHG/W7vIIet+wTStIR8ArvVQi0Kv -PbbGCfrZ+e+gq+UpBLBuAfMzM95TW+YJ5duMchie2n6LDmOeegA4jMEv2ppeOr8Q -JJtZuKpXWVbJvLg81yrDjr1rAwJR/WQrnk8GQWPCyPLneAA4mJbi75LqjLxn0AoJ -b3kzLfGEMJJEWXspxNg06bLQU948hB4L7nKARq6s7KoESjEV+/L4koMPWJoNq6fx -OUVw2+S3ITNrDctecRQ1j3RGVPaj5l6bn03C7KV9uRrfqFY3OUjn7A0kDczvRnmr -e1BZIpe+mfGFB+Uu7JiQoBv6I6fqyrdH9rX1LUKlAgMBAAGjgbMwgbAwgZ8GA1Ud -IwSBlzCBlKGBhqSBgzCBgDELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5ldyBZb3Jr -MREwDwYDVQQHDAhCcm9va2x5bjEMMAoGA1UECgwDTllVMSkwJwYDVQQLDCBDb21w -dXRlciBTY2llbmNlIGFuZCBFbmdpbmVlcmluZzESMBAGA1UEAwwJbG9jYWxob3N0 -ggkAu3JSwKEQgmkwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAYEAW4I1 -TacdFv3L9ENFkSLciPb7zFMckLUZfk/P+4VjdapWrfuydO4W/ogMxA4DK09thTsK -N/BgcExyKjDldGUfUv57Tqv3v2E5kbygNcNtP53fwMz3y+7QourzkDE5HWciw1Lb -hmbnCBTzt/UioSBdJnAH29GWpSS+Jzu745sRaI48AS/J5ApH2aVEnNQTCE7v1LNH -2bTTPYl3eDXiD8yOhvyiW1F4y2BSFbQRH/3aE6Goe4A75m8sX50+JlOgjyyQnAMf -vbfvZsjGfqdXv9Qpci50qKCFxHJLXXNAUbX3fDgKE+RoZUNZnmn2VDgJYnToz6on -RcVnppV09kmSjHXZBT04XXUA0vG3p+oU0TO4puJlePVf4Oz23/DRCPHSfVWgMeB2 -c1PpKit4+Bz7mypnsWVw8kk//l0GJ1cHnkkZElKJtPEB7I587jgTCDcN811TGNBc -rLLd/JwtYAvi1CPFt2ICGDvA4AKLY3rBNg5z1DrSE/iom1NTC00SFZJztYiX ------END CERTIFICATE----- diff --git a/tests/ssl_certs/ssl_cert_wronghost.crt b/tests/ssl_certs/ssl_cert_wronghost.crt deleted file mode 100644 index df7bfa37a6..0000000000 --- a/tests/ssl_certs/ssl_cert_wronghost.crt +++ /dev/null @@ -1,31 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIFRTCCA62gAwIBAgIJAKY6b706lpuDMA0GCSqGSIb3DQEBCwUAMIGEMQswCQYD -VQQGEwJVUzERMA8GA1UECAwITmV3IFlvcmsxETAPBgNVBAcMCEJyb29rbHluMQww -CgYDVQQKDANOWVUxKTAnBgNVBAsMIENvbXB1dGVyIFNjaWVuY2UgYW5kIEVuZ2lu -ZWVyaW5nMRYwFAYDVQQDDA1ub3RteWhvc3RuYW1lMB4XDTE4MDkxMjE2NTkxN1oX -DTM4MDkwNzE2NTkxN1owgYQxCzAJBgNVBAYTAlVTMREwDwYDVQQIDAhOZXcgWW9y -azERMA8GA1UEBwwIQnJvb2tseW4xDDAKBgNVBAoMA05ZVTEpMCcGA1UECwwgQ29t -cHV0ZXIgU2NpZW5jZSBhbmQgRW5naW5lZXJpbmcxFjAUBgNVBAMMDW5vdG15aG9z -dG5hbWUwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDHIVV5GxadvVSU -IoGSzZrMz1b0r4n9mTN1Jvp4LE4jG/v0z9SDngJ9aqTJJJpB6Oy7RT+AnGQVhG/B -ADLmUBOuyljaTKJZiKCBZcUNbP6LwRM+Qv3Ofn2/Xew0ilP8hdCPRLcyv1mexSfW -oLIQ44jEnFmXK5X8z9c/UM/i0CuES+t7HXZXoxAgr7x9jMSMcb9bu8gh637BNK0h -HwCu9VCLQq89tsYJ+tn576Cr5SkEsG4B8zMz3lNb5gnl24xyGJ7afosOY556ADiM -wS/aml46vxAkm1m4qldZVsm8uDzXKsOOvWsDAlH9ZCueTwZBY8LI8ud4ADiYluLv -kuqMvGfQCglveTMt8YQwkkRZeynE2DTpstBT3jyEHgvucoBGrqzsqgRKMRX78viS -gw9Ymg2rp/E5RXDb5LchM2sNy15xFDWPdEZU9qPmXpufTcLspX25Gt+oVjc5SOfs -DSQNzO9Geat7UFkil76Z8YUH5S7smJCgG/ojp+rKt0f2tfUtQqUCAwEAAaOBtzCB -tDCBowYDVR0jBIGbMIGYoYGKpIGHMIGEMQswCQYDVQQGEwJVUzERMA8GA1UECAwI -TmV3IFlvcmsxETAPBgNVBAcMCEJyb29rbHluMQwwCgYDVQQKDANOWVUxKTAnBgNV -BAsMIENvbXB1dGVyIFNjaWVuY2UgYW5kIEVuZ2luZWVyaW5nMRYwFAYDVQQDDA1u -b3RteWhvc3RuYW1lggkApjpvvTqWm4MwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0B -AQsFAAOCAYEAvpBMce3kxwo9W0o4RqezkSxnNyax0ezbUNodIkx5kbzX09qQLqhK -SkhQY3CNmtrpsczUg1W2nldxioEouwfTlhi15H98E/8XytpGaHO7Rnbtq8nkOp3E -N1+DMfFR95OynbHSd7bfK9UEmH1CmCnttvCuQkLTxDCpEsQNAxvmU/yDONoDr+cu -jGo80XTnYTqHl5/UtGbCS4SAIdWgrXTIqVvY/eF+mR+3nQEYjBuqW0cNfXLyYLXH -XMc6qtfGX1P+NRWtlrWgGQmc0fry+GczRHMJuKtJMV2xZzPJAJqwwvj3Fjz8HNGu -ZX3kVdbkDjf8is2cWgyZqDecqPHDBW4Ey539s/5eurgOkEvhriS4/9RnVhgdzduj -nRdXkD10ficrFcBQO0KaTWT+iFBc9duuYPuLRyRTye5p3t0liOikH2XrRXs4IBfz -2mT4npXQl1liNixcCf/yUEUOSQAJDG6aRjDjD4SZBUPDLjfqKLid8M0BpLQrks9L -5hAg1WZXorY6 ------END CERTIFICATE----- diff --git a/tests/test_api.py b/tests/test_api.py index 4d8ef1f92a..a0eaf1e910 100755 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -7,62 +7,55 @@ """ import json -import sys import logging import os import shutil +import sys import tempfile import unittest - +from copy import copy from datetime import datetime, timedelta -from dateutil.relativedelta import relativedelta +from typing import Any, ClassVar, Dict -from tests import utils +from securesystemslib import hash as sslib_hash +from securesystemslib.interface import ( + import_ed25519_privatekey_from_file, + import_ed25519_publickey_from_file, +) +from securesystemslib.keys import generate_ed25519_key +from securesystemslib.signer import Signature, SSlibSigner -from tuf import exceptions +from tests import utils +from tuf.api import exceptions from tuf.api.metadata import ( + TOP_LEVEL_ROLE_NAMES, + DelegatedRole, + Delegations, + Key, Metadata, Root, Snapshot, - Timestamp, - Targets, - Key, - MetaFile, TargetFile, - Delegations, - DelegatedRole, -) - -from tuf.api.serialization import ( - DeserializationError -) - -from tuf.api.serialization.json import ( - JSONSerializer, - CanonicalJSONSerializer -) - -from securesystemslib.interface import ( - import_ed25519_publickey_from_file, - import_ed25519_privatekey_from_file -) - -from securesystemslib.signer import ( - SSlibSigner, - Signature -) - -from securesystemslib.keys import ( - generate_ed25519_key + Targets, + Timestamp, ) +from tuf.api.serialization import DeserializationError, SerializationError +from tuf.api.serialization.json import CanonicalJSONSerializer, JSONSerializer logger = logging.getLogger(__name__) +# pylint: disable=too-many-public-methods class TestMetadata(unittest.TestCase): + """Tests for public API of all classes in 'tuf/api/metadata.py'.""" + + temporary_directory: ClassVar[str] + repo_dir: ClassVar[str] + keystore_dir: ClassVar[str] + keystore: ClassVar[Dict[str, Dict[str, Any]]] @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: # Create a temporary directory to store the repository, metadata, and # target files. 'temporary_directory' must be deleted in # TearDownClass() so that temporary files are always removed, even when @@ -70,62 +63,61 @@ def setUpClass(cls): cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) test_repo_data = os.path.join( - os.path.dirname(os.path.realpath(__file__)), 'repository_data') + os.path.dirname(os.path.realpath(__file__)), "repository_data" + ) - cls.repo_dir = os.path.join(cls.temporary_directory, 'repository') + cls.repo_dir = os.path.join(cls.temporary_directory, "repository") shutil.copytree( - os.path.join(test_repo_data, 'repository'), cls.repo_dir) + os.path.join(test_repo_data, "repository"), cls.repo_dir + ) - cls.keystore_dir = os.path.join(cls.temporary_directory, 'keystore') + cls.keystore_dir = os.path.join(cls.temporary_directory, "keystore") shutil.copytree( - os.path.join(test_repo_data, 'keystore'), cls.keystore_dir) + os.path.join(test_repo_data, "keystore"), cls.keystore_dir + ) # Load keys into memory cls.keystore = {} - for role in ['delegation', 'snapshot', 'targets', 'timestamp']: + for role in ["delegation", Snapshot.type, Targets.type, Timestamp.type]: cls.keystore[role] = import_ed25519_privatekey_from_file( - os.path.join(cls.keystore_dir, role + '_key'), - password="password" + os.path.join(cls.keystore_dir, role + "_key"), + password="password", ) - @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: # Remove the temporary repository directory, which should contain all # the metadata, targets, and key files generated for the test cases. shutil.rmtree(cls.temporary_directory) - - def test_generic_read(self): + def test_generic_read(self) -> None: for metadata, inner_metadata_cls in [ - ('root', Root), - ('snapshot', Snapshot), - ('timestamp', Timestamp), - ('targets', Targets)]: + (Root.type, Root), + (Snapshot.type, Snapshot), + (Timestamp.type, Timestamp), + (Targets.type, Targets), + ]: # Load JSON-formatted metdata of each supported type from file # and from out-of-band read JSON string - path = os.path.join(self.repo_dir, 'metadata', metadata + '.json') - metadata_obj = Metadata.from_file(path) - with open(path, 'rb') as f: - metadata_obj2 = Metadata.from_bytes(f.read()) + path = os.path.join(self.repo_dir, "metadata", metadata + ".json") + md_obj = Metadata.from_file(path) + with open(path, "rb") as f: + md_obj2 = Metadata.from_bytes(f.read()) # Assert that both methods instantiate the right inner class for # each metadata type and ... - self.assertTrue( - isinstance(metadata_obj.signed, inner_metadata_cls)) - self.assertTrue( - isinstance(metadata_obj2.signed, inner_metadata_cls)) + self.assertTrue(isinstance(md_obj.signed, inner_metadata_cls)) + self.assertTrue(isinstance(md_obj2.signed, inner_metadata_cls)) # ... and return the same object (compared by dict representation) - self.assertDictEqual( - metadata_obj.to_dict(), metadata_obj2.to_dict()) + self.assertDictEqual(md_obj.to_dict(), md_obj2.to_dict()) # Assert that it chokes correctly on an unknown metadata type - bad_metadata_path = 'bad-metadata.json' - bad_metadata = {'signed': {'_type': 'bad-metadata'}} - bad_string = json.dumps(bad_metadata).encode('utf-8') - with open(bad_metadata_path, 'wb') as f: + bad_metadata_path = "bad-metadata.json" + bad_metadata = {"signed": {"_type": "bad-metadata"}} + bad_string = json.dumps(bad_metadata).encode("utf-8") + with open(bad_metadata_path, "wb") as f: f.write(bad_string) with self.assertRaises(DeserializationError): @@ -135,144 +127,175 @@ def test_generic_read(self): os.remove(bad_metadata_path) + def test_md_read_write_file_exceptions(self) -> None: + # Test writing to a file with bad filename + with self.assertRaises(exceptions.StorageError): + Metadata.from_file("bad-metadata.json") - def test_compact_json(self): - path = os.path.join(self.repo_dir, 'metadata', 'targets.json') - metadata_obj = Metadata.from_file(path) - self.assertTrue( - len(JSONSerializer(compact=True).serialize(metadata_obj)) < - len(JSONSerializer().serialize(metadata_obj))) - + # Test serializing to a file with bad filename + with self.assertRaises(exceptions.StorageError): + md = Metadata.from_file( + os.path.join(self.repo_dir, "metadata", "root.json") + ) + md.to_file("") - def test_read_write_read_compare(self): - for metadata in ['root', 'snapshot', 'timestamp', 'targets']: - path = os.path.join(self.repo_dir, 'metadata', metadata + '.json') - metadata_obj = Metadata.from_file(path) + def test_compact_json(self) -> None: + path = os.path.join(self.repo_dir, "metadata", "targets.json") + md_obj = Metadata.from_file(path) + self.assertTrue( + len(JSONSerializer(compact=True).serialize(md_obj)) + < len(JSONSerializer().serialize(md_obj)) + ) - path_2 = path + '.tmp' - metadata_obj.to_file(path_2) - metadata_obj_2 = Metadata.from_file(path_2) + def test_read_write_read_compare(self) -> None: + for metadata in TOP_LEVEL_ROLE_NAMES: + path = os.path.join(self.repo_dir, "metadata", metadata + ".json") + md_obj = Metadata.from_file(path) - self.assertDictEqual( - metadata_obj.to_dict(), - metadata_obj_2.to_dict()) + path_2 = path + ".tmp" + md_obj.to_file(path_2) + md_obj_2 = Metadata.from_file(path_2) + self.assertDictEqual(md_obj.to_dict(), md_obj_2.to_dict()) os.remove(path_2) - - def test_to_from_bytes(self): - for metadata in ["root", "snapshot", "timestamp", "targets"]: - path = os.path.join(self.repo_dir, 'metadata', metadata + '.json') - with open(path, 'rb') as f: + def test_serialize_with_validate(self) -> None: + # Assert that by changing one required attribute validation will fail. + root = Metadata.from_file( + os.path.join(self.repo_dir, "metadata", "root.json") + ) + root.signed.version = 0 + with self.assertRaises(SerializationError): + root.to_bytes(JSONSerializer(validate=True)) + + def test_to_from_bytes(self) -> None: + for metadata in TOP_LEVEL_ROLE_NAMES: + path = os.path.join(self.repo_dir, "metadata", metadata + ".json") + with open(path, "rb") as f: metadata_bytes = f.read() - metadata_obj = Metadata.from_bytes(metadata_bytes) + md_obj = Metadata.from_bytes(metadata_bytes) # Comparate that from_bytes/to_bytes doesn't change the content # for two cases for the serializer: noncompact and compact. # Case 1: test noncompact by overriding the default serializer. - self.assertEqual( - metadata_obj.to_bytes(JSONSerializer()), metadata_bytes - ) + self.assertEqual(md_obj.to_bytes(JSONSerializer()), metadata_bytes) # Case 2: test compact by using the default serializer. - obj_bytes = metadata_obj.to_bytes() + obj_bytes = md_obj.to_bytes() metadata_obj_2 = Metadata.from_bytes(obj_bytes) - self.assertEqual( - metadata_obj_2.to_bytes(), obj_bytes - ) + self.assertEqual(metadata_obj_2.to_bytes(), obj_bytes) - - def test_sign_verify(self): - root_path = os.path.join(self.repo_dir, 'metadata', 'root.json') + def test_sign_verify(self) -> None: + root_path = os.path.join(self.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(root_path).signed # Locate the public keys we need from root - targets_keyid = next(iter(root.roles["targets"].keyids)) + targets_keyid = next(iter(root.roles[Targets.type].keyids)) targets_key = root.keys[targets_keyid] - snapshot_keyid = next(iter(root.roles["snapshot"].keyids)) + snapshot_keyid = next(iter(root.roles[Snapshot.type].keyids)) snapshot_key = root.keys[snapshot_keyid] - timestamp_keyid = next(iter(root.roles["timestamp"].keyids)) + timestamp_keyid = next(iter(root.roles[Timestamp.type].keyids)) timestamp_key = root.keys[timestamp_keyid] # Load sample metadata (targets) and assert ... - path = os.path.join(self.repo_dir, 'metadata', 'targets.json') - metadata_obj = Metadata.from_file(path) + path = os.path.join(self.repo_dir, "metadata", "targets.json") + md_obj = Metadata.from_file(path) # ... it has a single existing signature, - self.assertEqual(len(metadata_obj.signatures), 1) + self.assertEqual(len(md_obj.signatures), 1) # ... which is valid for the correct key. - targets_key.verify_signature(metadata_obj) + targets_key.verify_signature(md_obj) with self.assertRaises(exceptions.UnsignedMetadataError): - snapshot_key.verify_signature(metadata_obj) + snapshot_key.verify_signature(md_obj) # Test verifying with explicitly set serializer - targets_key.verify_signature(metadata_obj, CanonicalJSONSerializer()) + targets_key.verify_signature(md_obj, CanonicalJSONSerializer()) with self.assertRaises(exceptions.UnsignedMetadataError): - targets_key.verify_signature(metadata_obj, JSONSerializer()) + targets_key.verify_signature(md_obj, JSONSerializer()) # type: ignore[arg-type] - sslib_signer = SSlibSigner(self.keystore['snapshot']) + sslib_signer = SSlibSigner(self.keystore[Snapshot.type]) # Append a new signature with the unrelated key and assert that ... - sig = metadata_obj.sign(sslib_signer, append=True) + sig = md_obj.sign(sslib_signer, append=True) # ... there are now two signatures, and - self.assertEqual(len(metadata_obj.signatures), 2) + self.assertEqual(len(md_obj.signatures), 2) # ... both are valid for the corresponding keys. - targets_key.verify_signature(metadata_obj) - snapshot_key.verify_signature(metadata_obj) + targets_key.verify_signature(md_obj) + snapshot_key.verify_signature(md_obj) # ... the returned (appended) signature is for snapshot key self.assertEqual(sig.keyid, snapshot_keyid) - sslib_signer = SSlibSigner(self.keystore['timestamp']) + sslib_signer = SSlibSigner(self.keystore[Timestamp.type]) # Create and assign (don't append) a new signature and assert that ... - metadata_obj.sign(sslib_signer, append=False) + md_obj.sign(sslib_signer, append=False) # ... there now is only one signature, - self.assertEqual(len(metadata_obj.signatures), 1) + self.assertEqual(len(md_obj.signatures), 1) # ... valid for that key. - timestamp_key.verify_signature(metadata_obj) + timestamp_key.verify_signature(md_obj) + with self.assertRaises(exceptions.UnsignedMetadataError): + targets_key.verify_signature(md_obj) + + def test_sign_failures(self) -> None: + # Test throwing UnsignedMetadataError because of signing problems + # related to bad information in the signer. + md = Metadata.from_file( + os.path.join(self.repo_dir, "metadata", "snapshot.json") + ) + key_dict = copy(self.keystore[Snapshot.type]) + key_dict["keytype"] = "rsa" + key_dict["scheme"] = "bad_scheme" + sslib_signer = SSlibSigner(key_dict) with self.assertRaises(exceptions.UnsignedMetadataError): - targets_key.verify_signature(metadata_obj) + md.sign(sslib_signer) + + def test_verify_failures(self) -> None: + root_path = os.path.join(self.repo_dir, "metadata", "root.json") + root = Metadata[Root].from_file(root_path).signed - # Test failure on unknown scheme (securesystemslib UnsupportedAlgorithmError) + # Locate the timestamp public key we need from root + timestamp_keyid = next(iter(root.roles[Timestamp.type].keyids)) + timestamp_key = root.keys[timestamp_keyid] + + # Load sample metadata (timestamp) + path = os.path.join(self.repo_dir, "metadata", "timestamp.json") + md_obj = Metadata.from_file(path) + + # Test failure on unknown scheme (securesystemslib + # UnsupportedAlgorithmError) scheme = timestamp_key.scheme timestamp_key.scheme = "foo" with self.assertRaises(exceptions.UnsignedMetadataError): - timestamp_key.verify_signature(metadata_obj) + timestamp_key.verify_signature(md_obj) timestamp_key.scheme = scheme - # Test failure on broken public key data (securesystemslib CryptoError) + # Test failure on broken public key data (securesystemslib + # CryptoError) public = timestamp_key.keyval["public"] timestamp_key.keyval["public"] = "ffff" with self.assertRaises(exceptions.UnsignedMetadataError): - timestamp_key.verify_signature(metadata_obj) + timestamp_key.verify_signature(md_obj) timestamp_key.keyval["public"] = public - # Test failure with invalid signature (securesystemslib FormatError) - sig = metadata_obj.signatures[timestamp_keyid] + # Test failure with invalid signature (securesystemslib + # FormatError) + sig = md_obj.signatures[timestamp_keyid] correct_sig = sig.signature sig.signature = "foo" with self.assertRaises(exceptions.UnsignedMetadataError): - timestamp_key.verify_signature(metadata_obj) + timestamp_key.verify_signature(md_obj) # Test failure with valid but incorrect signature - sig.signature = "ff"*64 + sig.signature = "ff" * 64 with self.assertRaises(exceptions.UnsignedMetadataError): - timestamp_key.verify_signature(metadata_obj) + timestamp_key.verify_signature(md_obj) sig.signature = correct_sig - def test_metadata_base(self): - # Use of Snapshot is arbitrary, we're just testing the base class features - # with real data - snapshot_path = os.path.join( - self.repo_dir, 'metadata', 'snapshot.json') + def test_metadata_signed_is_expired(self) -> None: + # Use of Snapshot is arbitrary, we're just testing the base class + # features with real data + snapshot_path = os.path.join(self.repo_dir, "metadata", "snapshot.json") md = Metadata.from_file(snapshot_path) - self.assertEqual(md.signed.version, 1) - md.signed.bump_version() - self.assertEqual(md.signed.version, 2) self.assertEqual(md.signed.expires, datetime(2030, 1, 1, 0, 0)) - md.signed.bump_expiration() - self.assertEqual(md.signed.expires, datetime(2030, 1, 2, 0, 0)) - md.signed.bump_expiration(timedelta(days=365)) - self.assertEqual(md.signed.expires, datetime(2031, 1, 2, 0, 0)) # Test is_expired with reference_time provided is_expired = md.signed.is_expired(md.signed.expires) @@ -293,165 +316,104 @@ def test_metadata_base(self): self.assertFalse(is_expired) md.signed.expires = expires - # Test deserializing metadata with non-unique signatures: - data = md.to_dict() - data["signatures"].append({"keyid": data["signatures"][0]["keyid"], "sig": "foo"}) - with self.assertRaises(ValueError): - Metadata.from_dict(data) - - - def test_metadata_snapshot(self): - snapshot_path = os.path.join( - self.repo_dir, 'metadata', 'snapshot.json') - snapshot = Metadata[Snapshot].from_file(snapshot_path) - - # Create a MetaFile instance representing what we expect - # the updated data to be. - hashes = {'sha256': 'c2986576f5fdfd43944e2b19e775453b96748ec4fe2638a6d2f32f1310967095'} - fileinfo = MetaFile(2, 123, hashes) - - self.assertNotEqual( - snapshot.signed.meta['role1.json'].to_dict(), fileinfo.to_dict() - ) - snapshot.signed.update('role1', fileinfo) - self.assertEqual( - snapshot.signed.meta['role1.json'].to_dict(), fileinfo.to_dict() - ) - - - def test_metadata_timestamp(self): - timestamp_path = os.path.join( - self.repo_dir, 'metadata', 'timestamp.json') - timestamp = Metadata[Timestamp].from_file(timestamp_path) - - self.assertEqual(timestamp.signed.version, 1) - timestamp.signed.bump_version() - self.assertEqual(timestamp.signed.version, 2) - - self.assertEqual(timestamp.signed.expires, datetime(2030, 1, 1, 0, 0)) - timestamp.signed.bump_expiration() - self.assertEqual(timestamp.signed.expires, datetime(2030, 1, 2, 0, 0)) - timestamp.signed.bump_expiration(timedelta(days=365)) - self.assertEqual(timestamp.signed.expires, datetime(2031, 1, 2, 0, 0)) - - # Test whether dateutil.relativedelta works, this provides a much - # easier to use interface for callers - delta = relativedelta(days=1) - timestamp.signed.bump_expiration(delta) - self.assertEqual(timestamp.signed.expires, datetime(2031, 1, 3, 0, 0)) - delta = relativedelta(years=5) - timestamp.signed.bump_expiration(delta) - self.assertEqual(timestamp.signed.expires, datetime(2036, 1, 3, 0, 0)) - - # Create a MetaFile instance representing what we expect - # the updated data to be. - hashes = {'sha256': '0ae9664468150a9aa1e7f11feecb32341658eb84292851367fea2da88e8a58dc'} - fileinfo = MetaFile(2, 520, hashes) - - self.assertNotEqual( - timestamp.signed.meta['snapshot.json'].to_dict(), fileinfo.to_dict() - ) - timestamp.signed.update(fileinfo) - self.assertEqual( - timestamp.signed.meta['snapshot.json'].to_dict(), fileinfo.to_dict() - ) - - - def test_metadata_verify_delegate(self): - root_path = os.path.join(self.repo_dir, 'metadata', 'root.json') + def test_metadata_verify_delegate(self) -> None: + root_path = os.path.join(self.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(root_path) - snapshot_path = os.path.join( - self.repo_dir, 'metadata', 'snapshot.json') + snapshot_path = os.path.join(self.repo_dir, "metadata", "snapshot.json") snapshot = Metadata[Snapshot].from_file(snapshot_path) - targets_path = os.path.join( - self.repo_dir, 'metadata', 'targets.json') + targets_path = os.path.join(self.repo_dir, "metadata", "targets.json") targets = Metadata[Targets].from_file(targets_path) - role1_path = os.path.join( - self.repo_dir, 'metadata', 'role1.json') + role1_path = os.path.join(self.repo_dir, "metadata", "role1.json") role1 = Metadata[Targets].from_file(role1_path) - role2_path = os.path.join( - self.repo_dir, 'metadata', 'role2.json') + role2_path = os.path.join(self.repo_dir, "metadata", "role2.json") role2 = Metadata[Targets].from_file(role2_path) # test the expected delegation tree - root.verify_delegate('root', root) - root.verify_delegate('snapshot', snapshot) - root.verify_delegate('targets', targets) - targets.verify_delegate('role1', role1) - role1.verify_delegate('role2', role2) + root.verify_delegate(Root.type, root) + root.verify_delegate(Snapshot.type, snapshot) + root.verify_delegate(Targets.type, targets) + targets.verify_delegate("role1", role1) + role1.verify_delegate("role2", role2) # only root and targets can verify delegates with self.assertRaises(TypeError): - snapshot.verify_delegate('snapshot', snapshot) + snapshot.verify_delegate(Snapshot.type, snapshot) # verify fails for roles that are not delegated by delegator with self.assertRaises(ValueError): - root.verify_delegate('role1', role1) + root.verify_delegate("role1", role1) with self.assertRaises(ValueError): - targets.verify_delegate('targets', targets) + targets.verify_delegate(Targets.type, targets) # verify fails when delegator has no delegations with self.assertRaises(ValueError): - role2.verify_delegate('role1', role1) + role2.verify_delegate("role1", role1) # verify fails when delegate content is modified expires = snapshot.signed.expires - snapshot.signed.bump_expiration() + snapshot.signed.expires = expires + timedelta(days=1) with self.assertRaises(exceptions.UnsignedMetadataError): - root.verify_delegate('snapshot', snapshot) + root.verify_delegate(Snapshot.type, snapshot) snapshot.signed.expires = expires # verify fails if roles keys do not sign the metadata with self.assertRaises(exceptions.UnsignedMetadataError): - root.verify_delegate('timestamp', snapshot) + root.verify_delegate(Timestamp.type, snapshot) # Add a key to snapshot role, make sure the new sig fails to verify - ts_keyid = next(iter(root.signed.roles["timestamp"].keyids)) - root.signed.add_key("snapshot", root.signed.keys[ts_keyid]) - snapshot.signatures[ts_keyid] = Signature(ts_keyid, "ff"*64) + ts_keyid = next(iter(root.signed.roles[Timestamp.type].keyids)) + root.signed.add_key(Snapshot.type, root.signed.keys[ts_keyid]) + snapshot.signatures[ts_keyid] = Signature(ts_keyid, "ff" * 64) # verify succeeds if threshold is reached even if some signatures # fail to verify - root.verify_delegate('snapshot', snapshot) + root.verify_delegate(Snapshot.type, snapshot) # verify fails if threshold of signatures is not reached - root.signed.roles['snapshot'].threshold = 2 + root.signed.roles[Snapshot.type].threshold = 2 with self.assertRaises(exceptions.UnsignedMetadataError): - root.verify_delegate('snapshot', snapshot) + root.verify_delegate(Snapshot.type, snapshot) # verify succeeds when we correct the new signature and reach the # threshold of 2 keys - snapshot.sign(SSlibSigner(self.keystore['timestamp']), append=True) - root.verify_delegate('snapshot', snapshot) - + snapshot.sign(SSlibSigner(self.keystore[Timestamp.type]), append=True) + root.verify_delegate(Snapshot.type, snapshot) - def test_key_class(self): + def test_key_class(self) -> None: # Test if from_securesystemslib_key removes the private key from keyval # of a securesystemslib key dictionary. sslib_key = generate_ed25519_key() key = Key.from_securesystemslib_key(sslib_key) - self.assertFalse('private' in key.keyval.keys()) + self.assertFalse("private" in key.keyval.keys()) + # Test raising ValueError with non-existent keytype + sslib_key["keytype"] = "bad keytype" + with self.assertRaises(ValueError): + Key.from_securesystemslib_key(sslib_key) - def test_root_add_key_and_remove_key(self): - root_path = os.path.join( - self.repo_dir, 'metadata', 'root.json') + def test_root_add_key_and_remove_key(self) -> None: + root_path = os.path.join(self.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(root_path) # Create a new key - root_key2 = import_ed25519_publickey_from_file( - os.path.join(self.keystore_dir, 'root_key2.pub')) - keyid = root_key2['keyid'] - key_metadata = Key(keyid, root_key2['keytype'], root_key2['scheme'], - root_key2['keyval']) + root_key2 = import_ed25519_publickey_from_file( + os.path.join(self.keystore_dir, "root_key2.pub") + ) + keyid = root_key2["keyid"] + key_metadata = Key( + keyid, + root_key2["keytype"], + root_key2["scheme"], + root_key2["keyval"], + ) # Assert that root does not contain the new key - self.assertNotIn(keyid, root.signed.roles['root'].keyids) + self.assertNotIn(keyid, root.signed.roles[Root.type].keyids) self.assertNotIn(keyid, root.signed.keys) # Add new root key - root.signed.add_key('root', key_metadata) + root.signed.add_key(Root.type, key_metadata) # Assert that key is added - self.assertIn(keyid, root.signed.roles['root'].keyids) + self.assertIn(keyid, root.signed.roles[Root.type].keyids) self.assertIn(keyid, root.signed.keys) # Confirm that the newly added key does not break @@ -459,27 +421,34 @@ def test_root_add_key_and_remove_key(self): root.to_dict() # Try adding the same key again and assert its ignored. - pre_add_keyid = root.signed.roles['root'].keyids.copy() - root.signed.add_key('root', key_metadata) - self.assertEqual(pre_add_keyid, root.signed.roles['root'].keyids) + pre_add_keyid = root.signed.roles[Root.type].keyids.copy() + root.signed.add_key(Root.type, key_metadata) + self.assertEqual(pre_add_keyid, root.signed.roles[Root.type].keyids) # Add the same key to targets role as well - root.signed.add_key('targets', key_metadata) + root.signed.add_key(Targets.type, key_metadata) + + # Add the same key to a nonexistent role. + with self.assertRaises(ValueError): + root.signed.add_key("nosuchrole", key_metadata) # Remove the key from root role (targets role still uses it) - root.signed.remove_key('root', keyid) - self.assertNotIn(keyid, root.signed.roles['root'].keyids) + root.signed.remove_key(Root.type, keyid) + self.assertNotIn(keyid, root.signed.roles[Root.type].keyids) self.assertIn(keyid, root.signed.keys) # Remove the key from targets as well - root.signed.remove_key('targets', keyid) - self.assertNotIn(keyid, root.signed.roles['targets'].keyids) + root.signed.remove_key(Targets.type, keyid) + self.assertNotIn(keyid, root.signed.roles[Targets.type].keyids) self.assertNotIn(keyid, root.signed.keys) - with self.assertRaises(KeyError): - root.signed.remove_key('root', 'nosuchkey') + with self.assertRaises(ValueError): + root.signed.remove_key(Root.type, "nosuchkey") + with self.assertRaises(ValueError): + root.signed.remove_key("nosuchrole", keyid) - def test_is_target_in_pathpattern(self): + def test_is_target_in_pathpattern(self) -> None: + # pylint: disable=protected-access supported_use_cases = [ ("foo.tgz", "foo.tgz"), ("foo.tgz", "*"), @@ -499,73 +468,107 @@ def test_is_target_in_pathpattern(self): invalid_use_cases = [ ("targets/foo.tgz", "*.tgz"), - ("/foo.tgz", "*.tgz",), + ("/foo.tgz", "*.tgz"), ("targets/foo.tgz", "*"), ("foo-version-alpha.tgz", "foo-version-?.tgz"), ("foo//bar", "*/bar"), - ("foo/bar", "f?/bar") + ("foo/bar", "f?/bar"), ] for targetpath, pathpattern in invalid_use_cases: self.assertFalse( DelegatedRole._is_target_in_pathpattern(targetpath, pathpattern) ) + def test_targets_key_api(self) -> None: + targets_path = os.path.join(self.repo_dir, "metadata", "targets.json") + targets: Targets = Metadata[Targets].from_file(targets_path).signed + + # Add a new delegated role "role2" in targets + delegated_role = DelegatedRole.from_dict( + { + "keyids": [], + "name": "role2", + "paths": ["fn3", "fn4"], + "terminating": False, + "threshold": 1, + } + ) + assert isinstance(targets.delegations, Delegations) + targets.delegations.roles["role2"] = delegated_role + + key_dict = { + "keytype": "ed25519", + "keyval": { + "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" + }, + "scheme": "ed25519", + } + key = Key.from_dict("id2", key_dict) - def test_delegation_class(self): - # empty keys and roles - delegations_dict = {"keys":{}, "roles":[]} - delegations = Delegations.from_dict(delegations_dict.copy()) - self.assertEqual(delegations_dict, delegations.to_dict()) + # Assert that delegated role "role1" does not contain the new key + self.assertNotIn(key.keyid, targets.delegations.roles["role1"].keyids) + targets.add_key("role1", key) - # Test some basic missing or broken input - invalid_delegations_dicts = [ - {}, - {"keys":None, "roles":None}, - {"keys":{"foo":0}, "roles":[]}, - {"keys":{}, "roles":["foo"]}, - ] - for d in invalid_delegations_dicts: - with self.assertRaises((KeyError, AttributeError)): - Delegations.from_dict(d) + # Assert that the new key is added to the delegated role "role1" + self.assertIn(key.keyid, targets.delegations.roles["role1"].keyids) - def test_metadata_targets(self): - targets_path = os.path.join( - self.repo_dir, 'metadata', 'targets.json') - targets = Metadata[Targets].from_file(targets_path) + # Confirm that the newly added key does not break the obj serialization + targets.to_dict() - # Create a fileinfo dict representing what we expect the updated data to be - filename = 'file2.txt' - hashes = { - "sha256": "141f740f53781d1ca54b8a50af22cbf74e44c21a998fa2a8a05aaac2c002886b", - "sha512": "ef5beafa16041bcdd2937140afebd485296cd54f7348ecd5a4d035c09759608de467a7ac0eb58753d0242df873c305e8bffad2454aa48f44480f15efae1cacd0" - } + # Try adding the same key again and assert its ignored. + past_keyid = targets.delegations.roles["role1"].keyids.copy() + targets.add_key("role1", key) + self.assertEqual(past_keyid, targets.delegations.roles["role1"].keyids) - fileinfo = TargetFile(length=28, hashes=hashes, path=filename) + # Try adding a key to a delegated role that doesn't exists + with self.assertRaises(ValueError): + targets.add_key("nosuchrole", key) - # Assert that data is not aleady equal - self.assertNotEqual( - targets.signed.targets[filename].to_dict(), fileinfo.to_dict() - ) - # Update an already existing fileinfo - targets.signed.update(fileinfo) - # Verify that data is updated - self.assertEqual( - targets.signed.targets[filename].to_dict(), fileinfo.to_dict() - ) + # Add the same key to "role2" as well + targets.add_key("role2", key) + # Remove the key from "role1" role ("role2" still uses it) + targets.remove_key("role1", key.keyid) - def test_length_and_hash_validation(self): + # Assert that delegated role "role1" doesn't contain the key. + self.assertNotIn(key.keyid, targets.delegations.roles["role1"].keyids) + self.assertIn(key.keyid, targets.delegations.roles["role2"].keyids) + + # Remove the key from "role2" as well + targets.remove_key("role2", key.keyid) + self.assertNotIn(key.keyid, targets.delegations.roles["role2"].keyids) + + # Try remove key not used by "role1" + with self.assertRaises(ValueError): + targets.remove_key("role1", key.keyid) + + # Try removing a key from delegated role that doesn't exists + with self.assertRaises(ValueError): + targets.remove_key("nosuchrole", key.keyid) + + # Remove delegations as a whole + targets.delegations = None + # Test that calling add_key and remove_key throws an error + # and that delegations is still None after each of the api calls + with self.assertRaises(ValueError): + targets.add_key("role1", key) + self.assertTrue(targets.delegations is None) + with self.assertRaises(ValueError): + targets.remove_key("role1", key.keyid) + self.assertTrue(targets.delegations is None) + + def test_length_and_hash_validation(self) -> None: # Test metadata files' hash and length verification. # Use timestamp to get a MetaFile object and snapshot # for untrusted metadata file to verify. timestamp_path = os.path.join( - self.repo_dir, 'metadata', 'timestamp.json') + self.repo_dir, "metadata", "timestamp.json" + ) timestamp = Metadata[Timestamp].from_file(timestamp_path) - snapshot_metafile = timestamp.signed.meta["snapshot.json"] + snapshot_metafile = timestamp.signed.snapshot_meta - snapshot_path = os.path.join( - self.repo_dir, 'metadata', 'snapshot.json') + snapshot_path = os.path.join(self.repo_dir, "metadata", "snapshot.json") with open(snapshot_path, "rb") as file: # test with data as a file object @@ -578,36 +581,37 @@ def test_length_and_hash_validation(self): # test exceptions expected_length = snapshot_metafile.length snapshot_metafile.length = 2345 - self.assertRaises(exceptions.LengthOrHashMismatchError, - snapshot_metafile.verify_length_and_hashes, data) + with self.assertRaises(exceptions.LengthOrHashMismatchError): + snapshot_metafile.verify_length_and_hashes(data) snapshot_metafile.length = expected_length - snapshot_metafile.hashes = {'sha256': 'incorrecthash'} - self.assertRaises(exceptions.LengthOrHashMismatchError, - snapshot_metafile.verify_length_and_hashes, data) + snapshot_metafile.hashes = {"sha256": "incorrecthash"} + with self.assertRaises(exceptions.LengthOrHashMismatchError): + snapshot_metafile.verify_length_and_hashes(data) - snapshot_metafile.hashes = {'unsupported-alg': "8f88e2ba48b412c3843e9bb26e1b6f8fc9e98aceb0fbaa97ba37b4c98717d7ab"} - self.assertRaises(exceptions.LengthOrHashMismatchError, - snapshot_metafile.verify_length_and_hashes, data) + snapshot_metafile.hashes = { + "unsupported-alg": "8f88e2ba48b412c3843e9bb26e1b6f8fc9e98aceb0fbaa97ba37b4c98717d7ab" + } + with self.assertRaises(exceptions.LengthOrHashMismatchError): + snapshot_metafile.verify_length_and_hashes(data) # Test wrong algorithm format (sslib.FormatError) - snapshot_metafile.hashes = { 256: "8f88e2ba48b412c3843e9bb26e1b6f8fc9e98aceb0fbaa97ba37b4c98717d7ab"} - self.assertRaises(exceptions.LengthOrHashMismatchError, - snapshot_metafile.verify_length_and_hashes, data) + snapshot_metafile.hashes = { + 256: "8f88e2ba48b412c3843e9bb26e1b6f8fc9e98aceb0fbaa97ba37b4c98717d7ab" # type: ignore[dict-item] + } + with self.assertRaises(exceptions.LengthOrHashMismatchError): + snapshot_metafile.verify_length_and_hashes(data) # test optional length and hashes snapshot_metafile.length = None snapshot_metafile.hashes = None snapshot_metafile.verify_length_and_hashes(data) - # Test target files' hash and length verification - targets_path = os.path.join( - self.repo_dir, 'metadata', 'targets.json') + targets_path = os.path.join(self.repo_dir, "metadata", "targets.json") targets = Metadata[Targets].from_file(targets_path) - file1_targetfile = targets.signed.targets['file1.txt'] - filepath = os.path.join( - self.repo_dir, 'targets', 'file1.txt') + file1_targetfile = targets.signed.targets["file1.txt"] + filepath = os.path.join(self.repo_dir, Targets.type, "file1.txt") with open(filepath, "rb") as file1: file1_targetfile.verify_length_and_hashes(file1) @@ -615,15 +619,60 @@ def test_length_and_hash_validation(self): # test exceptions expected_length = file1_targetfile.length file1_targetfile.length = 2345 - self.assertRaises(exceptions.LengthOrHashMismatchError, - file1_targetfile.verify_length_and_hashes, file1) + with self.assertRaises(exceptions.LengthOrHashMismatchError): + file1_targetfile.verify_length_and_hashes(file1) file1_targetfile.length = expected_length - file1_targetfile.hashes = {'sha256': 'incorrecthash'} - self.assertRaises(exceptions.LengthOrHashMismatchError, - file1_targetfile.verify_length_and_hashes, file1) + file1_targetfile.hashes = {"sha256": "incorrecthash"} + with self.assertRaises(exceptions.LengthOrHashMismatchError): + file1_targetfile.verify_length_and_hashes(file1) + + def test_targetfile_from_file(self) -> None: + # Test with an existing file and valid hash algorithm + file_path = os.path.join(self.repo_dir, Targets.type, "file1.txt") + targetfile_from_file = TargetFile.from_file( + file_path, file_path, ["sha256"] + ) + + with open(file_path, "rb") as file: + targetfile_from_file.verify_length_and_hashes(file) + + # Test with a non-existing file + file_path = os.path.join(self.repo_dir, Targets.type, "file123.txt") + with self.assertRaises(FileNotFoundError): + TargetFile.from_file( + file_path, file_path, [sslib_hash.DEFAULT_HASH_ALGORITHM] + ) + + # Test with an unsupported algorithm + file_path = os.path.join(self.repo_dir, Targets.type, "file1.txt") + with self.assertRaises(ValueError): + TargetFile.from_file(file_path, file_path, ["123"]) + + def test_targetfile_custom(self) -> None: + # Test creating TargetFile and accessing custom. + targetfile = TargetFile( + 100, {"sha256": "abc"}, "file.txt", {"custom": "foo"} + ) + self.assertEqual(targetfile.custom, "foo") + + def test_targetfile_from_data(self) -> None: + data = b"Inline test content" + target_file_path = os.path.join( + self.repo_dir, Targets.type, "file1.txt" + ) - def test_is_delegated_role(self): + # Test with a valid hash algorithm + targetfile_from_data = TargetFile.from_data( + target_file_path, data, ["sha256"] + ) + targetfile_from_data.verify_length_and_hashes(data) + + # Test with no algorithms specified + targetfile_from_data = TargetFile.from_data(target_file_path, data) + targetfile_from_data.verify_length_and_hashes(data) + + def test_is_delegated_role(self) -> None: # test path matches # see more extensive tests in test_is_target_in_pathpattern() for paths in [ @@ -646,7 +695,8 @@ def test_is_delegated_role(self): self.assertFalse(role.is_delegated_path("a/non-matching path")) self.assertTrue(role.is_delegated_path("a/path")) + # Run unit test. -if __name__ == '__main__': +if __name__ == "__main__": utils.configure_test_logging(sys.argv) unittest.main() diff --git a/tests/test_arbitrary_package_attack.py b/tests/test_arbitrary_package_attack.py deleted file mode 100755 index 02b4c24900..0000000000 --- a/tests/test_arbitrary_package_attack.py +++ /dev/null @@ -1,287 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_arbitrary_package_attack.py - - - Konstantin Andrianov. - - - February 22, 2012. - - March 21, 2014. - Refactored to use the 'unittest' module (test conditions in code, rather - than verifying text output), use pre-generated repository files, and - discontinue use of the old repository tools. -vladimir.v.diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Simulate an arbitrary package attack, where an updater client attempts to - download a malicious file. TUF and non-TUF client scenarios are tested. - - There is no difference between 'updates' and 'target' files. -""" - -import os -import tempfile -import shutil -import json -import logging -import unittest -import sys -from urllib import request - -import tuf -import tuf.formats -import tuf.roledb -import tuf.keydb -import tuf.log -import tuf.client.updater as updater -import tuf.unittest_toolbox as unittest_toolbox - -from tests import utils - -import securesystemslib - -logger = logging.getLogger(__name__) - - -class TestArbitraryPackageAttack(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated of all the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.client_directory = os.path.join(temporary_repository_root, 'client') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - - - def tearDown(self): - # updater.Updater() populates the roledb with the name "test_repository1" - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - - def test_without_tuf(self): - # Verify that a target file replaced with a malicious version is downloaded - # by a non-TUF client (i.e., a non-TUF client that does not verify hashes, - # detect mix-and-mix attacks, etc.) A tuf client, on the other hand, should - # detect that the downloaded target file is invalid. - - # Test: Download a valid target file from the repository. - # Ensure the target file to be downloaded has not already been downloaded, - # and generate its file size and digest. The file size and digest is needed - # to check that the malicious file was indeed downloaded. - target_path = os.path.join(self.repository_directory, 'targets', 'file1.txt') - client_target_path = os.path.join(self.client_directory, 'file1.txt') - self.assertFalse(os.path.exists(client_target_path)) - length, hashes = securesystemslib.util.get_file_details(target_path) - fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'targets', 'file1.txt') - - # On Windows, the URL portion should not contain back slashes. - request.urlretrieve(url_file.replace('\\', '/'), client_target_path) - - self.assertTrue(os.path.exists(client_target_path)) - length, hashes = securesystemslib.util.get_file_details(client_target_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - self.assertEqual(fileinfo, download_fileinfo) - - # Test: Download a target file that has been modified by an attacker. - with open(target_path, 'wt') as file_object: - file_object.write('add malicious content.') - length, hashes = securesystemslib.util.get_file_details(target_path) - malicious_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # On Windows, the URL portion should not contain back slashes. - request.urlretrieve(url_file.replace('\\', '/'), client_target_path) - - length, hashes = securesystemslib.util.get_file_details(client_target_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Verify 'download_fileinfo' is unequal to the original trusted version. - self.assertNotEqual(download_fileinfo, fileinfo) - - # Verify 'download_fileinfo' is equal to the malicious version. - self.assertEqual(download_fileinfo, malicious_fileinfo) - - - - def test_with_tuf(self): - # Verify that a target file (on the remote repository) modified by an - # attacker is not downloaded by the TUF client. - # First test that the valid target file is successfully downloaded. - file1_fileinfo = self.repository_updater.get_one_valid_targetinfo('file1.txt') - destination = os.path.join(self.client_directory) - self.repository_updater.download_target(file1_fileinfo, destination) - client_target_path = os.path.join(destination, 'file1.txt') - self.assertTrue(os.path.exists(client_target_path)) - - # Modify 'file1.txt' and confirm that the TUF client rejects it. - target_path = os.path.join(self.repository_directory, 'targets', 'file1.txt') - with open(target_path, 'wt') as file_object: - file_object.write('malicious content, size 33 bytes.') - - try: - self.repository_updater.download_target(file1_fileinfo, destination) - - except tuf.exceptions.NoWorkingMirrorError as exception: - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'targets', 'file1.txt') - - # Verify that only one exception is raised for 'url_file'. - self.assertTrue(len(exception.mirror_errors), 1) - - # Verify that the expected 'tuf.exceptions.DownloadLengthMismatchError' exception - # is raised for 'url_file'. - self.assertTrue(url_file.replace('\\', '/') in exception.mirror_errors) - self.assertTrue( - isinstance(exception.mirror_errors[url_file.replace('\\', '/')], - securesystemslib.exceptions.BadHashError)) - - else: - self.fail('TUF did not prevent an arbitrary package attack.') - - - def test_with_tuf_and_metadata_tampering(self): - # Test that a TUF client does not download a malicious target file, and a - # 'targets.json' metadata file that has also been modified by the attacker. - # The attacker does not attach a valid signature to 'targets.json' - - # An attacker modifies 'file1.txt'. - target_path = os.path.join(self.repository_directory, 'targets', 'file1.txt') - with open(target_path, 'wt') as file_object: - file_object.write('malicious content, size 33 bytes.') - - # An attacker also tries to add the malicious target's length and digest - # to its metadata file. - length, hashes = securesystemslib.util.get_file_details(target_path) - - metadata_path = \ - os.path.join(self.repository_directory, 'metadata', 'targets.json') - - metadata = securesystemslib.util.load_json_file(metadata_path) - metadata['signed']['targets']['file1.txt']['hashes'] = hashes - metadata['signed']['targets']['file1.txt']['length'] = length - - tuf.formats.check_signable_object_format(metadata) - - with open(metadata_path, 'wb') as file_object: - file_object.write(json.dumps(metadata, indent=1, - separators=(',', ': '), sort_keys=True).encode('utf-8')) - - # Verify that the malicious 'targets.json' is not downloaded. Perform - # a refresh of top-level metadata to demonstrate that the malicious - # 'targets.json' is not downloaded. - try: - self.repository_updater.refresh() - file1_fileinfo = self.repository_updater.get_one_valid_targetinfo('file1.txt') - destination = os.path.join(self.client_directory) - self.repository_updater.download_target(file1_fileinfo, destination) - - except tuf.exceptions.NoWorkingMirrorError as exception: - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'targets', 'file1.txt') - - # Verify that an exception raised for only the malicious 'url_file'. - self.assertTrue(len(exception.mirror_errors), 1) - - # Verify that the specific and expected mirror exception is raised. - self.assertTrue(url_file.replace('\\', '/') in exception.mirror_errors) - self.assertTrue( - isinstance(exception.mirror_errors[url_file.replace('\\', '/')], - securesystemslib.exceptions.BadHashError)) - - else: - self.fail('TUF did not prevent an arbitrary package attack.') - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_developer_tool.py b/tests/test_developer_tool.py deleted file mode 100755 index d066964437..0000000000 --- a/tests/test_developer_tool.py +++ /dev/null @@ -1,428 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_developer_tool.py. - - - Santiago Torres Arias - Zane Fisher - - - January 22, 2014. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for the 'developer_tool.py' module. -""" - -import os -import unittest -import logging -import tempfile -import shutil -import sys - -import tuf -import tuf.log -import tuf.roledb -import tuf.keydb -import tuf.developer_tool as developer_tool -import tuf.exceptions - -import securesystemslib -import securesystemslib.exceptions - -from tuf.developer_tool import METADATA_DIRECTORY_NAME -from tuf.developer_tool import TARGETS_DIRECTORY_NAME - -from tests import utils - -logger = logging.getLogger(__name__) - -developer_tool.disable_console_log_messages() - -class TestProject(unittest.TestCase): - - tmp_dir = None - - @classmethod - def setUpClass(cls): - cls.tmp_dir = tempfile.mkdtemp(dir = os.getcwd()) - - - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.tmp_dir) - - - def tearDown(self): - # called after every test case - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - - def test_create_new_project(self): - # Test cases for the create_new_project function. In this test we will - # check input, correct file creation and format. We also check - # that a proper object is generated. We will use the normal layout for this - # test suite. - - # Create a local subfolder for this test. - local_tmp = tempfile.mkdtemp(dir = self.tmp_dir) - - # These are the usual values we will be throwing to the function, however - # we will swap these for nulls or malformed values every now and then to - # test input. - project_name = 'test_suite' - metadata_directory = local_tmp - location_in_repository = '/prefix' - targets_directory = None - key = None - - # Create a blank project. - project = developer_tool.create_new_project(project_name, metadata_directory, - location_in_repository) - - self.assertTrue(isinstance(project, developer_tool.Project)) - self.assertTrue(project.layout_type == 'repo-like') - self.assertTrue(project.prefix == location_in_repository) - self.assertTrue(project.project_name == project_name) - self.assertTrue(project.metadata_directory == - os.path.join(metadata_directory,METADATA_DIRECTORY_NAME)) - self.assertTrue(project.targets_directory == - os.path.join(metadata_directory,TARGETS_DIRECTORY_NAME)) - - # Create a blank project without a prefix. - project = developer_tool.create_new_project(project_name, metadata_directory) - self.assertTrue(isinstance(project, developer_tool.Project)) - self.assertTrue(project.layout_type == 'repo-like') - self.assertTrue(project.prefix == '') - self.assertTrue(project.project_name == project_name) - self.assertTrue(project.metadata_directory == - os.path.join(metadata_directory,METADATA_DIRECTORY_NAME)) - self.assertTrue(project.targets_directory == - os.path.join(metadata_directory,TARGETS_DIRECTORY_NAME)) - - # Create a blank project without a valid metadata directory. - self.assertRaises(securesystemslib.exceptions.FormatError, developer_tool.create_new_project, - 0, metadata_directory, location_in_repository) - self.assertRaises(securesystemslib.exceptions.FormatError, developer_tool.create_new_project, - project_name, 0, location_in_repository) - self.assertRaises(securesystemslib.exceptions.FormatError, developer_tool.create_new_project, - project_name, metadata_directory, 0) - - - # Create a new project with a flat layout. - targets_directory = tempfile.mkdtemp(dir = local_tmp) - metadata_directory = tempfile.mkdtemp(dir = local_tmp) - project = developer_tool.create_new_project(project_name, metadata_directory, - location_in_repository, targets_directory) - self.assertTrue(isinstance(project, developer_tool.Project)) - self.assertTrue(project.layout_type == 'flat') - self.assertTrue(project.prefix == location_in_repository) - self.assertTrue(project.project_name == project_name) - self.assertTrue(project.metadata_directory == metadata_directory) - self.assertTrue(project.targets_directory == targets_directory) - - # Finally, check that if targets_directory is set, it is valid. - self.assertRaises(securesystemslib.exceptions.FormatError, developer_tool.create_new_project, - project_name, metadata_directory, location_in_repository, 0) - - # Copy a key to our workspace and create a new project with it. - keystore_path = os.path.join('repository_data','keystore') - - # I will use the same key as the one provided in the repository - # tool tests for the root role, but this is not a root role... - root_key_path = os.path.join(keystore_path,'root_key.pub') - project_key = developer_tool.import_rsa_publickey_from_file(root_key_path) - - # Test create new project with a key added by default. - project = developer_tool.create_new_project(project_name, metadata_directory, - location_in_repository, targets_directory, project_key) - - self.assertTrue(isinstance(project, developer_tool.Project)) - self.assertTrue(project.layout_type == 'flat') - self.assertTrue(project.prefix == location_in_repository) - self.assertTrue(project.project_name == project_name) - self.assertTrue(project.metadata_directory == metadata_directory) - self.assertTrue(project.targets_directory == targets_directory) - self.assertTrue(len(project.keys) == 1) - self.assertTrue(project.keys[0] == project_key['keyid']) - - # Try to write to an invalid location. The OSError should be re-raised by - # create_new_project(). - shutil.rmtree(targets_directory) - tuf.roledb.clear_roledb() - tuf.keydb.clear_keydb() - - metadata_directory = '/' - valid_metadata_directory_name = developer_tool.METADATA_DIRECTORY_NAME - developer_tool.METADATA_DIRECTORY_NAME = '/' - - try: - developer_tool.create_new_project(project_name, metadata_directory, - location_in_repository, targets_directory, project_key) - - except (OSError, tuf.exceptions.RepositoryError): - pass - - developer_tool.METADATA_DIRECTORY_NAME = valid_metadata_directory_name - - - - def test_load_project(self): - # This test case will first try to load an existing project and test for - # verify the loaded object. It will next try to load a nonexisting project - # and expect a correct error handler. Finally, it will try to overwrite the - # existing prefix of the loaded project. - - # Create a local subfolder for this test. - local_tmp = tempfile.mkdtemp(dir = self.tmp_dir) - - # Test non-existent project filepath. - nonexistent_path = os.path.join(local_tmp, 'nonexistent') - self.assertRaises(securesystemslib.exceptions.StorageError, - developer_tool.load_project, nonexistent_path) - - # Copy the pregenerated metadata. - project_data_filepath = os.path.join('repository_data', 'project') - target_project_data_filepath = os.path.join(local_tmp, 'project') - shutil.copytree('repository_data/project', target_project_data_filepath) - - # Properly load a project. - repo_filepath = os.path.join(local_tmp, 'project', 'test-flat') - new_targets_path = os.path.join(local_tmp, 'project', 'targets') - project = developer_tool.load_project(repo_filepath, - new_targets_location = new_targets_path) - self.assertTrue(project._targets_directory == new_targets_path) - self.assertTrue(project.layout_type == 'flat') - - # Load a project overwriting the prefix. - project = developer_tool.load_project(repo_filepath, prefix='new') - self.assertTrue(project.prefix == 'new') - - # Load a project with a file missing. - file_to_corrupt = os.path.join(repo_filepath, 'test-flat.json') - with open(file_to_corrupt, 'wt') as fp: - fp.write('this is not a json file') - - self.assertRaises(securesystemslib.exceptions.Error, developer_tool.load_project, repo_filepath) - - - - - def test_add_verification_keys(self): - # Create a new project instance. - project = developer_tool.Project('test_verification_keys', 'somepath', - 'someotherpath', 'prefix') - - # Add invalid verification key. - self.assertRaises(securesystemslib.exceptions.FormatError, project.add_verification_key, 'invalid') - - # Add verification key. - # - load it first - keystore_path = os.path.join('repository_data', 'keystore') - first_verification_key_path = os.path.join(keystore_path,'root_key.pub') - first_verification_key = \ - developer_tool.import_rsa_publickey_from_file(first_verification_key_path) - - project.add_verification_key(first_verification_key) - - - # Add another verification key (should expect exception.) - second_verification_key_path = os.path.join(keystore_path, 'snapshot_key.pub') - second_verification_key = \ - developer_tool.import_ed25519_publickey_from_file(second_verification_key_path) - - self.assertRaises(securesystemslib.exceptions.Error, - project.add_verification_key,(second_verification_key)) - - - - # Add a verification key for the delegation. - project.delegate('somedelegation', [], []) - project('somedelegation').add_verification_key(first_verification_key) - project('somedelegation').add_verification_key(second_verification_key) - - - # Add another delegation of the delegation. - project('somedelegation').delegate('somesubdelegation', [], []) - project('somesubdelegation').add_verification_key(first_verification_key) - project('somesubdelegation').add_verification_key(second_verification_key) - - - def test_write(self): - - # Create tmp directory. - local_tmp = tempfile.mkdtemp(dir=self.tmp_dir) - - # Create new project inside tmp directory. - project = developer_tool.create_new_project('new_project', local_tmp, - 'prefix'); - - # Create some target files inside the tmp directory. - target_filepath = os.path.join(local_tmp, 'targets', 'test_target') - with open(target_filepath, 'wt') as fp: - fp.write('testing file') - - - # Add the targets. - project.add_target(os.path.basename(target_filepath)) - - # Add verification keys. - keystore_path = os.path.join('repository_data', 'keystore') - project_key_path = os.path.join(keystore_path, 'root_key.pub') - project_key = \ - developer_tool.import_rsa_publickey_from_file(project_key_path) - - - # Call status (for the sake of doing it and to improve test coverage by - # executing its statements.) - project.status() - - project.add_verification_key(project_key) - - - # Add another verification key (should expect exception.) - delegation_key_path = os.path.join(keystore_path, 'snapshot_key.pub') - delegation_key = \ - developer_tool.import_ed25519_publickey_from_file(delegation_key_path) - - # Add a subdelegation. - subdelegation_key_path = os.path.join(keystore_path, 'timestamp_key.pub') - subdelegation_key = \ - developer_tool.import_ed25519_publickey_from_file(subdelegation_key_path) - - # Add a delegation. - project.delegate('delegation', [delegation_key], []) - project('delegation').delegate('subdelegation', [subdelegation_key], []) - - # call write (except) - self.assertRaises(securesystemslib.exceptions.Error, project.write, ()) - - # Call status (for the sake of doing it and executing its statements.) - project.status() - - # Load private keys. - project_private_key_path = os.path.join(keystore_path, 'root_key') - project_private_key = \ - developer_tool.import_rsa_privatekey_from_file(project_private_key_path, - 'password') - - delegation_private_key_path = os.path.join(keystore_path, 'snapshot_key') - delegation_private_key = \ - developer_tool.import_ed25519_privatekey_from_file(delegation_private_key_path, - 'password') - - subdelegation_private_key_path = \ - os.path.join(keystore_path, 'timestamp_key') - subdelegation_private_key = \ - developer_tool.import_ed25519_privatekey_from_file(subdelegation_private_key_path, - 'password') - - # Test partial write. - # backup everything (again) - # + backup targets. - targets_backup = project.target_files - - # + backup delegations. - delegations_backup = \ - tuf.roledb.get_delegated_rolenames(project.project_name) - - # + backup layout type. - layout_type_backup = project.layout_type - - # + backup keyids. - keys_backup = project.keys - delegation_keys_backup = project('delegation').keys - - # + backup the prefix. - prefix_backup = project.prefix - - # + backup the name. - name_backup = project.project_name - - # Write and reload. - self.assertRaises(securesystemslib.exceptions.Error, project.write) - project.write(write_partial=True) - - project = developer_tool.load_project(local_tmp) - - # Check against backup. - self.assertEqual(list(project.target_files.keys()), list(targets_backup.keys())) - new_delegations = tuf.roledb.get_delegated_rolenames(project.project_name) - self.assertEqual(new_delegations, delegations_backup) - self.assertEqual(project.layout_type, layout_type_backup) - self.assertEqual(project.keys, keys_backup) - - self.assertEqual(project('delegation').keys, delegation_keys_backup) - - self.assertEqual(project.prefix, prefix_backup) - self.assertEqual(project.project_name, name_backup) - - roleinfo = tuf.roledb.get_roleinfo(project.project_name) - - self.assertEqual(roleinfo['partial_loaded'], True) - - - - # Load_signing_keys. - project('delegation').load_signing_key(delegation_private_key) - - project.status() - - project.load_signing_key(project_private_key) - - # Backup everything. - # + backup targets. - targets_backup = project.target_files - - # + backup delegations. - delegations_backup = \ - tuf.roledb.get_delegated_rolenames(project.project_name) - - # + backup layout type. - layout_type_backup = project.layout_type - - # + backup keyids - keys_backup = project.keys - delegation_keys_backup = project('delegation').keys - - # + backup the prefix. - prefix_backup = project.prefix - - # + backup the name. - name_backup = project.project_name - - # Call status (for the sake of doing it.) - project.status() - - # Call write. - project.write() - - # Call load. - project = developer_tool.load_project(local_tmp) - - - # Check against backup. - self.assertEqual(list(project.target_files.keys()), list(targets_backup.keys())) - - new_delegations = tuf.roledb.get_delegated_rolenames(project.project_name) - self.assertEqual(new_delegations, delegations_backup) - self.assertEqual(project.layout_type, layout_type_backup) - self.assertEqual(project.keys, keys_backup) - self.assertEqual(project('delegation').keys, delegation_keys_backup) - self.assertEqual(project.prefix, prefix_backup) - self.assertEqual(project.project_name, name_backup) - - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_download.py b/tests/test_download.py deleted file mode 100755 index 3b4572ccb0..0000000000 --- a/tests/test_download.py +++ /dev/null @@ -1,392 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_download.py - - - Konstantin Andrianov. - - - March 26, 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'download.py'. - - NOTE: Make sure test_download.py is ran in 'tuf/tests/' directory. - Otherwise, module that launches simple server would not be found. - - TODO: Adopt the environment variable management from test_proxy_use.py here. -""" - -import hashlib -import logging -import os -import sys -import unittest -import urllib3 -import warnings - -import tuf -import tuf.download as download -import tuf.requests_fetcher -import tuf.log -import tuf.unittest_toolbox as unittest_toolbox -import tuf.exceptions - -from tests import utils - -import requests.exceptions - -import securesystemslib - -logger = logging.getLogger(__name__) - - -class TestDownload(unittest_toolbox.Modified_TestCase): - def setUp(self): - """ - Create a temporary file and launch a simple server in the - current working directory. - """ - - unittest_toolbox.Modified_TestCase.setUp(self) - - # Making a temporary file. - current_dir = os.getcwd() - target_filepath = self.make_temp_data_file(directory=current_dir) - self.target_fileobj = open(target_filepath, 'r') - self.target_data = self.target_fileobj.read() - self.target_data_length = len(self.target_data) - - # Launch a SimpleHTTPServer (serves files in the current dir). - self.server_process_handler = utils.TestServerProcess(log=logger) - - rel_target_filepath = os.path.basename(target_filepath) - self.url = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + '/' + rel_target_filepath - - # Computing hash of target file data. - m = hashlib.md5() - m.update(self.target_data.encode('utf-8')) - digest = m.hexdigest() - self.target_hash = {'md5':digest} - - # Initialize the default fetcher for the download - self.fetcher = tuf.requests_fetcher.RequestsFetcher() - - - - # Stop server process and perform clean up. - def tearDown(self): - # Cleans the resources and flush the logged lines (if any). - self.server_process_handler.clean() - - self.target_fileobj.close() - - # Remove temp directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - # Test: Normal case. - def test_download_url_to_tempfileobj(self): - - download_file = download.safe_download - with download_file(self.url, self.target_data_length, self.fetcher) as temp_fileobj: - temp_fileobj.seek(0) - temp_file_data = temp_fileobj.read().decode('utf-8') - self.assertEqual(self.target_data, temp_file_data) - self.assertEqual(self.target_data_length, len(temp_file_data)) - - - # Test: Download url in more than one chunk. - def test_download_url_in_chunks(self): - - # Set smaller chunk size to ensure that the file will be downloaded - # in more than one chunk - default_chunk_size = tuf.settings.CHUNK_SIZE - tuf.settings.CHUNK_SIZE = 4 - # We don't have access to chunks from download_file() - # so we just confirm that the expectation of more than one chunk is - # correct and verify that no errors are raised during download - chunks_count = self.target_data_length/tuf.settings.CHUNK_SIZE - self.assertGreater(chunks_count, 1) - - download_file = download.safe_download - with download_file(self.url, self.target_data_length, self.fetcher) as temp_fileobj: - temp_fileobj.seek(0) - temp_file_data = temp_fileobj.read().decode('utf-8') - self.assertEqual(self.target_data, temp_file_data) - self.assertEqual(self.target_data_length, len(temp_file_data)) - - # Restore default settings - tuf.settings.CHUNK_SIZE = default_chunk_size - - - # Test: Incorrect lengths. - def test_download_url_to_tempfileobj_and_lengths(self): - # We do *not* catch - # 'securesystemslib.exceptions.DownloadLengthMismatchError' in the - # following two calls because the file at 'self.url' contains enough bytes - # to satisfy the smaller number of required bytes requested. - # safe_download() and unsafe_download() will only log a warning when the - # the server-reported length of the file does not match the - # required_length. 'updater.py' *does* verify the hashes of downloaded - # content. - download.safe_download(self.url, self.target_data_length - 4, self.fetcher).close() - download.unsafe_download(self.url, self.target_data_length - 4, self.fetcher).close() - - # We catch 'tuf.exceptions.DownloadLengthMismatchError' for safe_download() - # because it will not download more bytes than requested (in this case, a - # length greater than the size of the target file). - self.assertRaises(tuf.exceptions.DownloadLengthMismatchError, - download.safe_download, self.url, self.target_data_length + 1, self.fetcher) - - # Calling unsafe_download() with a mismatched length should not raise an - # exception. - download.unsafe_download(self.url, self.target_data_length + 1, self.fetcher).close() - - - - def test_download_url_to_tempfileobj_and_performance(self): - - """ - # Measuring performance of 'auto_flush = False' vs. 'auto_flush = True' - # in download._download_file() during write. No change was observed. - star_cpu = time.clock() - star_real = time.time() - - temp_fileobj = download_file(self.url, - self.target_data_length) - - end_cpu = time.clock() - end_real = time.time() - - self.assertEqual(self.target_data, temp_fileobj.read()) - self.assertEqual(self.target_data_length, len(temp_fileobj.read())) - temp_fileobj.close() - - print "Performance cpu time: "+str(end_cpu - star_cpu) - print "Performance real time: "+str(end_real - star_real) - - # TODO: [Not urgent] Show the difference by setting write(auto_flush=False) - """ - - - # Test: Incorrect/Unreachable URLs. - def test_download_url_to_tempfileobj_and_urls(self): - - download_file = download.safe_download - unsafe_download_file = download.unsafe_download - - with self.assertRaises(securesystemslib.exceptions.FormatError): - download_file(None, self.target_data_length, self.fetcher) - - url = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + '/' + self.random_string() - with self.assertRaises(tuf.exceptions.FetcherHTTPError) as cm: - download_file(url, self.target_data_length, self.fetcher) - self.assertEqual(cm.exception.status_code, 404) - - url1 = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port + 1) + '/' + self.random_string() - with self.assertRaises(requests.exceptions.ConnectionError): - download_file(url1, self.target_data_length, self.fetcher) - - # Specify an unsupported URI scheme. - url_with_unsupported_uri = self.url.replace('http', 'file') - self.assertRaises(requests.exceptions.InvalidSchema, download_file, url_with_unsupported_uri, - self.target_data_length, self.fetcher) - self.assertRaises(requests.exceptions.InvalidSchema, unsafe_download_file, - url_with_unsupported_uri, self.target_data_length, self.fetcher) - - - - - - ''' - # This test uses sites on the internet, requiring a net connection to succeed. - # Since this is the only such test in TUF, I'm not going to enable it... but - # it's here in case it's useful for diagnosis. - def test_https_validation(self): - """ - Use some known URLs on the net to ensure that TUF download checks SSL - certificates appropriately. - """ - # We should never get as far as the target file download itself, so the - # length we pass to safe_download and unsafe_download shouldn't matter. - irrelevant_length = 10 - - for bad_url in [ - 'https://expired.badssl.com/', # expired certificate - 'https://wrong.host.badssl.com/', ]: # hostname verification fail - - with self.assertRaises(requests.exceptions.SSLError): - download.safe_download(bad_url, irrelevant_length) - - with self.assertRaises(requests.exceptions.SSLError): - download.unsafe_download(bad_url, irrelevant_length) - ''' - - - - - def test_https_connection(self): - """ - Try various HTTPS downloads using trusted and untrusted certificates with - and without the correct hostname listed in the SSL certificate. - """ - # Make a temporary file to be served to the client. - current_directory = os.getcwd() - target_filepath = self.make_temp_data_file(directory=current_directory) - - with open(target_filepath, 'r') as target_file_object: - target_data_length = len(target_file_object.read()) - - # These cert files provide various test cases: - # good: A valid cert from an older generation of test_download.py tests. - # good2: A valid cert made simultaneous to the bad certs below, with the - # same settings otherwise, tested here in case the difference - # between the way the new bad certs and the old good cert were - # generated turns out to matter at some point. - # bad: An otherwise-valid cert with the wrong hostname. The good certs - # list "localhost", but this lists "notmyhostname". - # expired: An otherwise-valid cert but which is expired (no valid dates - # exist, fwiw: startdate > enddate). - good_cert_fname = os.path.join('ssl_certs', 'ssl_cert.crt') - good2_cert_fname = os.path.join('ssl_certs', 'ssl_cert_2.crt') - bad_cert_fname = os.path.join('ssl_certs', 'ssl_cert_wronghost.crt') - expired_cert_fname = os.path.join('ssl_certs', 'ssl_cert_expired.crt') - - # Launch four HTTPS servers (serve files in the current dir). - # 1: we expect to operate correctly - # 2: also good; uses a slightly different cert (controls for the cert - # generation method used for the next two, in case it comes to matter) - # 3: run with an HTTPS certificate with an unexpected hostname - # 4: run with an HTTPS certificate that is expired - # Be sure to offset from the port used in setUp to avoid collision. - - - good_https_server_handler = utils.TestServerProcess(log=logger, - server='simple_https_server.py', - extra_cmd_args=[good_cert_fname]) - good2_https_server_handler = utils.TestServerProcess(log=logger, - server='simple_https_server.py', - extra_cmd_args=[good2_cert_fname]) - bad_https_server_handler = utils.TestServerProcess(log=logger, - server='simple_https_server.py', - extra_cmd_args=[bad_cert_fname]) - expd_https_server_handler = utils.TestServerProcess(log=logger, - server='simple_https_server.py', - extra_cmd_args=[expired_cert_fname]) - - suffix = '/' + os.path.basename(target_filepath) - good_https_url = 'https://localhost:' \ - + str(good_https_server_handler.port) + suffix - good2_https_url = 'https://localhost:' \ - + str(good2_https_server_handler.port) + suffix - bad_https_url = 'https://localhost:' \ - + str(bad_https_server_handler.port) + suffix - expired_https_url = 'https://localhost:' \ - + str(expd_https_server_handler.port) + suffix - - # Download the target file using an HTTPS connection. - - # Use try-finally solely to ensure that the server processes are killed. - try: - # Trust the certfile that happens to use a different hostname than we - # will expect. - os.environ['REQUESTS_CA_BUNDLE'] = bad_cert_fname - # Clear sessions to ensure that the certificate we just specified is used. - # TODO: Confirm necessity of this session clearing and lay out mechanics. - self.fetcher._sessions = {} - - # Try connecting to the server process with the bad cert while trusting - # the bad cert. Expect failure because even though we trust it, the - # hostname we're connecting to does not match the hostname in the cert. - logger.info('Trying HTTPS download of target file: ' + bad_https_url) - with warnings.catch_warnings(): - # We're ok with a slightly fishy localhost cert - warnings.filterwarnings('ignore', - category=urllib3.exceptions.SubjectAltNameWarning) - - with self.assertRaises(requests.exceptions.SSLError): - download.safe_download(bad_https_url, target_data_length, self.fetcher) - with self.assertRaises(requests.exceptions.SSLError): - download.unsafe_download(bad_https_url, target_data_length, self.fetcher) - - # Try connecting to the server processes with the good certs while not - # trusting the good certs (trusting the bad cert instead). Expect failure - # because even though the server's cert file is otherwise OK, we don't - # trust it. - logger.info('Trying HTTPS download of target file: ' + good_https_url) - with self.assertRaises(requests.exceptions.SSLError): - download.safe_download(good_https_url, target_data_length, self.fetcher) - with self.assertRaises(requests.exceptions.SSLError): - download.unsafe_download(good_https_url, target_data_length, self.fetcher) - - logger.info('Trying HTTPS download of target file: ' + good2_https_url) - with self.assertRaises(requests.exceptions.SSLError): - download.safe_download(good2_https_url, target_data_length, self.fetcher) - with self.assertRaises(requests.exceptions.SSLError): - download.unsafe_download(good2_https_url, target_data_length, self.fetcher) - - - # Configure environment to now trust the certfile that is expired. - os.environ['REQUESTS_CA_BUNDLE'] = expired_cert_fname - # Clear sessions to ensure that the certificate we just specified is used. - # TODO: Confirm necessity of this session clearing and lay out mechanics. - self.fetcher._sessions = {} - - # Try connecting to the server process with the expired cert while - # trusting the expired cert. Expect failure because even though we trust - # it, it is expired. - logger.info('Trying HTTPS download of target file: ' + expired_https_url) - with self.assertRaises(requests.exceptions.SSLError): - download.safe_download(expired_https_url, target_data_length, self.fetcher) - with self.assertRaises(requests.exceptions.SSLError): - download.unsafe_download(expired_https_url, target_data_length, self.fetcher) - - - # Try connecting to the server processes with the good certs while - # trusting the appropriate good certs. Expect success. - # TODO: expand testing to switch expected certificates back and forth a - # bit more while clearing / not clearing sessions. - os.environ['REQUESTS_CA_BUNDLE'] = good_cert_fname - # Clear sessions to ensure that the certificate we just specified is used. - # TODO: Confirm necessity of this session clearing and lay out mechanics. - self.fetcher._sessions = {} - logger.info('Trying HTTPS download of target file: ' + good_https_url) - download.safe_download(good_https_url, target_data_length, self.fetcher).close() - download.unsafe_download(good_https_url, target_data_length,self.fetcher).close() - - os.environ['REQUESTS_CA_BUNDLE'] = good2_cert_fname - # Clear sessions to ensure that the certificate we just specified is used. - # TODO: Confirm necessity of this session clearing and lay out mechanics. - self.fetcher._sessions = {} - logger.info('Trying HTTPS download of target file: ' + good2_https_url) - download.safe_download(good2_https_url, target_data_length, self.fetcher).close() - download.unsafe_download(good2_https_url, target_data_length, self.fetcher).close() - - finally: - for proc_handler in [ - good_https_server_handler, - good2_https_server_handler, - bad_https_server_handler, - expd_https_server_handler]: - - # Cleans the resources and flush the logged lines (if any). - proc_handler.clean() - - - -# Run unit test. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_endless_data_attack.py b/tests/test_endless_data_attack.py deleted file mode 100755 index 251379bbee..0000000000 --- a/tests/test_endless_data_attack.py +++ /dev/null @@ -1,272 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_endless_data_attack.py - - - Konstantin Andrianov. - - - March 13, 2012. - - April 3, 2014. - Refactored to use the 'unittest' module (test conditions in code, rather - than verifying text output), use pre-generated repository files, and - discontinue use of the old repository tools. Minor edits to the test cases. - -vladimir.v.diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Simulate an endless data attack, where an updater client tries to download a - target file modified by an attacker to contain a large amount of data (a TUF - client should only download up to the file's expected length). TUF and - non-TUF client scenarios are tested. - - There is no difference between 'updates' and 'target' files. -""" - -import os -import tempfile -import shutil -import json -import logging -import unittest -import sys -from urllib import request - -import tuf -import tuf.formats -import tuf.log -import tuf.client.updater as updater -import tuf.unittest_toolbox as unittest_toolbox -import tuf.roledb - -from tests import utils - -import securesystemslib - -logger = logging.getLogger(__name__) - - -class TestEndlessDataAttack(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated of all the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.client_directory = os.path.join(temporary_repository_root, 'client') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_without_tuf(self): - # Verify that a target file replaced with a larger malicious version (to - # simulate an endless data attack) is downloaded by a non-TUF client (i.e., - # a non-TUF client that does not verify hashes, detect mix-and-mix attacks, - # etc.) A tuf client, on the other hand, should only download target files - # up to their expected lengths, as explicitly specified in metadata, or - # 'tuf.settings.py' (when retrieving 'timestamp.json' and 'root.json unsafely'.) - - # Test: Download a valid target file from the repository. - # Ensure the target file to be downloaded has not already been downloaded, - # and generate its file size and digest. The file size and digest is needed - # to verify that the malicious file was indeed downloaded. - target_path = os.path.join(self.repository_directory, 'targets', 'file1.txt') - client_target_path = os.path.join(self.client_directory, 'file1.txt') - self.assertFalse(os.path.exists(client_target_path)) - length, hashes = securesystemslib.util.get_file_details(target_path) - fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'targets', 'file1.txt') - - # On Windows, the URL portion should not contain backslashes. - request.urlretrieve(url_file.replace('\\', '/'), client_target_path) - - self.assertTrue(os.path.exists(client_target_path)) - length, hashes = securesystemslib.util.get_file_details(client_target_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - self.assertEqual(fileinfo, download_fileinfo) - - # Test: Download a target file that has been modified by an attacker with - # extra data. - with open(target_path, 'a') as file_object: - file_object.write('append large amount of data' * 100000) - large_length, hashes = securesystemslib.util.get_file_details(target_path) - malicious_fileinfo = tuf.formats.make_targets_fileinfo(large_length, hashes) - - # Is the modified file actually larger? - self.assertTrue(large_length > length) - - # On Windows, the URL portion should not contain backslashes. - request.urlretrieve(url_file.replace('\\', '/'), client_target_path) - - length, hashes = securesystemslib.util.get_file_details(client_target_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Verify 'download_fileinfo' is unequal to the original trusted version. - self.assertNotEqual(download_fileinfo, fileinfo) - - # Verify 'download_fileinfo' is equal to the malicious version. - self.assertEqual(download_fileinfo, malicious_fileinfo) - - - - def test_with_tuf(self): - # Verify that a target file (on the remote repository) modified by an - # attacker, to contain a large amount of extra data, is not downloaded by - # the TUF client. First test that the valid target file is successfully - # downloaded. - file1_fileinfo = self.repository_updater.get_one_valid_targetinfo('file1.txt') - destination = os.path.join(self.client_directory) - self.repository_updater.download_target(file1_fileinfo, destination) - client_target_path = os.path.join(destination, 'file1.txt') - self.assertTrue(os.path.exists(client_target_path)) - - # Verify the client's downloaded file matches the repository's. - target_path = os.path.join(self.repository_directory, 'targets', 'file1.txt') - length, hashes = securesystemslib.util.get_file_details(client_target_path) - fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - length, hashes = securesystemslib.util.get_file_details(client_target_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - self.assertEqual(fileinfo, download_fileinfo) - - # Modify 'file1.txt' and confirm that the TUF client only downloads up to - # the expected file length. - with open(target_path, 'a') as file_object: - file_object.write('append large amount of data' * 10000) - - # Is the modified file actually larger? - large_length, hashes = securesystemslib.util.get_file_details(target_path) - self.assertTrue(large_length > length) - - os.remove(client_target_path) - self.repository_updater.download_target(file1_fileinfo, destination) - - # A large amount of data has been appended to the original content. The - # extra data appended should be discarded by the client, so the downloaded - # file size and hash should not have changed. - length, hashes = securesystemslib.util.get_file_details(client_target_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - self.assertEqual(fileinfo, download_fileinfo) - - # Test that the TUF client does not download large metadata files, as well. - timestamp_path = os.path.join(self.repository_directory, 'metadata', - 'timestamp.json') - - original_length, hashes = securesystemslib.util.get_file_details(timestamp_path) - - with open(timestamp_path, 'r+') as file_object: - timestamp_content = securesystemslib.util.load_json_file(timestamp_path) - large_data = 'LargeTimestamp' * 10000 - timestamp_content['signed']['_type'] = large_data - json.dump(timestamp_content, file_object, indent=1, sort_keys=True) - - - modified_length, hashes = securesystemslib.util.get_file_details(timestamp_path) - self.assertTrue(modified_length > original_length) - - # Does the TUF client download the upper limit of an unsafely fetched - # 'timestamp.json'? 'timestamp.json' must not be greater than - # 'tuf.settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH'. - try: - self.repository_updater.refresh() - - except tuf.exceptions.NoWorkingMirrorError as exception: - for mirror_url, mirror_error in exception.mirror_errors.items(): - self.assertTrue(isinstance(mirror_error, securesystemslib.exceptions.Error)) - - else: - self.fail('TUF did not prevent an endless data attack.') - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_examples.py b/tests/test_examples.py new file mode 100644 index 0000000000..6014334fca --- /dev/null +++ b/tests/test_examples.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python +# Copyright 2020, New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 +""" Unit tests for 'examples' scripts. + +""" +import glob +import os +import shutil +import sys +import tempfile +import unittest +from pathlib import Path +from typing import ClassVar, List + +from tests import utils + + +class TestRepoExamples(unittest.TestCase): + """Unit test class for 'repo_example' scripts. + + Provides a '_run_example_script' method to run (exec) a script located in + the 'repo_example' directory. + + """ + + repo_examples_dir: ClassVar[Path] + + @classmethod + def setUpClass(cls) -> None: + """Locate and cache 'repo_example' dir.""" + base = Path(__file__).resolve().parents[1] + cls.repo_examples_dir = base / "examples" / "repo_example" + + def setUp(self) -> None: + """Create and change into test dir. + NOTE: Test scripts are expected to create dirs/files in new CWD.""" + self.original_cwd = os.getcwd() + self.base_test_dir = os.path.realpath(tempfile.mkdtemp()) + os.chdir(self.base_test_dir) + + def tearDown(self) -> None: + """Change back to original dir and remove test dir, which may contain + dirs/files the test created at test-time CWD.""" + os.chdir(self.original_cwd) + shutil.rmtree(self.base_test_dir) + + def _run_script_and_assert_files( + self, script_name: str, filenames_created: List[str] + ) -> None: + """Run script in 'repo_example' dir and assert that it created the + files corresponding to the passed filenames inside a 'tmp*' test dir at + CWD.""" + script_path = str(self.repo_examples_dir / script_name) + with open(script_path, "rb") as f: + # pylint: disable=exec-used + exec( + compile(f.read(), script_path, "exec"), + {"__file__": script_path}, + ) + + test_dirs = glob.glob("tmp*") + self.assertTrue( + len(test_dirs) == 1, f"expected 1 'tmp*' test dir, got {test_dirs}" + ) + + test_dir = test_dirs.pop() + for name in filenames_created: + metadata_path = Path(test_dir) / f"{name}" + self.assertTrue( + metadata_path.exists(), f"missing '{metadata_path}' file" + ) + + def test_basic_repo(self) -> None: + """Run 'basic_repo.py' and assert creation of metadata files.""" + self._run_script_and_assert_files( + "basic_repo.py", + [ + "1.python-scripts.json", + "1.root.json", + "1.snapshot.json", + "1.targets.json", + "2.root.json", + "2.snapshot.json", + "2.targets.json", + "timestamp.json", + ], + ) + + def test_hashed_bin_delegation(self) -> None: + """Run 'hashed_bin_delegation.py' and assert creation of metadata files.""" + self._run_script_and_assert_files( + "hashed_bin_delegation.py", + [ + "bins.json", + "00-07.json", + "08-0f.json", + "10-17.json", + "18-1f.json", + "20-27.json", + "28-2f.json", + "30-37.json", + "38-3f.json", + "40-47.json", + "48-4f.json", + "50-57.json", + "58-5f.json", + "60-67.json", + "68-6f.json", + "70-77.json", + "78-7f.json", + "80-87.json", + "88-8f.json", + "90-97.json", + "98-9f.json", + "a0-a7.json", + "a8-af.json", + "b0-b7.json", + "b8-bf.json", + "c0-c7.json", + "c8-cf.json", + "d0-d7.json", + "d8-df.json", + "e0-e7.json", + "e8-ef.json", + "f0-f7.json", + "f8-ff.json", + ], + ) + + +if __name__ == "__main__": + utils.configure_test_logging(sys.argv) + unittest.main() diff --git a/tests/test_extraneous_dependencies_attack.py b/tests/test_extraneous_dependencies_attack.py deleted file mode 100755 index 7e2bff7386..0000000000 --- a/tests/test_extraneous_dependencies_attack.py +++ /dev/null @@ -1,214 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2013 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_extraneous_dependencies_attack.py - - - Zane Fisher. - - - August 19, 2013. - - April 6, 2014. - Refactored to use the 'unittest' module (test conditions in code, rather - than verifying text output), use pre-generated repository files, and - discontinue use of the old repository tools. Modify the previous scenario - simulated for the mix-and-match attack. The metadata that specified the - dependencies of a project modified (previously a text file.) - -vladimir.v.diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Simulate an extraneous dependencies attack. The client attempts to download - a file, which lists all the target dependencies, with one legitimate - dependency, and one extraneous dependency. A client should not download a - target dependency even if it is found on the repository. Valid targets are - listed and verified by TUF metadata, such as 'targets.txt'. - - There is no difference between 'updates' and 'target' files. -""" - -import os -import tempfile -import shutil -import json -import logging -import unittest -import sys - -import tuf.formats -import tuf.log -import tuf.client.updater as updater -import tuf.roledb -import tuf.keydb -import tuf.unittest_toolbox as unittest_toolbox - -from tests import utils - -import securesystemslib - -logger = logging.getLogger(__name__) - - - -class TestExtraneousDependenciesAttack(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated of all the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client') - original_keystore = os.path.join(original_repository_files, 'keystore') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.client_directory = os.path.join(temporary_repository_root, 'client') - self.keystore_directory = os.path.join(temporary_repository_root, 'keystore') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_with_tuf(self): - # An attacker tries to trick a client into installing an extraneous target - # file (a valid file on the repository, in this case) by listing it in the - # project's metadata file. For the purposes of test_with_tuf(), - # 'role1.json' is treated as the metadata file that indicates all - # the files needed to install/update the 'role1' project. The attacker - # simply adds the extraneous target file to 'role1.json', which the TUF - # client should reject as improperly signed. - role1_filepath = os.path.join(self.repository_directory, 'metadata', - 'role1.json') - file1_filepath = os.path.join(self.repository_directory, 'targets', - 'file1.txt') - length, hashes = securesystemslib.util.get_file_details(file1_filepath) - - role1_metadata = securesystemslib.util.load_json_file(role1_filepath) - role1_metadata['signed']['targets']['/file2.txt'] = {} - role1_metadata['signed']['targets']['/file2.txt']['hashes'] = hashes - role1_metadata['signed']['targets']['/file2.txt']['length'] = length - - tuf.formats.check_signable_object_format(role1_metadata) - - with open(role1_filepath, 'wt') as file_object: - json.dump(role1_metadata, file_object, indent=1, sort_keys=True) - - # Un-install the metadata of the top-level roles so that the client can - # download and detect the invalid 'role1.json'. - os.remove(os.path.join(self.client_directory, self.repository_name, - 'metadata', 'current', 'snapshot.json')) - os.remove(os.path.join(self.client_directory, self.repository_name, - 'metadata', 'current', 'targets.json')) - os.remove(os.path.join(self.client_directory, self.repository_name, - 'metadata', 'current', 'timestamp.json')) - os.remove(os.path.join(self.client_directory, self.repository_name, - 'metadata', 'current', 'role1.json')) - - # Verify that the TUF client rejects the invalid metadata and refuses to - # continue the update process. - self.repository_updater.refresh() - - try: - with utils.ignore_deprecation_warnings('tuf.client.updater'): - self.repository_updater.targets_of_role('role1') - - # Verify that the specific 'tuf.exceptions.ForbiddenTargetError' exception is raised - # by each mirror. - except tuf.exceptions.NoWorkingMirrorError as exception: - for mirror_url, mirror_error in exception.mirror_errors.items(): - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'metadata', 'role1.json') - - # Verify that 'role1.json' is the culprit. - self.assertEqual(url_file.replace('\\', '/'), mirror_url) - self.assertTrue(isinstance(mirror_error, securesystemslib.exceptions.BadSignatureError)) - - else: - self.fail('TUF did not prevent an extraneous dependencies attack.') - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_fetcher.py b/tests/test_fetcher.py deleted file mode 100644 index bf94f252d8..0000000000 --- a/tests/test_fetcher.py +++ /dev/null @@ -1,131 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2021, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -"""Unit test for RequestsFetcher. -""" - -import logging -import os -import io -import sys -import unittest -import tempfile -import math - -import tuf -import tuf.exceptions -import tuf.requests_fetcher -import tuf.unittest_toolbox as unittest_toolbox - -from tests import utils - -logger = logging.getLogger(__name__) - - -class TestFetcher(unittest_toolbox.Modified_TestCase): - def setUp(self): - """ - Create a temporary file and launch a simple server in the - current working directory. - """ - - unittest_toolbox.Modified_TestCase.setUp(self) - - # Making a temporary file. - current_dir = os.getcwd() - target_filepath = self.make_temp_data_file(directory=current_dir) - self.target_fileobj = open(target_filepath, 'r') - self.file_contents = self.target_fileobj.read() - self.file_length = len(self.file_contents) - - # Launch a SimpleHTTPServer (serves files in the current dir). - self.server_process_handler = utils.TestServerProcess(log=logger) - - rel_target_filepath = os.path.basename(target_filepath) - self.url = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + '/' + rel_target_filepath - - # Create a temporary file where the target file chunks are written - # during fetching - self.temp_file = tempfile.TemporaryFile() - self.fetcher = tuf.requests_fetcher.RequestsFetcher() - - - # Stop server process and perform clean up. - def tearDown(self): - # Cleans the resources and flush the logged lines (if any). - self.server_process_handler.clean() - - self.target_fileobj.close() - self.temp_file.close() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - # Test: Normal case. - def test_fetch(self): - for chunk in self.fetcher.fetch(self.url, self.file_length): - self.temp_file.write(chunk) - - self.temp_file.seek(0) - temp_file_data = self.temp_file.read().decode('utf-8') - self.assertEqual(self.file_contents, temp_file_data) - - # Test if fetcher downloads file up to a required length - def test_fetch_restricted_length(self): - for chunk in self.fetcher.fetch(self.url, self.file_length-4): - self.temp_file.write(chunk) - - self.temp_file.seek(0, io.SEEK_END) - self.assertEqual(self.temp_file.tell(), self.file_length-4) - - - # Test that fetcher does not download more than actual file length - def test_fetch_upper_length(self): - for chunk in self.fetcher.fetch(self.url, self.file_length+4): - self.temp_file.write(chunk) - - self.temp_file.seek(0, io.SEEK_END) - self.assertEqual(self.temp_file.tell(), self.file_length) - - - # Test incorrect URL parsing - def test_url_parsing(self): - with self.assertRaises(tuf.exceptions.URLParsingError): - self.fetcher.fetch(self.random_string(), self.file_length) - - - # Test: Normal case with url data downloaded in more than one chunk - def test_fetch_in_chunks(self): - # Set smaller chunk size to ensure that the file will be downloaded - # in more than one chunk - default_chunk_size = tuf.settings.CHUNK_SIZE - tuf.settings.CHUNK_SIZE = 4 - - # expected_chunks_count: 3 - expected_chunks_count = math.ceil(self.file_length/tuf.settings.CHUNK_SIZE) - self.assertEqual(expected_chunks_count, 3) - - chunks_count = 0 - for chunk in self.fetcher.fetch(self.url, self.file_length): - self.temp_file.write(chunk) - chunks_count+=1 - - self.temp_file.seek(0) - temp_file_data = self.temp_file.read().decode('utf-8') - self.assertEqual(self.file_contents, temp_file_data) - # Check that we calculate chunks as expected - self.assertEqual(chunks_count, expected_chunks_count) - - # Restore default settings - tuf.settings.CHUNK_SIZE = default_chunk_size - - - -# Run unit test. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_fetcher_ng.py b/tests/test_fetcher_ng.py index b5714452af..4c87ed2b00 100644 --- a/tests/test_fetcher_ng.py +++ b/tests/test_fetcher_ng.py @@ -8,75 +8,76 @@ import io import logging +import math import os import sys -import unittest import tempfile -import math +import unittest +from typing import Any, ClassVar, Iterator +from unittest.mock import Mock, patch + +import requests from tests import utils -from tuf import exceptions, unittest_toolbox +from tuf.api import exceptions from tuf.ngclient._internal.requests_fetcher import RequestsFetcher logger = logging.getLogger(__name__) -class TestFetcher(unittest_toolbox.Modified_TestCase): +class TestFetcher(unittest.TestCase): + """Test RequestsFetcher class.""" - @classmethod - def setUpClass(cls): - # Launch a SimpleHTTPServer (serves files in the current dir). - cls.server_process_handler = utils.TestServerProcess(log=logger) + server_process_handler: ClassVar[utils.TestServerProcess] @classmethod - def tearDownClass(cls): - # Stop server process and perform clean up. - cls.server_process_handler.clean() - - def setUp(self): + def setUpClass(cls) -> None: """ Create a temporary file and launch a simple server in the current working directory. """ + cls.server_process_handler = utils.TestServerProcess(log=logger) - unittest_toolbox.Modified_TestCase.setUp(self) + cls.file_contents = b"junk data" + cls.file_length = len(cls.file_contents) + with tempfile.NamedTemporaryFile( + dir=os.getcwd(), delete=False + ) as cls.target_file: + cls.target_file.write(cls.file_contents) - # Making a temporary data file. - current_dir = os.getcwd() - target_filepath = self.make_temp_data_file(directory=current_dir) + cls.url_prefix = ( + f"http://{utils.TEST_HOST_ADDRESS}:" + f"{str(cls.server_process_handler.port)}" + ) + target_filename = os.path.basename(cls.target_file.name) + cls.url = f"{cls.url_prefix}/{target_filename}" - self.target_fileobj = open(target_filepath, "r") - self.file_contents = self.target_fileobj.read() - self.file_length = len(self.file_contents) - self.rel_target_filepath = os.path.basename(target_filepath) - self.url = f"http://{utils.TEST_HOST_ADDRESS}:{str(self.server_process_handler.port)}/{self.rel_target_filepath}" + @classmethod + def tearDownClass(cls) -> None: + # Stop server process and perform clean up. + cls.server_process_handler.clean() + os.remove(cls.target_file.name) + def setUp(self) -> None: # Instantiate a concrete instance of FetcherInterface self.fetcher = RequestsFetcher() - def tearDown(self): - self.target_fileobj.close() - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - # Simple fetch. - def test_fetch(self): + def test_fetch(self) -> None: with tempfile.TemporaryFile() as temp_file: for chunk in self.fetcher.fetch(self.url): temp_file.write(chunk) temp_file.seek(0) - self.assertEqual( - self.file_contents, temp_file.read().decode("utf-8") - ) + self.assertEqual(self.file_contents, temp_file.read()) # URL data downloaded in more than one chunk - def test_fetch_in_chunks(self): + def test_fetch_in_chunks(self) -> None: # Set a smaller chunk size to ensure that the file will be downloaded # in more than one chunk self.fetcher.chunk_size = 4 - # expected_chunks_count: 3 + # expected_chunks_count: 3 (depends on length of self.file_length) expected_chunks_count = math.ceil( self.file_length / self.fetcher.chunk_size ) @@ -89,41 +90,66 @@ def test_fetch_in_chunks(self): chunks_count += 1 temp_file.seek(0) - self.assertEqual( - self.file_contents, temp_file.read().decode("utf-8") - ) + self.assertEqual(self.file_contents, temp_file.read()) # Check that we calculate chunks as expected self.assertEqual(chunks_count, expected_chunks_count) # Incorrect URL parsing - def test_url_parsing(self): - with self.assertRaises(exceptions.URLParsingError): - self.fetcher.fetch(self.random_string()) + def test_url_parsing(self) -> None: + with self.assertRaises(exceptions.DownloadError): + self.fetcher.fetch("missing-scheme-and-hostname-in-url") # File not found error - def test_http_error(self): - with self.assertRaises(exceptions.FetcherHTTPError) as cm: - self.url = f"http://{utils.TEST_HOST_ADDRESS}:{str(self.server_process_handler.port)}/non-existing-path" + def test_http_error(self) -> None: + with self.assertRaises(exceptions.DownloadHTTPError) as cm: + self.url = f"{self.url_prefix}/non-existing-path" self.fetcher.fetch(self.url) self.assertEqual(cm.exception.status_code, 404) + # Response read timeout error + @patch.object(requests.Session, "get") + def test_response_read_timeout(self, mock_session_get: Any) -> None: + mock_response = Mock() + attr = { + "iter_content.side_effect": requests.exceptions.ConnectionError( + "Simulated timeout" + ) + } + mock_response.configure_mock(**attr) + mock_session_get.return_value = mock_response + + with self.assertRaises(exceptions.SlowRetrievalError): + next(self.fetcher.fetch(self.url)) + mock_response.iter_content.assert_called_once() + + # Read/connect session timeout error + @patch.object( + requests.Session, + "get", + side_effect=requests.exceptions.Timeout("Simulated timeout"), + ) + def test_session_get_timeout(self, mock_session_get: Any) -> None: + with self.assertRaises(exceptions.SlowRetrievalError): + self.fetcher.fetch(self.url) + mock_session_get.assert_called_once() + # Simple bytes download - def test_download_bytes(self): + def test_download_bytes(self) -> None: data = self.fetcher.download_bytes(self.url, self.file_length) - self.assertEqual(self.file_contents, data.decode("utf-8")) + self.assertEqual(self.file_contents, data) # Download file smaller than required max_length - def test_download_bytes_upper_length(self): + def test_download_bytes_upper_length(self) -> None: data = self.fetcher.download_bytes(self.url, self.file_length + 4) - self.assertEqual(self.file_contents, data.decode("utf-8")) + self.assertEqual(self.file_contents, data) # Download a file bigger than expected - def test_download_bytes_length_mismatch(self): + def test_download_bytes_length_mismatch(self) -> None: with self.assertRaises(exceptions.DownloadLengthMismatchError): self.fetcher.download_bytes(self.url, self.file_length - 4) # Simple file download - def test_download_file(self): + def test_download_file(self) -> None: with self.fetcher.download_file( self.url, self.file_length ) as temp_file: @@ -131,7 +157,7 @@ def test_download_file(self): self.assertEqual(self.file_length, temp_file.tell()) # Download file smaller than required max_length - def test_download_file_upper_length(self): + def test_download_file_upper_length(self) -> None: with self.fetcher.download_file( self.url, self.file_length + 4 ) as temp_file: @@ -139,8 +165,10 @@ def test_download_file_upper_length(self): self.assertEqual(self.file_length, temp_file.tell()) # Download a file bigger than expected - def test_download_file_length_mismatch(self): + def test_download_file_length_mismatch(self) -> Iterator[Any]: with self.assertRaises(exceptions.DownloadLengthMismatchError): + # Force download_file to execute and raise the error since it is a + # context manager and returns Iterator[IO] yield self.fetcher.download_file(self.url, self.file_length - 4) diff --git a/tests/test_formats.py b/tests/test_formats.py deleted file mode 100755 index 96da912b12..0000000000 --- a/tests/test_formats.py +++ /dev/null @@ -1,971 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_formats.py - - - Vladimir Diaz - - - October 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'formats.py' -""" - -import unittest -import datetime -import sys -import os - -import tuf -import tuf.formats - -from tests import utils - -import securesystemslib -import securesystemslib.util - - -class TestFormats(unittest.TestCase): - def setUp(self): - pass - - - - def tearDown(self): - pass - - - - def test_schemas(self): - # Test conditions for valid schemas. - valid_schemas = { - 'ISO8601_DATETIME_SCHEMA': (securesystemslib.formats.ISO8601_DATETIME_SCHEMA, - '1985-10-21T13:20:00Z'), - - 'UNIX_TIMESTAMP_SCHEMA': (securesystemslib.formats.UNIX_TIMESTAMP_SCHEMA, 499137720), - - 'HASH_SCHEMA': (securesystemslib.formats.HASH_SCHEMA, 'A4582BCF323BCEF'), - - 'HASHDICT_SCHEMA': (securesystemslib.formats.HASHDICT_SCHEMA, - {'sha256': 'A4582BCF323BCEF'}), - - 'HEX_SCHEMA': (securesystemslib.formats.HEX_SCHEMA, 'A4582BCF323BCEF'), - - 'KEYID_SCHEMA': (securesystemslib.formats.KEYID_SCHEMA, '123456789abcdef'), - - 'KEYIDS_SCHEMA': (securesystemslib.formats.KEYIDS_SCHEMA, - ['123456789abcdef', '123456789abcdef']), - - 'SCHEME_SCHEMA': (securesystemslib.formats.SCHEME_SCHEMA, 'rsassa-pss-sha256'), - - 'RELPATH_SCHEMA': (tuf.formats.RELPATH_SCHEMA, 'metadata/root/'), - - 'RELPATHS_SCHEMA': (tuf.formats.RELPATHS_SCHEMA, - ['targets/role1/', 'targets/role2/']), - - 'PATH_SCHEMA': (securesystemslib.formats.PATH_SCHEMA, '/home/someuser/'), - - 'PATHS_SCHEMA': (securesystemslib.formats.PATHS_SCHEMA, - ['/home/McFly/', '/home/Tannen/']), - - 'URL_SCHEMA': (securesystemslib.formats.URL_SCHEMA, - 'https://www.updateframework.com/'), - - 'VERSION_SCHEMA': (tuf.formats.VERSION_SCHEMA, - {'major': 1, 'minor': 0, 'fix': 8}), - - 'LENGTH_SCHEMA': (tuf.formats.LENGTH_SCHEMA, 8), - - 'NAME_SCHEMA': (securesystemslib.formats.NAME_SCHEMA, 'Marty McFly'), - - 'BOOLEAN_SCHEMA': (securesystemslib.formats.BOOLEAN_SCHEMA, True), - - 'THRESHOLD_SCHEMA': (tuf.formats.THRESHOLD_SCHEMA, 1), - - 'ROLENAME_SCHEMA': (tuf.formats.ROLENAME_SCHEMA, 'Root'), - - 'RSAKEYBITS_SCHEMA': (securesystemslib.formats.RSAKEYBITS_SCHEMA, 4096), - - 'PASSWORD_SCHEMA': (securesystemslib.formats.PASSWORD_SCHEMA, 'secret'), - - 'PASSWORDS_SCHEMA': (securesystemslib.formats.PASSWORDS_SCHEMA, ['pass1', 'pass2']), - - 'KEYVAL_SCHEMA': (securesystemslib.formats.KEYVAL_SCHEMA, - {'public': 'pubkey', 'private': 'privkey'}), - - 'KEY_SCHEMA': (securesystemslib.formats.KEY_SCHEMA, - {'keytype': 'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}), - - 'RSAKEY_SCHEMA': (securesystemslib.formats.RSAKEY_SCHEMA, - {'keytype': 'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyid': '123456789abcdef', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}), - - 'TARGETS_FILEINFO_SCHEMA': (tuf.formats.TARGETS_FILEINFO_SCHEMA, - {'length': 1024, - 'hashes': {'sha256': 'A4582BCF323BCEF'}, - 'custom': {'type': 'paintjob'}}), - - 'METADATA_FILEINFO_SCHEMA': (tuf.formats.METADATA_FILEINFO_SCHEMA, - {'length': 1024, - 'hashes': {'sha256': 'A4582BCF323BCEF'}, - 'version': 1}), - - 'FILEDICT_SCHEMA': (tuf.formats.FILEDICT_SCHEMA, - {'metadata/root.json': {'length': 1024, - 'hashes': {'sha256': 'ABCD123'}, - 'custom': {'type': 'metadata'}}}), - - 'TARGETINFO_SCHEMA': (tuf.formats.TARGETINFO_SCHEMA, - {'filepath': 'targets/target1.gif', - 'fileinfo': {'length': 1024, - 'hashes': {'sha256': 'ABCD123'}, - 'custom': {'type': 'target'}}}), - - 'TARGETINFOS_SCHEMA': (tuf.formats.TARGETINFOS_SCHEMA, - [{'filepath': 'targets/target1.gif', - 'fileinfo': {'length': 1024, - 'hashes': {'sha256': 'ABCD123'}, - 'custom': {'type': 'target'}}}]), - - 'SIGNATURE_SCHEMA': (securesystemslib.formats.SIGNATURE_SCHEMA, - {'keyid': '123abc', - 'sig': 'A4582BCF323BCEF'}), - - 'SIGNATURESTATUS_SCHEMA': (tuf.formats.SIGNATURESTATUS_SCHEMA, - {'threshold': 1, - 'good_sigs': ['123abc'], - 'bad_sigs': ['123abc'], - 'unknown_sigs': ['123abc'], - 'untrusted_sigs': ['123abc'], - 'unknown_signing_schemes': ['123abc']}), - - 'SIGNABLE_SCHEMA': (tuf.formats.SIGNABLE_SCHEMA, - {'signed': 'signer', - 'signatures': [{'keyid': '123abc', - 'sig': 'A4582BCF323BCEF'}]}), - - 'KEYDICT_SCHEMA': (securesystemslib.formats.KEYDICT_SCHEMA, - {'123abc': {'keytype': 'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}}), - - 'KEYDB_SCHEMA': (tuf.formats.KEYDB_SCHEMA, - {'123abc': {'keytype': 'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyid': '123456789abcdef', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}}), - - 'SCPCONFIG_SCHEMA': (tuf.formats.SCPCONFIG_SCHEMA, - {'general': {'transfer_module': 'scp', - 'metadata_path': '/path/meta.json', - 'targets_directory': '/targets'}, - 'scp': {'host': 'http://localhost:8001', - 'user': 'McFly', - 'identity_file': '/home/.ssh/file', - 'remote_directory': '/home/McFly'}}), - - 'RECEIVECONFIG_SCHEMA': (tuf.formats.RECEIVECONFIG_SCHEMA, - {'general': {'transfer_module': 'scp', - 'pushroots': ['/pushes'], - 'repository_directory': '/repo', - 'metadata_directory': '/repo/meta', - 'targets_directory': '/repo/targets', - 'backup_directory': '/repo/backup'}}), - - 'ROLE_SCHEMA': (tuf.formats.ROLE_SCHEMA, - {'keyids': ['123abc'], - 'threshold': 1, - 'paths': ['path1/', 'path2']}), - - 'ROLEDICT_SCHEMA': (tuf.formats.ROLEDICT_SCHEMA, - {'root': {'keyids': ['123abc'], - 'threshold': 1, - 'paths': ['path1/', 'path2']}}), - - 'ROOT_SCHEMA': (tuf.formats.ROOT_SCHEMA, - {'_type': 'root', - 'spec_version': '1.0.0', - 'version': 8, - 'consistent_snapshot': False, - 'expires': '1985-10-21T13:20:00Z', - 'keys': {'123abc': {'keytype': 'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}}, - 'roles': {'root': {'keyids': ['123abc'], - 'threshold': 1, - 'paths': ['path1/', 'path2']}}}), - - 'TARGETS_SCHEMA': (tuf.formats.TARGETS_SCHEMA, - {'_type': 'targets', - 'spec_version': '1.0.0', - 'version': 8, - 'expires': '1985-10-21T13:20:00Z', - 'targets': {'metadata/targets.json': {'length': 1024, - 'hashes': {'sha256': 'ABCD123'}, - 'custom': {'type': 'metadata'}}}, - 'delegations': {'keys': {'123abc': {'keytype':'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}}, - 'roles': [{'name': 'root', 'keyids': ['123abc'], - 'threshold': 1, - 'paths': ['path1/', 'path2']}]}}), - - 'SNAPSHOT_SCHEMA': (tuf.formats.SNAPSHOT_SCHEMA, - {'_type': 'snapshot', - 'spec_version': '1.0.0', - 'version': 8, - 'expires': '1985-10-21T13:20:00Z', - 'meta': {'snapshot.json': {'version': 1024}}}), - - 'TIMESTAMP_SCHEMA': (tuf.formats.TIMESTAMP_SCHEMA, - {'_type': 'timestamp', - 'spec_version': '1.0.0', - 'version': 8, - 'expires': '1985-10-21T13:20:00Z', - 'meta': {'metadattimestamp.json': {'length': 1024, - 'hashes': {'sha256': 'AB1245'}, - 'version': 1}}}), - - 'MIRROR_SCHEMA': (tuf.formats.MIRROR_SCHEMA, - {'url_prefix': 'http://localhost:8001', - 'metadata_path': 'metadata/', - 'targets_path': 'targets/', - 'confined_target_dirs': ['path1/', 'path2/'], - 'custom': {'type': 'mirror'}}), - - 'MIRROR_SCHEMA_NO_CONFINED_TARGETS': (tuf.formats.MIRROR_SCHEMA, - {'url_prefix': 'http://localhost:8001', - 'metadata_path': 'metadata/', - 'targets_path': 'targets/', - 'custom': {'type': 'mirror'}}), - - 'MIRRORDICT_SCHEMA': (tuf.formats.MIRRORDICT_SCHEMA, - {'mirror1': {'url_prefix': 'http://localhost:8001', - 'metadata_path': 'metadata/', - 'targets_path': 'targets/', - 'confined_target_dirs': ['path1/', 'path2/'], - 'custom': {'type': 'mirror'}}}), - - 'MIRRORLIST_SCHEMA': (tuf.formats.MIRRORLIST_SCHEMA, - {'_type': 'mirrors', - 'version': 8, - 'spec_version': '1.0.0', - 'expires': '1985-10-21T13:20:00Z', - 'mirrors': [{'url_prefix': 'http://localhost:8001', - 'metadata_path': 'metadata/', - 'targets_path': 'targets/', - 'confined_target_dirs': ['path1/', 'path2/'], - 'custom': {'type': 'mirror'}}]})} - - # Iterate 'valid_schemas', ensuring each 'valid_schema' correctly matches - # its respective 'schema_type'. - for schema_name, (schema_type, valid_schema) in valid_schemas.items(): - if not schema_type.matches(valid_schema): - print('bad schema: ' + repr(valid_schema)) - self.assertEqual(True, schema_type.matches(valid_schema)) - - # Test conditions for invalid schemas. - # Set the 'valid_schema' of 'valid_schemas' to an invalid - # value and test that it does not match 'schema_type'. - for schema_name, (schema_type, valid_schema) in valid_schemas.items(): - invalid_schema = 0xBAD - if isinstance(schema_type, securesystemslib.schema.Integer): - invalid_schema = 'BAD' - self.assertEqual(False, schema_type.matches(invalid_schema)) - - - def test_specfication_version_schema(self): - """Test valid and invalid SPECIFICATION_VERSION_SCHEMAs, using examples - from 'regex101.com/r/Ly7O1x/3/', referenced by - 'semver.org/spec/v2.0.0.html'. """ - valid_schemas = [ - "0.0.4", - "1.2.3", - "10.20.30", - "1.1.2-prerelease+meta", - "1.1.2+meta", - "1.1.2+meta-valid", - "1.0.0-alpha", - "1.0.0-beta", - "1.0.0-alpha.beta", - "1.0.0-alpha.beta.1", - "1.0.0-alpha.1", - "1.0.0-alpha0.valid", - "1.0.0-alpha.0valid", - "1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay", - "1.0.0-rc.1+build.1", - "2.0.0-rc.1+build.123", - "1.2.3-beta", - "10.2.3-DEV-SNAPSHOT", - "1.2.3-SNAPSHOT-123", - "1.0.0", - "2.0.0", - "1.1.7", - "2.0.0+build.1848", - "2.0.1-alpha.1227", - "1.0.0-alpha+beta", - "1.2.3----RC-SNAPSHOT.12.9.1--.12+788", - "1.2.3----R-S.12.9.1--.12+meta", - "1.2.3----RC-SNAPSHOT.12.9.1--.12", - "1.0.0+0.build.1-rc.10000aaa-kk-0.1", - "99999999999999999999999.999999999999999999.99999999999999999", - "1.0.0-0A.is.legal"] - - for valid_schema in valid_schemas: - self.assertTrue( - tuf.formats.SPECIFICATION_VERSION_SCHEMA.matches(valid_schema), - "'{}' should match 'SPECIFICATION_VERSION_SCHEMA'.".format( - valid_schema)) - - invalid_schemas = [ - "1", - "1.2", - "1.2.3-0123", - "1.2.3-0123.0123", - "1.1.2+.123", - "+invalid", - "-invalid", - "-invalid+invalid", - "-invalid.01", - "alpha", - "alpha.beta", - "alpha.beta.1", - "alpha.1", - "alpha+beta", - "alpha_beta", - "alpha.", - "alpha..", - "beta", - "1.0.0-alpha_beta", - "-alpha.", - "1.0.0-alpha..", - "1.0.0-alpha..1", - "1.0.0-alpha...1", - "1.0.0-alpha....1", - "1.0.0-alpha.....1", - "1.0.0-alpha......1", - "1.0.0-alpha.......1", - "01.1.1", - "1.01.1", - "1.1.01", - "1.2", - "1.2.3.DEV", - "1.2-SNAPSHOT", - "1.2.31.2.3----RC-SNAPSHOT.12.09.1--..12+788", - "1.2-RC-SNAPSHOT", - "-1.0.3-gamma+b7718", - "+justmeta", - "9.8.7+meta+meta", - "9.8.7-whatever+meta+meta", - "99999999999999999999999.999999999999999999.99999999999999999----RC-SNAPSHOT.12.09.1--------------------------------..12"] - - for invalid_schema in invalid_schemas: - self.assertFalse( - tuf.formats.SPECIFICATION_VERSION_SCHEMA.matches(invalid_schema), - "'{}' should not match 'SPECIFICATION_VERSION_SCHEMA'.".format( - invalid_schema)) - - - def test_build_dict_conforming_to_schema(self): - # Test construction of a few metadata formats using - # build_dict_conforming_to_schema(). - - # Try the wrong type of schema object. - STRING_SCHEMA = securesystemslib.schema.AnyString() - - with self.assertRaises(ValueError): - tuf.formats.build_dict_conforming_to_schema( - STRING_SCHEMA, string='some string') - - # Try building Timestamp metadata. - spec_version = tuf.SPECIFICATION_VERSION - version = 8 - length = 88 - hashes = {'sha256': '3c7fe3eeded4a34'} - expires = '1985-10-21T13:20:00Z' - filedict = {'snapshot.json': {'length': length, 'hashes': hashes, 'version': 1}} - - - # Try with and without _type and spec_version, both of which are - # automatically populated if they are not included. - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches( # both - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - _type='timestamp', - spec_version=spec_version, - version=version, - expires=expires, - meta=filedict))) - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches( # neither - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - version=version, - expires=expires, - meta=filedict))) - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches( # one - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - spec_version=spec_version, - version=version, - expires=expires, - meta=filedict))) - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches( # the other - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - _type='timestamp', - version=version, - expires=expires, - meta=filedict))) - - - # Try test arguments for invalid Timestamp creation. - bad_spec_version = 123 - bad_version = 'eight' - bad_expires = '2000' - bad_filedict = 123 - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - _type='timestamp', - spec_version=bad_spec_version, - version=version, - expires=expires, - meta=filedict) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - _type='timestamp', - spec_version=spec_version, - version=bad_version, - expires=expires, - meta=filedict) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - _type='timestamp', - spec_version=spec_version, - version=version, - expires=bad_expires, - meta=filedict) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - _type='timestamp', - spec_version=spec_version, - version=version, - expires=expires, - meta=bad_filedict) - - with self.assertRaises(ValueError): - tuf.formats.build_dict_conforming_to_schema(123) - - - # Try building Root metadata. - consistent_snapshot = False - - keydict = {'123abc': {'keytype': 'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}} - - roledict = {'root': {'keyids': ['123abc'], - 'threshold': 1, - 'paths': ['path1/', 'path2']}} - - - self.assertTrue(tuf.formats.ROOT_SCHEMA.matches( - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version=spec_version, - version=version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot))) - - - # Additional test arguments for invalid Root creation. - bad_keydict = 123 - bad_roledict = 123 - - # TODO: Later on, write a test looper that takes pairs of key-value args - # to substitute in on each run to shorten this.... There's a lot of - # test code that looks like this, and it'd be easier to use a looper. - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version=bad_spec_version, - version=version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version=spec_version, - version=bad_version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version=spec_version, - version=version, - expires=bad_expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version=spec_version, - version=version, - expires=expires, - keys=bad_keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version=spec_version, - version=version, - expires=expires, - keys=keydict, - roles=bad_roledict, - consistent_snapshot=consistent_snapshot) - - with self.assertRaises(TypeError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, 'bad') - - with self.assertRaises(ValueError): - tuf.formats.build_dict_conforming_to_schema( - 'bad', - _type='root', - spec_version=spec_version, - version=version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - - - # Try building Snapshot metadata. - versiondict = {'targets.json' : {'version': version}} - - self.assertTrue(tuf.formats.SNAPSHOT_SCHEMA.matches( - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.SNAPSHOT_SCHEMA, - _type='snapshot', - spec_version=spec_version, - version=version, - expires=expires, - meta=versiondict))) - - # Additional test arguments for invalid Snapshot creation. - bad_versiondict = 123 - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.SNAPSHOT_SCHEMA, - _type='snapshot', - spec_version=bad_spec_version, - version=version, - expires=expires, - meta=versiondict) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.SNAPSHOT_SCHEMA, - _type='snapshot', - spec_version=spec_version, - version=bad_version, - expires=expires, - meta=versiondict) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.SNAPSHOT_SCHEMA, - _type='snapshot', - spec_version=spec_version, - version=version, - expires=bad_expires, - meta=versiondict) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.SNAPSHOT_SCHEMA, - _type='snapshot', - spec_version=spec_version, - version=version, - expires=expires, - meta=bad_versiondict) - - - - # Try building Targets metadata. - filedict = {'metadata/targets.json': {'length': 1024, - 'hashes': {'sha256': 'ABCD123'}, - 'custom': {'type': 'metadata'}}} - - delegations = {'keys': {'123abc': {'keytype':'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}}, - 'roles': [{'name': 'root', 'keyids': ['123abc'], - 'threshold': 1, 'paths': ['path1/', 'path2']}]} - - - self.assertTrue(tuf.formats.TARGETS_SCHEMA.matches( - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - _type='targets', - spec_version=spec_version, - version=version, - expires=expires, - targets=filedict, - delegations=delegations))) - - # Try with no delegations included (should work, since they're optional). - self.assertTrue(tuf.formats.TARGETS_SCHEMA.matches( - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - _type='targets', - spec_version=spec_version, - version=version, - expires=expires, - targets=filedict))) - - - # Additional test arguments for invalid Targets creation. - bad_filedict = 123 - bad_delegations = 123 - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - _type='targets', - spec_version=spec_version, - version=bad_version, - expires=expires, - targets=filedict, - delegations=delegations) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - _type='targets', - spec_version=spec_version, - version=version, - expires=bad_expires, - targets=filedict, - delegations=delegations) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - _type='targets', - spec_version=spec_version, - version=version, - expires=expires, - targets=bad_filedict, - delegations=delegations) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - _type='targets', - spec_version=spec_version, - version=version, - expires=expires, - targets=filedict, - delegations=bad_delegations) - - - - def test_expiry_string_to_datetime(self): - dt = tuf.formats.expiry_string_to_datetime('1985-10-21T13:20:00Z') - self.assertEqual(dt, datetime.datetime(1985, 10, 21, 13, 20, 0)) - dt = tuf.formats.expiry_string_to_datetime('2038-01-19T03:14:08Z') - self.assertEqual(dt, datetime.datetime(2038, 1, 19, 3, 14, 8)) - - # First 3 fail via securesystemslib schema, last one because of strptime() - invalid_inputs = [ - '2038-1-19T03:14:08Z', # leading zeros not optional - '2038-01-19T031408Z', # strict time parsing - '2038-01-19T03:14:08Z-06:00', # timezone not allowed - '2038-13-19T03:14:08Z', # too many months - ] - for invalid_input in invalid_inputs: - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.expiry_string_to_datetime(invalid_input) - - - - def test_unix_timestamp_to_datetime(self): - # Test conditions for valid arguments. - UNIX_TIMESTAMP_SCHEMA = securesystemslib.formats.UNIX_TIMESTAMP_SCHEMA - self.assertTrue(datetime.datetime, tuf.formats.unix_timestamp_to_datetime(499137720)) - datetime_object = datetime.datetime(1985, 10, 26, 1, 22) - self.assertEqual(datetime_object, tuf.formats.unix_timestamp_to_datetime(499137720)) - - # Test conditions for invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.unix_timestamp_to_datetime, 'bad') - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.unix_timestamp_to_datetime, 1000000000000000000000) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.unix_timestamp_to_datetime, -1) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.unix_timestamp_to_datetime, ['5']) - - - - def test_datetime_to_unix_timestamp(self): - # Test conditions for valid arguments. - datetime_object = datetime.datetime(2015, 10, 21, 19, 28) - self.assertEqual(1445455680, tuf.formats.datetime_to_unix_timestamp(datetime_object)) - - # Test conditions for invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.datetime_to_unix_timestamp, 'bad') - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.datetime_to_unix_timestamp, 1000000000000000000000) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.datetime_to_unix_timestamp, ['1']) - - - - def test_format_base64(self): - # Test conditions for valid arguments. - data = 'updateframework'.encode('utf-8') - self.assertEqual('dXBkYXRlZnJhbWV3b3Jr', tuf.formats.format_base64(data)) - self.assertTrue(isinstance(tuf.formats.format_base64(data), str)) - - # Test conditions for invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.format_base64, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.format_base64, True) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.format_base64, ['123']) - - - def test_parse_base64(self): - # Test conditions for valid arguments. - base64 = 'dXBkYXRlZnJhbWV3b3Jr' - self.assertEqual(b'updateframework', tuf.formats.parse_base64(base64)) - self.assertTrue(isinstance(tuf.formats.parse_base64(base64), bytes)) - - # Test conditions for invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.parse_base64, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.parse_base64, True) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.parse_base64, ['123']) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.parse_base64, '/') - - - - def test_make_signable(self): - # Test conditions for expected make_signable() behavior. - SIGNABLE_SCHEMA = tuf.formats.SIGNABLE_SCHEMA - root_file = os.path.join('repository_data', 'repository', 'metadata', - 'root.json') - root = securesystemslib.util.load_json_file(root_file) - self.assertTrue(SIGNABLE_SCHEMA.matches(tuf.formats.make_signable(root))) - signable = tuf.formats.make_signable(root) - self.assertEqual('root', tuf.formats.check_signable_object_format(signable)) - - self.assertEqual(signable, tuf.formats.make_signable(signable)) - - # Test conditions for miscellaneous arguments. - self.assertTrue(SIGNABLE_SCHEMA.matches(tuf.formats.make_signable('123'))) - self.assertTrue(SIGNABLE_SCHEMA.matches(tuf.formats.make_signable(123))) - - - - - - def test_make_targets_fileinfo(self): - # Test conditions for valid arguments. - length = 1024 - hashes = {'sha256': 'A4582BCF323BCEF', 'sha512': 'A4582BCF323BFEF'} - custom = {'type': 'paintjob'} - - TARGETS_FILEINFO_SCHEMA = tuf.formats.TARGETS_FILEINFO_SCHEMA - make_targets_fileinfo = tuf.formats.make_targets_fileinfo - self.assertTrue(TARGETS_FILEINFO_SCHEMA.matches(make_targets_fileinfo(length, hashes, custom))) - self.assertTrue(TARGETS_FILEINFO_SCHEMA.matches(make_targets_fileinfo(length, hashes))) - - # Test conditions for invalid arguments. - bad_length = 'bad' - bad_hashes = 'bad' - bad_custom = 'bad' - - self.assertRaises(securesystemslib.exceptions.FormatError, make_targets_fileinfo, - bad_length, hashes, custom) - self.assertRaises(securesystemslib.exceptions.FormatError, make_targets_fileinfo, - length, bad_hashes, custom) - self.assertRaises(securesystemslib.exceptions.FormatError, make_targets_fileinfo, - length, hashes, bad_custom) - self.assertRaises(securesystemslib.exceptions.FormatError, make_targets_fileinfo, - bad_length, hashes) - self.assertRaises(securesystemslib.exceptions.FormatError, make_targets_fileinfo, - length, bad_hashes) - - - - def test_make_metadata_fileinfo(self): - # Test conditions for valid arguments. - length = 1024 - hashes = {'sha256': 'A4582BCF323BCEF', 'sha512': 'A4582BCF323BFEF'} - version = 8 - - METADATA_FILEINFO_SCHEMA = tuf.formats.METADATA_FILEINFO_SCHEMA - make_metadata_fileinfo = tuf.formats.make_metadata_fileinfo - self.assertTrue(METADATA_FILEINFO_SCHEMA.matches(make_metadata_fileinfo( - version, length, hashes))) - self.assertTrue(METADATA_FILEINFO_SCHEMA.matches(make_metadata_fileinfo(version))) - - # Test conditions for invalid arguments. - bad_version = 'bad' - bad_length = 'bad' - bad_hashes = 'bad' - - self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata_fileinfo, - bad_version, length, hashes) - self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata_fileinfo, - version, bad_length, hashes) - self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata_fileinfo, - version, length, bad_hashes) - self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata_fileinfo, - bad_version) - - - - def test_make_versioninfo(self): - # Test conditions for valid arguments. - version_number = 8 - versioninfo = {'version': version_number} - - VERSIONINFO_SCHEMA = tuf.formats.VERSIONINFO_SCHEMA - make_versioninfo = tuf.formats.make_versioninfo - self.assertTrue(VERSIONINFO_SCHEMA.matches(make_versioninfo(version_number))) - - # Test conditions for invalid arguments. - bad_version_number = '8' - - self.assertRaises(securesystemslib.exceptions.FormatError, make_versioninfo, bad_version_number) - - - - - - def test_expected_meta_rolename(self): - # Test conditions for valid arguments. - expected_rolename = tuf.formats.expected_meta_rolename - - self.assertEqual('root', expected_rolename('Root')) - self.assertEqual('targets', expected_rolename('Targets')) - self.assertEqual('snapshot', expected_rolename('Snapshot')) - self.assertEqual('timestamp', expected_rolename('Timestamp')) - self.assertEqual('mirrors', expected_rolename('Mirrors')) - self.assertEqual('targets role', expected_rolename('Targets Role')) - self.assertEqual('root', expected_rolename('Root')) - - # Test conditions for invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, expected_rolename, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, expected_rolename, tuf.formats.ROOT_SCHEMA) - self.assertRaises(securesystemslib.exceptions.FormatError, expected_rolename, True) - - - - def test_check_signable_object_format(self): - # Test condition for a valid argument. - root_file = os.path.join('repository_data', 'repository', 'metadata', - 'root.json') - root = securesystemslib.util.load_json_file(root_file) - root = tuf.formats.make_signable(root) - self.assertEqual('root', tuf.formats.check_signable_object_format(root)) - - # Test conditions for invalid arguments. - check_signable = tuf.formats.check_signable_object_format - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, 'root') - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, tuf.formats.ROOT_SCHEMA) - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, True) - - saved_type = root['signed']['_type'] - del root['signed']['_type'] - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, root) - root['signed']['_type'] = saved_type - - root['signed']['_type'] = 'Root' - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, root) - root['signed']['_type'] = 'root' - - del root['signed']['expires'] - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, root) - - - - def test_encode_canonical(self): - # Test conditions for valid arguments. - encode = securesystemslib.formats.encode_canonical - result = [] - output = result.append - bad_output = 123 - - self.assertEqual('""', encode("")) - self.assertEqual('[1,2,3]', encode([1, 2, 3])) - self.assertEqual('[1,2,3]', encode([1,2,3])) - self.assertEqual('[]', encode([])) - self.assertEqual('{"A":[99]}', encode({"A": [99]})) - self.assertEqual('{"x":3,"y":2}', encode({"x": 3, "y": 2})) - - self.assertEqual('{"x":3,"y":null}', encode({"x": 3, "y": None})) - - # Condition where 'encode()' sends the result to the callable - # 'output'. - self.assertEqual(None, encode([1, 2, 3], output)) - self.assertEqual('[1,2,3]', ''.join(result)) - - # Test conditions for invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, encode, tuf.formats.ROOT_SCHEMA) - self.assertRaises(securesystemslib.exceptions.FormatError, encode, 8.0) - self.assertRaises(securesystemslib.exceptions.FormatError, encode, {"x": 8.0}) - self.assertRaises(securesystemslib.exceptions.FormatError, encode, 8.0, output) - - self.assertRaises(securesystemslib.exceptions.FormatError, encode, {"x": securesystemslib.exceptions.FormatError}) - - -# Run unit test. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_indefinite_freeze_attack.py b/tests/test_indefinite_freeze_attack.py deleted file mode 100755 index 67b12c4372..0000000000 --- a/tests/test_indefinite_freeze_attack.py +++ /dev/null @@ -1,466 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_indefinite_freeze_attack.py - - - Konstantin Andrianov. - - - March 10, 2012. - - April 1, 2014. - Refactored to use the 'unittest' module (test conditions in code, rather - than verifying text output), use pre-generated repository files, and - discontinue use of the old repository tools. -vladimir.v.diaz - - March 9, 2016. - Additional test added relating to issue: - https://github.com/theupdateframework/python-tuf/issues/322 - If a metadata file is not updated (no indication of a new version - available), the expiration of the pre-existing, locally trusted metadata - must still be detected. This additional test complains if such does not - occur, and accompanies code in tuf.client.updater:refresh() to detect it. - -sebastien.awwad - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Simulate an indefinite freeze attack. In an indefinite freeze attack, - attacker is able to respond to client's requests with the same, outdated - metadata without the client being aware. -""" - -import datetime -import os -import time -import tempfile -import shutil -import json -import logging -import unittest -import sys -from urllib import request - -if sys.version_info >= (3, 3): - import unittest.mock as mock -else: - import mock - -import tuf.formats -import tuf.log -import tuf.client.updater as updater -import tuf.repository_tool as repo_tool -import tuf.unittest_toolbox as unittest_toolbox -import tuf.roledb -import tuf.keydb -import tuf.exceptions - -from tests import utils - -import securesystemslib - -# The repository tool is imported and logs console messages by default. Disable -# console log messages generated by this unit test. -repo_tool.disable_console_log_messages() - -logger = logging.getLogger(__name__) - - -class TestIndefiniteFreezeAttack(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated of all the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client') - original_keystore = os.path.join(original_repository_files, 'keystore') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.client_directory = os.path.join(temporary_repository_root, 'client') - self.keystore_directory = os.path.join(temporary_repository_root, 'keystore') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_without_tuf(self): - # Without TUF, Test 1 and Test 2 are functionally equivalent, so we skip - # Test 1 and only perform Test 2. - # - # Test 1: If we find that the timestamp acquired from a mirror indicates - # that there is no new snapshot file, and our current snapshot - # file is expired, is it recognized as such? - # Test 2: If an expired timestamp is downloaded, is it recognized as such? - - - # Test 2 Begin: - # - # 'timestamp.json' specifies the latest version of the repository files. A - # client should only accept the same version of this file up to a certain - # point, or else it cannot detect that new files are available for - # download. Modify the repository's timestamp.json' so that it expires - # soon, copy it over to the client, and attempt to re-fetch the same - # expired version. - # - # A non-TUF client (without a way to detect when metadata has expired) is - # expected to download the same version, and thus the same outdated files. - # Verify that the downloaded 'timestamp.json' contains the same file size - # and hash as the one available locally. - - timestamp_path = os.path.join(self.repository_directory, 'metadata', - 'timestamp.json') - - timestamp_metadata = securesystemslib.util.load_json_file(timestamp_path) - expiry_time = time.time() - 10 - expires = tuf.formats.unix_timestamp_to_datetime(int(expiry_time)) - expires = expires.isoformat() + 'Z' - timestamp_metadata['signed']['expires'] = expires - tuf.formats.check_signable_object_format(timestamp_metadata) - - with open(timestamp_path, 'wb') as file_object: - # Explicitly specify the JSON separators for Python 2 + 3 consistency. - timestamp_content = \ - json.dumps(timestamp_metadata, indent=1, separators=(',', ': '), - sort_keys=True).encode('utf-8') - file_object.write(timestamp_content) - - client_timestamp_path = os.path.join(self.client_directory, 'timestamp.json') - shutil.copy(timestamp_path, client_timestamp_path) - - length, hashes = securesystemslib.util.get_file_details(timestamp_path) - fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'metadata', 'timestamp.json') - - request.urlretrieve(url_file.replace('\\', '/'), client_timestamp_path) - - length, hashes = securesystemslib.util.get_file_details(client_timestamp_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Verify 'download_fileinfo' is equal to the current local file. - self.assertEqual(download_fileinfo, fileinfo) - - - def test_with_tuf(self): - # Three tests are conducted here. - # - # Test 1: If we find that the timestamp acquired from a mirror indicates - # that there is no new snapshot file, and our current snapshot - # file is expired, is it recognized as such? - # Test 2: If an expired timestamp is downloaded, is it recognized as such? - # Test 3: If an expired Snapshot is downloaded, is it (1) rejected? (2) the - # local Snapshot file deleted? (3) and is the client able to recover when - # given a new, valid Snapshot? - - - # Test 1 Begin: - # - # Addresses this issue: https://github.com/theupdateframework/python-tuf/issues/322 - # - # If time has passed and our snapshot or targets role is expired, and - # the mirror whose timestamp we fetched doesn't indicate the existence of a - # new snapshot version, we still need to check that it's expired and notify - # the software update system / application / user. This test creates that - # scenario. The correct behavior is to raise an exception. - # - # Background: Expiration checks (updater._ensure_not_expired) were - # previously conducted when the metadata file was downloaded. If no new - # metadata file was downloaded, no expiry check would occur. In particular, - # while root was checked for expiration at the beginning of each - # updater.refresh() cycle, and timestamp was always checked because it was - # always fetched, snapshot and targets were never checked if the user did - # not receive evidence that they had changed. This bug allowed a class of - # freeze attacks. - # That bug was fixed and this test tests that fix going forward. - - # Modify the timestamp file on the remote repository. 'timestamp.json' - # must be properly updated and signed with 'repository_tool.py', otherwise - # the client will reject it as invalid metadata. - - # Load the repository - repository = repo_tool.load_repository(self.repository_directory) - - # Load the snapshot and timestamp keys - key_file = os.path.join(self.keystore_directory, 'timestamp_key') - timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - repository.timestamp.load_signing_key(timestamp_private) - key_file = os.path.join(self.keystore_directory, 'snapshot_key') - snapshot_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - repository.snapshot.load_signing_key(snapshot_private) - - # sign snapshot with expiry in near future (earlier than e.g. timestamp) - expiry = int(time.time() + 60*60) - repository.snapshot.expiration = tuf.formats.unix_timestamp_to_datetime( - expiry) - repository.mark_dirty(['snapshot', 'timestamp']) - repository.writeall() - - # And move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Refresh metadata on the client. For this refresh, all data is not expired. - logger.info('Test: Refreshing #1 - Initial metadata refresh occurring.') - self.repository_updater.refresh() - - logger.info('Test: Refreshing #2 - refresh after local snapshot expiry.') - - # mock current time to one second after snapshot expiry - mock_time = mock.Mock() - mock_time.return_value = expiry + 1 - with mock.patch('time.time', mock_time): - try: - self.repository_updater.refresh() # We expect this to fail! - - except tuf.exceptions.ExpiredMetadataError: - logger.info('Test: Refresh #2 - failed as expected. Expired local' - ' snapshot case generated a tuf.exceptions.ExpiredMetadataError' - ' exception as expected. Test pass.') - - else: - self.fail('TUF failed to detect expired stale snapshot metadata. Freeze' - ' attack successful.') - - - - - # Test 2 Begin: - # - # 'timestamp.json' specifies the latest version of the repository files. - # A client should only accept the same version of this file up to a certain - # point, or else it cannot detect that new files are available for download. - # Modify the repository's 'timestamp.json' so that it is about to expire, - # copy it over the to client, wait a moment until it expires, and attempt to - # re-fetch the same expired version. - - # The same scenario as in test_without_tuf() is followed here, except with - # a TUF client. The TUF client performs a refresh of top-level metadata, - # which includes 'timestamp.json', and should detect a freeze attack if - # the repository serves an outdated 'timestamp.json'. - - # Modify the timestamp file on the remote repository. 'timestamp.json' - # must be properly updated and signed with 'repository_tool.py', otherwise - # the client will reject it as invalid metadata. The resulting - # 'timestamp.json' should be valid metadata, but expired (as intended). - repository = repo_tool.load_repository(self.repository_directory) - - key_file = os.path.join(self.keystore_directory, 'timestamp_key') - timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - - repository.timestamp.load_signing_key(timestamp_private) - - # Set timestamp metadata to expire soon. - # We cannot set the timestamp expiration with - # 'repository.timestamp.expiration = ...' with already-expired timestamp - # metadata because of consistency checks that occur during that assignment. - expiry_time = time.time() + 60*60 - datetime_object = tuf.formats.unix_timestamp_to_datetime(int(expiry_time)) - repository.timestamp.expiration = datetime_object - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # mock current time to one second after timestamp expiry - mock_time = mock.Mock() - mock_time.return_value = expiry_time + 1 - with mock.patch('time.time', mock_time): - try: - self.repository_updater.refresh() # We expect NoWorkingMirrorError. - - except tuf.exceptions.NoWorkingMirrorError as e: - # Make sure the contained error is ExpiredMetadataError - for mirror_url, mirror_error in e.mirror_errors.items(): - self.assertTrue(isinstance(mirror_error, tuf.exceptions.ExpiredMetadataError)) - - else: - self.fail('TUF failed to detect expired, stale timestamp metadata.' - ' Freeze attack successful.') - - - - - # Test 3 Begin: - # - # Serve the client expired Snapshot. The client should reject the given, - # expired Snapshot and the locally trusted one, which should now be out of - # date. - # After the attack, attempt to re-issue a valid Snapshot to verify that - # the client is still able to update. A bug previously caused snapshot - # expiration or replay to result in an indefinite freeze; see - # github.com/theupdateframework/python-tuf/issues/736 - repository = repo_tool.load_repository(self.repository_directory) - - ts_key_file = os.path.join(self.keystore_directory, 'timestamp_key') - snapshot_key_file = os.path.join(self.keystore_directory, 'snapshot_key') - timestamp_private = repo_tool.import_ed25519_privatekey_from_file( - ts_key_file, 'password') - snapshot_private = repo_tool.import_ed25519_privatekey_from_file( - snapshot_key_file, 'password') - - repository.timestamp.load_signing_key(timestamp_private) - repository.snapshot.load_signing_key(snapshot_private) - - # Set ts to expire in 1 month. - ts_expiry_time = time.time() + 2630000 - - # Set snapshot to expire in 1 hour. - snapshot_expiry_time = time.time() + 60*60 - - ts_datetime_object = tuf.formats.unix_timestamp_to_datetime( - int(ts_expiry_time)) - snapshot_datetime_object = tuf.formats.unix_timestamp_to_datetime( - int(snapshot_expiry_time)) - repository.timestamp.expiration = ts_datetime_object - repository.snapshot.expiration = snapshot_datetime_object - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # mock current time to one second after snapshot expiry - mock_time = mock.Mock() - mock_time.return_value = snapshot_expiry_time + 1 - with mock.patch('time.time', mock_time): - try: - # We expect the following refresh() to raise a NoWorkingMirrorError. - self.repository_updater.refresh() - - except tuf.exceptions.NoWorkingMirrorError as e: - # Make sure the contained error is ExpiredMetadataError - for mirror_url, mirror_error in e.mirror_errors.items(): - self.assertTrue(isinstance(mirror_error, tuf.exceptions.ExpiredMetadataError)) - self.assertTrue(mirror_url.endswith('snapshot.json')) - - else: - self.fail('TUF failed to detect expired, stale Snapshot metadata.' - ' Freeze attack successful.') - - # The client should have rejected the malicious Snapshot metadata, and - # distrusted the local snapshot file that is no longer valid. - self.assertTrue('snapshot' not in self.repository_updater.metadata['current']) - self.assertEqual(sorted(['root', 'targets', 'timestamp']), - sorted(self.repository_updater.metadata['current'])) - - # Verify that the client is able to recover from the malicious Snapshot. - # Re-sign a valid Snapshot file that the client should accept. - repository = repo_tool.load_repository(self.repository_directory) - - repository.timestamp.load_signing_key(timestamp_private) - repository.snapshot.load_signing_key(snapshot_private) - - # Set snapshot to expire in 1 month. - snapshot_expiry_time = time.time() + 2630000 - - snapshot_datetime_object = tuf.formats.unix_timestamp_to_datetime( - int(snapshot_expiry_time)) - repository.snapshot.expiration = snapshot_datetime_object - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Verify that the client accepts the valid metadata file. - self.repository_updater.refresh() - self.assertTrue('snapshot' in self.repository_updater.metadata['current']) - self.assertEqual(sorted(['root', 'targets', 'timestamp', 'snapshot']), - sorted(self.repository_updater.metadata['current'])) - - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_key_revocation_integration.py b/tests/test_key_revocation_integration.py deleted file mode 100755 index 4c00713445..0000000000 --- a/tests/test_key_revocation_integration.py +++ /dev/null @@ -1,495 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_key_revocation_integration.py - - - Vladimir Diaz. - - - April 28, 2016. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Integration test that verifies top-level roles are updated after all of their - keys have been revoked. There are unit tests in 'test_repository_tool.py' - that verify key and role revocation of specific roles, but these should be - expanded to verify key revocations over the span of multiple snapshots of the - repository. - - The 'unittest_toolbox.py' module was created to provide additional testing - tools, such as automatically deleting temporary files created in test cases. - For more information on the additional testing tools, see - 'tests/unittest_toolbox.py'. -""" - -import os -import shutil -import tempfile -import logging -import unittest -import sys - -import tuf -import tuf.log -import tuf.roledb -import tuf.keydb -import tuf.repository_tool as repo_tool -import tuf.unittest_toolbox as unittest_toolbox -import tuf.client.updater as updater - -from tests import utils - -import securesystemslib - -logger = logging.getLogger(__name__) -repo_tool.disable_console_log_messages() - - -class TestKeyRevocation(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). Test - # cases will request metadata and target files that have been pre-generated - # in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of - # 'test_key_revocation.py' assume the pre-generated metadata files have a - # specific structure, such as a delegated role, three target files, five - # key files, etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated for the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf.tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_keystore = os.path.join(original_repository_files, 'keystore') - original_client = os.path.join(original_repository_files, 'client') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.keystore_directory = \ - os.path.join(temporary_repository_root, 'keystore') - self.client_directory = os.path.join(temporary_repository_root, 'client') - self.client_metadata = os.path.join(self.client_directory, - self.repository_name, 'metadata') - self.client_metadata_current = os.path.join(self.client_metadata, 'current') - self.client_metadata_previous = os.path.join(self.client_metadata, 'previous') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Creating repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - # Metadata role keys are needed by the test cases to make changes to the - # repository (e.g., adding a new target file to 'targets.json' and then - # requesting a refresh()). - self.role_keys = _load_role_keys(self.keystore_directory) - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - # UNIT TESTS. - def test_timestamp_key_revocation(self): - # First verify that the Timestamp role is properly signed. Calling - # refresh() should not raise an exception. - self.repository_updater.refresh() - - # There should only be one key for Timestamp. Store the keyid to later - # verify that it has been revoked. - timestamp_roleinfo = tuf.roledb.get_roleinfo('timestamp', self.repository_name) - timestamp_keyid = timestamp_roleinfo['keyids'] - self.assertEqual(len(timestamp_keyid), 1) - - # Remove 'timestamp_keyid' and add a new key. Verify that the client - # detects the removal and addition of keys to the Timestamp role. - repository = repo_tool.load_repository(self.repository_directory) - repository.timestamp.remove_verification_key(self.role_keys['timestamp']['public']) - repository.timestamp.add_verification_key(self.role_keys['snapshot']['public']) - - # Root, Snapshot, and Timestamp must be rewritten. Root must be written - # because the timestamp key has changed; Snapshot, because Root has - # changed, and ... - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['snapshot']['private']) - repository.writeall() - - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # The client performs a refresh of top-level metadata to get the latest - # changes. - self.repository_updater.refresh() - - # Verify that the client is able to recognize that a new set of keys have - # been added to the Timestamp role. - # First, has 'timestamp_keyid' been removed? - timestamp_roleinfo = tuf.roledb.get_roleinfo('timestamp', self.repository_name) - self.assertTrue(timestamp_keyid not in timestamp_roleinfo['keyids']) - - # Second, is Timestamp's new key correct? The new key should be Snapshot's. - - self.assertEqual(len(timestamp_roleinfo['keyids']), 1) - snapshot_roleinfo = tuf.roledb.get_roleinfo('snapshot', self.repository_name) - self.assertEqual(timestamp_roleinfo['keyids'], snapshot_roleinfo['keyids']) - - - - def test_snapshot_key_revocation(self): - # First verify that the Snapshot role is properly signed. Calling - # refresh() should not raise an exception. - self.repository_updater.refresh() - - # There should only be one key for Snapshot. Store the keyid to later - # verify that it has been revoked. - snapshot_roleinfo = tuf.roledb.get_roleinfo('snapshot', self.repository_name) - snapshot_keyid = snapshot_roleinfo['keyids'] - self.assertEqual(len(snapshot_keyid), 1) - - - # Remove 'snapshot_keyid' and add a new key. Verify that the client - # detects the removal and addition of keys to the Snapshot role. - repository = repo_tool.load_repository(self.repository_directory) - repository.snapshot.remove_verification_key(self.role_keys['snapshot']['public']) - repository.snapshot.add_verification_key(self.role_keys['timestamp']['public']) - - # Root, Snapshot, and Timestamp must be rewritten. Root must be written - # because the timestamp key has changed; Snapshot, because Root has - # changed, and Timesamp, because it must sign its metadata with a new key. - repository.root.load_signing_key(self.role_keys['root']['private']) - # Note: we added Timestamp's key to the Snapshot role. - repository.snapshot.load_signing_key(self.role_keys['timestamp']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # The client performs a refresh of top-level metadata to get the latest - # changes. - self.repository_updater.refresh() - - # Verify that the client is able to recognize that a new set of keys have - # been added to the Snapshot role. - # First, has 'snapshot_keyid' been removed? - snapshot_roleinfo = tuf.roledb.get_roleinfo('snapshot', self.repository_name) - self.assertTrue(snapshot_keyid not in snapshot_roleinfo['keyids']) - - # Second, is Snapshot's new key correct? The new key should be - # Timestamp's. - self.assertEqual(len(snapshot_roleinfo['keyids']), 1) - timestamp_roleinfo = tuf.roledb.get_roleinfo('timestamp', self.repository_name) - self.assertEqual(snapshot_roleinfo['keyids'], timestamp_roleinfo['keyids']) - - - - - - def test_targets_key_revocation(self): - # First verify that the Targets role is properly signed. Calling - # refresh() should not raise an exception. - self.repository_updater.refresh() - - # There should only be one key for Targets. Store the keyid to later - # verify that it has been revoked. - targets_roleinfo = tuf.roledb.get_roleinfo('targets', self.repository_name) - targets_keyid = targets_roleinfo['keyids'] - self.assertEqual(len(targets_keyid), 1) - - # Remove 'targets_keyid' and add a new key. Verify that the client - # detects the removal and addition of keys to the Targets role. - repository = repo_tool.load_repository(self.repository_directory) - repository.targets.remove_verification_key(self.role_keys['targets']['public']) - repository.targets.add_verification_key(self.role_keys['timestamp']['public']) - - # Root, Snapshot, and Timestamp must be rewritten. Root must be written - # because the timestamp key has changed; Snapshot, because Root has - # changed, and Timestamp because it must sign its metadata with a new key. - repository.root.load_signing_key(self.role_keys['root']['private']) - # Note: we added Timestamp's key to the Targets role. - repository.targets.load_signing_key(self.role_keys['timestamp']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # The client performs a refresh of top-level metadata to get the latest - # changes. - self.repository_updater.refresh() - - # Verify that the client is able to recognize that a new set of keys have - # been added to the Targets role. - # First, has 'targets_keyid' been removed? - targets_roleinfo = tuf.roledb.get_roleinfo('targets', self.repository_name) - self.assertTrue(targets_keyid not in targets_roleinfo['keyids']) - - # Second, is Targets's new key correct? The new key should be - # Timestamp's. - self.assertEqual(len(targets_roleinfo['keyids']), 1) - timestamp_roleinfo = tuf.roledb.get_roleinfo('timestamp', self.repository_name) - self.assertEqual(targets_roleinfo['keyids'], timestamp_roleinfo['keyids']) - - - - def test_root_key_revocation(self): - # First verify that the Root role is properly signed. Calling - # refresh() should not raise an exception. - self.repository_updater.refresh() - - # There should only be one key for Root. Store the keyid to later verify - # that it has been revoked. - root_roleinfo = tuf.roledb.get_roleinfo('root', self.repository_name) - root_keyid = root_roleinfo['keyids'] - self.assertEqual(len(root_keyid), 1) - - # Remove 'root_keyid' and add a new key. Verify that the client detects - # the removal and addition of keys to the Root file. - repository = repo_tool.load_repository(self.repository_directory) - - repository.root.add_verification_key(self.role_keys['snapshot']['public']) - repository.root.add_verification_key(self.role_keys['targets']['public']) - repository.root.add_verification_key(self.role_keys['timestamp']['public']) - - # Root, Snapshot, and Timestamp must be rewritten. Root must be written - # because the timestamp key has changed; Snapshot, because Root has - # changed, and Timestamp because it must sign its metadata with a new key. - repository.root.load_signing_key(self.role_keys['snapshot']['private']) - repository.root.load_signing_key(self.role_keys['targets']['private']) - repository.root.load_signing_key(self.role_keys['timestamp']['private']) - - # Note: We added the Snapshot, Targets, and Timestampkeys to the Root role. - # The Root's expected private key has not been loaded yet, so that we can - # verify that refresh() correctly raises a - # securesystemslib.exceptions.BadSignatureError exception. - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - - # Root's version number = 2 after the following writeall(). - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Note well: The client should reject the new Root file because the - # repository has revoked the only Root key that the client trusts. - try: - self.repository_updater.refresh() - - except tuf.exceptions.NoWorkingMirrorError as exception: - for mirror_exception in exception.mirror_errors.values(): - self.assertTrue(isinstance(mirror_exception, - securesystemslib.exceptions.BadSignatureError)) - - repository.root.add_verification_key(self.role_keys['root']['public']) - repository.root.load_signing_key(self.role_keys['root']['private']) - - # root, snapshot, and timestamp should be dirty - repository.dirty_roles() - repository.write('root', increment_version_number=False) - repository.write('snapshot') - repository.write('timestamp') - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Root's version number = 2... - # The client successfully performs a refresh of top-level metadata to get - # the latest changes. - self.repository_updater.refresh() - self.assertEqual(self.repository_updater.metadata['current']['root']['version'], 2) - - # Revoke the snapshot and targets keys (added to root) so that multiple - # snapshots are created. Discontinue signing with the old root key now - # that the client has successfully updated (note: the old Root key - # was revoked, but the repository continued signing with it to allow - # the client to update). - repository.root.remove_verification_key(self.role_keys['root']['public']) - repository.root.unload_signing_key(self.role_keys['root']['private']) - repository.root.remove_verification_key(self.role_keys['snapshot']['public']) - repository.root.unload_signing_key(self.role_keys['snapshot']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Root's version number = 3... - self.repository_updater.refresh() - - repository.root.remove_verification_key(self.role_keys['targets']['public']) - repository.root.unload_signing_key(self.role_keys['targets']['private']) - - # The following should fail because root rotation requires the new Root - # to be signed with the previous self.role_keys['targets'] key. - self.assertRaises(tuf.exceptions.UnsignedMetadataError, - repository.writeall) - - repository.root.load_signing_key(self.role_keys['targets']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Root's version number = 4... - self.repository_updater.refresh() - self.assertEqual(self.repository_updater.metadata['current']['root']['version'], 4) - - # Verify that the client is able to recognize that a new set of keys have - # been added to the Root role. - # First, has 'root_keyid' been removed? - root_roleinfo = tuf.roledb.get_roleinfo('root', self.repository_name) - self.assertTrue(root_keyid not in root_roleinfo['keyids']) - - # Second, is Root's new key correct? The new key should be - # Timestamp's. - self.assertEqual(len(root_roleinfo['keyids']), 1) - timestamp_roleinfo = tuf.roledb.get_roleinfo('timestamp', self.repository_name) - self.assertEqual(root_roleinfo['keyids'], timestamp_roleinfo['keyids']) - - - -def _load_role_keys(keystore_directory): - - # Populating 'self.role_keys' by importing the required public and private - # keys of 'tuf/tests/repository_data/'. The role keys are needed when - # modifying the remote repository used by the test cases in this unit test. - # The pre-generated key files in 'repository_data/keystore' are all encrypted with - # a 'password' passphrase. - EXPECTED_KEYFILE_PASSWORD = 'password' - - # Store and return the cryptography keys of the top-level roles, including 1 - # delegated role. - role_keys = {} - - root_key_file = os.path.join(keystore_directory, 'root_key') - targets_key_file = os.path.join(keystore_directory, 'targets_key') - snapshot_key_file = os.path.join(keystore_directory, 'snapshot_key') - timestamp_key_file = os.path.join(keystore_directory, 'timestamp_key') - delegation_key_file = os.path.join(keystore_directory, 'delegation_key') - - role_keys = {'root': {}, 'targets': {}, 'snapshot': {}, 'timestamp': {}, - 'role1': {}} - - # Import the top-level and delegated role public keys. - role_keys['root']['public'] = \ - repo_tool.import_rsa_publickey_from_file(root_key_file+'.pub') - role_keys['targets']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(targets_key_file + '.pub') - role_keys['snapshot']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(snapshot_key_file + '.pub') - role_keys['timestamp']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(timestamp_key_file + '.pub') - role_keys['role1']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(delegation_key_file + '.pub') - - # Import the private keys of the top-level and delegated roles. - role_keys['root']['private'] = \ - repo_tool.import_rsa_privatekey_from_file(root_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['targets']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(targets_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['snapshot']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['timestamp']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['role1']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(delegation_key_file, - EXPECTED_KEYFILE_PASSWORD) - - return role_keys - - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_keydb.py b/tests/test_keydb.py deleted file mode 100755 index b075dd4acb..0000000000 --- a/tests/test_keydb.py +++ /dev/null @@ -1,407 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_keydb.py - - - Vladimir Diaz - - - October 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'keydb.py'. -""" - -import unittest -import logging -import sys - -import tuf -import tuf.formats -import securesystemslib.keys -import securesystemslib.settings -import tuf.keydb -import tuf.log - -from tests import utils - -logger = logging.getLogger(__name__) - - -# Generate the three keys to use in our test cases. -KEYS = [] -for junk in range(3): - rsa_key = securesystemslib.keys.generate_rsa_key(2048) - rsa_key['keyid_hash_algorithms'] = securesystemslib.settings.HASH_ALGORITHMS - KEYS.append(rsa_key) - - - -class TestKeydb(unittest.TestCase): - def setUp(self): - tuf.keydb.clear_keydb(clear_all=True) - - - - def tearDown(self): - tuf.keydb.clear_keydb(clear_all=True) - - - - def test_create_keydb(self): - # Test condition for normal behaviour. - repository_name = 'example_repository' - - # The keydb dictionary should contain only the 'default' repository entry. - self.assertTrue('default' in tuf.keydb._keydb_dict) - self.assertEqual(1, len(tuf.keydb._keydb_dict)) - - - tuf.keydb.create_keydb(repository_name) - self.assertEqual(2, len(tuf.keydb._keydb_dict)) - - # Verify that a keydb cannot be created for a name that already exists. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.keydb.create_keydb, repository_name) - - # Ensure that the key database for 'example_repository' is deleted so that - # the key database is returned to its original, default state. - tuf.keydb.remove_keydb(repository_name) - - - - def test_remove_keydb(self): - # Test condition for expected behaviour. - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.keydb.remove_keydb, 'default') - - tuf.keydb.create_keydb(repository_name) - tuf.keydb.remove_keydb(repository_name) - - # tuf.keydb.remove_keydb() logs a warning if a keydb for a non-existent - # repository is specified. - tuf.keydb.remove_keydb(repository_name) - - # Test condition for improperly formatted argument, and unexpected argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.remove_keydb, 123) - self.assertRaises(TypeError, tuf.keydb.remove_keydb, rsakey, 123) - - - - def test_clear_keydb(self): - # Test condition ensuring 'clear_keydb()' clears the keydb database. - # Test the length of the keydb before and after adding a key. - self.assertEqual(0, len(tuf.keydb._keydb_dict['default'])) - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - tuf.keydb._keydb_dict['default'][keyid] = rsakey - self.assertEqual(1, len(tuf.keydb._keydb_dict['default'])) - tuf.keydb.clear_keydb() - self.assertEqual(0, len(tuf.keydb._keydb_dict['default'])) - - # Test condition for unexpected argument. - self.assertRaises(TypeError, tuf.keydb.clear_keydb, 'default', False, 'unexpected_argument') - - # Test condition for improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.clear_keydb, 0) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.clear_keydb, 'default', 0) - - # Test condition for non-existent repository name. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.keydb.clear_keydb, 'non-existent') - - # Test condition for keys added to a non-default key database. Unlike the - # test conditions above, this test makes use of the public functions - # add_key(), create_keydb(), and get_key() to more easily verify - # clear_keydb()'s behaviour. - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - repository_name = 'example_repository' - tuf.keydb.create_keydb(repository_name) - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid, repository_name) - tuf.keydb.add_key(rsakey, keyid, repository_name) - self.assertEqual(rsakey, tuf.keydb.get_key(keyid, repository_name)) - - tuf.keydb.clear_keydb(repository_name) - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid, repository_name) - - # Remove 'repository_name' from the key database to revert it back to its - # original, default state (i.e., only the 'default' repository exists). - tuf.keydb.remove_keydb(repository_name) - - - - def test_get_key(self): - # Test conditions using valid 'keyid' arguments. - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - tuf.keydb._keydb_dict['default'][keyid] = rsakey - rsakey2 = KEYS[1] - keyid2 = KEYS[1]['keyid'] - tuf.keydb._keydb_dict['default'][keyid2] = rsakey2 - - self.assertEqual(rsakey, tuf.keydb.get_key(keyid)) - self.assertEqual(rsakey2, tuf.keydb.get_key(keyid2)) - self.assertNotEqual(rsakey2, tuf.keydb.get_key(keyid)) - self.assertNotEqual(rsakey, tuf.keydb.get_key(keyid2)) - - # Test conditions using invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.get_key, None) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.get_key, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.get_key, ['123']) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.get_key, {'keyid': '123'}) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.get_key, '') - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.get_key, keyid, 123) - - # Test condition using a 'keyid' that has not been added yet. - keyid3 = KEYS[2]['keyid'] - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid3) - - # Test condition for a key added to a non-default repository. - repository_name = 'example_repository' - rsakey3 = KEYS[2] - tuf.keydb.create_keydb(repository_name) - tuf.keydb.add_key(rsakey3, keyid3, repository_name) - - # Test condition for a key added to a non-existent repository. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.keydb.get_key, - keyid, 'non-existent') - - # Verify that 'rsakey3' is added to the expected repository name. - # If not supplied, the 'default' repository name is searched. - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid3) - self.assertEqual(rsakey3, tuf.keydb.get_key(keyid3, repository_name)) - - # Remove the 'example_repository' so that other test functions have access - # to a default state of the keydb. - tuf.keydb.remove_keydb(repository_name) - - - - def test_add_key(self): - # Test conditions using valid 'keyid' arguments. - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - rsakey2 = KEYS[1] - keyid2 = KEYS[1]['keyid'] - rsakey3 = KEYS[2] - keyid3 = KEYS[2]['keyid'] - self.assertEqual(None, tuf.keydb.add_key(rsakey, keyid)) - self.assertEqual(None, tuf.keydb.add_key(rsakey2, keyid2)) - self.assertEqual(None, tuf.keydb.add_key(rsakey3)) - - self.assertEqual(rsakey, tuf.keydb.get_key(keyid)) - self.assertEqual(rsakey2, tuf.keydb.get_key(keyid2)) - self.assertEqual(rsakey3, tuf.keydb.get_key(keyid3)) - - # Test conditions using arguments with invalid formats. - tuf.keydb.clear_keydb() - rsakey3['keytype'] = 'bad_keytype' - - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, None, keyid) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, '', keyid) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, ['123'], keyid) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, {'a': 'b'}, keyid) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, rsakey, {'keyid': ''}) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, rsakey, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, rsakey, False) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, rsakey, ['keyid']) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, rsakey3, keyid3) - rsakey3['keytype'] = 'rsa' - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, rsakey3, keyid3, 123) - - # Test conditions where keyid does not match the rsakey. - self.assertRaises(securesystemslib.exceptions.Error, tuf.keydb.add_key, rsakey, keyid2) - self.assertRaises(securesystemslib.exceptions.Error, tuf.keydb.add_key, rsakey2, keyid) - - # Test conditions using keyids that have already been added. - tuf.keydb.add_key(rsakey, keyid) - tuf.keydb.add_key(rsakey2, keyid2) - self.assertRaises(tuf.exceptions.KeyAlreadyExistsError, tuf.keydb.add_key, rsakey) - self.assertRaises(tuf.exceptions.KeyAlreadyExistsError, tuf.keydb.add_key, rsakey2) - - # Test condition for key added to the keydb of a non-default repository. - repository_name = 'example_repository' - tuf.keydb.create_keydb(repository_name) - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid3, repository_name) - tuf.keydb.add_key(rsakey3, keyid3, repository_name) - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid3) - self.assertEqual(rsakey3, tuf.keydb.get_key(keyid3, repository_name)) - - # Test condition for key added to the keydb of a non-existent repository. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.keydb.add_key, - rsakey3, keyid3, 'non-existent') - - # Reset the keydb to its original, default state. Other test functions - # expect only the 'default' repository to exist. - tuf.keydb.remove_keydb(repository_name) - - - - def test_remove_key(self): - # Test conditions using valid keyids. - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - rsakey2 = KEYS[1] - keyid2 = KEYS[1]['keyid'] - rsakey3 = KEYS[2] - keyid3 = KEYS[2]['keyid'] - tuf.keydb.add_key(rsakey, keyid) - tuf.keydb.add_key(rsakey2, keyid2) - tuf.keydb.add_key(rsakey3, keyid3) - - self.assertEqual(None, tuf.keydb.remove_key(keyid)) - self.assertEqual(None, tuf.keydb.remove_key(keyid2)) - - # Ensure the keys were actually removed. - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid) - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid2) - - # Test for 'keyid' not in keydb. - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.remove_key, keyid) - - # Test condition for unknown key argument. - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.remove_key, '1') - - # Test condition for removal of keys from a non-default repository. - repository_name = 'example_repository' - tuf.keydb.create_keydb(repository_name) - tuf.keydb.add_key(rsakey, keyid, repository_name) - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.keydb.remove_key, keyid, 'non-existent') - tuf.keydb.remove_key(keyid, repository_name) - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.remove_key, keyid, repository_name) - - # Reset the keydb so that subsequent tests have access to the original, - # default keydb. - tuf.keydb.remove_keydb(repository_name) - - # Test conditions for arguments with invalid formats. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.remove_key, None) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.remove_key, '') - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.remove_key, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.remove_key, ['123']) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.remove_key, keyid, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.remove_key, {'bad': '123'}) - self.assertRaises(securesystemslib.exceptions.Error, tuf.keydb.remove_key, rsakey3) - - - - def test_create_keydb_from_root_metadata(self): - # Test condition using a valid 'root_metadata' argument. - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - rsakey2 = KEYS[1] - keyid2 = KEYS[1]['keyid'] - - keydict = {keyid: rsakey, keyid2: rsakey2} - - roledict = {'Root': {'keyids': [keyid], 'threshold': 1}, - 'Targets': {'keyids': [keyid2, keyid], 'threshold': 1}} - version = 8 - consistent_snapshot = False - expires = '1985-10-21T01:21:00Z' - - root_metadata = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version='1.0.0', - version=version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - self.assertEqual(None, tuf.keydb.create_keydb_from_root_metadata(root_metadata)) - tuf.keydb.create_keydb_from_root_metadata(root_metadata) - - # Ensure 'keyid' and 'keyid2' were added to the keydb database. - self.assertEqual(rsakey, tuf.keydb.get_key(keyid)) - self.assertEqual(rsakey2, tuf.keydb.get_key(keyid2)) - - # Verify that the keydb is populated for a non-default repository. - repository_name = 'example_repository' - tuf.keydb.create_keydb_from_root_metadata(root_metadata, repository_name) - - # Test conditions for arguments with invalid formats. - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, None) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, '') - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, ['123']) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, {'bad': '123'}) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, root_metadata, 123) - - # Verify that a keydb cannot be created for a non-existent repository name. - tuf.keydb.create_keydb_from_root_metadata(root_metadata, 'non-existent') - - # Remove the 'non-existent' and 'example_repository' key database so that - # subsequent test functions have access to a default keydb. - tuf.keydb.remove_keydb(repository_name) - tuf.keydb.remove_keydb('non-existent') - - - # Test conditions for correctly formatted 'root_metadata' arguments but - # containing incorrect keyids or key types. In these conditions, the keys - # should not be added to the keydb database and a warning should be logged. - tuf.keydb.clear_keydb() - - # 'keyid' does not match 'rsakey2'. - # In this case, the key will be added to the keydb - keydict[keyid] = rsakey2 - - # Key with invalid keytype. - rsakey3 = KEYS[2] - keyid3 = KEYS[2]['keyid'] - rsakey3['keytype'] = 'bad_keytype' - keydict[keyid3] = rsakey3 - - version = 8 - expires = '1985-10-21T01:21:00Z' - - root_metadata = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version='1.0.0', - version=version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - self.assertEqual(None, tuf.keydb.create_keydb_from_root_metadata(root_metadata)) - - # Ensure only 'keyid2' and 'keyid' were added to the keydb database. - # 'keyid3' should not be stored. - self.maxDiff = None - self.assertEqual(rsakey2, tuf.keydb.get_key(keyid2)) - - test_key = rsakey2 - test_key['keyid'] = keyid - self.assertEqual(test_key, tuf.keydb.get_key(keyid)) - - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid3) - - # reset values - rsakey3['keytype'] = 'rsa' - rsakey2['keyid'] = keyid2 - - - -# Run unit test. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_log.py b/tests/test_log.py deleted file mode 100755 index 82637f50f5..0000000000 --- a/tests/test_log.py +++ /dev/null @@ -1,210 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_log.py - - - Vladimir Diaz - - - May 1, 2014. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'log.py'. -""" - -import logging -import unittest -import os -import shutil -import sys -import importlib - -import tuf -import tuf.log -import tuf.settings - -import securesystemslib -import securesystemslib.util - -from tests import utils - - -# We explicitly create a logger which is a child of the tuf hierarchy, -# instead of using the standard getLogger(__name__) pattern, because the -# tests are not part of the tuf hierarchy and we are testing functionality -# of the tuf package explicitly enabled on the tuf hierarchy -logger = logging.getLogger('tuf.test_log') - -log_levels = [logging.CRITICAL, logging.ERROR, logging.WARNING, - logging.INFO, logging.DEBUG] - - -class TestLog(unittest.TestCase): - - def setUp(self): - # store the current log level so it can be restored after the test - self._initial_level = logging.getLogger('tuf').level - - def tearDown(self): - tuf.log.remove_console_handler() - tuf.log.disable_file_logging() - logging.getLogger('tuf').level = self._initial_level - - - - - def test_set_log_level(self): - # Test normal case. - global log_levels - global logger - - tuf.log.set_log_level() - self.assertTrue(logger.isEnabledFor(logging.DEBUG)) - - for level in log_levels: - tuf.log.set_log_level(level) - self.assertTrue(logger.isEnabledFor(level)) - - # Test for improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.set_log_level, '123') - - # Test for invalid argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.set_log_level, 51) - - - - def test_set_filehandler_log_level(self): - # Normal case. Default log level. - # A file handler is not set by default. Add one now before attempting to - # set the log level. - self.assertRaises(tuf.exceptions.Error, tuf.log.set_filehandler_log_level) - tuf.log.enable_file_logging() - tuf.log.set_filehandler_log_level() - - # Expected log levels. - for level in log_levels: - tuf.log.set_log_level(level) - - # Test that the log level of the file handler cannot be set because - # file logging is disabled (via tuf.settings.ENABLE_FILE_LOGGING). - tuf.settings.ENABLE_FILE_LOGGING = False - importlib.reload(tuf.log) - - # Test for improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.set_filehandler_log_level, '123') - - # Test for invalid argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.set_filehandler_log_level, 51) - - - def test_set_console_log_level(self): - # Test setting a console log level without first adding one. - self.assertRaises(securesystemslib.exceptions.Error, tuf.log.set_console_log_level) - - # Normal case. Default log level. Setting the console log level first - # requires adding a console logger. - tuf.log.add_console_handler() - tuf.log.set_console_log_level() - - # Expected log levels. - for level in log_levels: - tuf.log.set_console_log_level(level) - - # Test for improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.set_console_log_level, '123') - - # Test for invalid argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.set_console_log_level, 51) - - - - - - def test_add_console_handler(self): - # Normal case. Default log level. - tuf.log.add_console_handler() - - # Adding a console handler when one has already been added. - tuf.log.add_console_handler() - - # Expected log levels. - for level in log_levels: - tuf.log.set_console_log_level(level) - - # Test for improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.add_console_handler, '123') - - # Test for invalid argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.add_console_handler, 51) - - # Test that an exception is printed to the console. Note: A stack trace - # is not included in the exception output because 'log.py' applies a filter - # to minimize the amount of output to the console. - try: - raise TypeError('Test exception output in the console.') - - except TypeError as e: - logger.exception(e) - - - def test_remove_console_handler(self): - # Normal case. - tuf.log.remove_console_handler() - - # Removing a console handler that has not been added. Logs a warning. - tuf.log.remove_console_handler() - - - def test_enable_file_logging(self): - # Normal case. - if os.path.exists(tuf.settings.LOG_FILENAME): - shutil.move( - tuf.settings.LOG_FILENAME, tuf.settings.LOG_FILENAME + '.backup') - - tuf.log.enable_file_logging() - self.assertTrue(os.path.exists(tuf.settings.LOG_FILENAME)) - if os.path.exists(tuf.settings.LOG_FILENAME + '.backup'): - shutil.move( - tuf.settings.LOG_FILENAME + '.backup', tuf.settings.LOG_FILENAME) - - # The file logger must first be unset before attempting to re-add it. - self.assertRaises(tuf.exceptions.Error, tuf.log.enable_file_logging) - - tuf.log.disable_file_logging() - tuf.log.enable_file_logging('my_log_file.log') - logger.debug('testing file logging') - self.assertTrue(os.path.exists('my_log_file.log')) - - # Test for an improperly formatted argument. - tuf.log.disable_file_logging() - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.log.enable_file_logging, 1) - - - def test_disable_file_logging(self): - # Normal case. - tuf.log.enable_file_logging('my.log') - logger.debug('debug message') - junk, hashes = securesystemslib.util.get_file_details('my.log') - tuf.log.disable_file_logging() - logger.debug('new debug message') - junk, hashes2 = securesystemslib.util.get_file_details('my.log') - self.assertEqual(hashes, hashes2) - - # An exception should not be raised if an attempt is made to disable - # the file logger if it has already been disabled. - tuf.log.disable_file_logging() - - -# Run unit test. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_metadata_eq_.py b/tests/test_metadata_eq_.py new file mode 100644 index 0000000000..4f21e3976e --- /dev/null +++ b/tests/test_metadata_eq_.py @@ -0,0 +1,322 @@ +#!/usr/bin/env python + +# Copyright New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +"""Test __eq__ implementations of classes inside tuf/api/metadata.py.""" + + +import copy +import os +import sys +import unittest +from typing import Any, ClassVar, Dict + +from securesystemslib.signer import Signature + +from tests import utils +from tuf.api.metadata import ( + TOP_LEVEL_ROLE_NAMES, + DelegatedRole, + Delegations, + Key, + Metadata, + MetaFile, + Role, + Root, + Snapshot, + TargetFile, + Targets, + Timestamp, +) + + +class TestMetadataComparisions(unittest.TestCase): + """Test __eq__ for all classes inside tuf/api/metadata.py.""" + + metadata: ClassVar[Dict[str, bytes]] + + @classmethod + def setUpClass(cls) -> None: + cls.repo_dir = os.path.join( + utils.TESTS_DIR, "repository_data", "repository", "metadata" + ) + cls.metadata = {} + for md in TOP_LEVEL_ROLE_NAMES: + with open(os.path.join(cls.repo_dir, f"{md}.json"), "rb") as f: + cls.metadata[md] = f.read() + + def copy_and_simple_assert(self, obj: Any) -> Any: + # Assert that obj is not equal to an object from another type + self.assertNotEqual(obj, "") + result_obj = copy.deepcopy(obj) + # Assert that __eq__ works for equal objects. + self.assertEqual(obj, result_obj) + return result_obj + + def test_metadata_eq_(self) -> None: + md = Metadata.from_bytes(self.metadata["snapshot"]) + md_2: Metadata = self.copy_and_simple_assert(md) + + for attr, value in [("signed", None), ("signatures", None)]: + setattr(md_2, attr, value) + self.assertNotEqual(md, md_2, f"Failed case: {attr}") + # Restore the old value of the attribute. + setattr(md_2, attr, getattr(md, attr)) + + def test_md_eq_signatures_reversed_order(self) -> None: + # Test comparing objects with same signatures but different order. + + # Remove all signatures and create new ones. + md = Metadata.from_bytes(self.metadata["snapshot"]) + md.signatures = {"a": Signature("a", "a"), "b": Signature("b", "b")} + md_2 = copy.deepcopy(md) + # Reverse signatures order in md_2. + # In python3.7 we need to cast to a list and then reverse. + md_2.signatures = dict(reversed(list(md_2.signatures.items()))) + # Assert that both objects are not the same because of signatures order. + self.assertNotEqual(md, md_2) + + # but if we fix the signatures order they will be equal + md_2.signatures = {"a": Signature("a", "a"), "b": Signature("b", "b")} + self.assertEqual(md, md_2) + + def test_md_eq_special_signatures_tests(self) -> None: + # Test that metadata objects with different signatures are not equal. + md = Metadata.from_bytes(self.metadata["snapshot"]) + md_2 = copy.deepcopy(md) + md_2.signatures = {} + self.assertNotEqual(md, md_2) + + # Test that metadata objects with empty signatures are equal + md.signatures = {} + self.assertEqual(md, md_2) + + # Metadata objects with different signatures types are not equal. + md_2.signatures = "" # type: ignore + self.assertNotEqual(md, md_2) + + def test_signed_eq_(self) -> None: + md = Metadata.from_bytes(self.metadata["snapshot"]) + md_2: Metadata = self.copy_and_simple_assert(md) + + # We don't need to make "signed" = None as that was done when testing + # metadata attribute modifications. + for attr, value in [("version", -1), ("spec_version", "0.0.0")]: + setattr(md_2.signed, attr, value) + self.assertNotEqual(md.signed, md_2.signed, f"Failed case: {attr}") + # Restore the old value of the attribute. + setattr(md_2.signed, attr, getattr(md.signed, attr)) + + def test_key_eq_(self) -> None: + key_dict = { + "keytype": "rsa", + "scheme": "rsassa-pss-sha256", + "keyval": {"public": "foo"}, + } + key = Key.from_dict("12sa12", key_dict) + key_2: Key = self.copy_and_simple_assert(key) + for attr, value in [ + ("keyid", "a"), + ("keytype", "foo"), + ("scheme", "b"), + ("keytype", "b"), + ]: + setattr(key_2, attr, value) + self.assertNotEqual(key, key_2, f"Failed case: {attr}") + # Restore the old value of the attribute. + setattr(key_2, attr, getattr(key, attr)) + + def test_role_eq_(self) -> None: + role_dict = { + "keyids": ["keyid1", "keyid2"], + "threshold": 3, + } + role = Role.from_dict(role_dict) + role_2: Role = self.copy_and_simple_assert(role) + + for attr, value in [("keyids", []), ("threshold", 10)]: + setattr(role_2, attr, value) + self.assertNotEqual(role, role_2, f"Failed case: {attr}") + # Restore the old value of the attribute. + setattr(role_2, attr, getattr(role, attr)) + + def test_root_eq_(self) -> None: + md = Metadata.from_bytes(self.metadata["root"]) + signed_copy: Root = self.copy_and_simple_assert(md.signed) + + # Common attributes between Signed and Root doesn't need testing. + # Ignore mypy request for type annotations on attr and value + for attr, value in [ # type: ignore + ("consistent_snapshot", None), + ("keys", {}), + ("roles", {}), + ]: + + setattr(signed_copy, attr, value) + self.assertNotEqual(md.signed, signed_copy, f"Failed case: {attr}") + # Restore the old value of the attribute. + setattr(signed_copy, attr, getattr(md.signed, attr)) + + def test_metafile_eq_(self) -> None: + metafile_dict = { + "version": 1, + "length": 12, + "hashes": {"sha256": "abc"}, + } + metafile = MetaFile.from_dict(metafile_dict) + metafile_2: MetaFile = self.copy_and_simple_assert(metafile) + + # Ignore mypy request for type annotations on attr and value + for attr, value in [ # type: ignore + ("version", None), + ("length", None), + ("hashes", {}), + ]: + setattr(metafile_2, attr, value) + self.assertNotEqual(metafile, metafile_2, f"Failed case: {attr}") + # Restore the old value of the attribute. + setattr(metafile_2, attr, getattr(metafile, attr)) + + def test_timestamp_eq_(self) -> None: + md = Metadata.from_bytes(self.metadata["timestamp"]) + signed_copy: Timestamp = self.copy_and_simple_assert(md.signed) + + # Common attributes between Signed and Timestamp doesn't need testing. + setattr(signed_copy, "snapshot_meta", None) + self.assertNotEqual(md.signed, signed_copy) + + def test_snapshot_eq_(self) -> None: + md = Metadata.from_bytes(self.metadata["snapshot"]) + signed_copy: Snapshot = self.copy_and_simple_assert(md.signed) + + # Common attributes between Signed and Snapshot doesn't need testing. + setattr(signed_copy, "meta", None) + self.assertNotEqual(md.signed, signed_copy) + + def test_delegated_role_eq_(self) -> None: + delegated_role_dict = { + "keyids": ["keyid"], + "name": "a", + "terminating": False, + "threshold": 1, + "paths": ["fn1", "fn2"], + } + delegated_role = DelegatedRole.from_dict(delegated_role_dict) + delegated_role_2: DelegatedRole = self.copy_and_simple_assert( + delegated_role + ) + + # Common attributes between DelegatedRole and Role doesn't need testing. + for attr, value in [ + ("name", ""), + ("terminating", None), + ("paths", [""]), + ("path_hash_prefixes", [""]), + ]: + setattr(delegated_role_2, attr, value) + msg = f"Failed case: {attr}" + self.assertNotEqual(delegated_role, delegated_role_2, msg) + # Restore the old value of the attribute. + setattr(delegated_role_2, attr, getattr(delegated_role, attr)) + + def test_delegations_eq_(self) -> None: + delegations_dict = { + "keys": { + "keyid2": { + "keytype": "ed25519", + "scheme": "ed25519", + "keyval": {"public": "bar"}, + } + }, + "roles": [ + { + "keyids": ["keyid2"], + "name": "b", + "terminating": True, + "paths": ["fn2"], + "threshold": 4, + } + ], + } + delegations = Delegations.from_dict(delegations_dict) + delegations_2: Delegations = self.copy_and_simple_assert(delegations) + # Ignore mypy request for type annotations on attr and value + for attr, value in [("keys", {}), ("roles", {})]: # type: ignore + setattr(delegations_2, attr, value) + msg = f"Failed case: {attr}" + self.assertNotEqual(delegations, delegations_2, msg) + # Restore the old value of the attribute. + setattr(delegations_2, attr, getattr(delegations, attr)) + + def test_targetfile_eq_(self) -> None: + targetfile_dict = { + "length": 12, + "hashes": {"sha256": "abc"}, + } + targetfile = TargetFile.from_dict(targetfile_dict, "file1.txt") + targetfile_2: TargetFile = self.copy_and_simple_assert(targetfile) + + # Common attr between TargetFile and MetaFile doesn't need testing. + setattr(targetfile_2, "path", "") + self.assertNotEqual(targetfile, targetfile_2) + + def test_delegations_eq_roles_reversed_order(self) -> None: + # Test comparing objects with same delegated roles but different order. + role_one_dict = { + "keyids": ["keyid1"], + "name": "a", + "terminating": False, + "paths": ["fn1"], + "threshold": 1, + } + role_two_dict = { + "keyids": ["keyid2"], + "name": "b", + "terminating": True, + "paths": ["fn2"], + "threshold": 4, + } + + delegations_dict = { + "keys": { + "keyid2": { + "keytype": "ed25519", + "scheme": "ed25519", + "keyval": {"public": "bar"}, + } + }, + "roles": [role_one_dict, role_two_dict], + } + delegations = Delegations.from_dict(copy.deepcopy(delegations_dict)) + + # Create a second delegations obj with reversed roles order + delegations_2 = copy.deepcopy(delegations) + # In python3.7 we need to cast to a list and then reverse. + delegations_2.roles = dict(reversed(list(delegations.roles.items()))) + + # Both objects are not the equal because of delegated roles order. + self.assertNotEqual(delegations, delegations_2) + + # but if we fix the delegated roles order they will be equal + delegations_2.roles = delegations.roles + + self.assertEqual(delegations, delegations_2) + + def test_targets_eq_(self) -> None: + md = Metadata.from_bytes(self.metadata["targets"]) + signed_copy: Targets = self.copy_and_simple_assert(md.signed) + + # Common attributes between Targets and Signed doesn't need testing. + # Ignore mypy request for type annotations on attr and value + for attr, value in [("targets", {}), ("delegations", [])]: # type: ignore + setattr(signed_copy, attr, value) + self.assertNotEqual(md.signed, signed_copy, f"Failed case: {attr}") + # Restore the old value of the attribute. + setattr(signed_copy, attr, getattr(md.signed, attr)) + + +# Run unit test. +if __name__ == "__main__": + utils.configure_test_logging(sys.argv) + unittest.main() diff --git a/tests/test_metadata_generation.py b/tests/test_metadata_generation.py new file mode 100644 index 0000000000..b514748b8f --- /dev/null +++ b/tests/test_metadata_generation.py @@ -0,0 +1,26 @@ +"""Unit tests for 'tests/generated_data/generate_md.py'.""" + +# Copyright New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + + +import sys +import unittest + +from tests import utils +from tests.generated_data.generate_md import generate_all_files + + +class TestMetadataGeneration(unittest.TestCase): + """Test metadata files generation.""" + + @staticmethod + def test_compare_static_md_to_generated() -> None: + # md_generator = MetadataGenerator("generated_data/ed25519_metadata") + generate_all_files(dump=False, verify=True) + + +# Run unit test. +if __name__ == "__main__": + utils.configure_test_logging(sys.argv) + unittest.main() diff --git a/tests/test_metadata_serialization.py b/tests/test_metadata_serialization.py index 1f5867d2ce..a856ab5454 100644 --- a/tests/test_metadata_serialization.py +++ b/tests/test_metadata_serialization.py @@ -6,91 +6,139 @@ """ +import copy import json -import sys import logging +import sys import unittest -import copy -from typing import Dict, Callable, Optional, Mapping, Any -from datetime import datetime +from securesystemslib.signer import Signature from tests import utils - from tuf.api.metadata import ( - Signed, - Root, - Snapshot, - Timestamp, - Targets, + DelegatedRole, + Delegations, Key, - Role, + Metadata, MetaFile, + Role, + Root, + Snapshot, TargetFile, - Delegations, - DelegatedRole, + Targets, + Timestamp, ) +from tuf.api.serialization import DeserializationError logger = logging.getLogger(__name__) -# DataSet is only here so type hints can be used: -# It is a dict of name to test dict -DataSet = Dict[str, str] -# Test runner decorator: Runs the test as a set of N SubTests, -# (where N is number of items in dataset), feeding the actual test -# function one test case at a time -def run_sub_tests_with_dataset(dataset: DataSet): - def real_decorator(function: Callable[["TestSerialization", str], None]): - def wrapper(test_cls: "TestSerialization"): - for case, data in dataset.items(): - with test_cls.subTest(case=case): - function(test_cls, data) - return wrapper - return real_decorator +# pylint: disable=too-many-public-methods +class TestSerialization(unittest.TestCase): + """Test serialization for all classes in 'tuf/api/metadata.py'.""" + + invalid_metadata: utils.DataSet = { + "no signatures field": b'{"signed": \ + { "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ + "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}} \ + }', + "non unique duplicating signatures": b'{"signed": \ + { "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ + "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}}, \ + "signatures": [{"keyid": "id", "sig": "b"}, {"keyid": "id", "sig": "b"}] \ + }', + } + @utils.run_sub_tests_with_dataset(invalid_metadata) + def test_invalid_metadata_serialization(self, test_data: bytes) -> None: + # We expect a DeserializationError reraised from ValueError or KeyError. + with self.assertRaises(DeserializationError): + Metadata.from_bytes(test_data) + + valid_metadata: utils.DataSet = { + "multiple signatures": b'{ \ + "signed": \ + { "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ + "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}}, \ + "signatures": [{ "keyid": "id", "sig": "b"}, {"keyid": "id2", "sig": "d" }] \ + }', + "no signatures": b'{ \ + "signed": \ + { "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ + "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}}, \ + "signatures": [] \ + }', + "unrecognized fields": b'{ \ + "signed": \ + { "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ + "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}}, \ + "signatures": [{"keyid": "id", "sig": "b"}], \ + "foo": "bar" \ + }', + } -class TestSerialization(unittest.TestCase): + @utils.run_sub_tests_with_dataset(valid_metadata) + def test_valid_metadata_serialization(self, test_case_data: bytes) -> None: + md = Metadata.from_bytes(test_case_data) + + # Convert to a JSON and sort the keys the way we do in JSONSerializer. + separators = (",", ":") + test_json = json.loads(test_case_data) + test_bytes = json.dumps( + test_json, separators=separators, sort_keys=True + ).encode("utf-8") + + self.assertEqual(test_bytes, md.to_bytes()) + + invalid_signatures: utils.DataSet = { + "missing keyid attribute in a signature": '{ "sig": "abc" }', + "missing sig attribute in a signature": '{ "keyid": "id" }', + } + + @utils.run_sub_tests_with_dataset(invalid_signatures) + def test_invalid_signature_serialization(self, test_data: str) -> None: + case_dict = json.loads(test_data) + with self.assertRaises(KeyError): + Signature.from_dict(case_dict) + + valid_signatures: utils.DataSet = { + "all": '{ "keyid": "id", "sig": "b"}', + "unrecognized fields": '{ "keyid": "id", "sig": "b", "foo": "bar"}', + } + + @utils.run_sub_tests_with_dataset(valid_signatures) + def test_signature_serialization(self, test_case_data: str) -> None: + case_dict = json.loads(test_case_data) + signature = Signature.from_dict(copy.copy(case_dict)) + self.assertEqual(case_dict, signature.to_dict()) # Snapshot instances with meta = {} are valid, but for a full valid # repository it's required that meta has at least one element inside it. - invalid_signed: DataSet = { + invalid_signed: utils.DataSet = { "no _type": '{"spec_version": "1.0.0", "expires": "2030-01-01T00:00:00Z", "meta": {}}', - "no spec_version": '{"_type": "signed", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', - "no version": '{"_type": "signed", "spec_version": "1.0.0", "expires": "2030-01-01T00:00:00Z", "meta": {}}', - "no expires": '{"_type": "signed", "spec_version": "1.0.0", "version": 1, "meta": {}}', - "empty str _type": - '{"_type": "", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', - "empty str spec_version": - '{"_type": "signed", "spec_version": "", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', - "_type wrong type": - '{"_type": "foo", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', - "version wrong type": - '{"_type": "signed", "spec_version": "1.0.0", "version": "a", "expires": "2030-01-01T00:00:00Z", "meta": {}}', - "invalid spec_version str": - '{"_type": "signed", "spec_version": "abc", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', - "two digit spec_version": - '{"_type": "signed", "spec_version": "1.2.a", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', - "no digit spec_version": - '{"_type": "signed", "spec_version": "a.b.c", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', - "different major spec_version": - '{"_type": "signed", "spec_version": "0.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', - "version 0": - '{"_type": "signed", "spec_version": "1.0.0", "version": 0, "expires": "2030-01-01T00:00:00Z", "meta": {}}', - "version below 0": - '{"_type": "signed", "spec_version": "1.0.0", "version": -1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', - "wrong datetime string": - '{"_type": "signed", "spec_version": "1.0.0", "version": 1, "expires": "abc", "meta": {}}', + "no spec_version": '{"_type": "snapshot", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', + "no version": '{"_type": "snapshot", "spec_version": "1.0.0", "expires": "2030-01-01T00:00:00Z", "meta": {}}', + "no expires": '{"_type": "snapshot", "spec_version": "1.0.0", "version": 1, "meta": {}}', + "empty str _type": '{"_type": "", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', + "empty str spec_version": '{"_type": "snapshot", "spec_version": "", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', + "_type wrong type": '{"_type": "foo", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', + "version wrong type": '{"_type": "snapshot", "spec_version": "1.0.0", "version": "a", "expires": "2030-01-01T00:00:00Z", "meta": {}}', + "invalid spec_version str": '{"_type": "snapshot", "spec_version": "abc", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', + "non-number spec_version": '{"_type": "snapshot", "spec_version": "1.2.a", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', + "one part spec_version": '{"_type": "snapshot", "spec_version": "1", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', + "different major spec_version": '{"_type": "snapshot", "spec_version": "0.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', + "version 0": '{"_type": "snapshot", "spec_version": "1.0.0", "version": 0, "expires": "2030-01-01T00:00:00Z", "meta": {}}', + "version below 0": '{"_type": "snapshot", "spec_version": "1.0.0", "version": -1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', + "wrong datetime string": '{"_type": "snapshot", "spec_version": "1.0.0", "version": 1, "expires": "abc", "meta": {}}', } - @run_sub_tests_with_dataset(invalid_signed) - def test_invalid_signed_serialization(self, test_case_data: Dict[str, str]): + @utils.run_sub_tests_with_dataset(invalid_signed) + def test_invalid_signed_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((KeyError, ValueError, TypeError)): - Snapshot.from_dict(copy.deepcopy(case_dict)) + Snapshot.from_dict(case_dict) - - valid_keys: DataSet = { + valid_keys: utils.DataSet = { "all": '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", \ "keyval": {"public": "foo"}}', "unrecognized field": '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", \ @@ -99,14 +147,13 @@ def test_invalid_signed_serialization(self, test_case_data: Dict[str, str]): "keyval": {"public": "foo", "foo": "bar"}}', } - @run_sub_tests_with_dataset(valid_keys) - def test_valid_key_serialization(self, test_case_data: str): + @utils.run_sub_tests_with_dataset(valid_keys) + def test_valid_key_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) key = Key.from_dict("id", copy.copy(case_dict)) self.assertDictEqual(case_dict, key.to_dict()) - - invalid_keys: DataSet = { + invalid_keys: utils.DataSet = { "no keyid": '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}', "no keytype": '{"keyid": "id", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}', "no scheme": '{"keyid": "id", "keytype": "rsa", "keyval": {"public": "foo"}}', @@ -117,14 +164,14 @@ def test_valid_key_serialization(self, test_case_data: str): "keyval wrong type": '{"keyid": "id", "keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": 1}', } - @run_sub_tests_with_dataset(invalid_keys) - def test_invalid_key_serialization(self, test_case_data: Dict[str, str]): + @utils.run_sub_tests_with_dataset(invalid_keys) + def test_invalid_key_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((TypeError, KeyError)): keyid = case_dict.pop("keyid") - Key.from_dict(keyid, copy.copy(case_dict)) + Key.from_dict(keyid, case_dict) - invalid_roles: DataSet = { + invalid_roles: utils.DataSet = { "no threshold": '{"keyids": ["keyid"]}', "no keyids": '{"threshold": 3}', "wrong threshold type": '{"keyids": ["keyid"], "threshold": "a"}', @@ -132,63 +179,122 @@ def test_invalid_key_serialization(self, test_case_data: Dict[str, str]): "duplicate keyids": '{"keyids": ["keyid", "keyid"], "threshold": 3}', } - @run_sub_tests_with_dataset(invalid_roles) - def test_invalid_role_serialization(self, test_case_data: Dict[str, str]): + @utils.run_sub_tests_with_dataset(invalid_roles) + def test_invalid_role_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((KeyError, TypeError, ValueError)): - Role.from_dict(copy.deepcopy(case_dict)) - + Role.from_dict(case_dict) - valid_roles: DataSet = { + valid_roles: utils.DataSet = { "all": '{"keyids": ["keyid"], "threshold": 3}', "many keyids": '{"keyids": ["a", "b", "c", "d", "e"], "threshold": 1}', + "ordered keyids": '{"keyids": ["c", "b", "a"], "threshold": 1}', "empty keyids": '{"keyids": [], "threshold": 1}', "unrecognized field": '{"keyids": ["keyid"], "threshold": 3, "foo": "bar"}', } - @run_sub_tests_with_dataset(valid_roles) - def test_role_serialization(self, test_case_data: str): + @utils.run_sub_tests_with_dataset(valid_roles) + def test_role_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) role = Role.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, role.to_dict()) - - valid_roots: DataSet = { + valid_roots: utils.DataSet = { "all": '{"_type": "root", "spec_version": "1.0.0", "version": 1, \ "expires": "2030-01-01T00:00:00Z", "consistent_snapshot": false, \ "keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ "roles": { \ + "root": {"keyids": ["keyid1"], "threshold": 1}, \ + "timestamp": {"keyids": ["keyid2"], "threshold": 1}, \ "targets": {"keyids": ["keyid1"], "threshold": 1}, \ "snapshot": {"keyids": ["keyid2"], "threshold": 1}} \ }', "no consistent_snapshot": '{ "_type": "root", "spec_version": "1.0.0", "version": 1, \ "expires": "2030-01-01T00:00:00Z", \ "keys": {"keyid" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"} }}, \ - "roles": { "targets": {"keyids": ["keyid"], "threshold": 3} } \ + "roles": { \ + "root": {"keyids": ["keyid"], "threshold": 1}, \ + "timestamp": {"keyids": ["keyid"], "threshold": 1}, \ + "targets": {"keyids": ["keyid"], "threshold": 1}, \ + "snapshot": {"keyids": ["keyid"], "threshold": 1}} \ }', - "empty keys and roles": '{"_type": "root", "spec_version": "1.0.0", "version": 1, \ + "empty keys": '{"_type": "root", "spec_version": "1.0.0", "version": 1, \ "expires": "2030-01-01T00:00:00Z", "consistent_snapshot": false, \ "keys": {}, \ - "roles": {} \ + "roles": { \ + "root": {"keyids": [], "threshold": 1}, \ + "timestamp": {"keyids": [], "threshold": 1}, \ + "targets": {"keyids": [], "threshold": 1}, \ + "snapshot": {"keyids": [], "threshold": 1}} \ }', "unrecognized field": '{"_type": "root", "spec_version": "1.0.0", "version": 1, \ "expires": "2030-01-01T00:00:00Z", "consistent_snapshot": false, \ "keys": {"keyid" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ - "roles": { "targets": {"keyids": ["keyid"], "threshold": 3}}, \ + "roles": { \ + "root": {"keyids": ["keyid"], "threshold": 1}, \ + "timestamp": {"keyids": ["keyid"], "threshold": 1}, \ + "targets": {"keyids": ["keyid"], "threshold": 1}, \ + "snapshot": {"keyids": ["keyid"], "threshold": 1} \ + }, \ "foo": "bar"}', } - @run_sub_tests_with_dataset(valid_roots) - def test_root_serialization(self, test_case_data: str): + @utils.run_sub_tests_with_dataset(valid_roots) + def test_root_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) root = Root.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, root.to_dict()) + invalid_roots: utils.DataSet = { + "invalid role name": '{"_type": "root", "spec_version": "1.0.0", "version": 1, \ + "expires": "2030-01-01T00:00:00Z", "consistent_snapshot": false, \ + "keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ + "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ + "roles": { \ + "bar": {"keyids": ["keyid1"], "threshold": 1}, \ + "timestamp": {"keyids": ["keyid2"], "threshold": 1}, \ + "targets": {"keyids": ["keyid1"], "threshold": 1}, \ + "snapshot": {"keyids": ["keyid2"], "threshold": 1}} \ + }', + "missing root role": '{"_type": "root", "spec_version": "1.0.0", "version": 1, \ + "expires": "2030-01-01T00:00:00Z", "consistent_snapshot": false, \ + "keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ + "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ + "roles": { \ + "timestamp": {"keyids": ["keyid2"], "threshold": 1}, \ + "targets": {"keyids": ["keyid1"], "threshold": 1}, \ + "snapshot": {"keyids": ["keyid2"], "threshold": 1}} \ + }', + "one additional role": '{"_type": "root", "spec_version": "1.0.0", "version": 1, \ + "expires": "2030-01-01T00:00:00Z", "consistent_snapshot": false, \ + "keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ + "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ + "roles": { \ + "root": {"keyids": ["keyid1"], "threshold": 1}, \ + "timestamp": {"keyids": ["keyid2"], "threshold": 1}, \ + "targets": {"keyids": ["keyid1"], "threshold": 1}, \ + "snapshot": {"keyids": ["keyid2"], "threshold": 1}, \ + "foo": {"keyids": ["keyid2"], "threshold": 1}} \ + }', + "invalid expiry with microseconds": '{"_type": "root", "spec_version": "1.0.0", "version": 1, \ + "expires": "2030-01-01T12:00:00.123456Z", "consistent_snapshot": false, \ + "keys": {}, "roles": {"root": {}, "timestamp": {}, "targets": {}, "snapshot": {}}}', + } + + @utils.run_sub_tests_with_dataset(invalid_roots) + def test_invalid_root_serialization(self, test_case_data: str) -> None: + case_dict = json.loads(test_case_data) + with self.assertRaises(ValueError): + Root.from_dict(case_dict) - invalid_metafiles: DataSet = { + invalid_metafiles: utils.DataSet = { "wrong length type": '{"version": 1, "length": "a", "hashes": {"sha256" : "abc"}}', + "version 0": '{"version": 0, "length": 1, "hashes": {"sha256" : "abc"}}', "length 0": '{"version": 1, "length": 0, "hashes": {"sha256" : "abc"}}', "length below 0": '{"version": 1, "length": -1, "hashes": {"sha256" : "abc"}}', "empty hashes dict": '{"version": 1, "length": 1, "hashes": {}}', @@ -196,14 +302,13 @@ def test_root_serialization(self, test_case_data: str): "hashes values wrong type": '{"version": 1, "length": 1, "hashes": {"sha256": 1}}', } - @run_sub_tests_with_dataset(invalid_metafiles) - def test_invalid_metafile_serialization(self, test_case_data: Dict[str, str]): + @utils.run_sub_tests_with_dataset(invalid_metafiles) + def test_invalid_metafile_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((TypeError, ValueError, AttributeError)): - MetaFile.from_dict(copy.deepcopy(case_dict)) - + MetaFile.from_dict(case_dict) - valid_metafiles: DataSet = { + valid_metafiles: utils.DataSet = { "all": '{"hashes": {"sha256" : "abc"}, "length": 12, "version": 1}', "no length": '{"hashes": {"sha256" : "abc"}, "version": 1 }', "no hashes": '{"length": 12, "version": 1}', @@ -211,38 +316,38 @@ def test_invalid_metafile_serialization(self, test_case_data: Dict[str, str]): "many hashes": '{"hashes": {"sha256" : "abc", "sha512": "cde"}, "length": 12, "version": 1}', } - @run_sub_tests_with_dataset(valid_metafiles) - def test_metafile_serialization(self, test_case_data: str): + @utils.run_sub_tests_with_dataset(valid_metafiles) + def test_metafile_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) metafile = MetaFile.from_dict(copy.copy(case_dict)) self.assertDictEqual(case_dict, metafile.to_dict()) - invalid_timestamps: DataSet = { + invalid_timestamps: utils.DataSet = { "no metafile": '{ "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z"}', } - @run_sub_tests_with_dataset(invalid_timestamps) - def test_invalid_timestamp_serialization(self, test_case_data: Dict[str, str]): + @utils.run_sub_tests_with_dataset(invalid_timestamps) + def test_invalid_timestamp_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((ValueError, KeyError)): - Timestamp.from_dict(copy.deepcopy(case_dict)) + Timestamp.from_dict(case_dict) - - valid_timestamps: DataSet = { + valid_timestamps: utils.DataSet = { "all": '{ "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}}', + "legacy spec_version": '{ "_type": "timestamp", "spec_version": "1.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ + "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}}', "unrecognized field": '{ "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}, "foo": "bar"}', } - @run_sub_tests_with_dataset(valid_timestamps) - def test_timestamp_serialization(self, test_case_data: str): + @utils.run_sub_tests_with_dataset(valid_timestamps) + def test_timestamp_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) timestamp = Timestamp.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, timestamp.to_dict()) - - valid_snapshots: DataSet = { + valid_snapshots: utils.DataSet = { "all": '{ "_type": "snapshot", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "meta": { \ "file1.txt": {"hashes": {"sha256" : "abc"}, "version": 1}, \ @@ -255,66 +360,124 @@ def test_timestamp_serialization(self, test_case_data: str): "meta": { "file.txt": { "hashes": {"sha256" : "abc"}, "version": 1 }}, "foo": "bar"}', } - @run_sub_tests_with_dataset(valid_snapshots) - def test_snapshot_serialization(self, test_case_data: str): + @utils.run_sub_tests_with_dataset(valid_snapshots) + def test_snapshot_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) snapshot = Snapshot.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, snapshot.to_dict()) - - valid_delegated_roles: DataSet = { + valid_delegated_roles: utils.DataSet = { # DelegatedRole inherits Role and some use cases can be found in the valid_roles. - "no hash prefix attribute": - '{"keyids": ["keyid"], "name": "a", "paths": ["fn1", "fn2"], \ + "no hash prefix attribute": '{"keyids": ["keyid"], "name": "a", "paths": ["fn1", "fn2"], \ "terminating": false, "threshold": 1}', - "no path attribute": - '{"keyids": ["keyid"], "name": "a", "terminating": false, \ + "no path attribute": '{"keyids": ["keyid"], "name": "a", "terminating": false, \ "path_hash_prefixes": ["h1", "h2"], "threshold": 99}', "empty paths": '{"keyids": ["keyid"], "name": "a", "paths": [], \ "terminating": false, "threshold": 1}', "empty path_hash_prefixes": '{"keyids": ["keyid"], "name": "a", "terminating": false, \ "path_hash_prefixes": [], "threshold": 99}', - "unrecognized field": - '{"keyids": ["keyid"], "name": "a", "terminating": true, "paths": ["fn1"], "threshold": 3, "foo": "bar"}', - "many keyids": - '{"keyids": ["keyid1", "keyid2"], "name": "a", "paths": ["fn1", "fn2"], \ + "unrecognized field": '{"keyids": ["keyid"], "name": "a", "terminating": true, "paths": ["fn1"], "threshold": 3, "foo": "bar"}', + "many keyids": '{"keyids": ["keyid1", "keyid2"], "name": "a", "paths": ["fn1", "fn2"], \ + "terminating": false, "threshold": 1}', + "ordered keyids": '{"keyids": ["keyid2", "keyid1"], "name": "a", "paths": ["fn1", "fn2"], \ "terminating": false, "threshold": 1}', } - @run_sub_tests_with_dataset(valid_delegated_roles) - def test_delegated_role_serialization(self, test_case_data: str): + @utils.run_sub_tests_with_dataset(valid_delegated_roles) + def test_delegated_role_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) deserialized_role = DelegatedRole.from_dict(copy.copy(case_dict)) self.assertDictEqual(case_dict, deserialized_role.to_dict()) - - invalid_delegated_roles: DataSet = { + invalid_delegated_roles: utils.DataSet = { # DelegatedRole inherits Role and some use cases can be found in the invalid_roles. - "missing hash prefixes and paths": - '{"name": "a", "keyids": ["keyid"], "threshold": 1, "terminating": false}', - "both hash prefixes and paths": - '{"name": "a", "keyids": ["keyid"], "threshold": 1, "terminating": false, \ + "missing hash prefixes and paths": '{"name": "a", "keyids": ["keyid"], "threshold": 1, "terminating": false}', + "both hash prefixes and paths": '{"name": "a", "keyids": ["keyid"], "threshold": 1, "terminating": false, \ "paths": ["fn1", "fn2"], "path_hash_prefixes": ["h1", "h2"]}', + "invalid path type": '{"keyids": ["keyid"], "name": "a", "paths": [1,2,3], \ + "terminating": false, "threshold": 1}', + "invalid path_hash_prefixes type": '{"keyids": ["keyid"], "name": "a", "path_hash_prefixes": [1,2,3], \ + "terminating": false, "threshold": 1}', } - @run_sub_tests_with_dataset(invalid_delegated_roles) - def test_invalid_delegated_role_serialization(self, test_case_data: str): + @utils.run_sub_tests_with_dataset(invalid_delegated_roles) + def test_invalid_delegated_role_serialization( + self, test_case_data: str + ) -> None: case_dict = json.loads(test_case_data) with self.assertRaises(ValueError): - DelegatedRole.from_dict(copy.copy(case_dict)) + DelegatedRole.from_dict(case_dict) + invalid_delegations: utils.DataSet = { + "empty delegations": "{}", + "missing keys": '{ "roles": [ \ + {"keyids": ["keyid"], "name": "a", "terminating": true, "paths": ["fn1"], "threshold": 3}, \ + {"keyids": ["keyid2"], "name": "b", "terminating": true, "paths": ["fn2"], "threshold": 4} ] \ + }', + "missing roles": '{"keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ + "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}}', + "bad keys": '{"keys": "foo", \ + "roles": [{"keyids": ["keyid"], "name": "a", "paths": ["fn1", "fn2"], "terminating": false, "threshold": 3}]}', + "bad roles": '{"keys": {"keyid" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ + "roles": ["foo"]}', + "duplicate role names": '{"keys": {"keyid" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ + "roles": [ \ + {"keyids": ["keyid"], "name": "a", "paths": ["fn1", "fn2"], "terminating": false, "threshold": 3}, \ + {"keyids": ["keyid2"], "name": "a", "paths": ["fn3"], "terminating": false, "threshold": 2} \ + ] \ + }', + "using empty string role name": '{"keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ + "roles": [ \ + {"keyids": ["keyid1"], "name": "", "terminating": true, "paths": ["fn1"], "threshold": 3}] \ + }', + "using root as delegate role name": '{"keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ + "roles": [ \ + {"keyids": ["keyid1"], "name": "root", "terminating": true, "paths": ["fn1"], "threshold": 3}] \ + }', + "using snapshot as delegate role name": '{"keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ + "roles": [ \ + {"keyids": ["keyid1"], "name": "snapshot", "terminating": true, "paths": ["fn1"], "threshold": 3}] \ + }', + "using targets as delegate role name": '{"keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ + "roles": [ \ + {"keyids": ["keyid1"], "name": "targets", "terminating": true, "paths": ["fn1"], "threshold": 3}] \ + }', + "using timestamp as delegate role name": '{"keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ + "roles": [ \ + {"keyids": ["keyid1"], "name": "timestamp", "terminating": true, "paths": ["fn1"], "threshold": 3}] \ + }', + "using valid and top-level role name": '{"keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ + "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ + "roles": [ \ + {"keyids": ["keyid1"], "name": "b", "terminating": true, "paths": ["fn1"], "threshold": 3}, \ + {"keyids": ["keyid2"], "name": "root", "terminating": true, "paths": ["fn2"], "threshold": 4} ] \ + }', + } + + @utils.run_sub_tests_with_dataset(invalid_delegations) + def test_invalid_delegation_serialization( + self, test_case_data: str + ) -> None: + case_dict = json.loads(test_case_data) + with self.assertRaises((ValueError, KeyError, AttributeError)): + Delegations.from_dict(case_dict) - valid_delegations: DataSet = { - "all": - '{"keys": { \ + valid_delegations: utils.DataSet = { + "all": '{"keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ "roles": [ \ {"keyids": ["keyid"], "name": "a", "terminating": true, "paths": ["fn1"], "threshold": 3}, \ {"keyids": ["keyid2"], "name": "b", "terminating": true, "paths": ["fn2"], "threshold": 4} ] \ }', - "unrecognized field": - '{"keys": {"keyid" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ + "unrecognized field": '{"keys": {"keyid" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ "roles": [ {"keyids": ["keyid"], "name": "a", "paths": ["fn1", "fn2"], "terminating": true, "threshold": 3} ], \ "foo": "bar"}', "empty keys and roles": '{"keys": {}, \ @@ -322,28 +485,28 @@ def test_invalid_delegated_role_serialization(self, test_case_data: str): }', } - @run_sub_tests_with_dataset(valid_delegations) - def test_delegation_serialization(self, test_case_data: str): + @utils.run_sub_tests_with_dataset(valid_delegations) + def test_delegation_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) delegation = Delegations.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, delegation.to_dict()) - - invalid_targetfiles: DataSet = { + invalid_targetfiles: utils.DataSet = { "no hashes": '{"length": 1}', "no length": '{"hashes": {"sha256": "abc"}}' # The remaining cases are the same as for invalid_hashes and # invalid_length datasets. } - @run_sub_tests_with_dataset(invalid_targetfiles) - def test_invalid_targetfile_serialization(self, test_case_data: Dict[str, str]): + @utils.run_sub_tests_with_dataset(invalid_targetfiles) + def test_invalid_targetfile_serialization( + self, test_case_data: str + ) -> None: case_dict = json.loads(test_case_data) with self.assertRaises(KeyError): - TargetFile.from_dict(copy.deepcopy(case_dict), "file1.txt") - + TargetFile.from_dict(case_dict, "file1.txt") - valid_targetfiles: DataSet = { + valid_targetfiles: utils.DataSet = { "all": '{"length": 12, "hashes": {"sha256" : "abc"}, \ "custom" : {"foo": "bar"} }', "no custom": '{"length": 12, "hashes": {"sha256" : "abc"}}', @@ -351,14 +514,13 @@ def test_invalid_targetfile_serialization(self, test_case_data: Dict[str, str]): "custom" : {"foo": "bar"}, "foo": "bar"}', } - @run_sub_tests_with_dataset(valid_targetfiles) - def test_targetfile_serialization(self, test_case_data: str): + @utils.run_sub_tests_with_dataset(valid_targetfiles) + def test_targetfile_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) target_file = TargetFile.from_dict(copy.copy(case_dict), "file1.txt") self.assertDictEqual(case_dict, target_file.to_dict()) - - valid_targets: DataSet = { + valid_targets: utils.DataSet = { "all attributes": '{"_type": "targets", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "targets": { \ "file.txt": {"length": 12, "hashes": {"sha256" : "abc"} }, \ @@ -384,14 +546,14 @@ def test_targetfile_serialization(self, test_case_data: str): "targets": {}, "foo": "bar"}', } - @run_sub_tests_with_dataset(valid_targets) - def test_targets_serialization(self, test_case_data): + @utils.run_sub_tests_with_dataset(valid_targets) + def test_targets_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) targets = Targets.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, targets.to_dict()) # Run unit test. -if __name__ == '__main__': +if __name__ == "__main__": utils.configure_test_logging(sys.argv) unittest.main() diff --git a/tests/test_mirrors.py b/tests/test_mirrors.py deleted file mode 100755 index ed87ff18e6..0000000000 --- a/tests/test_mirrors.py +++ /dev/null @@ -1,138 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_mirrors.py - - - Konstantin Andrianov. - - - March 26, 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'mirrors.py'. -""" - -import unittest -import sys - -import tuf.mirrors as mirrors -import tuf.unittest_toolbox as unittest_toolbox - -from tests import utils - -import securesystemslib -import securesystemslib.util - - -class TestMirrors(unittest_toolbox.Modified_TestCase): - - def setUp(self): - - unittest_toolbox.Modified_TestCase.setUp(self) - - self.mirrors = \ - {'mirror1': {'url_prefix' : 'http://mirror1.com', - 'metadata_path' : 'metadata', - 'targets_path' : 'targets'}, - 'mirror2': {'url_prefix' : 'http://mirror2.com', - 'metadata_path' : 'metadata', - 'targets_path' : 'targets', - 'confined_target_dirs' : ['targets/release/', - 'targets/release/']}, - 'mirror3': {'url_prefix' : 'http://mirror3.com', - 'targets_path' : 'targets', - 'confined_target_dirs' : ['targets/release/v2/']}, - # confined_target_dirs = [] means that none of the targets on - # that mirror is available. - 'mirror4': {'url_prefix' : 'http://mirror4.com', - 'metadata_path' : 'metadata', - 'confined_target_dirs' : []}, - # Make sure we are testing when confined_target_dirs is [''] which means - # that all targets are available on that mirror. - 'mirror5': {'url_prefix' : 'http://mirror5.com', - 'targets_path' : 'targets', - 'confined_target_dirs' : ['']} - } - - - - def test_get_list_of_mirrors(self): - # Test: Normal case. - - # 1 match: a mirror without target directory confinement - mirror_list = mirrors.get_list_of_mirrors('target', 'a.txt', self.mirrors) - self.assertEqual(len(mirror_list), 2) - self.assertTrue(self.mirrors['mirror1']['url_prefix']+'/targets/a.txt' in \ - mirror_list) - self.assertTrue(self.mirrors['mirror5']['url_prefix']+'/targets/a.txt' in \ - mirror_list) - - mirror_list = mirrors.get_list_of_mirrors('target', 'a/b', self.mirrors) - self.assertEqual(len(mirror_list), 2) - self.assertTrue(self.mirrors['mirror1']['url_prefix']+'/targets/a/b' in \ - mirror_list) - self.assertTrue(self.mirrors['mirror5']['url_prefix']+'/targets/a/b' in \ - mirror_list) - - # 2 matches: One with non-confined targets and one with matching confinement - mirror_list = mirrors.get_list_of_mirrors('target', 'release/v2/c', self.mirrors) - self.assertEqual(len(mirror_list), 3) - self.assertTrue(self.mirrors['mirror1']['url_prefix']+'/targets/release/v2/c' in \ - mirror_list) - self.assertTrue(self.mirrors['mirror3']['url_prefix']+'/targets/release/v2/c' in \ - mirror_list) - self.assertTrue(self.mirrors['mirror5']['url_prefix']+'/targets/release/v2/c' in \ - mirror_list) - - # 3 matches: Metadata found on 3 mirrors - mirror_list = mirrors.get_list_of_mirrors('meta', 'release.txt', self.mirrors) - self.assertEqual(len(mirror_list), 3) - self.assertTrue(self.mirrors['mirror1']['url_prefix']+'/metadata/release.txt' in \ - mirror_list) - self.assertTrue(self.mirrors['mirror2']['url_prefix']+'/metadata/release.txt' in \ - mirror_list) - self.assertTrue(self.mirrors['mirror4']['url_prefix']+'/metadata/release.txt' in \ - mirror_list) - - # No matches - del self.mirrors['mirror1'] - del self.mirrors['mirror5'] - mirror_list = mirrors.get_list_of_mirrors('target', 'a/b', self.mirrors) - self.assertFalse(mirror_list) - - - # Test: Invalid 'file_type'. - self.assertRaises(securesystemslib.exceptions.Error, mirrors.get_list_of_mirrors, - self.random_string(), 'a', self.mirrors) - - self.assertRaises(securesystemslib.exceptions.Error, mirrors.get_list_of_mirrors, - 12345, 'a', self.mirrors) - - # Test: Improperly formatted 'file_path'. - self.assertRaises(securesystemslib.exceptions.FormatError, mirrors.get_list_of_mirrors, - 'meta', 12345, self.mirrors) - - # Test: Improperly formatted 'mirrors_dict' object. - self.assertRaises(securesystemslib.exceptions.FormatError, mirrors.get_list_of_mirrors, - 'meta', 'a', 12345) - - self.assertRaises(securesystemslib.exceptions.FormatError, mirrors.get_list_of_mirrors, - 'meta', 'a', ['a']) - - self.assertRaises(securesystemslib.exceptions.FormatError, mirrors.get_list_of_mirrors, - 'meta', 'a', {'a':'b'}) - - - -# Run the unittests -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_mix_and_match_attack.py b/tests/test_mix_and_match_attack.py deleted file mode 100755 index 2d9d672abe..0000000000 --- a/tests/test_mix_and_match_attack.py +++ /dev/null @@ -1,236 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_mix_and_match_attack.py - - - Konstantin Andrianov. - - - March 27, 2012. - - April 6, 2014. - Refactored to use the 'unittest' module (test conditions in code, rather - than verifying text output), use pre-generated repository files, and - discontinue use of the old repository tools. Modify the previous scenario - simulated for the mix-and-match attack. -vladimir.v.diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Simulate a mix-and-match attack. In a mix-and-match attack, an attacker is - able to trick clients into using a combination of metadata that never existed - together on the repository at the same time. - - Note: There is no difference between 'updates' and 'target' files. -""" - -import os -import tempfile -import shutil -import logging -import unittest -import sys - -import tuf.exceptions -import tuf.log -import tuf.client.updater as updater -import tuf.repository_tool as repo_tool -import tuf.unittest_toolbox as unittest_toolbox -import tuf.roledb -import tuf.keydb - -from tests import utils - - -# The repository tool is imported and logs console messages by default. -# Disable console log messages generated by this unit test. -repo_tool.disable_console_log_messages() - -logger = logging.getLogger(__name__) - - - -class TestMixAndMatchAttack(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and - # target files. 'temporary_directory' must be deleted in TearDownModule() - # so that temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated of all the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client') - original_keystore = os.path.join(original_repository_files, 'keystore') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.client_directory = os.path.join(temporary_repository_root, 'client') - self.keystore_directory = os.path.join(temporary_repository_root, 'keystore') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_with_tuf(self): - # Scenario: - # An attacker tries to trick the client into installing files indicated by - # a previous release of its corresponding metatadata. The outdated metadata - # is properly named and was previously valid, but is no longer current - # according to the latest 'snapshot.json' role. Generate a new snapshot of - # the repository after modifying a target file of 'role1.json'. - # Backup 'role1.json' (the delegated role to be updated, and then inserted - # again for the mix-and-match attack.) - role1_path = os.path.join(self.repository_directory, 'metadata', 'role1.json') - backup_role1 = os.path.join(self.repository_directory, 'role1.json.backup') - shutil.copy(role1_path, backup_role1) - - # Backup 'file3.txt', specified by 'role1.json'. - file3_path = os.path.join(self.repository_directory, 'targets', 'file3.txt') - shutil.copy(file3_path, file3_path + '.backup') - - # Re-generate the required metadata on the remote repository. The affected - # metadata must be properly updated and signed with 'repository_tool.py', - # otherwise the client will reject them as invalid metadata. The resulting - # metadata should be valid metadata. - repository = repo_tool.load_repository(self.repository_directory) - - # Load the signing keys so that newly generated metadata is properly signed. - timestamp_keyfile = os.path.join(self.keystore_directory, 'timestamp_key') - role1_keyfile = os.path.join(self.keystore_directory, 'delegation_key') - snapshot_keyfile = os.path.join(self.keystore_directory, 'snapshot_key') - timestamp_private = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_keyfile, 'password') - role1_private = \ - repo_tool.import_ed25519_privatekey_from_file(role1_keyfile, 'password') - snapshot_private = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_keyfile, 'password') - - repository.targets('role1').load_signing_key(role1_private) - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - # Modify a 'role1.json' target file, and add it to its metadata so that a - # new version is generated. - with open(file3_path, 'wt') as file_object: - file_object.write('This is role2\'s target file.') - repository.targets('role1').add_target(os.path.basename(file3_path)) - - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Insert the previously valid 'role1.json'. The TUF client should reject it. - shutil.move(backup_role1, role1_path) - - # Verify that the TUF client detects unexpected metadata (previously valid, - # but not up-to-date with the latest snapshot of the repository) and - # refuses to continue the update process. Refresh top-level metadata so - # that the client is aware of the latest snapshot of the repository. - self.repository_updater.refresh() - - try: - with utils.ignore_deprecation_warnings('tuf.client.updater'): - self.repository_updater.targets_of_role('role1') - - # Verify that the specific - # 'tuf.exceptions.BadVersionNumberError' exception is raised by - # each mirror. - except tuf.exceptions.NoWorkingMirrorError as exception: - for mirror_url, mirror_error in exception.mirror_errors.items(): - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'metadata', 'role1.json') - - # Verify that 'role1.json' is the culprit. - self.assertEqual(url_file.replace('\\', '/'), mirror_url) - self.assertTrue(isinstance( - mirror_error, tuf.exceptions.BadVersionNumberError)) - - else: - self.fail('TUF did not prevent a mix-and-match attack.') - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_multiple_repositories_integration.py b/tests/test_multiple_repositories_integration.py deleted file mode 100755 index 0339a0d2d0..0000000000 --- a/tests/test_multiple_repositories_integration.py +++ /dev/null @@ -1,268 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_multiple_repositories_integration.py - - - Vladimir Diaz - - - February 2, 2017 - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Verify that clients and the repository tools are able to keep track of - multiple repositories and separate sets of metadata for each. -""" - -import os -import tempfile -import logging -import shutil -import unittest -import json -import sys - -import tuf -import tuf.log -import tuf.roledb -import tuf.client.updater as updater -import tuf.settings -import tuf.unittest_toolbox as unittest_toolbox -import tuf.repository_tool as repo_tool - -from tests import utils - -import securesystemslib - -logger = logging.getLogger(__name__) - -repo_tool.disable_console_log_messages() - - -class TestMultipleRepositoriesIntegration(unittest_toolbox.Modified_TestCase): - - def setUp(self): - # Modified_Testcase can handle temp dir removal - unittest_toolbox.Modified_TestCase.setUp(self) - self.temporary_directory = self.make_temp_directory(directory=os.getcwd()) - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - - self.temporary_repository_root = tempfile.mkdtemp(dir=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client', 'test_repository1') - original_keystore = os.path.join(original_repository_files, 'keystore') - original_map_file = os.path.join(original_repository_files, 'map.json') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = os.path.join(self.temporary_repository_root, - 'repository_server1') - self.repository_directory2 = os.path.join(self.temporary_repository_root, - 'repository_server2') - - # Setting 'tuf.settings.repositories_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.temporary_repository_root - - self.repository_name = 'test_repository1' - self.repository_name2 = 'test_repository2' - - self.client_directory = os.path.join(self.temporary_repository_root, - self.repository_name) - self.client_directory2 = os.path.join(self.temporary_repository_root, - self.repository_name2) - - self.keystore_directory = os.path.join(self.temporary_repository_root, 'keystore') - self.map_file = os.path.join(self.client_directory, 'map.json') - self.map_file2 = os.path.join(self.client_directory2, 'map.json') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_repository, self.repository_directory2) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_client, self.client_directory2) - shutil.copyfile(original_map_file, self.map_file) - shutil.copyfile(original_map_file, self.map_file2) - shutil.copytree(original_keystore, self.keystore_directory) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - - # Needed because in some tests simple_server.py cannot be found. - # The reason is that the current working directory - # has been changed when executing a subprocess. - SIMPLE_SERVER_PATH = os.path.join(os.getcwd(), 'simple_server.py') - - # Creates a subprocess running a server. - self.server_process_handler = utils.TestServerProcess(log=logger, - server=SIMPLE_SERVER_PATH, popen_cwd=self.repository_directory) - - logger.debug('Server process started.') - - # Creates a subprocess running a server. - self.server_process_handler2 = utils.TestServerProcess(log=logger, - server=SIMPLE_SERVER_PATH, popen_cwd=self.repository_directory2) - - logger.debug('Server process 2 started.') - - url_prefix = \ - 'http://' + utils.TEST_HOST_ADDRESS + ':' + \ - str(self.server_process_handler.port) - url_prefix2 = \ - 'http://' + utils.TEST_HOST_ADDRESS + ':' + \ - str(self.server_process_handler2.port) - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - self.repository_mirrors2 = {'mirror1': {'url_prefix': url_prefix2, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instances. The test cases will use these client - # updaters to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - self.repository_updater2 = updater.Updater(self.repository_name2, - self.repository_mirrors2) - - - def tearDown(self): - # Cleans the resources and flush the logged lines (if any). - self.server_process_handler.clean() - self.server_process_handler2.clean() - - # updater.Updater() populates the roledb with the name "test_repository1" - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Remove top-level temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_update(self): - self.assertEqual('test_repository1', str(self.repository_updater)) - self.assertEqual('test_repository2', str(self.repository_updater2)) - - self.assertEqual(sorted(['role1', 'root', 'snapshot', 'targets', 'timestamp']), - sorted(tuf.roledb.get_rolenames('test_repository1'))) - - self.assertEqual(sorted(['role1', 'root', 'snapshot', 'targets', 'timestamp']), - sorted(tuf.roledb.get_rolenames('test_repository2'))) - - # Note: refresh() resets the known metadata and updates the latest - # top-level metadata. - self.repository_updater.refresh() - - self.assertEqual(sorted(['root', 'snapshot', 'targets', 'timestamp']), - sorted(tuf.roledb.get_rolenames('test_repository1'))) - - # test_repository2 wasn't refreshed and should still know about delegated - # roles. - self.assertEqual(sorted(['root', 'role1', 'snapshot', 'targets', 'timestamp']), - sorted(tuf.roledb.get_rolenames('test_repository2'))) - - # 'role1.json' should be downloaded, because it provides info for the - # requested 'file3.txt'. - valid_targetinfo = self.repository_updater.get_one_valid_targetinfo('file3.txt') - - self.assertEqual(sorted(['role2', 'role1', 'root', 'snapshot', 'targets', 'timestamp']), - sorted(tuf.roledb.get_rolenames('test_repository1'))) - - - - - def test_repository_tool(self): - - self.assertEqual(self.repository_name, str(self.repository_updater)) - self.assertEqual(self.repository_name2, str(self.repository_updater2)) - - repository = repo_tool.load_repository(self.repository_directory, - self.repository_name) - repository2 = repo_tool.load_repository(self.repository_directory2, - self.repository_name2) - - repository.timestamp.version = 88 - self.assertEqual(['timestamp'], tuf.roledb.get_dirty_roles( - self.repository_name)) - self.assertEqual([], tuf.roledb.get_dirty_roles(self.repository_name2)) - - repository2.timestamp.version = 100 - self.assertEqual(['timestamp'], tuf.roledb.get_dirty_roles( - self.repository_name2)) - - key_file = os.path.join(self.keystore_directory, 'timestamp_key') - timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file, "password") - - repository.timestamp.load_signing_key(timestamp_private) - repository2.timestamp.load_signing_key(timestamp_private) - - repository.write('timestamp', increment_version_number=False) - repository2.write('timestamp', increment_version_number=False) - - # And move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.rmtree(os.path.join(self.repository_directory2, 'metadata')) - - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory2, 'metadata.staged'), - os.path.join(self.repository_directory2, 'metadata')) - - # Verify that the client retrieves the expected updates. - logger.info('Downloading timestamp from server 1.') - self.repository_updater.refresh() - - self.assertEqual( - 88, self.repository_updater.metadata['current']['timestamp']['version']) - logger.info('Downloading timestamp from server 2.') - self.repository_updater2.refresh() - - self.assertEqual( - 100, self.repository_updater2.metadata['current']['timestamp']['version']) - - # Test the behavior of the multi-repository updater. - map_file = securesystemslib.util.load_json_file(self.map_file) - map_file['repositories'][self.repository_name] = ['http://localhost:' \ - + str(self.server_process_handler.port)] - map_file['repositories'][self.repository_name2] = ['http://localhost:' \ - + str(self.server_process_handler2.port)] - with open(self.map_file, 'w') as file_object: - file_object.write(json.dumps(map_file)) - - # Try to load a non-existent map file. - self.assertRaises(tuf.exceptions.Error, updater.MultiRepoUpdater, 'bad_path') - - multi_repo_updater = updater.MultiRepoUpdater(self.map_file) - valid_targetinfo = multi_repo_updater.get_valid_targetinfo('file3.txt') - - for my_updater, my_targetinfo in valid_targetinfo.items(): - my_updater.download_target(my_targetinfo, self.temporary_directory) - self.assertTrue(os.path.exists(os.path.join(self.temporary_directory, 'file3.txt'))) - - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_replay_attack.py b/tests/test_replay_attack.py deleted file mode 100755 index 05cf572c7c..0000000000 --- a/tests/test_replay_attack.py +++ /dev/null @@ -1,321 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_replay_attack.py - - - Konstantin Andrianov. - - - February 22, 2012. - - April 5, 2014. - Refactored to use the 'unittest' module (test conditions in code, rather - than verifying text output), use pre-generated repository files, and - discontinue use of the old repository tools. Expanded comments. - -vladimir.v.diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Simulate a replay, or rollback, attack. In a replay attack, a client is - tricked into installing software that is older than that which the client - previously knew to be available. - - Note: There is no difference between 'updates' and 'target' files. -""" - -import os -import tempfile -import datetime -import shutil -import logging -import unittest -import sys -from urllib import request - -import tuf.formats -import tuf.log -import tuf.client.updater as updater -import tuf.repository_tool as repo_tool -import tuf.unittest_toolbox as unittest_toolbox - -from tests import utils - -import securesystemslib - - -# The repository tool is imported and logs console messages by default. -# Disable console log messages generated by this unit test. -repo_tool.disable_console_log_messages() - -logger = logging.getLogger(__name__) - - - -class TestReplayAttack(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated of all the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client') - original_keystore = os.path.join(original_repository_files, 'keystore') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.client_directory = os.path.join(temporary_repository_root, 'client') - self.keystore_directory = os.path.join(temporary_repository_root, 'keystore') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_without_tuf(self): - # Scenario: - # 'timestamp.json' specifies the latest version of the repository files. - # A client should only accept the same version number (specified in the - # file) of the metadata, or greater. A version number less than the one - # currently trusted should be rejected. A non-TUF client may use a - # different mechanism for determining versions of metadata, but version - # numbers in this integrations because that is what TUF uses. - # - # Modify the repository's timestamp.json' so that a new version is generated - # and accepted by the client, and backup the previous version. The previous - # is then returned the next time the client requests an update. A non-TUF - # client (without a way to detect older versions of metadata, and thus - # updates) is expected to download older metadata and outdated files. - # Verify that the older version of timestamp.json' is downloaded by the - # non-TUF client. - - # Backup the current version of 'timestamp'. It will be used as the - # outdated version returned to the client. The repository tool removes - # obsolete metadadata, so do *not* save the backup version in the - # repository's metadata directory. - timestamp_path = os.path.join(self.repository_directory, 'metadata', - 'timestamp.json') - backup_timestamp = os.path.join(self.repository_directory, - 'timestamp.json.backup') - shutil.copy(timestamp_path, backup_timestamp) - - # The fileinfo of the previous version is saved to verify that it is indeed - # accepted by the non-TUF client. - length, hashes = securesystemslib.util.get_file_details(backup_timestamp) - previous_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Modify the timestamp file on the remote repository. - repository = repo_tool.load_repository(self.repository_directory) - key_file = os.path.join(self.keystore_directory, 'timestamp_key') - timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - repository.timestamp.load_signing_key(timestamp_private) - - # Set an arbitrary expiration so that the repository tool generates a new - # version. - repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 12) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Save the fileinfo of the new version generated to verify that it is - # saved by the client. - length, hashes = securesystemslib.util.get_file_details(timestamp_path) - new_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'metadata', 'timestamp.json') - client_timestamp_path = os.path.join(self.client_directory, - self.repository_name, 'metadata', 'current', 'timestamp.json') - - # On Windows, the URL portion should not contain back slashes. - request.urlretrieve(url_file.replace('\\', '/'), client_timestamp_path) - - length, hashes = securesystemslib.util.get_file_details(client_timestamp_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Verify 'download_fileinfo' is equal to the new version. - self.assertEqual(download_fileinfo, new_fileinfo) - - # Restore the previous version of 'timestamp.json' on the remote repository - # and verify that the non-TUF client downloads it (expected, but not ideal). - shutil.move(backup_timestamp, timestamp_path) - - # On Windows, the URL portion should not contain back slashes. - request.urlretrieve(url_file.replace('\\', '/'), client_timestamp_path) - - length, hashes = securesystemslib.util.get_file_details(client_timestamp_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Verify 'download_fileinfo' is equal to the previous version. - self.assertEqual(download_fileinfo, previous_fileinfo) - self.assertNotEqual(download_fileinfo, new_fileinfo) - - - - def test_with_tuf(self): - # The same scenario outlined in test_without_tuf() is followed here, except - # with a TUF client (scenario description provided in the opening comment - # block of that test case.) The TUF client performs a refresh of top-level - # metadata, which also includes 'timestamp.json'. - - # Backup the current version of 'timestamp'. It will be used as the - # outdated version returned to the client. The repository tool removes - # obsolete metadadata, so do *not* save the backup version in the - # repository's metadata directory. - timestamp_path = os.path.join(self.repository_directory, 'metadata', - 'timestamp.json') - backup_timestamp = os.path.join(self.repository_directory, - 'timestamp.json.backup') - shutil.copy(timestamp_path, backup_timestamp) - - # The fileinfo of the previous version is saved to verify that it is indeed - # accepted by the non-TUF client. - length, hashes = securesystemslib.util.get_file_details(backup_timestamp) - previous_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Modify the timestamp file on the remote repository. - repository = repo_tool.load_repository(self.repository_directory) - key_file = os.path.join(self.keystore_directory, 'timestamp_key') - timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - repository.timestamp.load_signing_key(timestamp_private) - - # Set an arbitrary expiration so that the repository tool generates a new - # version. - repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 12) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Save the fileinfo of the new version generated to verify that it is - # saved by the client. - length, hashes = securesystemslib.util.get_file_details(timestamp_path) - new_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Refresh top-level metadata, including 'timestamp.json'. Installation of - # new version of 'timestamp.json' is expected. - self.repository_updater.refresh() - - client_timestamp_path = os.path.join(self.client_directory, - self.repository_name, 'metadata', 'current', 'timestamp.json') - length, hashes = securesystemslib.util.get_file_details(client_timestamp_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Verify 'download_fileinfo' is equal to the new version. - self.assertEqual(download_fileinfo, new_fileinfo) - - # Restore the previous version of 'timestamp.json' on the remote repository - # and verify that the non-TUF client downloads it (expected, but not ideal). - shutil.move(backup_timestamp, timestamp_path) - logger.info('Moving the timestamp.json backup to the current version.') - - # Verify that the TUF client detects replayed metadata and refuses to - # continue the update process. - try: - self.repository_updater.refresh() - - # Verify that the specific 'tuf.exceptions.ReplayedMetadataError' is raised by each - # mirror. - except tuf.exceptions.NoWorkingMirrorError as exception: - for mirror_url, mirror_error in exception.mirror_errors.items(): - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'metadata', 'timestamp.json') - - # Verify that 'timestamp.json' is the culprit. - self.assertEqual(url_file.replace('\\', '/'), mirror_url) - self.assertTrue(isinstance(mirror_error, tuf.exceptions.ReplayedMetadataError)) - - else: - self.fail('TUF did not prevent a replay attack.') - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_repository_lib.py b/tests/test_repository_lib.py deleted file mode 100755 index 96dcb0e0e0..0000000000 --- a/tests/test_repository_lib.py +++ /dev/null @@ -1,1102 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_repository_lib.py - - - Vladimir Diaz - - - June 1, 2014. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'repository_lib.py'. -""" - -import os -import time -import datetime -import logging -import tempfile -import json -import shutil -import unittest -import copy -import sys - -import tuf -import tuf.formats -import tuf.log -import tuf.formats -import tuf.roledb -import tuf.keydb -import tuf.settings - -import tuf.repository_lib as repo_lib -import tuf.repository_tool as repo_tool - -from tests import utils - -import securesystemslib -import securesystemslib.exceptions -import securesystemslib.rsa_keys -import securesystemslib.interface -import securesystemslib.storage - -logger = logging.getLogger(__name__) - -repo_lib.disable_console_log_messages() - -TOP_LEVEL_METADATA_FILES = ['root.json', 'targets.json', 'timestamp.json', - 'snapshot.json'] - - -class TestRepositoryToolFunctions(unittest.TestCase): - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownClass() so that - # temporary files are always removed, even when exceptions occur. - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - - - @classmethod - def tearDownClass(cls): - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated for the test cases. - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - shutil.rmtree(cls.temporary_directory) - - - def setUp(self): - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - - - def test_import_rsa_privatekey_from_file(self): - # Test normal case. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - - # Load one of the pre-generated key files from 'tuf/tests/repository_data'. - # 'password' unlocks the pre-generated key files. - key_filepath = os.path.join('repository_data', 'keystore', - 'root_key') - self.assertTrue(os.path.exists(key_filepath)) - - imported_rsa_key = repo_lib.import_rsa_privatekey_from_file(key_filepath, - 'password') - self.assertTrue(securesystemslib.formats.RSAKEY_SCHEMA.matches(imported_rsa_key)) - - - # Test improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.import_rsa_privatekey_from_file, 3, 'pw') - - - # Test invalid argument. - # Non-existent key file. - nonexistent_keypath = os.path.join(temporary_directory, - 'nonexistent_keypath') - self.assertRaises(securesystemslib.exceptions.StorageError, - repo_lib.import_rsa_privatekey_from_file, - nonexistent_keypath, 'pw') - - # Invalid key file argument. - invalid_keyfile = os.path.join(temporary_directory, 'invalid_keyfile') - with open(invalid_keyfile, 'wb') as file_object: - file_object.write(b'bad keyfile') - self.assertRaises(securesystemslib.exceptions.CryptoError, repo_lib.import_rsa_privatekey_from_file, - invalid_keyfile, 'pw') - - - - def test_import_ed25519_privatekey_from_file(self): - # Test normal case. - # Generate ed25519 keys that can be imported. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - ed25519_keypath = os.path.join(temporary_directory, 'ed25519_key') - securesystemslib.interface.generate_and_write_ed25519_keypair( - password='pw', filepath=ed25519_keypath) - - imported_ed25519_key = \ - repo_lib.import_ed25519_privatekey_from_file(ed25519_keypath, 'pw') - self.assertTrue(securesystemslib.formats.ED25519KEY_SCHEMA.matches(imported_ed25519_key)) - - - # Test improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.import_ed25519_privatekey_from_file, 3, 'pw') - - - # Test invalid argument. - # Non-existent key file. - nonexistent_keypath = os.path.join(temporary_directory, - 'nonexistent_keypath') - self.assertRaises(securesystemslib.exceptions.StorageError, - repo_lib.import_ed25519_privatekey_from_file, - nonexistent_keypath, 'pw') - - # Invalid key file argument. - invalid_keyfile = os.path.join(temporary_directory, 'invalid_keyfile') - with open(invalid_keyfile, 'wb') as file_object: - file_object.write(b'bad keyfile') - - self.assertRaises(securesystemslib.exceptions.Error, - repo_lib.import_ed25519_privatekey_from_file, invalid_keyfile, 'pw') - - # Invalid private key imported (contains unexpected keytype.) - imported_ed25519_key['keytype'] = 'invalid_keytype' - - # Use 'rsa_keys.py' to bypass the key format validation performed by - # 'keys.py'. - salt, iterations, derived_key = \ - securesystemslib.rsa_keys._generate_derived_key('pw') - - # Store the derived key info in a dictionary, the object expected - # by the non-public _encrypt() routine. - derived_key_information = {'salt': salt, 'iterations': iterations, - 'derived_key': derived_key} - - # Convert the key object to json string format and encrypt it with the - # derived key. - encrypted_key = securesystemslib.rsa_keys._encrypt( - json.dumps(imported_ed25519_key), derived_key_information) - - with open(ed25519_keypath, 'wb') as file_object: - file_object.write(encrypted_key.encode('utf-8')) - - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.import_ed25519_privatekey_from_file, ed25519_keypath, 'pw') - - - - def test_get_top_level_metadata_filenames(self): - - # Test normal case. - metadata_directory = os.path.join('metadata/') - filenames = {'root.json': metadata_directory + 'root.json', - 'targets.json': metadata_directory + 'targets.json', - 'snapshot.json': metadata_directory + 'snapshot.json', - 'timestamp.json': metadata_directory + 'timestamp.json'} - - self.assertEqual(filenames, - repo_lib.get_top_level_metadata_filenames('metadata/')) - - # If a directory argument is not specified, the current working directory - # is used. - metadata_directory = os.getcwd() - filenames = {'root.json': os.path.join(metadata_directory, 'root.json'), - 'targets.json': os.path.join(metadata_directory, 'targets.json'), - 'snapshot.json': os.path.join(metadata_directory, 'snapshot.json'), - 'timestamp.json': os.path.join(metadata_directory, 'timestamp.json')} - self.assertEqual(filenames, - repo_lib.get_top_level_metadata_filenames(metadata_directory)) - - - # Test improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.get_top_level_metadata_filenames, 3) - - - - def test_get_targets_metadata_fileinfo(self): - # Test normal case. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - test_filepath = os.path.join(temporary_directory, 'file.txt') - - with open(test_filepath, 'wt') as file_object: - file_object.write('test file') - - # Generate test fileinfo object. It is assumed SHA256 and SHA512 hashes - # are computed by get_targets_metadata_fileinfo(). - file_length = os.path.getsize(test_filepath) - sha256_digest_object = securesystemslib.hash.digest_filename(test_filepath) - sha512_digest_object = securesystemslib.hash.digest_filename(test_filepath, algorithm='sha512') - file_hashes = {'sha256': sha256_digest_object.hexdigest(), - 'sha512': sha512_digest_object.hexdigest()} - fileinfo = {'length': file_length, 'hashes': file_hashes} - self.assertTrue(tuf.formats.TARGETS_FILEINFO_SCHEMA.matches(fileinfo)) - - storage_backend = securesystemslib.storage.FilesystemBackend() - - self.assertEqual(fileinfo, repo_lib.get_targets_metadata_fileinfo(test_filepath, - storage_backend)) - - - # Test improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.get_targets_metadata_fileinfo, 3, - storage_backend) - - - # Test non-existent file. - nonexistent_filepath = os.path.join(temporary_directory, 'oops.txt') - self.assertRaises(securesystemslib.exceptions.Error, - repo_lib.get_targets_metadata_fileinfo, - nonexistent_filepath, storage_backend) - - - - def test_get_target_hash(self): - # Test normal case. - expected_target_hashes = { - '/file1.txt': 'e3a3d89eb3b70ce3fbce6017d7b8c12d4abd5635427a0e8a238f53157df85b3d', - '/README.txt': '8faee106f1bb69f34aaf1df1e3c2e87d763c4d878cb96b91db13495e32ceb0b0', - '/packages/file2.txt': 'c9c4a5cdd84858dd6a23d98d7e6e6b2aec45034946c16b2200bc317c75415e92' - } - for filepath, target_hash in expected_target_hashes.items(): - self.assertTrue(tuf.formats.RELPATH_SCHEMA.matches(filepath)) - self.assertTrue(securesystemslib.formats.HASH_SCHEMA.matches(target_hash)) - self.assertEqual(repo_lib.get_target_hash(filepath), target_hash) - - # Test for improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.get_target_hash, 8) - - - - def test_generate_root_metadata(self): - # Test normal case. - # Load the root metadata provided in 'tuf/tests/repository_data/'. - root_filepath = os.path.join('repository_data', 'repository', - 'metadata', 'root.json') - root_signable = securesystemslib.util.load_json_file(root_filepath) - - # generate_root_metadata() expects the top-level roles and keys to be - # available in 'tuf.keydb' and 'tuf.roledb'. - tuf.roledb.create_roledb_from_root_metadata(root_signable['signed']) - tuf.keydb.create_keydb_from_root_metadata(root_signable['signed']) - expires = '1985-10-21T01:22:00Z' - - root_metadata = repo_lib.generate_root_metadata(1, expires, - consistent_snapshot=False) - self.assertTrue(tuf.formats.ROOT_SCHEMA.matches(root_metadata)) - - root_keyids = tuf.roledb.get_role_keyids('root') - tuf.keydb._keydb_dict['default'][root_keyids[0]]['keytype'] = 'bad_keytype' - self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_root_metadata, 1, - expires, consistent_snapshot=False) - - # Reset the root key's keytype, so that we can next verify that a different - # securesystemslib.exceptions.Error exception is raised for duplicate keyids. - tuf.keydb._keydb_dict['default'][root_keyids[0]]['keytype'] = 'rsa' - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_root_metadata, - '3', expires, False) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_root_metadata, - 1, '3', False) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_root_metadata, - 1, expires, 3) - - # Test for missing required roles and keys. - tuf.roledb.clear_roledb() - tuf.keydb.clear_keydb() - self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_root_metadata, - 1, expires, False) - - - - def test_generate_targets_metadata(self): - # Test normal case. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - targets_directory = os.path.join(temporary_directory, 'targets') - file1_path = os.path.join(targets_directory, 'file.txt') - securesystemslib.util.ensure_parent_dir(file1_path) - - with open(file1_path, 'wt') as file_object: - file_object.write('test file.') - - # Set valid generate_targets_metadata() arguments. Add a custom field for - # the 'target_files' target set below. - version = 1 - datetime_object = datetime.datetime(2030, 1, 1, 12, 0) - expiration_date = datetime_object.isoformat() + 'Z' - file_permissions = oct(os.stat(file1_path).st_mode)[4:] - target_files = {'file.txt': {'custom': {'file_permission': file_permissions}}} - - # Delegations data must be loaded into roledb since - # generate_targets_metadata tries to update delegations keyids - # and threshold - repository_path = os.path.join('repository_data', 'repository') - repository = repo_tool.load_repository(repository_path) - roleinfo = tuf.roledb.get_roleinfo('targets') - delegations = roleinfo['delegations'] - - targets_metadata = repo_lib.generate_targets_metadata(targets_directory, - target_files, version, expiration_date, delegations, False) - self.assertTrue(tuf.formats.TARGETS_SCHEMA.matches(targets_metadata)) - - # Valid arguments with 'delegations' set to None. - targets_metadata = repo_lib.generate_targets_metadata(targets_directory, - target_files, version, expiration_date, None, False) - self.assertTrue(tuf.formats.TARGETS_SCHEMA.matches(targets_metadata)) - - # Test update in targets' delegations - keystore_path = os.path.join('repository_data', 'keystore') - targets_public_keypath = os.path.join(keystore_path, 'targets_key.pub') - targets_public_key = securesystemslib.interface.\ - import_ed25519_publickey_from_file(targets_public_keypath) - - # Add new key and threshold to delegated role - repository.targets('role1').add_verification_key(targets_public_key) - repository.targets('role1').threshold = 2 - role1_keyids = tuf.roledb.get_role_keyids('role1') - role1_threshold = tuf.roledb.get_role_threshold('role1') - roleinfo = tuf.roledb.get_roleinfo('targets') - delegations = roleinfo['delegations'] - old_delegations = copy.deepcopy(delegations) - - targets_metadata = repo_lib.generate_targets_metadata(targets_directory, - target_files, version, expiration_date, delegations, False) - self.assertNotEqual(old_delegations, delegations) - self.assertEqual(role1_keyids, - targets_metadata['delegations']['roles'][0]['keyids']) - self.assertEqual(role1_threshold, - targets_metadata['delegations']['roles'][0]['threshold']) - for keyid in role1_keyids: - self.assertIn(keyid, targets_metadata['delegations']['keys']) - - - # Verify that 'digest.filename' file is saved to 'targets_directory' if - # the 'write_consistent_targets' argument is True. - list_targets_directory = os.listdir(targets_directory) - targets_metadata = repo_lib.generate_targets_metadata(targets_directory, - target_files, version, expiration_date, delegations, - write_consistent_targets=True) - new_list_targets_directory = os.listdir(targets_directory) - - # Verify that 'targets_directory' contains only one extra item. - self.assertTrue(len(list_targets_directory) + 1, - len(new_list_targets_directory)) - - # Verify that an exception is not raised if the target files already exist. - repo_lib.generate_targets_metadata(targets_directory, target_files, - version, expiration_date, delegations, - write_consistent_targets=True) - - - # Verify that 'targets_metadata' contains a 'custom' entry (optional) - # for 'file.txt'. - self.assertTrue('custom' in targets_metadata['targets']['file.txt']) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, - 3, target_files, version, expiration_date) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, - targets_directory, 3, version, expiration_date) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, - targets_directory, target_files, '3', expiration_date) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, - targets_directory, target_files, version, '3') - - # Improperly formatted 'delegations' and 'write_consistent_targets' - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, - targets_directory, target_files, version, expiration_date, - 3, False) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, - targets_directory, target_files, version, expiration_date, - delegations, 3) - - # Test non-existent target file. - bad_target_file = \ - {'non-existent.txt': {'file_permission': file_permissions}} - - self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_targets_metadata, - targets_directory, bad_target_file, version, - expiration_date) - - - # Test use of an existing fileinfo structures - target1_hashes = {'sha256': 'c2986576f5fdfd43944e2b19e775453b96748ec4fe2638a6d2f32f1310967095'} - target2_hashes = {'sha256': '517c0ce943e7274a2431fa5751e17cfd5225accd23e479bfaad13007751e87ef'} - - # Test missing expected field, hashes, when use_existing_fileinfo - target_files = {'file.txt': {'length': 555}} - self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_targets_metadata, - targets_directory, target_files, version, expiration_date, - use_existing_fileinfo=True) - - # Test missing expected field, length, when use_existing_fileinfo - target_files = {'file.txt': {'hashes': target1_hashes}} - self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_targets_metadata, - targets_directory, target_files, version, expiration_date, - use_existing_fileinfo=True) - - # Test missing both expected fields when use_existing_fileinfo - target_files = {'file.txt': {}} - self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_targets_metadata, - targets_directory, target_files, version, expiration_date, - use_existing_fileinfo=True) - - target_files = {'file1.txt': {'custom': {'meta': 'foo'}, - 'hashes': target1_hashes, - 'length': 555}, - 'file2.txt': {'custom': {'meta': 'bar'}, - 'hashes': target2_hashes, - 'length': 42}} - targets_metadata = \ - repo_lib.generate_targets_metadata(targets_directory, target_files, - version, expiration_date, delegations, - False, use_existing_fileinfo=True) - - - def _setup_generate_snapshot_metadata_test(self): - # Test normal case. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - original_repository_path = os.path.join('repository_data', - 'repository') - repository_directory = os.path.join(temporary_directory, 'repository') - shutil.copytree(original_repository_path, repository_directory) - metadata_directory = os.path.join(repository_directory, - repo_lib.METADATA_STAGED_DIRECTORY_NAME) - - targets_directory = os.path.join(repository_directory, repo_lib.TARGETS_DIRECTORY_NAME) - - version = 1 - expiration_date = '1985-10-21T13:20:00Z' - - # Load a valid repository so that top-level roles exist in roledb and - # generate_snapshot_metadata() has roles to specify in snapshot metadata. - storage_backend = securesystemslib.storage.FilesystemBackend() - repository = repo_tool.Repository(repository_directory, metadata_directory, - targets_directory, storage_backend) - repository_junk = repo_tool.load_repository(repository_directory) - - # Load a valid repository so that top-level roles exist in roledb and - # generate_snapshot_metadata() has roles to specify in snapshot metadata. - storage_backend = securesystemslib.storage.FilesystemBackend() - - # For testing purposes, store an invalid metadata file in the metadata directory - # to verify that it isn't loaded by generate_snapshot_metadata(). Unknown - # metadata file extensions should be ignored. - invalid_metadata_file = os.path.join(metadata_directory, 'role_file.xml') - with open(invalid_metadata_file, 'w') as file_object: - file_object.write('bad extension on metadata file') - - return metadata_directory, version, expiration_date, \ - storage_backend - - - def test_generate_snapshot_metadata(self): - metadata_directory, version, expiration_date, storage_backend = \ - self._setup_generate_snapshot_metadata_test() - - snapshot_metadata = \ - repo_lib.generate_snapshot_metadata(metadata_directory, version, - expiration_date, - storage_backend, - consistent_snapshot=False) - self.assertTrue(tuf.formats.SNAPSHOT_SCHEMA.matches(snapshot_metadata)) - - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_snapshot_metadata, - 3, version, expiration_date, consistent_snapshot=False, - storage_backend=storage_backend) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_snapshot_metadata, - metadata_directory, '3', expiration_date, storage_backend, - consistent_snapshot=False) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_snapshot_metadata, - metadata_directory, version, '3', storage_backend, - consistent_snapshot=False) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_snapshot_metadata, - metadata_directory, version, expiration_date, 3, - storage_backend) - - - - def test_generate_snapshot_metadata_with_length(self): - metadata_directory, version, expiration_date, storage_backend = \ - self._setup_generate_snapshot_metadata_test() - - snapshot_metadata = \ - repo_lib.generate_snapshot_metadata(metadata_directory, version, - expiration_date, - storage_backend, - consistent_snapshot=False, - use_length=True) - self.assertTrue(tuf.formats.SNAPSHOT_SCHEMA.matches(snapshot_metadata)) - - metadata_files_info_dict = snapshot_metadata['meta'] - for metadata_filename in sorted(os.listdir(metadata_directory), reverse=True): - - # In the metadata_directory, there are files with format: - # 1.root.json. The prefix number should be removed. - stripped_filename, version = \ - repo_lib._strip_version_number(metadata_filename, - consistent_snapshot=True) - - # In the repository, the file "role_file.xml" have been added to make - # sure that non-json files aren't loaded. This file should be filtered. - if stripped_filename.endswith('.json'): - if stripped_filename not in TOP_LEVEL_METADATA_FILES: - # Check that length is not calculated but hashes is - self.assertIn('length', metadata_files_info_dict[stripped_filename]) - self.assertNotIn('hashes', metadata_files_info_dict[stripped_filename]) - - - - def test_generate_snapshot_metadata_with_hashes(self): - metadata_directory, version, expiration_date, storage_backend = \ - self._setup_generate_snapshot_metadata_test() - - snapshot_metadata = \ - repo_lib.generate_snapshot_metadata(metadata_directory, version, - expiration_date, - storage_backend, - consistent_snapshot=False, - use_hashes=True) - self.assertTrue(tuf.formats.SNAPSHOT_SCHEMA.matches(snapshot_metadata)) - - metadata_files_info_dict = snapshot_metadata['meta'] - for metadata_filename in sorted(os.listdir(metadata_directory), reverse=True): - - # In the metadata_directory, there are files with format: - # 1.root.json. The prefix number should be removed. - stripped_filename, version = \ - repo_lib._strip_version_number(metadata_filename, - consistent_snapshot=True) - - # In the repository, the file "role_file.xml" have been added to make - # sure that non-json files aren't loaded. This file should be filtered. - if stripped_filename.endswith('.json'): - if stripped_filename not in TOP_LEVEL_METADATA_FILES: - # Check that hashes is not calculated but length is - self.assertNotIn('length', metadata_files_info_dict[stripped_filename]) - self.assertIn('hashes', metadata_files_info_dict[stripped_filename]) - - - - def test_generate_snapshot_metadata_with_hashes_and_length(self): - metadata_directory, version, expiration_date, storage_backend = \ - self._setup_generate_snapshot_metadata_test() - - snapshot_metadata = \ - repo_lib.generate_snapshot_metadata(metadata_directory, version, - expiration_date, - storage_backend, - consistent_snapshot=False, - use_length=True, - use_hashes=True) - self.assertTrue(tuf.formats.SNAPSHOT_SCHEMA.matches(snapshot_metadata)) - - metadata_files_info_dict = snapshot_metadata['meta'] - for metadata_filename in sorted(os.listdir(metadata_directory), reverse=True): - - # In the metadata_directory, there are files with format: - # 1.root.json. The prefix number should be removed. - stripped_filename, version = \ - repo_lib._strip_version_number(metadata_filename, - consistent_snapshot=True) - - # In the repository, the file "role_file.xml" have been added to make - # sure that non-json files aren't loaded. This file should be filtered. - if stripped_filename.endswith('.json'): - if stripped_filename not in TOP_LEVEL_METADATA_FILES: - # Check that both length and hashes are not are not calculated - self.assertIn('length', metadata_files_info_dict[stripped_filename]) - self.assertIn('hashes', metadata_files_info_dict[stripped_filename]) - - - - def _setup_generate_timestamp_metadata_test(self): - # Test normal case. - repository_name = 'test_repository' - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - original_repository_path = os.path.join('repository_data', - 'repository') - repository_directory = os.path.join(temporary_directory, 'repository') - shutil.copytree(original_repository_path, repository_directory) - metadata_directory = os.path.join(repository_directory, - repo_lib.METADATA_STAGED_DIRECTORY_NAME) - targets_directory = os.path.join(repository_directory, repo_lib.TARGETS_DIRECTORY_NAME) - - snapshot_file_path = os.path.join(metadata_directory, - repo_lib.SNAPSHOT_FILENAME) - - # Set valid generate_timestamp_metadata() arguments. - version = 1 - expiration_date = '1985-10-21T13:20:00Z' - - storage_backend = securesystemslib.storage.FilesystemBackend() - # Load a valid repository so that top-level roles exist in roledb and - # generate_snapshot_metadata() has roles to specify in snapshot metadata. - repository = repo_tool.Repository(repository_directory, metadata_directory, - targets_directory, repository_name) - - repository_junk = repo_tool.load_repository(repository_directory, - repository_name) - - return snapshot_file_path, version, expiration_date, storage_backend, \ - repository_name - - - def test_generate_timestamp_metadata(self): - snapshot_file_path, version, expiration_date, storage_backend, \ - repository_name = self._setup_generate_timestamp_metadata_test() - - timestamp_metadata = repo_lib.generate_timestamp_metadata(snapshot_file_path, - version, expiration_date, storage_backend, repository_name) - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches(timestamp_metadata)) - - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.generate_timestamp_metadata, 3, version, expiration_date, - storage_backend, repository_name) - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.generate_timestamp_metadata, snapshot_file_path, '3', - expiration_date, storage_backend, repository_name) - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.generate_timestamp_metadata, snapshot_file_path, version, '3', - storage_backend, repository_name) - - - - def test_generate_timestamp_metadata_without_length(self): - snapshot_file_path, version, expiration_date, storage_backend, \ - repository_name = self._setup_generate_timestamp_metadata_test() - - timestamp_metadata = repo_lib.generate_timestamp_metadata(snapshot_file_path, - version, expiration_date, storage_backend, repository_name, - use_length=False) - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches(timestamp_metadata)) - - # Check that length is not calculated but hashes is - timestamp_file_info = timestamp_metadata['meta'] - - self.assertNotIn('length', timestamp_file_info['snapshot.json']) - self.assertIn('hashes', timestamp_file_info['snapshot.json']) - - - - def test_generate_timestamp_metadata_without_hashes(self): - snapshot_file_path, version, expiration_date, storage_backend, \ - repository_name = self._setup_generate_timestamp_metadata_test() - - timestamp_metadata = repo_lib.generate_timestamp_metadata(snapshot_file_path, - version, expiration_date, storage_backend, repository_name, - use_hashes=False) - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches(timestamp_metadata)) - - # Check that hashes is not calculated but length is - timestamp_file_info = timestamp_metadata['meta'] - - self.assertIn('length', timestamp_file_info['snapshot.json']) - self.assertNotIn('hashes', timestamp_file_info['snapshot.json']) - - - - def test_generate_timestamp_metadata_without_length_and_hashes(self): - snapshot_file_path, version, expiration_date, storage_backend, \ - repository_name = self._setup_generate_timestamp_metadata_test() - - timestamp_metadata = repo_lib.generate_timestamp_metadata(snapshot_file_path, - version, expiration_date, storage_backend, repository_name, - use_hashes=False, use_length=False) - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches(timestamp_metadata)) - - # Check that length and hashes attributes are not added - timestamp_file_info = timestamp_metadata['meta'] - self.assertNotIn('length', timestamp_file_info['snapshot.json']) - self.assertNotIn('hashes', timestamp_file_info['snapshot.json']) - - - - def test_sign_metadata(self): - # Test normal case. - repository_name = 'test_repository' - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - metadata_path = os.path.join('repository_data', 'repository', 'metadata') - keystore_path = os.path.join('repository_data', 'keystore') - root_filename = os.path.join(metadata_path, 'root.json') - root_metadata = securesystemslib.util.load_json_file(root_filename)['signed'] - targets_filename = os.path.join(metadata_path, 'targets.json') - targets_metadata = securesystemslib.util.load_json_file(targets_filename)['signed'] - - tuf.keydb.create_keydb_from_root_metadata(root_metadata, repository_name) - tuf.roledb.create_roledb_from_root_metadata(root_metadata, repository_name) - root_keyids = tuf.roledb.get_role_keyids('root', repository_name) - targets_keyids = tuf.roledb.get_role_keyids('targets', repository_name) - - root_private_keypath = os.path.join(keystore_path, 'root_key') - root_private_key = repo_lib.import_rsa_privatekey_from_file(root_private_keypath, - 'password') - - # Sign with a valid, but not a threshold, key. - targets_public_keypath = os.path.join(keystore_path, 'targets_key.pub') - targets_public_key = securesystemslib.interface.\ - import_ed25519_publickey_from_file(targets_public_keypath) - - # sign_metadata() expects the private key 'root_metadata' to be in - # 'tuf.keydb'. Remove any public keys that may be loaded before - # adding private key, otherwise a 'tuf.KeyAlreadyExists' exception is - # raised. - tuf.keydb.remove_key(root_private_key['keyid'], - repository_name=repository_name) - tuf.keydb.add_key(root_private_key, repository_name=repository_name) - tuf.keydb.remove_key(targets_public_key['keyid'], repository_name=repository_name) - tuf.keydb.add_key(targets_public_key, repository_name=repository_name) - - # Verify that a valid root signable is generated. - root_signable = repo_lib.sign_metadata(root_metadata, root_keyids, - root_filename, repository_name) - self.assertTrue(tuf.formats.SIGNABLE_SCHEMA.matches(root_signable)) - - # Test for an unset private key (in this case, target's). - repo_lib.sign_metadata(targets_metadata, targets_keyids, targets_filename, - repository_name) - - # Add an invalid keytype to one of the root keys. - root_keyid = root_keyids[0] - tuf.keydb._keydb_dict[repository_name][root_keyid]['keytype'] = 'bad_keytype' - self.assertRaises(securesystemslib.exceptions.Error, repo_lib.sign_metadata, - root_metadata, root_keyids, root_filename, repository_name) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.sign_metadata, 3, root_keyids, 'root.json', repository_name) - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.sign_metadata, root_metadata, 3, 'root.json', repository_name) - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.sign_metadata, root_metadata, root_keyids, 3, repository_name) - - - - def test_write_metadata_file(self): - # Test normal case. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - metadata_directory = os.path.join('repository_data', 'repository', 'metadata') - root_filename = os.path.join(metadata_directory, 'root.json') - root_signable = securesystemslib.util.load_json_file(root_filename) - - output_filename = os.path.join(temporary_directory, 'root.json') - version_number = root_signable['signed']['version'] + 1 - - self.assertFalse(os.path.exists(output_filename)) - storage_backend = securesystemslib.storage.FilesystemBackend() - repo_lib.write_metadata_file(root_signable, output_filename, version_number, - consistent_snapshot=False, storage_backend=storage_backend) - self.assertTrue(os.path.exists(output_filename)) - - # Attempt to over-write the previously written metadata file. An exception - # is not raised in this case, only a debug message is logged. - repo_lib.write_metadata_file(root_signable, output_filename, version_number, - consistent_snapshot=False, storage_backend=storage_backend) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file, - 3, output_filename, version_number, False, storage_backend) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file, - root_signable, 3, version_number, False, storage_backend) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file, - root_signable, output_filename, '3', False, storage_backend) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file, - root_signable, output_filename, storage_backend, version_number, 3) - - - - def test_create_tuf_client_directory(self): - # Test normal case. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - repository_directory = os.path.join('repository_data', 'repository') - client_directory = os.path.join(temporary_directory, 'client') - - repo_lib.create_tuf_client_directory(repository_directory, client_directory) - - self.assertTrue(os.path.exists(client_directory)) - metadata_directory = os.path.join(client_directory, 'metadata') - current_directory = os.path.join(metadata_directory, 'current') - previous_directory = os.path.join(metadata_directory, 'previous') - self.assertTrue(os.path.exists(client_directory)) - self.assertTrue(os.path.exists(metadata_directory)) - self.assertTrue(os.path.exists(current_directory)) - self.assertTrue(os.path.exists(previous_directory)) - - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.create_tuf_client_directory, 3, client_directory) - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.create_tuf_client_directory, repository_directory, 3) - - - # Test invalid argument (i.e., client directory already exists.) - self.assertRaises(tuf.exceptions.RepositoryError, - repo_lib.create_tuf_client_directory, repository_directory, - client_directory) - - # Test invalid client metadata directory (i.e., non-errno.EEXIST exceptions - # should be re-raised.) - shutil.rmtree(metadata_directory) - - # Save the original metadata directory name so that it can be restored - # after testing. - metadata_directory_name = repo_lib.METADATA_DIRECTORY_NAME - repo_lib.METADATA_DIRECTORY_NAME = '/' - - # Creation of the '/' directory is forbidden on all supported OSs. The '/' - # argument to create_tuf_client_directory should cause it to re-raise a - # non-errno.EEXIST exception. - self.assertRaises((OSError, tuf.exceptions.RepositoryError), - repo_lib.create_tuf_client_directory, repository_directory, '/') - - # Restore the metadata directory name in repo_lib. - repo_lib.METADATA_DIRECTORY_NAME = metadata_directory_name - - - - def test__generate_and_write_metadata(self): - # Test for invalid, or unsupported, rolename. - # Load the root metadata provided in 'tuf/tests/repository_data/'. - repository_name = 'repository_name' - root_filepath = os.path.join('repository_data', 'repository', - 'metadata', 'root.json') - root_signable = securesystemslib.util.load_json_file(root_filepath) - - # _generate_and_write_metadata() expects the top-level roles - # (specifically 'snapshot') and keys to be available in 'tuf.roledb'. - tuf.roledb.create_roledb_from_root_metadata(root_signable['signed'], - repository_name) - tuf.keydb.create_keydb_from_root_metadata(root_signable['signed'], - repository_name) - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - targets_directory = os.path.join(temporary_directory, 'targets') - os.mkdir(targets_directory) - repository_directory = os.path.join(temporary_directory, 'repository') - metadata_directory = os.path.join(repository_directory, - repo_lib.METADATA_STAGED_DIRECTORY_NAME) - targets_metadata = os.path.join('repository_data', 'repository', 'metadata', - 'targets.json') - obsolete_metadata = os.path.join(metadata_directory, 'obsolete_role.json') - securesystemslib.util.ensure_parent_dir(obsolete_metadata) - shutil.copyfile(targets_metadata, obsolete_metadata) - - keystore_path = os.path.join('repository_data', 'keystore') - targets_private_keypath = os.path.join(keystore_path, 'targets_key') - targets_private_key = repo_lib.import_ed25519_privatekey_from_file(targets_private_keypath, - 'password') - tuf.keydb.remove_key(targets_private_key['keyid'], - repository_name=repository_name) - tuf.keydb.add_key(targets_private_key, repository_name=repository_name) - - # Verify that obsolete metadata (a metadata file exists on disk, but the - # role is unavailable in 'tuf.roledb'). First add the obsolete - # role to 'tuf.roledb' so that its metadata file can be written to disk. - targets_roleinfo = tuf.roledb.get_roleinfo('targets', repository_name) - targets_roleinfo['version'] = 1 - expiration = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + 86400)) - expiration = expiration.isoformat() + 'Z' - targets_roleinfo['expires'] = expiration - targets_roleinfo['signing_keyids'] = targets_roleinfo['keyids'] - tuf.roledb.add_role('obsolete_role', targets_roleinfo, - repository_name=repository_name) - - storage_backend = securesystemslib.storage.FilesystemBackend() - repo_lib._generate_and_write_metadata('obsolete_role', obsolete_metadata, - targets_directory, metadata_directory, storage_backend, - consistent_snapshot=False, filenames=None, - repository_name=repository_name) - - snapshot_filepath = os.path.join('repository_data', 'repository', - 'metadata', 'snapshot.json') - snapshot_signable = securesystemslib.util.load_json_file(snapshot_filepath) - tuf.roledb.remove_role('obsolete_role', repository_name) - self.assertTrue(os.path.exists(os.path.join(metadata_directory, - 'obsolete_role.json'))) - tuf.repository_lib._delete_obsolete_metadata(metadata_directory, - snapshot_signable['signed'], False, repository_name, - storage_backend) - self.assertFalse(os.path.exists(metadata_directory + 'obsolete_role.json')) - shutil.copyfile(targets_metadata, obsolete_metadata) - - - - def test__delete_obsolete_metadata(self): - repository_name = 'test_repository' - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - repository_directory = os.path.join(temporary_directory, 'repository') - metadata_directory = os.path.join(repository_directory, - repo_lib.METADATA_STAGED_DIRECTORY_NAME) - os.makedirs(metadata_directory) - snapshot_filepath = os.path.join('repository_data', 'repository', - 'metadata', 'snapshot.json') - snapshot_signable = securesystemslib.util.load_json_file(snapshot_filepath) - storage_backend = securesystemslib.storage.FilesystemBackend() - - # Create role metadata that should not exist in snapshot.json. - role1_filepath = os.path.join('repository_data', 'repository', 'metadata', - 'role1.json') - shutil.copyfile(role1_filepath, os.path.join(metadata_directory, 'role2.json')) - - repo_lib._delete_obsolete_metadata(metadata_directory, - snapshot_signable['signed'], True, repository_name, storage_backend) - - # _delete_obsolete_metadata should never delete root.json. - root_filepath = os.path.join('repository_data', 'repository', 'metadata', - 'root.json') - shutil.copyfile(root_filepath, os.path.join(metadata_directory, 'root.json')) - repo_lib._delete_obsolete_metadata(metadata_directory, - snapshot_signable['signed'], True, repository_name, storage_backend) - self.assertTrue(os.path.exists(os.path.join(metadata_directory, 'root.json'))) - - # Verify what happens for a non-existent metadata directory (a debug - # message is logged). - self.assertRaises(securesystemslib.exceptions.StorageError, - repo_lib._delete_obsolete_metadata, 'non-existent', - snapshot_signable['signed'], True, repository_name, storage_backend) - - - def test__load_top_level_metadata(self): - repository_name = 'test_repository' - - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - repository_directory = os.path.join(temporary_directory, 'repository') - metadata_directory = os.path.join(repository_directory, - repo_lib.METADATA_STAGED_DIRECTORY_NAME) - targets_directory = os.path.join(repository_directory, - repo_lib.TARGETS_DIRECTORY_NAME) - shutil.copytree(os.path.join('repository_data', 'repository', 'metadata'), - metadata_directory) - shutil.copytree(os.path.join('repository_data', 'repository', 'targets'), - targets_directory) - - # Add a duplicate signature to the Root file for testing purposes). - root_file = os.path.join(metadata_directory, 'root.json') - signable = securesystemslib.util.load_json_file(os.path.join(metadata_directory, 'root.json')) - signable['signatures'].append(signable['signatures'][0]) - - storage_backend = securesystemslib.storage.FilesystemBackend() - repo_lib.write_metadata_file(signable, root_file, 8, False, storage_backend) - - filenames = repo_lib.get_top_level_metadata_filenames(metadata_directory) - repository = repo_tool.create_new_repository(repository_directory, repository_name) - repo_lib._load_top_level_metadata(repository, filenames, repository_name) - - # Manually add targets delegations to roledb since - # repository.write('targets') will try to update its delegations - targets_filepath = os.path.join('repository_data', 'repository', - 'metadata', 'targets.json') - targets_signable = securesystemslib.util.load_json_file(targets_filepath) - delegations = targets_signable['signed']['delegations'] - - roleinfo = {} - roleinfo['name'] = delegations['roles'][0]['name'] - roleinfo['keyids'] = delegations['roles'][0]['keyids'] - roleinfo['threshold'] = delegations['roles'][0]['threshold'] - roleinfo['version'] = 1 - tuf.roledb.add_role('role1', roleinfo, repository_name) - - keystore_path = os.path.join('repository_data', 'keystore') - root_privkey_path = os.path.join(keystore_path, 'root_key') - targets_privkey_path = os.path.join(keystore_path, 'targets_key') - snapshot_privkey_path = os.path.join(keystore_path, 'snapshot_key') - timestamp_privkey_path = os.path.join(keystore_path, 'timestamp_key') - - repository.root.load_signing_key(repo_lib.import_rsa_privatekey_from_file(root_privkey_path, 'password')) - repository.targets.load_signing_key(repo_lib.import_ed25519_privatekey_from_file(targets_privkey_path, 'password')) - repository.snapshot.load_signing_key(repo_lib.import_ed25519_privatekey_from_file(snapshot_privkey_path, 'password')) - repository.timestamp.load_signing_key(repo_lib.import_ed25519_privatekey_from_file(timestamp_privkey_path, 'password')) - - # Partially write all top-level roles (we increase the threshold of each - # top-level role so that they are flagged as partially written. - repository.root.threshold = repository.root.threshold + 1 - repository.snapshot.threshold = repository.snapshot.threshold + 1 - repository.targets.threshold = repository.targets.threshold + 1 - repository.timestamp.threshold = repository.timestamp.threshold + 1 - repository.write('root') - repository.write('snapshot') - repository.write('targets') - repository.write('timestamp') - - repo_lib._load_top_level_metadata(repository, filenames, repository_name) - - # Attempt to load a repository with missing top-level metadata. - for role_file in os.listdir(metadata_directory): - if role_file.endswith('.json') and not role_file.startswith('root'): - role_filename = os.path.join(metadata_directory, role_file) - os.remove(role_filename) - self.assertRaises(tuf.exceptions.RepositoryError, - repo_lib._load_top_level_metadata, repository, filenames, - repository_name) - - # Remove the required Root file and verify that an exception is raised. - os.remove(os.path.join(metadata_directory, 'root.json')) - self.assertRaises(tuf.exceptions.RepositoryError, - repo_lib._load_top_level_metadata, repository, filenames, - repository_name) - - - - def test__remove_invalid_and_duplicate_signatures(self): - # Remove duplicate PSS signatures (same key generates valid, but different - # signatures). First load a valid signable (in this case, the root role). - repository_name = 'test_repository' - root_filepath = os.path.join('repository_data', 'repository', - 'metadata', 'root.json') - root_signable = securesystemslib.util.load_json_file(root_filepath) - key_filepath = os.path.join('repository_data', 'keystore', 'root_key') - root_rsa_key = repo_lib.import_rsa_privatekey_from_file(key_filepath, - 'password') - - # Add 'root_rsa_key' to tuf.keydb, since - # _remove_invalid_and_duplicate_signatures() checks for unknown keys in - # tuf.keydb. - tuf.keydb.add_key(root_rsa_key, repository_name=repository_name) - - # Append the new valid, but duplicate PSS signature, and test that - # duplicates are removed. create_signature() generates a key for the - # key type of the first argument (i.e., root_rsa_key). - data = securesystemslib.formats.encode_canonical(root_signable['signed']).encode('utf-8') - new_pss_signature = securesystemslib.keys.create_signature(root_rsa_key, - data) - root_signable['signatures'].append(new_pss_signature) - - expected_number_of_signatures = len(root_signable['signatures']) - tuf.repository_lib._remove_invalid_and_duplicate_signatures(root_signable, - repository_name) - self.assertEqual(len(root_signable), expected_number_of_signatures) - - # Test for an invalid keyid. - root_signable['signatures'][0]['keyid'] = '404' - tuf.repository_lib._remove_invalid_and_duplicate_signatures(root_signable, - repository_name) - - # Re-add a valid signature for the following test condition. - root_signable['signatures'].append(new_pss_signature) - - # Test that an exception is not raised if an invalid sig is present, - # and that the duplicate key is removed 'root_signable'. - root_signable['signatures'][0]['sig'] = '4040' - invalid_keyid = root_signable['signatures'][0]['keyid'] - tuf.repository_lib._remove_invalid_and_duplicate_signatures(root_signable, - repository_name) - - for signature in root_signable['signatures']: - self.assertFalse(invalid_keyid == signature['keyid']) - - - -# Run the test cases. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_repository_tool.py b/tests/test_repository_tool.py deleted file mode 100755 index be0333c351..0000000000 --- a/tests/test_repository_tool.py +++ /dev/null @@ -1,2199 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_repository_tool.py - - - Vladimir Diaz - - - April 7, 2014. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'repository_tool.py'. -""" - -import os -import time -import datetime -import unittest -import logging -import tempfile -import shutil -import sys - -import tuf -import tuf.log -import tuf.formats -import tuf.roledb -import tuf.keydb -import tuf.repository_tool as repo_tool - -from tests import utils - -import securesystemslib -import securesystemslib.exceptions -import securesystemslib.storage - -logger = logging.getLogger(__name__) - -repo_tool.disable_console_log_messages() - - -class TestRepository(unittest.TestCase): - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownClass() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - - @classmethod - def tearDownClass(cls): - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated for the test cases. - shutil.rmtree(cls.temporary_directory) - - - - def setUp(self): - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - - def test_init(self): - # Test normal case. - repository_name = 'test_repository' - storage_backend = securesystemslib.storage.FilesystemBackend() - repository = repo_tool.Repository('repository_directory/', - 'metadata_directory/', 'targets_directory/', storage_backend, - repository_name) - self.assertTrue(isinstance(repository.root, repo_tool.Root)) - self.assertTrue(isinstance(repository.snapshot, repo_tool.Snapshot)) - self.assertTrue(isinstance(repository.timestamp, repo_tool.Timestamp)) - self.assertTrue(isinstance(repository.targets, repo_tool.Targets)) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, - storage_backend, 3, 'metadata_directory/', 'targets_directory') - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, - 'repository_directory', storage_backend, 3, 'targets_directory') - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, - 'repository_directory', 'metadata_directory', 3, storage_backend) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, - 'repository_directory/', 'metadata_directory/', 'targets_directory/', - storage_backend, repository_name, use_timestamp_length=3) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, - 'repository_directory/', 'metadata_directory/', 'targets_directory/', - storage_backend, repository_name, use_timestamp_length=False, - use_timestamp_hashes=3) - - - - def create_repository_directory(self): - # Create a repository directory and copy in test targets data - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - targets_directory = os.path.join(temporary_directory, 'repository', - repo_tool.TARGETS_DIRECTORY_NAME) - original_targets_directory = os.path.join('repository_data', - 'repository', 'targets') - shutil.copytree(original_targets_directory, targets_directory) - - # In this case, create_new_repository() creates the 'repository/' - # sub-directory in 'temporary_directory' if it does not exist. - return os.path.join(temporary_directory, 'repository') - - - - - def test_writeall(self): - # Test creation of a TUF repository. - # - # 1. Import public and private keys. - # 2. Add verification keys. - # 3. Load signing keys. - # 4. Add target files. - # 5. Perform delegation. - # 6. writeall() - # - # Copy the target files from 'tuf/tests/repository_data' so that writeall() - # has target fileinfo to include in metadata. - repository_name = 'test_repository' - repository_directory = self.create_repository_directory() - metadata_directory = os.path.join(repository_directory, - repo_tool.METADATA_STAGED_DIRECTORY_NAME) - - repository = repo_tool.create_new_repository(repository_directory, repository_name) - - # (1) Load the public and private keys of the top-level roles, and one - # delegated role. - keystore_directory = os.path.join('repository_data', 'keystore') - - # Load the public keys. - root_pubkey_path = os.path.join(keystore_directory, 'root_key.pub') - targets_pubkey_path = os.path.join(keystore_directory, 'targets_key.pub') - snapshot_pubkey_path = os.path.join(keystore_directory, 'snapshot_key.pub') - timestamp_pubkey_path = os.path.join(keystore_directory, 'timestamp_key.pub') - role1_pubkey_path = os.path.join(keystore_directory, 'delegation_key.pub') - - root_pubkey = repo_tool.import_rsa_publickey_from_file(root_pubkey_path) - targets_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(targets_pubkey_path) - snapshot_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(snapshot_pubkey_path) - timestamp_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(timestamp_pubkey_path) - role1_pubkey = repo_tool.import_ed25519_publickey_from_file(role1_pubkey_path) - - # Load the private keys. - root_privkey_path = os.path.join(keystore_directory, 'root_key') - targets_privkey_path = os.path.join(keystore_directory, 'targets_key') - snapshot_privkey_path = os.path.join(keystore_directory, 'snapshot_key') - timestamp_privkey_path = os.path.join(keystore_directory, 'timestamp_key') - role1_privkey_path = os.path.join(keystore_directory, 'delegation_key') - - root_privkey = \ - repo_tool.import_rsa_privatekey_from_file(root_privkey_path, 'password') - targets_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(targets_privkey_path, - 'password') - snapshot_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_privkey_path, - 'password') - timestamp_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_privkey_path, - 'password') - role1_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(role1_privkey_path, - 'password') - - - # (2) Add top-level verification keys. - repository.root.add_verification_key(root_pubkey) - repository.targets.add_verification_key(targets_pubkey) - repository.snapshot.add_verification_key(snapshot_pubkey) - - # Verify that repository.writeall() fails for insufficient threshold - # of signatures (default threshold = 1). - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - - repository.timestamp.add_verification_key(timestamp_pubkey) - - - # (3) Load top-level signing keys. - repository.status() - repository.root.load_signing_key(root_privkey) - repository.status() - repository.targets.load_signing_key(targets_privkey) - repository.status() - repository.snapshot.load_signing_key(snapshot_privkey) - repository.status() - - # Verify that repository.writeall() fails for insufficient threshold - # of signatures (default threshold = 1). - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - - repository.timestamp.load_signing_key(timestamp_privkey) - - - # (4) Add target files. - target1 = 'file1.txt' - target2 = 'file2.txt' - target3 = 'file3.txt' - repository.targets.add_target(target1) - repository.targets.add_target(target2) - - # (5) Perform delegation. - repository.targets.delegate('role1', [role1_pubkey], [target3]) - repository.targets('role1').load_signing_key(role1_privkey) - - # (6) Write repository. - repository.writeall() - - # Verify that the expected metadata is written. - for role in ['root.json', 'targets.json', 'snapshot.json', 'timestamp.json']: - role_filepath = os.path.join(metadata_directory, role) - role_signable = securesystemslib.util.load_json_file(role_filepath) - - # Raise 'securesystemslib.exceptions.FormatError' if 'role_signable' is - # an invalid signable. - tuf.formats.check_signable_object_format(role_signable) - - self.assertTrue(os.path.exists(role_filepath)) - - # Verify the 'role1.json' delegation is also written. - role1_filepath = os.path.join(metadata_directory, 'role1.json') - role1_signable = securesystemslib.util.load_json_file(role1_filepath) - tuf.formats.check_signable_object_format(role1_signable) - - # Verify that an exception is *not* raised for multiple - # repository.writeall(). - repository.writeall() - - # Verify that status() does not raise an exception. - repository.status() - - # Verify that status() does not raise - # 'tuf.exceptions.InsufficientKeysError' if a top-level role - # does not contain a threshold of keys. - targets_roleinfo = tuf.roledb.get_roleinfo('targets', repository_name) - old_threshold = targets_roleinfo['threshold'] - targets_roleinfo['threshold'] = 10 - tuf.roledb.update_roleinfo('targets', targets_roleinfo, - repository_name=repository_name) - repository.status() - - # Restore the original threshold values. - targets_roleinfo = tuf.roledb.get_roleinfo('targets', repository_name) - targets_roleinfo['threshold'] = old_threshold - tuf.roledb.update_roleinfo('targets', targets_roleinfo, - repository_name=repository_name) - - # Verify that status() does not raise - # 'tuf.exceptions.InsufficientKeysError' if a delegated role - # does not contain a threshold of keys. - role1_roleinfo = tuf.roledb.get_roleinfo('role1', repository_name) - old_role1_threshold = role1_roleinfo['threshold'] - role1_roleinfo['threshold'] = 10 - tuf.roledb.update_roleinfo('role1', role1_roleinfo, - repository_name=repository_name) - repository.status() - - # Restore role1's threshold. - role1_roleinfo = tuf.roledb.get_roleinfo('role1', repository_name) - role1_roleinfo['threshold'] = old_role1_threshold - tuf.roledb.update_roleinfo('role1', role1_roleinfo, - repository_name=repository_name) - - # Verify status() does not raise 'tuf.exceptions.UnsignedMetadataError' if any of the - # the top-level roles. Test that 'root' is improperly signed. - repository.root.unload_signing_key(root_privkey) - repository.root.load_signing_key(targets_privkey) - repository.status() - - repository.targets('role1').unload_signing_key(role1_privkey) - repository.targets('role1').load_signing_key(targets_privkey) - repository.status() - - # Reset Root and 'role1', and verify Targets. - repository.root.unload_signing_key(targets_privkey) - repository.root.load_signing_key(root_privkey) - repository.targets('role1').unload_signing_key(targets_privkey) - repository.targets('role1').load_signing_key(role1_privkey) - repository.targets.unload_signing_key(targets_privkey) - repository.targets.load_signing_key(snapshot_privkey) - repository.status() - - # Reset Targets and verify Snapshot. - repository.targets.unload_signing_key(snapshot_privkey) - repository.targets.load_signing_key(targets_privkey) - repository.snapshot.unload_signing_key(snapshot_privkey) - repository.snapshot.load_signing_key(timestamp_privkey) - repository.status() - - # Reset Snapshot and verify timestamp. - repository.snapshot.unload_signing_key(timestamp_privkey) - repository.snapshot.load_signing_key(snapshot_privkey) - repository.timestamp.unload_signing_key(timestamp_privkey) - repository.timestamp.load_signing_key(root_privkey) - repository.status() - - # Reset Timestamp - repository.timestamp.unload_signing_key(root_privkey) - repository.timestamp.load_signing_key(timestamp_privkey) - - # Verify that a writeall() fails if a repository is loaded and a change - # is made to a role. - repo_tool.load_repository(repository_directory, repository_name) - - repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 0) - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - - # Load the required Timestamp key so that a valid repository can be written. - repository.timestamp.load_signing_key(timestamp_privkey) - repository.writeall() - - # Test creation of a consistent snapshot repository. Writing a consistent - # snapshot modifies the Root metadata, which specifies whether a repository - # supports consistent snapshot. Verify that an exception is raised due to - # the missing signature of Root. - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall, True) - - # Make sure the private keys of Root (new version required since Root will - # change to enable consistent snapshot), Snapshot, role1, and timestamp - # loaded before writing consistent snapshot. - repository.root.load_signing_key(root_privkey) - repository.snapshot.load_signing_key(snapshot_privkey) - # Must also load targets signing key, because targets is re-signed when - # updating 'role1'. - repository.targets.load_signing_key(targets_privkey) - repository.targets('role1').load_signing_key(role1_privkey) - - # Verify that a consistent snapshot can be written and loaded. The roles - # above must be marked as dirty, otherwise writeall() will not create a - # consistent snapshot for them. - repository.mark_dirty(['role1', 'targets', 'root', 'snapshot', 'timestamp']) - repository.writeall(consistent_snapshot=True) - - # Verify that the newly written consistent snapshot can be loaded - # successfully. - repo_tool.load_repository(repository_directory, repository_name) - - # Verify the behavior of marking and unmarking roles as dirty. - # We begin by ensuring that writeall() cleared the list of dirty roles.. - self.assertEqual([], tuf.roledb.get_dirty_roles(repository_name)) - - repository.mark_dirty(['root', 'timestamp']) - self.assertEqual(['root', 'timestamp'], tuf.roledb.get_dirty_roles(repository_name)) - repository.unmark_dirty(['root']) - self.assertEqual(['timestamp'], tuf.roledb.get_dirty_roles(repository_name)) - - # Ensure status() does not leave behind any dirty roles. - repository.status() - self.assertEqual(['timestamp'], tuf.roledb.get_dirty_roles(repository_name)) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repository.writeall, 3) - - - def test_writeall_no_files(self): - # Test writeall() when using pre-supplied fileinfo - - repository_name = 'test_repository' - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - repository_directory = os.path.join(temporary_directory, 'repository') - targets_directory = os.path.join(repository_directory, - repo_tool.TARGETS_DIRECTORY_NAME) - - repository = repo_tool.create_new_repository(repository_directory, repository_name) - - # (1) Load the public and private keys of the top-level roles, and one - # delegated role. - keystore_directory = os.path.join('repository_data', 'keystore') - - # Load the public keys. - root_pubkey_path = os.path.join(keystore_directory, 'root_key.pub') - targets_pubkey_path = os.path.join(keystore_directory, 'targets_key.pub') - snapshot_pubkey_path = os.path.join(keystore_directory, 'snapshot_key.pub') - timestamp_pubkey_path = os.path.join(keystore_directory, 'timestamp_key.pub') - - root_pubkey = repo_tool.import_rsa_publickey_from_file(root_pubkey_path) - targets_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(targets_pubkey_path) - snapshot_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(snapshot_pubkey_path) - timestamp_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(timestamp_pubkey_path) - - # Load the private keys. - root_privkey_path = os.path.join(keystore_directory, 'root_key') - targets_privkey_path = os.path.join(keystore_directory, 'targets_key') - snapshot_privkey_path = os.path.join(keystore_directory, 'snapshot_key') - timestamp_privkey_path = os.path.join(keystore_directory, 'timestamp_key') - - root_privkey = \ - repo_tool.import_rsa_privatekey_from_file(root_privkey_path, 'password') - targets_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(targets_privkey_path, - 'password') - snapshot_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_privkey_path, - 'password') - timestamp_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_privkey_path, - 'password') - - - # (2) Add top-level verification keys. - repository.root.add_verification_key(root_pubkey) - repository.targets.add_verification_key(targets_pubkey) - repository.snapshot.add_verification_key(snapshot_pubkey) - - # Verify that repository.writeall() fails for insufficient threshold - # of signatures (default threshold = 1). - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - - repository.timestamp.add_verification_key(timestamp_pubkey) - - - # (3) Load top-level signing keys. - repository.status() - repository.root.load_signing_key(root_privkey) - repository.status() - repository.targets.load_signing_key(targets_privkey) - repository.status() - repository.snapshot.load_signing_key(snapshot_privkey) - repository.status() - - # Verify that repository.writeall() fails for insufficient threshold - # of signatures (default threshold = 1). - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - - repository.timestamp.load_signing_key(timestamp_privkey) - - # Add target fileinfo - target1_hashes = {'sha256': 'c2986576f5fdfd43944e2b19e775453b96748ec4fe2638a6d2f32f1310967095'} - target2_hashes = {'sha256': '517c0ce943e7274a2431fa5751e17cfd5225accd23e479bfaad13007751e87ef'} - target1_fileinfo = tuf.formats.make_targets_fileinfo(555, target1_hashes) - target2_fileinfo = tuf.formats.make_targets_fileinfo(37, target2_hashes) - target1 = 'file1.txt' - target2 = 'file2.txt' - repository.targets.add_target(target1, fileinfo=target1_fileinfo) - repository.targets.add_target(target2, fileinfo=target2_fileinfo) - - repository.writeall(use_existing_fileinfo=True) - - # Verify that the expected metadata is written. - metadata_directory = os.path.join(repository_directory, - repo_tool.METADATA_STAGED_DIRECTORY_NAME) - - for role in ['root.json', 'targets.json', 'snapshot.json', 'timestamp.json']: - role_filepath = os.path.join(metadata_directory, role) - role_signable = securesystemslib.util.load_json_file(role_filepath) - - # Raise 'securesystemslib.exceptions.FormatError' if 'role_signable' is - # an invalid signable. - tuf.formats.check_signable_object_format(role_signable) - - self.assertTrue(os.path.exists(role_filepath)) - - - - def test_get_filepaths_in_directory(self): - # Test normal case. - # Use the pre-generated metadata directory for testing. - # Set 'repo' reference to improve readability. - repo = repo_tool.Repository - metadata_directory = os.path.join('repository_data', - 'repository', 'metadata') - - # Verify the expected filenames. get_filepaths_in_directory() returns - # a list of absolute paths. - metadata_files = repo.get_filepaths_in_directory(metadata_directory) - - # Construct list of file paths expected, determining absolute paths. - expected_files = [] - for filepath in ['1.root.json', 'root.json', 'targets.json', - 'snapshot.json', 'timestamp.json', 'role1.json', 'role2.json']: - expected_files.append(os.path.abspath(os.path.join( - 'repository_data', 'repository', 'metadata', filepath))) - - self.assertEqual(sorted(expected_files), sorted(metadata_files)) - - - # Test when the 'recursive_walk' argument is True. - # In this case, recursive walk should yield the same results as the - # previous, non-recursive call. - metadata_files = repo.get_filepaths_in_directory(metadata_directory, - recursive_walk=True) - self.assertEqual(sorted(expected_files), sorted(metadata_files)) - - # And this recursive call from the directory above should yield the same - # results as well, plus extra files. - metadata_files = repo.get_filepaths_in_directory( - os.path.join('repository_data', 'repository'), recursive_walk=True) - for expected_file in expected_files: - self.assertIn(expected_file, metadata_files) - # self.assertEqual(sorted(expected_files), sorted(metadata_files)) - - # Now let's check it against the full list of expected files for the parent - # directory.... We'll add to the existing list. Expect the same files in - # metadata.staged/ as in metadata/, and a few target files in targets/ - # This is somewhat redundant with the previous test, but together they're - # probably more future-proof. - for filepath in ['file1.txt', 'file2.txt', 'file3.txt']: - expected_files.append(os.path.abspath(os.path.join( - 'repository_data', 'repository', 'targets', filepath))) - for filepath in [ '1.root.json', 'root.json', 'targets.json', - 'snapshot.json', 'timestamp.json', 'role1.json', 'role2.json']: - expected_files.append(os.path.abspath(os.path.join( - 'repository_data', 'repository', 'metadata.staged', filepath))) - - self.assertEqual(sorted(expected_files), sorted(metadata_files)) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo.get_filepaths_in_directory, - 3, recursive_walk=False, followlinks=False) - self.assertRaises(securesystemslib.exceptions.FormatError, repo.get_filepaths_in_directory, - metadata_directory, 3, followlinks=False) - self.assertRaises(securesystemslib.exceptions.FormatError, repo.get_filepaths_in_directory, - metadata_directory, recursive_walk=False, followlinks=3) - - # Test invalid directory argument. - # A non-directory. - self.assertRaises(securesystemslib.exceptions.Error, repo.get_filepaths_in_directory, - os.path.join(metadata_directory, 'root.json')) - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - nonexistent_directory = os.path.join(temporary_directory, 'nonexistent/') - self.assertRaises(securesystemslib.exceptions.Error, repo.get_filepaths_in_directory, - nonexistent_directory, recursive_walk=False, - followlinks=False) - - - - def test_writeall_abstract_storage(self): - # Test creation of a TUF repository with a custom storage backend to ensure - # that functions relying on a storage backend being supplied operate - # correctly - - - class TestStorageBackend(securesystemslib.storage.StorageBackendInterface): - """ - An implementation of securesystemslib.storage.StorageBackendInterface - which mutates filenames on put()/get(), translating filename in memory - to filename + '.tst' on-disk, such that trying to read the - expected/canonical file paths from local storage doesn't find the TUF - metadata files. - """ - - from contextlib import contextmanager - - - @contextmanager - def get(self, filepath): - file_object = open(filepath + '.tst', 'rb') - yield file_object - file_object.close() - - - def put(self, fileobj, filepath): - if not fileobj.closed: - fileobj.seek(0) - - with open(filepath + '.tst', 'wb') as destination_file: - shutil.copyfileobj(fileobj, destination_file) - destination_file.flush() - os.fsync(destination_file.fileno()) - - - def remove(self, filepath): - os.remove(filepath + '.tst') - - - def getsize(self, filepath): - return os.path.getsize(filepath + '.tst') - - - def create_folder(self, filepath): - if not filepath: - return - try: - os.makedirs(filepath) - except OSError as err: - pass - - - def list_folder(self, filepath): - contents = [] - files = os.listdir(filepath) - - for fi in files: - if fi.endswith('.tst'): - contents.append(fi.split('.tst')[0]) - else: - contents.append(fi) - - return contents - - - - # Set up the repository directory - repository_name = 'test_repository' - repository_directory = self.create_repository_directory() - metadata_directory = os.path.join(repository_directory, - repo_tool.METADATA_STAGED_DIRECTORY_NAME) - targets_directory = os.path.join(repository_directory, - repo_tool.TARGETS_DIRECTORY_NAME) - - # TestStorageBackend expects all files on disk to have an additional '.tst' - # file extension - for target in os.listdir(targets_directory): - src = os.path.join(targets_directory, target) - dst = os.path.join(targets_directory, target + '.tst') - os.rename(src, dst) - - # (0) Create a repository with TestStorageBackend() - storage_backend = TestStorageBackend() - repository = repo_tool.create_new_repository(repository_directory, - repository_name, - storage_backend) - - # (1) Load the public and private keys of the top-level roles, and one - # delegated role. - keystore_directory = os.path.join('repository_data', 'keystore') - - # Load the public keys. - root_pubkey_path = os.path.join(keystore_directory, 'root_key.pub') - targets_pubkey_path = os.path.join(keystore_directory, 'targets_key.pub') - snapshot_pubkey_path = os.path.join(keystore_directory, 'snapshot_key.pub') - timestamp_pubkey_path = os.path.join(keystore_directory, 'timestamp_key.pub') - - root_pubkey = repo_tool.import_rsa_publickey_from_file(root_pubkey_path) - targets_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(targets_pubkey_path) - snapshot_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(snapshot_pubkey_path) - timestamp_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(timestamp_pubkey_path) - - # Load the private keys. - root_privkey_path = os.path.join(keystore_directory, 'root_key') - targets_privkey_path = os.path.join(keystore_directory, 'targets_key') - snapshot_privkey_path = os.path.join(keystore_directory, 'snapshot_key') - timestamp_privkey_path = os.path.join(keystore_directory, 'timestamp_key') - - root_privkey = \ - repo_tool.import_rsa_privatekey_from_file(root_privkey_path, 'password') - targets_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(targets_privkey_path, - 'password') - snapshot_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_privkey_path, - 'password') - timestamp_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_privkey_path, - 'password') - - - # (2) Add top-level verification keys. - repository.root.add_verification_key(root_pubkey) - repository.targets.add_verification_key(targets_pubkey) - repository.snapshot.add_verification_key(snapshot_pubkey) - repository.timestamp.add_verification_key(timestamp_pubkey) - - - # (3) Load top-level signing keys. - repository.root.load_signing_key(root_privkey) - repository.targets.load_signing_key(targets_privkey) - repository.snapshot.load_signing_key(snapshot_privkey) - repository.timestamp.load_signing_key(timestamp_privkey) - - - # (4) Add target files. - target1 = 'file1.txt' - target2 = 'file2.txt' - target3 = 'file3.txt' - repository.targets.add_target(target1) - repository.targets.add_target(target2) - repository.targets.add_target(target3) - - # (6) Write repository. - repository.writeall() - - - # Ensure all of the metadata files exist at the mutated file location and - # that those files are valid metadata - for role in ['root.json.tst', 'targets.json.tst', 'snapshot.json.tst', - 'timestamp.json.tst']: - role_filepath = os.path.join(metadata_directory, role) - self.assertTrue(os.path.exists(role_filepath)) - - role_signable = securesystemslib.util.load_json_file(role_filepath) - # Raise 'securesystemslib.exceptions.FormatError' if 'role_signable' is - # an invalid signable. - tuf.formats.check_signable_object_format(role_signable) - - - def test_signature_order(self): - """Test signatures are added to metadata in alphabetical order. """ - # Create empty repo dir and init default repo in memory - repo_dir = tempfile.mkdtemp(dir=self.temporary_directory) - repo = repo_tool.create_new_repository(repo_dir) - - # Dedicate any two existing test keys as root signing keys - for key_name in ["targets_key", "snapshot_key"]: - repo.root.load_signing_key( - repo_tool.import_ed25519_privatekey_from_file( - os.path.join("repository_data", "keystore", key_name), - "password")) - - # Write root metadata with two signatures - repo.write("root") - - # Load signed and written json metadata back into memory - root_metadata_path = os.path.join( - repo_dir, repo_tool.METADATA_STAGED_DIRECTORY_NAME, "root.json") - root_metadata = securesystemslib.util.load_json_file(root_metadata_path) - - # Assert signatures are ordered alphabetically (by signing key keyid) - self.assertListEqual( - [sig["keyid"] for sig in root_metadata["signatures"]], - [ - "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d", - "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" - ]) - - - -class TestMetadata(unittest.TestCase): - def setUp(self): - # Inherit from the repo_tool.Metadata() base class. All of the methods - # to be tested in TestMetadata require at least 1 role, so create it here - # and set its roleinfo. - - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - - class MetadataRole(repo_tool.Metadata): - def __init__(self): - super(MetadataRole, self).__init__() - - self._rolename = 'metadata_role' - self._repository_name = 'test_repository' - - # Expire in 86400 seconds (1 day). - expiration = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + 86400)) - expiration = expiration.isoformat() + 'Z' - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'signatures': [], 'version': 0, - 'consistent_snapshot': False, - 'expires': expiration, - 'partial_loaded': False} - - tuf.roledb.add_role(self._rolename, roleinfo, - repository_name='test_repository') - - self.metadata = MetadataRole() - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - self.metadata = None - - - - def test_rolename(self): - base_metadata = repo_tool.Metadata() - - self.assertEqual(base_metadata.rolename, None) - - # Test the sub-classed MetadataRole(). - self.assertEqual(self.metadata.rolename, 'metadata_role') - - - - def test_version(self): - # Test version getter, and the default version number. - self.assertEqual(self.metadata.version, 0) - - # Test version setter, and verify updated version number. - self.metadata.version = 8 - self.assertEqual(self.metadata.version, 8) - - - - def test_threshold(self): - # Test threshold getter, and the default threshold number. - self.assertEqual(self.metadata.threshold, 1) - - # Test threshold setter, and verify updated threshold number. - self.metadata.threshold = 3 - self.assertEqual(self.metadata.threshold, 3) - - - - def test_expiration(self): - # Test expiration getter. - expiration = self.metadata.expiration - self.assertTrue(isinstance(expiration, datetime.datetime)) - - # Test expiration setter. - self.metadata.expiration = datetime.datetime(2030, 1, 1, 12, 0) - expiration = self.metadata.expiration - self.assertTrue(isinstance(expiration, datetime.datetime)) - - # test a setter with microseconds, we are forcing the microseconds value - expiration = datetime.datetime.today() + datetime.timedelta(weeks = 1) - # we force the microseconds value if we are unlucky enough to get a 0 - if expiration.microsecond == 0: - expiration = expiration.replace(microsecond = 1) - - new_expiration = self.metadata.expiration - self.assertTrue(isinstance(new_expiration, datetime.datetime)) - - # check that the expiration value is truncated - self.assertTrue(new_expiration.microsecond == 0) - - # Test improperly formatted datetime. - try: - self.metadata.expiration = '3' - - except securesystemslib.exceptions.FormatError: - pass - - else: - self.fail('Setter failed to detect improperly formatted datetime.') - - - # Test invalid argument (i.e., expiration has already expired.) - expired_datetime = tuf.formats.unix_timestamp_to_datetime(int(time.time() - 1)) - try: - self.metadata.expiration = expired_datetime - - except securesystemslib.exceptions.Error: - pass - - else: - self.fail('Setter failed to detect an expired datetime.') - - - - def test_keys(self): - # Test default case, where a verification key has not been added. - self.assertEqual(self.metadata.keys, []) - - - # Test keys() getter after a verification key has been loaded. - key_path = os.path.join('repository_data', - 'keystore', 'snapshot_key.pub') - key_object = repo_tool.import_ed25519_publickey_from_file(key_path) - self.metadata.add_verification_key(key_object) - - keyid = key_object['keyid'] - self.assertEqual([keyid], self.metadata.keys) - - - - def test_signing_keys(self): - # Test default case, where a signing key has not been added. - self.assertEqual(self.metadata.signing_keys, []) - - - # Test signing_keys() getter after a signing key has been loaded. - key_path = os.path.join('repository_data', - 'keystore', 'root_key') - key_object = repo_tool.import_rsa_privatekey_from_file(key_path, 'password') - self.metadata.load_signing_key(key_object) - - keyid = key_object['keyid'] - self.assertEqual([keyid], self.metadata.signing_keys) - - - - - - def test_add_verification_key(self): - # Add verification key and verify that it was added via (role).keys. - key_path = os.path.join('repository_data', 'keystore', 'snapshot_key.pub') - key_object = repo_tool.import_ed25519_publickey_from_file(key_path) - self.metadata.add_verification_key(key_object) - - keyid = key_object['keyid'] - self.assertEqual([keyid], self.metadata.keys) - - expiration = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + 86400)) - expiration = expiration.isoformat() + 'Z' - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'signatures': [], 'version': 0, - 'consistent_snapshot': False, 'expires': expiration, - 'partial_loaded': False} - - tuf.roledb.add_role('Root', roleinfo, 'test_repository') - tuf.roledb.add_role('Targets', roleinfo, 'test_repository') - tuf.roledb.add_role('Snapshot', roleinfo, 'test_repository') - tuf.roledb.add_role('Timestamp', roleinfo, 'test_repository') - - # Test for different top-level role names. - self.metadata._rolename = 'Targets' - self.metadata.add_verification_key(key_object) - self.metadata._rolename = 'Snapshot' - self.metadata.add_verification_key(key_object) - self.metadata._rolename = 'Timestamp' - self.metadata.add_verification_key(key_object) - - # Test for a given 'expires' argument. - expires = datetime.datetime(2030, 1, 1, 12, 0) - self.metadata.add_verification_key(key_object, expires) - - - # Test for an expired 'expires'. - expired = datetime.datetime(1984, 1, 1, 12, 0) - self.assertRaises(securesystemslib.exceptions.Error, - self.metadata.add_verification_key, key_object, expired) - - # Test improperly formatted key argument. - self.assertRaises(securesystemslib.exceptions.FormatError, self.metadata.add_verification_key, 3) - self.assertRaises(securesystemslib.exceptions.FormatError, self.metadata.add_verification_key, key_object, 3) - - - - def test_remove_verification_key(self): - # Add verification key so that remove_verifiation_key() can be tested. - key_path = os.path.join('repository_data', - 'keystore', 'snapshot_key.pub') - key_object = repo_tool.import_ed25519_publickey_from_file(key_path) - self.metadata.add_verification_key(key_object) - - keyid = key_object['keyid'] - self.assertEqual([keyid], self.metadata.keys) - - - # Test successful removal of verification key added above. - self.metadata.remove_verification_key(key_object) - self.assertEqual(self.metadata.keys, []) - - - # Test improperly formatted argument - self.assertRaises(securesystemslib.exceptions.FormatError, self.metadata.remove_verification_key, 3) - - - # Test non-existent public key argument. - key_path = os.path.join('repository_data', - 'keystore', 'targets_key.pub') - unused_key_object = repo_tool.import_ed25519_publickey_from_file(key_path) - - self.assertRaises(securesystemslib.exceptions.Error, self.metadata.remove_verification_key, - unused_key_object) - - - - def test_load_signing_key(self): - # Test normal case. - key_path = os.path.join('repository_data', - 'keystore', 'snapshot_key') - key_object = repo_tool.import_ed25519_privatekey_from_file(key_path, 'password') - self.metadata.load_signing_key(key_object) - - keyid = key_object['keyid'] - self.assertEqual([keyid], self.metadata.signing_keys) - - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, self.metadata.load_signing_key, 3) - - - # Test non-private key. - key_path = os.path.join('repository_data', - 'keystore', 'snapshot_key.pub') - key_object = repo_tool.import_ed25519_publickey_from_file(key_path) - self.assertRaises(securesystemslib.exceptions.Error, self.metadata.load_signing_key, key_object) - - - - def test_unload_signing_key(self): - # Load a signing key so that unload_signing_key() can have a key to unload. - key_path = os.path.join('repository_data', - 'keystore', 'snapshot_key') - key_object = repo_tool.import_ed25519_privatekey_from_file(key_path, 'password') - self.metadata.load_signing_key(key_object) - - keyid = key_object['keyid'] - self.assertEqual([keyid], self.metadata.signing_keys) - - self.metadata.unload_signing_key(key_object) - - self.assertEqual(self.metadata.signing_keys, []) - - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, self.metadata.unload_signing_key, 3) - - - # Test non-existent key argument. - key_path = os.path.join('repository_data', - 'keystore', 'targets_key') - unused_key_object = repo_tool.import_ed25519_privatekey_from_file(key_path, - 'password') - - self.assertRaises(securesystemslib.exceptions.Error, self.metadata.unload_signing_key, - unused_key_object) - - - - def test_add_signature(self): - # Test normal case. - # Load signature list from any of pre-generated metadata; needed for - # testing. - metadata_directory = os.path.join('repository_data', - 'repository', 'metadata') - root_filepath = os.path.join(metadata_directory, 'root.json') - root_signable = securesystemslib.util.load_json_file(root_filepath) - signatures = root_signable['signatures'] - - # Add the first signature from the list, as only one is needed. - self.metadata.add_signature(signatures[0]) - self.assertEqual(signatures, self.metadata.signatures) - - # Verify that a signature is added if a 'signatures' entry is not present. - tuf.roledb.create_roledb_from_root_metadata(root_signable['signed'], repository_name='test_repository') - del tuf.roledb._roledb_dict['test_repository']['root']['signatures'] - self.metadata._rolename = 'root' - self.metadata.add_signature(signatures[0]) - - # Add a duplicate signature. - self.metadata.add_signature(signatures[0]) - - # Test improperly formatted signature argument. - self.assertRaises(securesystemslib.exceptions.FormatError, self.metadata.add_signature, 3) - self.assertRaises(securesystemslib.exceptions.FormatError, self.metadata.add_signature, signatures[0], 3) - - - - def test_remove_signature(self): - # Test normal case. - # Add a signature so remove_signature() has some signature to remove. - metadata_directory = os.path.join('repository_data', - 'repository', 'metadata') - root_filepath = os.path.join(metadata_directory, 'root.json') - root_signable = securesystemslib.util.load_json_file(root_filepath) - signatures = root_signable['signatures'] - self.metadata.add_signature(signatures[0]) - - self.metadata.remove_signature(signatures[0]) - self.assertEqual(self.metadata.signatures, []) - - - # Test improperly formatted signature argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.metadata.remove_signature, 3) - - # Test invalid signature argument (i.e., signature that has not been added.) - # Load an unused signature to be tested. - targets_filepath = os.path.join(metadata_directory, 'targets.json') - targets_signable = securesystemslib.util.load_json_file(targets_filepath) - signatures = targets_signable['signatures'] - - self.assertRaises(securesystemslib.exceptions.Error, - self.metadata.remove_signature, signatures[0]) - - - - def test_signatures(self): - # Test default case, where no signatures have been added yet. - self.assertEqual(self.metadata.signatures, []) - - - # Test getter after adding an example signature. - metadata_directory = os.path.join('repository_data', - 'repository', 'metadata') - root_filepath = os.path.join(metadata_directory, 'root.json') - root_signable = securesystemslib.util.load_json_file(root_filepath) - signatures = root_signable['signatures'] - - # Add the first signature from the list, as only need one is needed. - self.metadata.add_signature(signatures[0]) - self.assertEqual(signatures, self.metadata.signatures) - - - -class TestRoot(unittest.TestCase): - def setUp(self): - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - - - def test_init(self): - - # Test normal case. - # Root() subclasses Metadata(), and creates a 'root' role in 'tuf.roledb'. - repository_name = 'test_repository' - root_object = repo_tool.Root(repository_name) - self.assertTrue(isinstance(root_object, repo_tool.Metadata)) - self.assertTrue(tuf.roledb.role_exists('root', repository_name)) - - - -class TestTimestamp(unittest.TestCase): - def setUp(self): - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - - - def test_init(self): - - # Test normal case. - # Timestamp() subclasses Metadata(), and creates a 'timestamp' role in - # 'tuf.roledb'. - timestamp_object = repo_tool.Timestamp('test_repository') - self.assertTrue(isinstance(timestamp_object, repo_tool.Metadata)) - self.assertTrue(tuf.roledb.role_exists('timestamp', 'test_repository')) - - - - - -class TestSnapshot(unittest.TestCase): - def setUp(self): - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - - - def test_init(self): - - # Test normal case. - # Snapshot() subclasses Metadata(), and creates a 'snapshot' role in - # 'tuf.roledb'. - snapshot_object = repo_tool.Snapshot('test_repository') - self.assertTrue(isinstance(snapshot_object, repo_tool.Metadata)) - self.assertTrue(tuf.roledb.role_exists('snapshot', 'test_repository')) - - - - - -class TestTargets(unittest.TestCase): - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownClass() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - - - @classmethod - def tearDownClass(cls): - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated for the test cases. - shutil.rmtree(cls.temporary_directory) - - - - def setUp(self): - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - self.targets_directory = os.path.join(temporary_directory, 'repository', - 'targets') - original_targets_directory = os.path.join('repository_data', - 'repository', 'targets') - shutil.copytree(original_targets_directory, self.targets_directory) - self.targets_object = repo_tool.Targets(self.targets_directory, - repository_name='test_repository') - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - self.targets_object = None - - - - def test_init(self): - - # Test normal case. - # Snapshot() subclasses Metadata(), and creates a 'snapshot' role in - # 'tuf.roledb'. - targets_object = repo_tool.Targets('targets_directory/') - self.assertTrue(isinstance(targets_object, repo_tool.Metadata)) - self.assertTrue(tuf.roledb.role_exists('targets')) - - # Custom Targets object rolename. - targets_object = repo_tool.Targets('targets_directory/', 'project') - self.assertTrue(isinstance(targets_object, repo_tool.Metadata)) - self.assertTrue(tuf.roledb.role_exists('project')) - - # Custom roleinfo object (i.e., tuf.formats.ROLEDB_SCHEMA). 'keyids' and - # 'threshold' are required, the rest are optional. - roleinfo = {'keyids': - ['66c4cb5fef5e4d62b7013ef1cab4b8a827a36c14056d5603c3a970e21eb30e6f'], - 'threshold': 8} - self.assertTrue(tuf.formats.ROLEDB_SCHEMA.matches(roleinfo)) - - targets_object = repo_tool.Targets('targets_directory/', 'package', roleinfo) - self.assertTrue(isinstance(targets_object, repo_tool.Metadata)) - self.assertTrue(tuf.roledb.role_exists('package')) - - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Targets, 3) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Targets, 'targets_directory/', 3) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Targets, 'targets_directory/', - 'targets', 3) - - - - def test_call(self): - # Test normal case. - # Perform a delegation so that a delegated role can be accessed and tested - # through __call__(). Example: {targets_object}('role1'). - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - - # Create Targets() object to be tested. - targets_object = repo_tool.Targets(self.targets_directory) - targets_object.delegate('role1', [public_key], ['file1.txt']) - - self.assertTrue(isinstance(targets_object('role1'), repo_tool.Targets)) - - # Test invalid (i.e., non-delegated) rolename argument. - self.assertRaises(tuf.exceptions.UnknownRoleError, targets_object, 'unknown_role') - - # Test improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, targets_object, 1) - - - - def test_get_delegated_rolenames(self): - # Test normal case. - # Perform two delegations so that get_delegated_rolenames() has roles to - # return. - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - - # Set needed arguments by delegate(). - public_keys = [public_key] - threshold = 1 - - self.targets_object.delegate('tuf', public_keys, [], threshold, False, - ['file1.txt'], path_hash_prefixes=None) - - self.targets_object.delegate('warehouse', public_keys, [], threshold, False, - ['file2.txt'], path_hash_prefixes=None) - - # Test that get_delegated_rolenames returns the expected delegations. - expected_delegated_rolenames = ['targets/tuf/', 'targets/warehouse'] - for delegated_rolename in self.targets_object.get_delegated_rolenames(): - delegated_rolename in expected_delegated_rolenames - - - - def test_target_files(self): - # Test normal case. - # Verify the targets object initially contains zero target files. - self.assertEqual(self.targets_object.target_files, {}) - - target_filepath = 'file1.txt' - self.targets_object.add_target(target_filepath) - - self.assertEqual(len(self.targets_object.target_files), 1) - self.assertTrue(target_filepath in self.targets_object.target_files) - - - - def test_delegations(self): - # Test normal case. - # Perform a delegation so that delegations() has a Targets() object to - # return. - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - - # Set needed arguments by delegate(). - public_keys = [public_key] - rolename = 'tuf' - paths = ['file1.txt'] - threshold = 1 - - self.targets_object.delegate(rolename, public_keys, paths, threshold, - terminating=False, list_of_targets=None, path_hash_prefixes=None) - - # Test that a valid Targets() object is returned by delegations(). - for delegated_object in self.targets_object.delegations: - self.assertTrue(isinstance(delegated_object, repo_tool.Targets)) - - # For testing / coverage purposes, try to remove a delegated role with the - # remove_delegated_role() method. - self.targets_object.remove_delegated_role(rolename) - - - - def test_add_delegated_role(self): - # Test for invalid targets object. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_delegated_role, 'targets', 'bad_object') - - - - def test_add_target(self): - # Test normal case. - # Verify the targets object initially contains zero target files. - self.assertEqual(self.targets_object.target_files, {}) - - target_filepath = 'file1.txt' - self.targets_object.add_target(target_filepath) - - self.assertEqual(len(self.targets_object.target_files), 1) - self.assertTrue(target_filepath in self.targets_object.target_files) - - # Test the 'custom' parameter of add_target(), where additional information - # may be specified for the target. - target2_filepath = 'file2.txt' - target2_fullpath = os.path.join(self.targets_directory, target2_filepath) - - # The file permission of the target (octal number specifying file access - # for owner, group, others (e.g., 0755). - octal_file_permissions = oct(os.stat(target2_fullpath).st_mode)[4:] - custom_file_permissions = {'file_permissions': octal_file_permissions} - self.targets_object.add_target(target2_filepath, custom_file_permissions) - - self.assertEqual(len(self.targets_object.target_files), 2) - self.assertTrue(target2_filepath in self.targets_object.target_files) - self.assertEqual(self.targets_object.target_files['file2.txt']['custom'], - custom_file_permissions) - - # Attempt to replace target that has already been added. - octal_file_permissions2 = oct(os.stat(target2_fullpath).st_mode)[4:] - custom_file_permissions2 = {'file_permissions': octal_file_permissions} - self.targets_object.add_target(target2_filepath, custom_file_permissions2) - self.assertEqual(self.targets_object.target_files[target2_filepath]['custom'], - custom_file_permissions2) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_target, 3) - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_target, 3, custom_file_permissions) - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_target, target_filepath, 3) - - # A target path starting with a directory separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.add_target, '/file1.txt') - - # A target path using a backward slash as a separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.add_target, 'subdir\\file1.txt') - - # Should not access the file system to check for non-existent files - self.targets_object.add_target('non-existent') - - - - def test_add_targets(self): - # Test normal case. - # Verify the targets object initially contains zero target files. - self.assertEqual(self.targets_object.target_files, {}) - - target1_filepath = 'file1.txt' - target2_filepath = 'file2.txt' - target3_filepath = 'file3.txt' - - # Add a 'target1_filepath' duplicate for testing purposes - # ('target1_filepath' should not be added twice.) - target_files = \ - [target1_filepath, target2_filepath, 'file3.txt', target1_filepath] - self.targets_object.add_targets(target_files) - - self.assertEqual(len(self.targets_object.target_files), 3) - self.assertEqual(self.targets_object.target_files, - {target1_filepath: {}, target2_filepath: {}, target3_filepath: {}}) - - # Attempt to replace targets that has already been added. - self.targets_object.add_targets(target_files) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_targets, 3) - - # A target path starting with a directory separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.add_targets, ['/file1.txt']) - - # A target path using a backward slash as a separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.add_targets, ['subdir\\file1.txt']) - - # Check if the addition of the whole list is rolled back in case of - # wrong target path - target_files = self.targets_object.target_files - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.add_targets, ['file4.txt', '/file5.txt']) - self.assertEqual(self.targets_object.target_files, target_files) - - # Should not access the file system to check for non-existent files - self.targets_object.add_targets(['non-existent']) - - - def test_remove_target(self): - # Test normal case. - # Verify the targets object initially contains zero target files. - self.assertEqual(self.targets_object.target_files, {}) - - # Add a target so that remove_target() has something to remove. - target_filepath = 'file1.txt' - self.targets_object.add_target(target_filepath) - - # Test remove_target()'s behavior. - self.targets_object.remove_target(target_filepath) - self.assertEqual(self.targets_object.target_files, {}) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.remove_target, 3) - - # Test for filepath that hasn't been added yet. - target5_filepath = 'file5.txt' - self.assertRaises(securesystemslib.exceptions.Error, - self.targets_object.remove_target, - target5_filepath) - - - - def test_clear_targets(self): - # Test normal case. - # Verify the targets object initially contains zero target files. - self.assertEqual(self.targets_object.target_files, {}) - - # Add targets, to be tested by clear_targets(). - target1_filepath = 'file1.txt' - target2_filepath = 'file2.txt' - self.targets_object.add_targets([target1_filepath, target2_filepath]) - - self.targets_object.clear_targets() - self.assertEqual(self.targets_object.target_files, {}) - - - - def test_delegate(self): - # Test normal case. - # Need at least one public key and valid target paths required by - # delegate(). - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - - # Set needed arguments by delegate(). - public_keys = [public_key] - rolename = 'tuf' - list_of_targets = ['file1.txt', 'file2.txt'] - threshold = 1 - paths = ['*'] - path_hash_prefixes = ['e3a3', '8fae', 'd543'] - - self.targets_object.delegate(rolename, public_keys, paths, - threshold, terminating=False, list_of_targets=list_of_targets, - path_hash_prefixes=path_hash_prefixes) - - self.assertEqual(self.targets_object.get_delegated_rolenames(), - ['tuf']) - - # Test for delegated paths that do not exist. - # An exception should not be raised for non-existent delegated paths, since - # these paths may not necessarily exist when the delegation is done, - # and also because the delegated paths can be glob patterns. - self.targets_object.delegate(rolename, public_keys, ['non-existent'], - threshold, terminating=False, list_of_targets=list_of_targets, - path_hash_prefixes=path_hash_prefixes) - - # Test for delegated targets that do not exist. - # An exception should not be raised for non-existent delegated targets, - # since at this point the file system should not be accessed yet - self.targets_object.delegate(rolename, public_keys, [], threshold, - terminating=False, list_of_targets=['non-existent.txt'], - path_hash_prefixes=path_hash_prefixes) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate, 3, public_keys, paths, threshold, - list_of_targets, path_hash_prefixes) - - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate, rolename, 3, paths, threshold, - list_of_targets, path_hash_prefixes) - - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate, rolename, public_keys, 3, threshold, - list_of_targets, path_hash_prefixes) - - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate, rolename, public_keys, paths, '3', - list_of_targets, path_hash_prefixes) - - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate, rolename, public_keys, paths, threshold, - 3, path_hash_prefixes) - - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate, rolename, public_keys, paths, threshold, - list_of_targets, 3) - - # Test invalid arguments (e.g., already delegated 'rolename', non-existent - # files, etc.). - # Test duplicate 'rolename' delegation, which should have been delegated - # in the normal case above. - self.assertRaises(securesystemslib.exceptions.Error, - self.targets_object.delegate, rolename, public_keys, paths, threshold, - list_of_targets, path_hash_prefixes) - - # A path or target starting with a directory separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.delegate, rolename, public_keys, ['/*']) - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.delegate, rolename, public_keys, [], - list_of_targets=['/file1.txt']) - - # A path or target using '\' as a directory separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.delegate, rolename, public_keys, ['subpath\\*']) - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.delegate, rolename, public_keys, [], - list_of_targets=['subpath\\file1.txt']) - - - - - def test_delegate_hashed_bins(self): - # Test normal case. - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - - # Set needed arguments by delegate_hashed_bins(). - public_keys = [public_key] - list_of_targets = ['file1.txt'] - - - # A helper function to check that the range of prefixes the role is - # delegated for, specified in path_hash_prefixes, matches the range - # implied by the bin, or delegation role, name. - def check_prefixes_match_range(): - roleinfo = tuf.roledb.get_roleinfo(self.targets_object.rolename, - 'test_repository') - have_prefixes = False - - for delegated_role in roleinfo['delegations']['roles']: - if len(delegated_role['path_hash_prefixes']) > 0: - rolename = delegated_role['name'] - prefixes = delegated_role['path_hash_prefixes'] - have_prefixes = True - - if len(prefixes) > 1: - prefix_range = "{}-{}".format(prefixes[0], prefixes[-1]) - else: - prefix_range = prefixes[0] - - self.assertEqual(rolename, prefix_range) - - # We expect at least one delegation with some path_hash_prefixes - self.assertTrue(have_prefixes) - - - # Test delegate_hashed_bins() and verify that 16 hashed bins have - # been delegated in the parent's roleinfo. - self.targets_object.delegate_hashed_bins(list_of_targets, public_keys, - number_of_bins=16) - - # The expected child rolenames, since 'number_of_bins' = 16 - delegated_rolenames = ['0', '1', '2', '3', '4', '5', '6', '7', - '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'] - - self.assertEqual(sorted(self.targets_object.get_delegated_rolenames()), - sorted(delegated_rolenames)) - check_prefixes_match_range() - - # For testing / coverage purposes, try to create delegated bins that - # hold a range of hash prefixes (e.g., bin name: 000-003). - self.targets_object.delegate_hashed_bins(list_of_targets, public_keys, - number_of_bins=512) - check_prefixes_match_range() - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate_hashed_bins, 3, public_keys, - number_of_bins=1) - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate_hashed_bins, - list_of_targets, 3, number_of_bins=1) - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate_hashed_bins, - list_of_targets, public_keys, '1') - - # Test invalid arguments. - # Invalid number of bins, which must be a power of 2. - self.assertRaises(securesystemslib.exceptions.Error, - self.targets_object.delegate_hashed_bins, - list_of_targets, public_keys, number_of_bins=3) - - # Invalid 'list_of_targets'. - # A path or target starting with a directory separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.delegate_hashed_bins, - ['/file1.txt'], public_keys, - number_of_bins=2) - - # A path or target using '\' as a directory separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.delegate_hashed_bins, - ['subpath\\file1.txt'], public_keys, - number_of_bins=2) - - - def test_add_target_to_bin(self): - # Test normal case. - # Delegate the hashed bins so that add_target_to_bin() can be tested. - repository_name = 'test_repository' - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'targets_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - target1_filepath = 'file1.txt' - - # Set needed arguments by delegate_hashed_bins(). - public_keys = [public_key] - - # Delegate to hashed bins. The target filepath to be tested is expected - # to contain a hash prefix of 'e', and should be available at: - # repository.targets('e'). - self.targets_object.delegate_hashed_bins([], public_keys, - number_of_bins=16) - - # Ensure each hashed bin initially contains zero targets. - for delegation in self.targets_object.delegations: - self.assertEqual(delegation.target_files, {}) - - # Add 'target1_filepath' and verify that the relative path of - # 'target1_filepath' is added to the correct bin. - rolename = self.targets_object.add_target_to_bin(target1_filepath, 16) - - for delegation in self.targets_object.delegations: - if delegation.rolename == rolename: - self.assertTrue('file1.txt' in delegation.target_files) - - else: - self.assertFalse('file1.txt' in delegation.target_files) - - # Test for non-existent delegations and hashed bins. - empty_targets_role = repo_tool.Targets(self.targets_directory, 'empty', - repository_name=repository_name) - - self.assertRaises(securesystemslib.exceptions.Error, - empty_targets_role.add_target_to_bin, - target1_filepath, 16) - - # Test for a required hashed bin that does not exist. - self.targets_object.revoke(rolename) - self.assertRaises(securesystemslib.exceptions.Error, - self.targets_object.add_target_to_bin, - target1_filepath, 16) - - # Test adding a target with fileinfo - target2_hashes = {'sha256': '517c0ce943e7274a2431fa5751e17cfd5225accd23e479bfaad13007751e87ef'} - target2_fileinfo = tuf.formats.make_targets_fileinfo(37, target2_hashes) - target2_filepath = 'file2.txt' - - rolename = self.targets_object.add_target_to_bin(target2_filepath, 16, - fileinfo=target2_fileinfo) - - for delegation in self.targets_object.delegations: - if delegation.rolename == rolename: - self.assertTrue(target2_filepath in delegation.target_files) - - else: - self.assertFalse(target2_filepath in delegation.target_files) - - # Test improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_target_to_bin, 3, 'foo') - - - - def test_remove_target_from_bin(self): - # Test normal case. - # Delegate the hashed bins so that add_target_to_bin() can be tested. - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'targets_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - target1_filepath = 'file1.txt' - - # Set needed arguments by delegate_hashed_bins(). - public_keys = [public_key] - - # Delegate to hashed bins. The target filepath to be tested is expected - # to contain a hash prefix of 'e', and can be accessed as: - # repository.targets('e'). - self.targets_object.delegate_hashed_bins([], public_keys, - number_of_bins=16) - - # Ensure each hashed bin initially contains zero targets. - for delegation in self.targets_object.delegations: - self.assertEqual(delegation.target_files, {}) - - # Add 'target1_filepath' and verify that the relative path of - # 'target1_filepath' is added to the correct bin. - added_rolename = self.targets_object.add_target_to_bin(target1_filepath, 16) - - for delegation in self.targets_object.delegations: - if delegation.rolename == added_rolename: - self.assertTrue('file1.txt' in delegation.target_files) - self.assertTrue(len(delegation.target_files) == 1) - else: - self.assertTrue('file1.txt' not in delegation.target_files) - - # Test the remove_target_from_bin() method. Verify that 'target1_filepath' - # has been removed. - removed_rolename = self.targets_object.remove_target_from_bin(target1_filepath, 16) - self.assertEqual(added_rolename, removed_rolename) - - for delegation in self.targets_object.delegations: - self.assertTrue(target1_filepath not in delegation.target_files) - - - # Test improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.remove_target_from_bin, 3, 'foo') - - # Invalid target file path argument. - self.assertRaises(securesystemslib.exceptions.Error, - self.targets_object.remove_target_from_bin, 'non-existent', 16) - - - - def test_default_bin_num(self): - # Test creating, adding to and removing from hashed bins with the default - # number of bins - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - target1_filepath = os.path.join(self.targets_directory, 'file1.txt') - - # Set needed arguments by delegate_hashed_bins(). - public_keys = [public_key] - - # Test default parameters for number_of_bins - self.targets_object.delegate_hashed_bins([], public_keys) - - # Ensure each hashed bin initially contains zero targets. - for delegation in self.targets_object.delegations: - self.assertEqual(delegation.target_files, {}) - - # Add 'target1_filepath' and verify that the relative path of - # 'target1_filepath' is added to the correct bin. - added_rolename = self.targets_object.add_target_to_bin(os.path.basename(target1_filepath)) - - for delegation in self.targets_object.delegations: - if delegation.rolename == added_rolename: - self.assertTrue('file1.txt' in delegation.target_files) - - else: - self.assertFalse('file1.txt' in delegation.target_files) - - # Remove target1_filepath and verify that all bins are now empty - removed_rolename = self.targets_object.remove_target_from_bin( - os.path.basename(target1_filepath)) - self.assertEqual(added_rolename, removed_rolename) - - for delegation in self.targets_object.delegations: - self.assertEqual(delegation.target_files, {}) - - - def test_add_paths(self): - # Test normal case. - # Perform a delegation so that add_paths() has a child role to delegate a - # path to. - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - - # Set needed arguments by delegate(). - public_keys = [public_key] - rolename = 'tuf' - threshold = 1 - - self.targets_object.delegate(rolename, public_keys, [], threshold, - list_of_targets=None, path_hash_prefixes=None) - - # Delegate an extra role for test coverage (i.e., to later verify that - # delegated paths are not added to a child role that was not requested). - self.targets_object.delegate('junk_role', public_keys, []) - - paths = ['tuf_files/*'] - self.targets_object.add_paths(paths, 'tuf') - - # Retrieve 'targets_object' roleinfo, and verify the roleinfo contains the - # expected delegated paths of the delegated role. - targets_object_roleinfo = tuf.roledb.get_roleinfo(self.targets_object.rolename, - 'test_repository') - - delegated_role = targets_object_roleinfo['delegations']['roles'][0] - self.assertEqual(['tuf_files/*'], delegated_role['paths']) - - # Try to add a delegated path that has already been set. - # add_paths() should simply log a message in this case. - self.targets_object.add_paths(paths, 'tuf') - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_paths, 3, 'tuf') - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_paths, paths, 3) - - - # Test invalid arguments. - # A non-delegated child role. - self.assertRaises(securesystemslib.exceptions.Error, - self.targets_object.add_paths, paths, 'non_delegated_rolename') - - # A path starting with a directory separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.add_paths, ['/tuf_files/*'], 'tuf') - - # A path using a backward slash as a separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.add_paths, ['tuf_files\\*'], 'tuf') - - # add_paths() should not raise an exception for non-existent - # paths, which it previously did. - self.targets_object.add_paths(['non-existent'], 'tuf') - - - - - def test_revoke(self): - # Test normal case. - # Perform a delegation so that revoke() has a delegation to revoke. - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - - # Set needed arguments by delegate(). - public_keys = [public_key] - rolename = 'tuf' - paths = ['file1.txt'] - threshold = 1 - - self.targets_object.delegate(rolename, public_keys, [], threshold, False, - paths, path_hash_prefixes=None) - - # Test revoke() - self.targets_object.revoke('tuf') - self.assertEqual(self.targets_object.get_delegated_rolenames(), []) - - - # Test improperly formatted rolename argument. - self.assertRaises(securesystemslib.exceptions.FormatError, self.targets_object.revoke, 3) - - - - def test_check_path(self): - # Test that correct path does not raise exception: using '/' as a separator - # and does not start with a directory separator - self.targets_object._check_path('file1.txt') - - # Test that non-existent path does not raise exception (_check_path - # checks only the path string for compliance) - self.targets_object._check_path('non-existent.txt') - self.targets_object._check_path('subdir/non-existent') - - # Test improperly formatted pathname argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object._check_path, 3) - - # Test invalid pathname - # Starting with os separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object._check_path, '/file1.txt') - - # Starting with Windows-style separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object._check_path, '\\file1.txt') - - # Using Windows-style separator ('\') - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object._check_path, 'subdir\\non-existent') - - - -class TestRepositoryToolFunctions(unittest.TestCase): - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownClass() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - - - @classmethod - def tearDownClass(cls): - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated for the test cases. - shutil.rmtree(cls.temporary_directory) - - - - def setUp(self): - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - - - def test_create_new_repository(self): - # Test normal case. - # Setup the temporary repository directories needed by - # create_new_repository(). - repository_name = 'test_repository' - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - repository_directory = os.path.join(temporary_directory, 'repository') - metadata_directory = os.path.join(repository_directory, - repo_tool.METADATA_STAGED_DIRECTORY_NAME) - targets_directory = os.path.join(repository_directory, - repo_tool.TARGETS_DIRECTORY_NAME) - - repository = repo_tool.create_new_repository(repository_directory, - repository_name) - self.assertTrue(isinstance(repository, repo_tool.Repository)) - - # Verify that the 'repository/', 'repository/metadata', and - # 'repository/targets' directories were created. - self.assertTrue(os.path.exists(repository_directory)) - self.assertTrue(os.path.exists(metadata_directory)) - self.assertTrue(os.path.exists(targets_directory)) - - # Test that the 'repository' directory is created (along with the other - # sub-directories) when it does not exist yet. The repository tool creates - # the non-existent directory. - shutil.rmtree(repository_directory) - - repository = repo_tool.create_new_repository(repository_directory, - repository_name) - self.assertTrue(isinstance(repository, repo_tool.Repository)) - - # Verify that the 'repository/', 'repository/metadata', and - # 'repository/targets' directories were created. - self.assertTrue(os.path.exists(repository_directory)) - self.assertTrue(os.path.exists(metadata_directory)) - self.assertTrue(os.path.exists(targets_directory)) - - # Test passing custom arguments to control the computation - # of length and hashes for timestamp and snapshot roles. - repository = repo_tool.create_new_repository(repository_directory, - repository_name, use_timestamp_length=True, use_timestamp_hashes=True, - use_snapshot_length=True, use_snapshot_hashes=True) - - # Verify that the argument for optional hashes and length for - # snapshot and timestamp are properly set. - self.assertTrue(repository._use_timestamp_length) - self.assertTrue(repository._use_timestamp_hashes) - self.assertTrue(repository._use_snapshot_length) - self.assertTrue(repository._use_snapshot_hashes) - - # Test for a repository name that doesn't exist yet. Note: - # The 'test_repository' repository name is created in setup() before this - # test case is run. - repository = repo_tool.create_new_repository(repository_directory, 'my-repo') - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_tool.create_new_repository, 3, repository_name) - - # For testing purposes, try to create a repository directory that - # fails due to a non-errno.EEXIST exception raised. - self.assertRaises(securesystemslib.exceptions.StorageError, - repo_tool.create_new_repository, 'bad' * 2000, repository_name) - - # Reset the 'repository_directory' so that the metadata and targets - # directories can be tested likewise. - repository_directory = os.path.join(temporary_directory, 'repository') - - # The same test as before, but for the metadata and targets directories. - original_metadata_staged_directory = \ - tuf.repository_tool.METADATA_STAGED_DIRECTORY_NAME - tuf.repository_tool.METADATA_STAGED_DIRECTORY_NAME = 'bad' * 2000 - - self.assertRaises(securesystemslib.exceptions.StorageError, - repo_tool.create_new_repository, repository_directory, repository_name) - - # Reset metadata staged directory so that the targets directory can be - # tested... - tuf.repository_tool.METADATA_STAGED_DIRECTORY_NAME = \ - original_metadata_staged_directory - - original_targets_directory = tuf.repository_tool.TARGETS_DIRECTORY_NAME - tuf.repository_tool.TARGETS_DIRECTORY_NAME = 'bad' * 2000 - - self.assertRaises(securesystemslib.exceptions.StorageError, - repo_tool.create_new_repository, repository_directory, repository_name) - - tuf.repository_tool.TARGETS_DIRECTORY_NAME = \ - original_targets_directory - - - - def test_load_repository(self): - # Test normal case. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - original_repository_directory = os.path.join('repository_data', - 'repository') - - repository_directory = os.path.join(temporary_directory, 'repository') - metadata_directory = os.path.join(repository_directory, 'metadata.staged') - shutil.copytree(original_repository_directory, repository_directory) - - # For testing purposes, add a metadata file with an extension that is - # not supported, and another with invalid JSON content. - invalid_metadata_file = os.path.join(metadata_directory, 'root.xml') - root_file = os.path.join(metadata_directory, 'root.json') - shutil.copyfile(root_file, invalid_metadata_file) - bad_root_content = os.path.join(metadata_directory, 'root_bad.json') - - with open(bad_root_content, 'wb') as file_object: - file_object.write(b'bad') - - repository = repo_tool.load_repository(repository_directory) - self.assertTrue(isinstance(repository, repo_tool.Repository)) - self.assertTrue(isinstance(repository.targets('role1'), - repo_tool.Targets)) - self.assertTrue(isinstance(repository.targets('role1')('role2'), - repo_tool.Targets)) - - # Verify the expected roles have been loaded. See - # 'tuf/tests/repository_data/repository/'. - expected_roles = \ - ['root', 'targets', 'snapshot', 'timestamp', 'role1', 'role2'] - for role in tuf.roledb.get_rolenames(): - self.assertTrue(role in expected_roles) - - self.assertTrue(len(repository.root.keys)) - self.assertTrue(len(repository.targets.keys)) - self.assertTrue(len(repository.snapshot.keys)) - self.assertTrue(len(repository.timestamp.keys)) - self.assertEqual(1, repository.targets('role1').version) - - # It is assumed that the targets (tuf/tests/repository_data/) role contains - # 'file1.txt' and 'file2.txt'. - self.assertTrue('file1.txt' in repository.targets.target_files) - self.assertTrue('file2.txt' in repository.targets.target_files) - self.assertTrue('file3.txt' in repository.targets('role1').target_files) - - # Test if targets file info is loaded correctly: read the JSON metadata - # files separately and then compare with the loaded repository data. - targets_path = os.path.join(metadata_directory, 'targets.json') - role1_path = os.path.join(metadata_directory, 'role1.json') - - targets_object = securesystemslib.util.load_json_file(targets_path) - role1_object = securesystemslib.util.load_json_file(role1_path) - - targets_fileinfo = targets_object['signed']['targets'] - role1_fileinfo = role1_object['signed']['targets'] - - repository = repo_tool.load_repository(repository_directory) - - self.assertEqual(targets_fileinfo, repository.targets.target_files) - self.assertEqual(role1_fileinfo, repository.targets('role1').target_files) - - # Test for a non-default repository name. - repository = repo_tool.load_repository(repository_directory, 'my-repo') - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_tool.load_repository, 3) - - - # Test passing custom arguments to control the computation - # of length and hashes for timestamp and snapshot roles. - repository = repo_tool.load_repository(repository_directory, - 'my-repo', use_timestamp_length=True, use_timestamp_hashes=True, - use_snapshot_length=True, use_snapshot_hashes=True) - - # Verify that the argument for optional hashes and length for - # snapshot and timestamp are properly set. - self.assertTrue(repository._use_timestamp_length) - self.assertTrue(repository._use_timestamp_hashes) - self.assertTrue(repository._use_snapshot_length) - self.assertTrue(repository._use_snapshot_hashes) - - # Test for invalid 'repository_directory' (i.e., does not contain the - # minimum required metadata. - root_filepath = os.path.join(repository_directory, - repo_tool.METADATA_STAGED_DIRECTORY_NAME, 'root.json') - os.remove(root_filepath) - self.assertRaises(tuf.exceptions.RepositoryError, - repo_tool.load_repository, repository_directory) - - - - def test_dirty_roles(self): - repository_name = 'test_repository' - original_repository_directory = os.path.join('repository_data', - 'repository') - repository = repo_tool.load_repository(original_repository_directory, - repository_name) - - # dirty_roles() only logs the list of dirty roles. - repository.dirty_roles() - - - - def test_dump_signable_metadata(self): - metadata_directory = os.path.join('repository_data', - 'repository', 'metadata') - targets_metadata_file = os.path.join(metadata_directory, 'targets.json') - - metadata_content = repo_tool.dump_signable_metadata(targets_metadata_file) - - # Test for an invalid targets metadata file.. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_tool.dump_signable_metadata, 1) - self.assertRaises(securesystemslib.exceptions.StorageError, - repo_tool.dump_signable_metadata, 'bad file path') - - - - def test_append_signature(self): - metadata_directory = os.path.join('repository_data', - 'repository', 'metadata') - targets_metadata_path = os.path.join(metadata_directory, 'targets.json') - - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - tmp_targets_metadata_path = os.path.join(temporary_directory, 'targets.json') - shutil.copyfile(targets_metadata_path, tmp_targets_metadata_path) - - # Test for normal case. - targets_metadata = securesystemslib.util.load_json_file(tmp_targets_metadata_path) - num_signatures = len(targets_metadata['signatures']) - signature = targets_metadata['signatures'][0] - - repo_tool.append_signature(signature, tmp_targets_metadata_path) - - targets_metadata = securesystemslib.util.load_json_file(tmp_targets_metadata_path) - self.assertTrue(num_signatures, len(targets_metadata['signatures'])) - - # Test for invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_tool.append_signature, 1, tmp_targets_metadata_path) - - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_tool.append_signature, signature, 1) - - -# Run the test cases. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_roledb.py b/tests/test_roledb.py deleted file mode 100755 index 73405b21bd..0000000000 --- a/tests/test_roledb.py +++ /dev/null @@ -1,787 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_roledb.py - - - Vladimir Diaz - - - October 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'roledb.py'. -""" - -import unittest -import logging -import sys - -import tuf -import tuf.formats -import tuf.roledb -import tuf.exceptions -import tuf.log - -from tests import utils - -import securesystemslib -import securesystemslib.keys - -logger = logging.getLogger(__name__) - - -# Generate the three keys to use in our test cases. -KEYS = [] -for junk in range(3): - KEYS.append(securesystemslib.keys.generate_rsa_key(2048)) - - - -class TestRoledb(unittest.TestCase): - def setUp(self): - tuf.roledb.clear_roledb(clear_all=True) - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - - - - def test_create_roledb(self): - # Verify that a roledb is created for a named repository. - self.assertTrue('default' in tuf.roledb._roledb_dict) - self.assertEqual(1, len(tuf.roledb._roledb_dict)) - - repository_name = 'example_repository' - tuf.roledb.create_roledb(repository_name) - self.assertEqual(2, len(tuf.roledb._roledb_dict)) - self.assertTrue(repository_name in tuf.roledb._roledb_dict) - - # Test for invalid and improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.create_roledb, 123) - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.create_roledb, 'default') - - # Reset the roledb so that subsequent test functions have access to the - # original, default roledb. - tuf.roledb.remove_roledb(repository_name) - - - - def test_remove_roledb(self): - # Verify that the named repository is removed from the roledb. - repository_name = 'example_repository' - - rolename = 'targets' - roleinfo = {'keyids': ['123'], 'threshold': 1} - - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.remove_roledb, 'default') - tuf.roledb.create_roledb(repository_name) - - tuf.roledb.remove_roledb(repository_name) - - # remove_roledb() should not raise an exception if a non-existent - # 'repository_name' is specified. - tuf.roledb.remove_roledb(repository_name) - - # Ensure the roledb is reset to its original, default state. Subsequent - # test functions expect only the 'default' repository to exist in the roledb. - tuf.roledb.remove_roledb(repository_name) - - - - def test_clear_roledb(self): - # Test for an empty roledb, a length of 1 after adding a key, and finally - # an empty roledb after calling 'clear_roledb()'. - self.assertEqual(0, len(tuf.roledb._roledb_dict['default'])) - tuf.roledb._roledb_dict['default']['Root'] = {'keyids': ['123'], 'threshold': 1} - self.assertEqual(1, len(tuf.roledb._roledb_dict['default'])) - tuf.roledb.clear_roledb() - self.assertEqual(0, len(tuf.roledb._roledb_dict['default'])) - - # Verify that the roledb can be cleared for a non-default repository. - rolename = 'targets' - roleinfo = {'keyids': ['123'], 'threshold': 1} - - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.clear_roledb, repository_name) - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertEqual(roleinfo['keyids'], tuf.roledb.get_role_keyids(rolename, repository_name)) - tuf.roledb.clear_roledb(repository_name) - self.assertFalse(tuf.roledb.role_exists(rolename, repository_name)) - - # Reset the roledb so that subsequent tests have access to the original, - # default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test condition for invalid and unexpected arguments. - self.assertRaises(TypeError, tuf.roledb.clear_roledb, 'default', False, 'unexpected_argument') - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.clear_roledb, 123) - - - - def test_add_role(self): - # Test conditions where the arguments are valid. - self.assertEqual(0, len(tuf.roledb._roledb_dict['default'])) - rolename = 'targets' - roleinfo = {'keyids': ['123'], 'threshold': 1} - rolename2 = 'role1' - self.assertEqual(None, tuf.roledb.add_role(rolename, roleinfo)) - self.assertEqual(1, len(tuf.roledb._roledb_dict['default'])) - tuf.roledb.clear_roledb() - self.assertEqual(None, tuf.roledb.add_role(rolename, roleinfo)) - self.assertEqual(1, len(tuf.roledb._roledb_dict['default'])) - - # Verify that a role can be added to a non-default repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.clear_roledb, - repository_name) - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertEqual(roleinfo['keyids'], tuf.roledb.get_role_keyids(rolename, - repository_name)) - - # Reset the roledb so that subsequent tests have access to a default - # roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where the arguments are improperly formatted. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.add_role, None, roleinfo) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.add_role, 123, roleinfo) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.add_role, [''], roleinfo) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.add_role, rolename, None) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.add_role, rolename, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.add_role, rolename, ['']) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.add_role, rolename, roleinfo, 123) - - - # Test condition where the rolename already exists in the role database. - self.assertRaises(tuf.exceptions.RoleAlreadyExistsError, tuf.roledb.add_role, - rolename, roleinfo) - - # Test where the repository name does not exist in the role database. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.add_role, - 'new_role', roleinfo, 'non-existent') - - # Test conditions for invalid rolenames. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.add_role, ' badrole ', - roleinfo) - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.add_role, '/badrole/', - roleinfo) - - - - - - def test_role_exists(self): - # Test conditions where the arguments are valid. - rolename = 'targets' - roleinfo = {'keyids': ['123'], 'threshold': 1} - rolename2 = 'role1' - - self.assertEqual(False, tuf.roledb.role_exists(rolename)) - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo) - self.assertEqual(True, tuf.roledb.role_exists(rolename)) - self.assertEqual(True, tuf.roledb.role_exists(rolename2)) - - # Verify that a role can be queried for a non-default repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.clear_roledb, repository_name) - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.role_exists, rolename, repository_name) - - tuf.roledb.create_roledb(repository_name) - self.assertEqual(False, tuf.roledb.role_exists(rolename, repository_name)) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertTrue(tuf.roledb.role_exists(rolename, repository_name)) - - # Reset the roledb so that subsequent tests have access to the original, - # default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where the arguments are improperly formatted. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.role_exists, None) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.role_exists, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.role_exists, ['rolename']) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.role_exists, rolename, 123) - - # Test conditions for invalid rolenames. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.role_exists, '') - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.role_exists, ' badrole ') - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.role_exists, '/badrole/') - - - - - - def test_remove_role(self): - # Test conditions where the arguments are valid. - rolename = 'targets' - rolename2 = 'release' - rolename3 = 'django' - roleinfo = {'keyids': ['123'], 'threshold': 1} - roleinfo2 = {'keyids': ['123'], 'threshold': 1, 'delegations': - {'roles': [{'name': 'django', 'keyids': ['456'], 'threshold': 1}], - 'keys': {'456': {'keytype': 'rsa', 'keyval': {'public': '456'}}, - }}} - - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo2) - tuf.roledb.add_role(rolename3, roleinfo) - - self.assertEqual(None, tuf.roledb.remove_role(rolename)) - self.assertEqual(True, rolename not in tuf.roledb._roledb_dict) - - # Verify that a role can be removed from a non-default repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.remove_role, rolename, repository_name) - tuf.roledb.create_roledb(repository_name) - - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertEqual(roleinfo['keyids'], tuf.roledb.get_role_keyids(rolename, repository_name)) - self.assertEqual(None, tuf.roledb.remove_role(rolename, repository_name)) - - # Verify that a role cannot be removed from a non-existent repository name. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.remove_role, rolename, 'non-existent') - - # Reset the roledb so that subsequent test have access to the original, - # default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where removing a role does not cause the removal of its - # delegated roles. The 'django' role should now only exist (after the - # removal of 'targets' in the previous test condition, and the removal - # of 'release' in the remove_role() call next. - self.assertEqual(None, tuf.roledb.remove_role(rolename2)) - self.assertEqual(1, len(tuf.roledb._roledb_dict['default'])) - - # Test conditions where the arguments are improperly formatted, - # contain invalid names, or haven't been added to the role database. - self._test_rolename(tuf.roledb.remove_role) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.remove_role, rolename, 123) - - - - - def test_get_rolenames(self): - # Test conditions where the arguments are valid. - rolename = 'targets' - rolename2 = 'role1' - roleinfo = {'keyids': ['123'], 'threshold': 1} - self.assertEqual([], tuf.roledb.get_rolenames()) - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo) - self.assertEqual(set(['targets', 'role1']), - set(tuf.roledb.get_rolenames())) - - # Verify that rolenames can be retrieved for a role in a non-default - # repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_rolenames, repository_name) - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - tuf.roledb.add_role(rolename2, roleinfo, repository_name) - - self.assertEqual(set(['targets', 'role1']), - set(tuf.roledb.get_rolenames())) - - # Reset the roledb so that subsequent tests have access to the original, - # default repository. - tuf.roledb.remove_roledb(repository_name) - - # Test for invalid or improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_rolenames, 123) - - - - def test_get_role_info(self): - # Test conditions where the arguments are valid. - rolename = 'targets' - rolename2 = 'role1' - roleinfo = {'keyids': ['123'], 'threshold': 1} - roleinfo2 = {'keyids': ['456', '789'], 'threshold': 2} - self.assertRaises(tuf.exceptions.UnknownRoleError, tuf.roledb.get_roleinfo, rolename) - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo2) - - self.assertEqual(roleinfo, tuf.roledb.get_roleinfo(rolename)) - self.assertEqual(roleinfo2, tuf.roledb.get_roleinfo(rolename2)) - - # Verify that a roleinfo can be retrieved for a role in a non-default - # repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_roleinfo, - rolename, repository_name) - - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertEqual(roleinfo, tuf.roledb.get_roleinfo(rolename, repository_name)) - - # Verify that a roleinfo cannot be retrieved for a non-existent repository - # name. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_roleinfo, rolename, - 'non-existent') - - # Reset the roledb so that subsequent tests have access to the original, - # default roledb - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where the arguments are improperly formatted, contain - # invalid names, or haven't been added to the role database. - self._test_rolename(tuf.roledb.get_roleinfo) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_roleinfo, rolename, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_roleinfo, 123) - - - - def test_get_role_keyids(self): - # Test conditions where the arguments are valid. - rolename = 'targets' - rolename2 = 'role1' - roleinfo = {'keyids': ['123'], 'threshold': 1} - roleinfo2 = {'keyids': ['456', '789'], 'threshold': 2} - self.assertRaises(tuf.exceptions.UnknownRoleError, tuf.roledb.get_role_keyids, rolename) - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo2) - - self.assertEqual(['123'], tuf.roledb.get_role_keyids(rolename)) - self.assertEqual(set(['456', '789']), - set(tuf.roledb.get_role_keyids(rolename2))) - - # Verify that the role keyids can be retrieved for a role in a non-default - # repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_role_keyids, - rolename, repository_name) - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertEqual(['123'], tuf.roledb.get_role_keyids(rolename, repository_name)) - - # Verify that rolekeyids cannot be retrieved from a non-existent repository - # name. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_role_keyids, rolename, - 'non-existent') - - # Reset the roledb so that subsequent tests have access to the original, - # default roledb - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where the arguments are improperly formatted, contain - # invalid names, or haven't been added to the role database. - self._test_rolename(tuf.roledb.get_role_keyids) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_role_keyids, rolename, 123) - - - - def test_get_role_threshold(self): - # Test conditions where the arguments are valid. - rolename = 'targets' - rolename2 = 'role1' - roleinfo = {'keyids': ['123'], 'threshold': 1} - roleinfo2 = {'keyids': ['456', '789'], 'threshold': 2} - self.assertRaises(tuf.exceptions.UnknownRoleError, tuf.roledb.get_role_threshold, rolename) - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo2) - - self.assertEqual(1, tuf.roledb.get_role_threshold(rolename)) - self.assertEqual(2, tuf.roledb.get_role_threshold(rolename2)) - - # Verify that the threshold can be retrieved for a role in a non-default - # repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_role_threshold, - rolename, repository_name) - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertEqual(roleinfo['threshold'], tuf.roledb.get_role_threshold(rolename, repository_name)) - - # Verify that a role's threshold cannot be retrieved from a non-existent - # repository name. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_role_threshold, - rolename, 'non-existent') - - # Reset the roledb so that subsequent tests have access to the original, - # default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where the arguments are improperly formatted, - # contain invalid names, or haven't been added to the role database. - self._test_rolename(tuf.roledb.get_role_threshold) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_role_threshold, rolename, 123) - - - def test_get_role_paths(self): - # Test conditions where the arguments are valid. - rolename = 'targets' - rolename2 = 'role1' - roleinfo = {'keyids': ['123'], 'threshold': 1} - paths = ['a/b', 'c/d'] - roleinfo2 = {'keyids': ['456', '789'], 'threshold': 2, 'paths': paths} - self.assertRaises(tuf.exceptions.UnknownRoleError, tuf.roledb.get_role_paths, rolename) - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo2) - - self.assertEqual({}, tuf.roledb.get_role_paths(rolename)) - self.assertEqual(paths, tuf.roledb.get_role_paths(rolename2)) - - # Verify that role paths can be queried for roles in non-default - # repositories. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_role_paths, - rolename, repository_name) - - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename2, roleinfo2, repository_name) - self.assertEqual(roleinfo2['paths'], tuf.roledb.get_role_paths(rolename2, - repository_name)) - - # Reset the roledb so that subsequent roles have access to the original, - # default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where the arguments are improperly formatted, - # contain invalid names, or haven't been added to the role database. - self._test_rolename(tuf.roledb.get_role_paths) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_role_paths, rolename, 123) - - - - def test_get_delegated_rolenames(self): - # Test conditions where the arguments are valid. - rolename = 'unclaimed' - rolename2 = 'django' - rolename3 = 'release' - rolename4 = 'tuf' - - # unclaimed's roleinfo. - roleinfo = {'keyids': ['123'], 'threshold': 1, 'delegations': - {'roles': [{'name': 'django', 'keyids': ['456'], 'threshold': 1}, - {'name': 'tuf', 'keyids': ['888'], 'threshold': 1}], - 'keys': {'456': {'keytype': 'rsa', 'keyval': {'public': '456'}}, - }}} - - # django's roleinfo. - roleinfo2 = {'keyids': ['456'], 'threshold': 1, 'delegations': - {'roles': [{'name': 'release', 'keyids': ['789'], 'threshold': 1}], - 'keys': {'789': {'keytype': 'rsa', 'keyval': {'public': '789'}}, - }}} - - # release's roleinfo. - roleinfo3 = {'keyids': ['789'], 'threshold': 1, 'delegations': - {'roles': [], - 'keys': {}}} - - # tuf's roleinfo. - roleinfo4 = {'keyids': ['888'], 'threshold': 1, 'delegations': - {'roles': [], - 'keys': {}}} - - self.assertRaises(tuf.exceptions.UnknownRoleError, tuf.roledb.get_delegated_rolenames, - rolename) - - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo2) - tuf.roledb.add_role(rolename3, roleinfo3) - tuf.roledb.add_role(rolename4, roleinfo4) - - self.assertEqual(set(['django', 'tuf']), - set(tuf.roledb.get_delegated_rolenames(rolename))) - - self.assertEqual(set(['release']), - set(tuf.roledb.get_delegated_rolenames(rolename2))) - - self.assertEqual(set([]), - set(tuf.roledb.get_delegated_rolenames(rolename3))) - - self.assertEqual(set([]), - set(tuf.roledb.get_delegated_rolenames(rolename4))) - - # Verify that the delegated rolenames of a role in a non-default - # repository can be accessed. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_delegated_rolenames, - rolename, repository_name) - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertEqual(set(['django', 'tuf']), - set(tuf.roledb.get_delegated_rolenames(rolename, repository_name))) - - # Reset the roledb so that subsequent tests have access to the original, - # default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where the arguments are improperly formatted, - # contain invalid names, or haven't been added to the role database. - self._test_rolename(tuf.roledb.get_delegated_rolenames) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_delegated_rolenames, rolename, 123) - - - - def test_create_roledb_from_root_metadata(self): - # Test condition using a valid 'root_metadata' argument. - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - rsakey2 = KEYS[1] - keyid2 = KEYS[1]['keyid'] - rsakey3 = KEYS[2] - keyid3 = KEYS[2]['keyid'] - keydict = {keyid: rsakey, keyid2: rsakey2} - roledict = {'root': {'keyids': [keyid], 'threshold': 1}, - 'targets': {'keyids': [keyid2], 'threshold': 1}} - version = 8 - consistent_snapshot = False - expires = '1985-10-21T01:21:00Z' - - root_metadata = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version='1.0.0', - version=version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - self.assertEqual(None, - tuf.roledb.create_roledb_from_root_metadata(root_metadata)) - - # Ensure 'Root' and 'Targets' were added to the role database. - self.assertEqual([keyid], tuf.roledb.get_role_keyids('root')) - self.assertEqual([keyid2], tuf.roledb.get_role_keyids('targets')) - - # Test that a roledb is created for a non-default repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.clear_roledb, - repository_name) - tuf.roledb.create_roledb_from_root_metadata(root_metadata, repository_name) - self.assertEqual([keyid], tuf.roledb.get_role_keyids('root', repository_name)) - self.assertEqual([keyid2], tuf.roledb.get_role_keyids('targets', repository_name)) - - # Remove the example repository added to the roledb so that subsequent - # tests have access to an original, default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test conditions for arguments with invalid formats. - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.roledb.create_roledb_from_root_metadata, None) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.roledb.create_roledb_from_root_metadata, '') - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.roledb.create_roledb_from_root_metadata, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.roledb.create_roledb_from_root_metadata, ['123']) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.roledb.create_roledb_from_root_metadata, {'bad': '123'}) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.roledb.create_roledb_from_root_metadata, root_metadata, 123) - - # Verify that the expected roles of a Root file are properly loaded. - tuf.roledb.clear_roledb() - roledict = {'root': {'keyids': [keyid], 'threshold': 1}, - 'release': {'keyids': [keyid3], 'threshold': 1}} - version = 8 - - # Add a third key for 'release'. - keydict[keyid3] = rsakey3 - - # Generate 'root_metadata' to verify that 'release' and 'root' are added - # to the role database. - - root_metadata = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version='1.0.0', - version=version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - self.assertEqual(None, - tuf.roledb.create_roledb_from_root_metadata(root_metadata)) - - # Ensure only 'root' and 'release' were added to the role database. - self.assertEqual(2, len(tuf.roledb._roledb_dict['default'])) - self.assertEqual(True, tuf.roledb.role_exists('root')) - self.assertEqual(True, tuf.roledb.role_exists('release')) - - - - def test_update_roleinfo(self): - rolename = 'targets' - roleinfo = {'keyids': ['123'], 'threshold': 1} - tuf.roledb.add_role(rolename, roleinfo) - - # Test normal case. - tuf.roledb.update_roleinfo(rolename, roleinfo) - - # Verify that a roleinfo can be updated for a role in a non-default - # repository. - repository_name = 'example_repository' - mark_role_as_dirty = True - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.clear_roledb, repository_name) - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - tuf.roledb.update_roleinfo(rolename, roleinfo, mark_role_as_dirty, repository_name) - self.assertEqual(roleinfo['keyids'], tuf.roledb.get_role_keyids(rolename, repository_name)) - - # Reset the roledb so that subsequent tests can access the default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test for an unknown role. - self.assertRaises(tuf.exceptions.UnknownRoleError, tuf.roledb.update_roleinfo, - 'unknown_rolename', roleinfo) - - # Verify that a roleinfo cannot be updated to a non-existent repository - # name. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.update_roleinfo, - 'new_rolename', roleinfo, False, 'non-existent') - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.update_roleinfo, 1, roleinfo) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.update_roleinfo, rolename, 1) - - repository_name = 'example_repository' - mark_role_as_dirty = True - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.update_roleinfo, rolename, - roleinfo, 1, repository_name) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.update_roleinfo, - rolename, mark_role_as_dirty, 123) - - - - def test_get_dirty_roles(self): - # Verify that the dirty roles of a role are returned. - rolename = 'targets' - roleinfo1 = {'keyids': ['123'], 'threshold': 1} - tuf.roledb.add_role(rolename, roleinfo1) - roleinfo2 = {'keyids': ['123'], 'threshold': 2} - mark_role_as_dirty = True - tuf.roledb.update_roleinfo(rolename, roleinfo2, mark_role_as_dirty) - # Note: The 'default' repository is searched if the repository name is - # not given to get_dirty_roles(). - self.assertEqual([rolename], tuf.roledb.get_dirty_roles()) - - # Verify that a list of dirty roles is returned for a non-default - # repository. - repository_name = 'example_repository' - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo1, repository_name) - tuf.roledb.update_roleinfo(rolename, roleinfo2, mark_role_as_dirty, repository_name) - self.assertEqual([rolename], tuf.roledb.get_dirty_roles(repository_name)) - - # Verify that dirty roles are not returned for a non-existent repository. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_dirty_roles, 'non-existent') - - # Reset the roledb so that subsequent tests have access to a default - # roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test for improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_dirty_roles, 123) - - - - def test_mark_dirty(self): - # Add a dirty role to roledb. - rolename = 'targets' - roleinfo1 = {'keyids': ['123'], 'threshold': 1} - tuf.roledb.add_role(rolename, roleinfo1) - rolename2 = 'dirty_role' - roleinfo2 = {'keyids': ['123'], 'threshold': 2} - mark_role_as_dirty = True - tuf.roledb.update_roleinfo(rolename, roleinfo1, mark_role_as_dirty) - # Note: The 'default' repository is searched if the repository name is - # not given to get_dirty_roles(). - self.assertEqual([rolename], tuf.roledb.get_dirty_roles()) - - tuf.roledb.mark_dirty(['dirty_role']) - self.assertEqual([rolename2, rolename], tuf.roledb.get_dirty_roles()) - - # Verify that a role cannot be marked as dirty for a non-existent - # repository. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.mark_dirty, - ['dirty_role'], 'non-existent') - - - - def test_unmark_dirty(self): - # Add a dirty role to roledb. - rolename = 'targets' - roleinfo1 = {'keyids': ['123'], 'threshold': 1} - tuf.roledb.add_role(rolename, roleinfo1) - rolename2 = 'dirty_role' - roleinfo2 = {'keyids': ['123'], 'threshold': 2} - tuf.roledb.add_role(rolename2, roleinfo2) - mark_role_as_dirty = True - tuf.roledb.update_roleinfo(rolename, roleinfo1, mark_role_as_dirty) - # Note: The 'default' repository is searched if the repository name is - # not given to get_dirty_roles(). - self.assertEqual([rolename], tuf.roledb.get_dirty_roles()) - tuf.roledb.update_roleinfo(rolename2, roleinfo2, mark_role_as_dirty) - - tuf.roledb.unmark_dirty(['dirty_role']) - self.assertEqual([rolename], tuf.roledb.get_dirty_roles()) - tuf.roledb.unmark_dirty(['targets']) - self.assertEqual([], tuf.roledb.get_dirty_roles()) - - # What happens for a role that isn't dirty? unmark_dirty() should just - # log a message. - tuf.roledb.unmark_dirty(['unknown_role']) - - # Verify that a role cannot be unmarked as dirty for a non-existent - # repository. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.unmark_dirty, - ['dirty_role'], 'non-existent') - - - def _test_rolename(self, test_function): - # Private function that tests the 'rolename' argument of 'test_function' - # for format, invalid name, and unknown role exceptions. - - # Test conditions where the arguments are improperly formatted. - self.assertRaises(securesystemslib.exceptions.FormatError, test_function, None) - self.assertRaises(securesystemslib.exceptions.FormatError, test_function, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, test_function, ['rolename']) - self.assertRaises(securesystemslib.exceptions.FormatError, test_function, {'a': 'b'}) - self.assertRaises(securesystemslib.exceptions.FormatError, test_function, ('a', 'b')) - self.assertRaises(securesystemslib.exceptions.FormatError, test_function, True) - - # Test condition where the 'rolename' has not been added to the role database. - self.assertRaises(tuf.exceptions.UnknownRoleError, test_function, 'badrole') - - # Test conditions for invalid rolenames. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, test_function, '') - self.assertRaises(securesystemslib.exceptions.InvalidNameError, test_function, ' badrole ') - self.assertRaises(securesystemslib.exceptions.InvalidNameError, test_function, '/badrole/') - - - -def setUpModule(): - # setUpModule() is called before any test cases run. - # Ensure the roledb has not been modified by a previous test, which may - # affect assumptions (i.e., empty roledb) made by the tests cases in this - # unit test. - tuf.roledb.clear_roledb() - -def tearDownModule(): - # tearDownModule() is called after all the tests have run. - # Ensure we clean up roledb. Courtesy is contagious, and it begins with - # test_roledb.py. - tuf.roledb.clear_roledb() - - - -# Run the unit tests. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_root_versioning_integration.py b/tests/test_root_versioning_integration.py deleted file mode 100755 index 5012029802..0000000000 --- a/tests/test_root_versioning_integration.py +++ /dev/null @@ -1,230 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_root_versioning_integration.py - - - Evan Cordell. - - - July 21, 2016. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Test root versioning for efficient root key rotation. -""" - - -import os -import logging -import tempfile -import shutil -import unittest -import sys - -import tuf -import tuf.log -import tuf.formats -import tuf.exceptions -import tuf.roledb -import tuf.keydb -import tuf.repository_tool as repo_tool - -from tests import utils - -import securesystemslib -import securesystemslib.storage - -logger = logging.getLogger(__name__) - -repo_tool.disable_console_log_messages() - - -class TestRepository(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.temporary_directory) - - def tearDown(self): - tuf.roledb.clear_roledb() - tuf.keydb.clear_keydb() - - def test_init(self): - # Test normal case. - storage_backend = securesystemslib.storage.FilesystemBackend() - repository = repo_tool.Repository('repository_directory/', - 'metadata_directory/', - 'targets_directory/', - storage_backend) - self.assertTrue(isinstance(repository.root, repo_tool.Root)) - self.assertTrue(isinstance(repository.snapshot, repo_tool.Snapshot)) - self.assertTrue(isinstance(repository.timestamp, repo_tool.Timestamp)) - self.assertTrue(isinstance(repository.targets, repo_tool.Targets)) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, 3, - 'metadata_directory/', 'targets_directory', storage_backend) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, - 'repository_directory', 3, 'targets_directory', storage_backend) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, - 'repository_directory', 'metadata_directory', storage_backend, 3) - - - - def test_root_role_versioning(self): - # Test root role versioning - # - # 1. Import public and private keys. - # 2. Add verification keys. - # 3. Load signing keys. - # 4. Add target files. - # 5. Perform delegation. - # 6. writeall() - # - # Copy the target files from 'tuf/tests/repository_data' so that writeall() - # has target fileinfo to include in metadata. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - targets_directory = os.path.join(temporary_directory, 'repository', - repo_tool.TARGETS_DIRECTORY_NAME) - original_targets_directory = os.path.join('repository_data', - 'repository', 'targets') - shutil.copytree(original_targets_directory, targets_directory) - - # In this case, create_new_repository() creates the 'repository/' - # sub-directory in 'temporary_directory' if it does not exist. - repository_directory = os.path.join(temporary_directory, 'repository') - metadata_directory = os.path.join(repository_directory, - repo_tool.METADATA_STAGED_DIRECTORY_NAME) - repository = repo_tool.create_new_repository(repository_directory) - - - - - # (1) Load the public and private keys of the top-level roles, and one - # delegated role. - keystore_directory = os.path.join('repository_data', 'keystore') - - # Load the public keys. - root_pubkey_path = os.path.join(keystore_directory, 'root_key.pub') - targets_pubkey_path = os.path.join(keystore_directory, 'targets_key.pub') - snapshot_pubkey_path = os.path.join(keystore_directory, 'snapshot_key.pub') - timestamp_pubkey_path = os.path.join(keystore_directory, 'timestamp_key.pub') - role1_pubkey_path = os.path.join(keystore_directory, 'delegation_key.pub') - - root_pubkey = repo_tool.import_rsa_publickey_from_file(root_pubkey_path) - targets_pubkey = repo_tool.import_ed25519_publickey_from_file(targets_pubkey_path) - snapshot_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(snapshot_pubkey_path) - timestamp_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(timestamp_pubkey_path) - role1_pubkey = repo_tool.import_ed25519_publickey_from_file(role1_pubkey_path) - - # Load the private keys. - root_privkey_path = os.path.join(keystore_directory, 'root_key') - targets_privkey_path = os.path.join(keystore_directory, 'targets_key') - snapshot_privkey_path = os.path.join(keystore_directory, 'snapshot_key') - timestamp_privkey_path = os.path.join(keystore_directory, 'timestamp_key') - role1_privkey_path = os.path.join(keystore_directory, 'delegation_key') - - root_privkey = \ - repo_tool.import_rsa_privatekey_from_file(root_privkey_path, 'password') - targets_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(targets_privkey_path, 'password') - snapshot_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_privkey_path, - 'password') - timestamp_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_privkey_path, - 'password') - role1_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(role1_privkey_path, - 'password') - - - # (2) Add top-level verification keys. - repository.root.add_verification_key(root_pubkey) - repository.targets.add_verification_key(targets_pubkey) - repository.snapshot.add_verification_key(snapshot_pubkey) - repository.timestamp.add_verification_key(timestamp_pubkey) - - - # (3) Load top-level signing keys. - repository.root.load_signing_key(root_privkey) - repository.targets.load_signing_key(targets_privkey) - repository.snapshot.load_signing_key(snapshot_privkey) - repository.timestamp.load_signing_key(timestamp_privkey) - - # (4) Add target files. - target1 = 'file1.txt' - target2 = 'file2.txt' - target3 = 'file3.txt' - repository.targets.add_target(target1) - repository.targets.add_target(target2) - - - # (5) Perform delegation. - repository.targets.delegate('role1', [role1_pubkey], [target3]) - repository.targets('role1').load_signing_key(role1_privkey) - - # (6) Write repository. - repository.writeall() - - self.assertTrue(os.path.exists(os.path.join(metadata_directory, 'root.json'))) - self.assertTrue(os.path.exists(os.path.join(metadata_directory, '1.root.json'))) - - - # Verify that the expected metadata is written. - root_filepath = os.path.join(metadata_directory, 'root.json') - root_1_filepath = os.path.join(metadata_directory, '1.root.json') - root_2_filepath = os.path.join(metadata_directory, '2.root.json') - old_root_signable = securesystemslib.util.load_json_file(root_filepath) - root_1_signable = securesystemslib.util.load_json_file(root_1_filepath) - - # Make a change to the root keys - repository.root.add_verification_key(targets_pubkey) - repository.root.load_signing_key(targets_privkey) - repository.root.threshold = 2 - repository.writeall() - - new_root_signable = securesystemslib.util.load_json_file(root_filepath) - root_2_signable = securesystemslib.util.load_json_file(root_2_filepath) - - for role_signable in [old_root_signable, new_root_signable, root_1_signable, root_2_signable]: - # Raise 'securesystemslib.exceptions.FormatError' if 'role_signable' is an - # invalid signable. - tuf.formats.check_signable_object_format(role_signable) - - # Verify contents of versioned roots - self.assertEqual(old_root_signable, root_1_signable) - self.assertEqual(new_root_signable, root_2_signable) - - self.assertEqual(root_1_signable['signed']['version'], 1) - self.assertEqual(root_2_signable['signed']['version'], 2) - - repository.root.remove_verification_key(root_pubkey) - repository.root.unload_signing_key(root_privkey) - repository.root.threshold = 2 - - # Errors, not enough signing keys to satisfy old threshold - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - - # No error, write() ignore's root's threshold and allows it to be written - # to disk partially signed. - repository.write('root') - - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_sig.py b/tests/test_sig.py deleted file mode 100755 index a49c59c21c..0000000000 --- a/tests/test_sig.py +++ /dev/null @@ -1,546 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_sig.py - - - Geremy Condra - Vladimir Diaz - - - February 28, 2012. Based on a previous version of this module. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Test cases for sig.py. -""" - -import unittest -import logging -import copy -import sys - -import tuf -import tuf.log -import tuf.formats -import tuf.keydb -import tuf.roledb -import tuf.sig -import tuf.exceptions - -from tests import utils - -import securesystemslib -import securesystemslib.keys - -logger = logging.getLogger(__name__) - -# Setup the keys to use in our test cases. -KEYS = [] -for _ in range(3): - KEYS.append(securesystemslib.keys.generate_rsa_key(2048)) - - - -class TestSig(unittest.TestCase): - def setUp(self): - pass - - def tearDown(self): - tuf.roledb.clear_roledb() - tuf.keydb.clear_keydb() - - - def test_get_signature_status_no_role(self): - signable = {'signed': 'test', 'signatures': []} - - # A valid, but empty signature status. - sig_status = tuf.sig.get_signature_status(signable) - self.assertTrue(tuf.formats.SIGNATURESTATUS_SCHEMA.matches(sig_status)) - - self.assertEqual(0, sig_status['threshold']) - self.assertEqual([], sig_status['good_sigs']) - self.assertEqual([], sig_status['bad_sigs']) - self.assertEqual([], sig_status['unknown_sigs']) - self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_signing_schemes']) - - # A valid signable, but non-existent role argument. - self.assertRaises(tuf.exceptions.UnknownRoleError, - tuf.sig.get_signature_status, signable, 'unknown_role') - - # Should verify we are not adding a duplicate signature - # when doing the following action. Here we know 'signable' - # has only one signature so it's okay. - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - tuf.keydb.add_key(KEYS[0]) - - # Improperly formatted role. - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.sig.get_signature_status, signable, 1) - - # Not allowed to call verify() without having specified a role. - args = (signable, None) - self.assertRaises(securesystemslib.exceptions.Error, tuf.sig.verify, *args) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - - - def test_get_signature_status_bad_sig(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - signable['signed'] += 'signature no longer matches signed data' - - tuf.keydb.add_key(KEYS[0]) - threshold = 1 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, keyids=[KEYS[0]['keyid']], threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - - sig_status = tuf.sig.get_signature_status(signable, 'Root') - - self.assertEqual(1, sig_status['threshold']) - self.assertEqual([], sig_status['good_sigs']) - self.assertEqual([KEYS[0]['keyid']], sig_status['bad_sigs']) - self.assertEqual([], sig_status['unknown_sigs']) - self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_signing_schemes']) - - self.assertFalse(tuf.sig.verify(signable, 'Root')) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - # Remove the role. - tuf.roledb.remove_role('Root') - - - def test_get_signature_status_unknown_signing_scheme(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - valid_scheme = KEYS[0]['scheme'] - KEYS[0]['scheme'] = 'unknown_signing_scheme' - tuf.keydb.add_key(KEYS[0]) - threshold = 1 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, keyids=[KEYS[0]['keyid']], threshold=threshold) - - tuf.roledb.add_role('root', roleinfo) - - sig_status = tuf.sig.get_signature_status(signable, 'root') - - self.assertEqual(1, sig_status['threshold']) - self.assertEqual([], sig_status['good_sigs']) - self.assertEqual([], sig_status['bad_sigs']) - self.assertEqual([], sig_status['unknown_sigs']) - self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([KEYS[0]['keyid']], - sig_status['unknown_signing_schemes']) - - self.assertFalse(tuf.sig.verify(signable, 'root')) - - # Done. Let's remove the added key(s) from the key database. - KEYS[0]['scheme'] = valid_scheme - tuf.keydb.remove_key(KEYS[0]['keyid']) - # Remove the role. - tuf.roledb.remove_role('root') - - - def test_get_signature_status_single_key(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - threshold = 1 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, keyids=[KEYS[0]['keyid']], threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - tuf.keydb.add_key(KEYS[0]) - - sig_status = tuf.sig.get_signature_status(signable, 'Root') - - self.assertEqual(1, sig_status['threshold']) - self.assertEqual([KEYS[0]['keyid']], sig_status['good_sigs']) - self.assertEqual([], sig_status['bad_sigs']) - self.assertEqual([], sig_status['unknown_sigs']) - self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_signing_schemes']) - - self.assertTrue(tuf.sig.verify(signable, 'Root')) - - # Test for an unknown signature when 'role' is left unspecified. - sig_status = tuf.sig.get_signature_status(signable) - - self.assertEqual(0, sig_status['threshold']) - self.assertEqual([], sig_status['good_sigs']) - self.assertEqual([], sig_status['bad_sigs']) - self.assertEqual([KEYS[0]['keyid']], sig_status['unknown_sigs']) - self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_signing_schemes']) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - # Remove the role. - tuf.roledb.remove_role('Root') - - - def test_get_signature_status_below_threshold(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - tuf.keydb.add_key(KEYS[0]) - threshold = 2 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, - keyids=[KEYS[0]['keyid'], KEYS[2]['keyid']], - threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - - sig_status = tuf.sig.get_signature_status(signable, 'Root') - - self.assertEqual(2, sig_status['threshold']) - self.assertEqual([KEYS[0]['keyid']], sig_status['good_sigs']) - self.assertEqual([], sig_status['bad_sigs']) - self.assertEqual([], sig_status['unknown_sigs']) - self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_signing_schemes']) - - self.assertFalse(tuf.sig.verify(signable, 'Root')) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - - # Remove the role. - tuf.roledb.remove_role('Root') - - - def test_get_signature_status_below_threshold_unrecognized_sigs(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - # Two keys sign it, but only one of them will be trusted. - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[2], signed)) - - tuf.keydb.add_key(KEYS[0]) - tuf.keydb.add_key(KEYS[1]) - threshold = 2 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, - keyids=[KEYS[0]['keyid'], KEYS[1]['keyid']], - threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - - sig_status = tuf.sig.get_signature_status(signable, 'Root') - - self.assertEqual(2, sig_status['threshold']) - self.assertEqual([KEYS[0]['keyid']], sig_status['good_sigs']) - self.assertEqual([], sig_status['bad_sigs']) - self.assertEqual([KEYS[2]['keyid']], sig_status['unknown_sigs']) - self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_signing_schemes']) - - self.assertFalse(tuf.sig.verify(signable, 'Root')) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - tuf.keydb.remove_key(KEYS[1]['keyid']) - - # Remove the role. - tuf.roledb.remove_role('Root') - - - def test_get_signature_status_below_threshold_unauthorized_sigs(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - # Two keys sign it, but one of them is only trusted for a different - # role. - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[1], signed)) - - tuf.keydb.add_key(KEYS[0]) - tuf.keydb.add_key(KEYS[1]) - threshold = 2 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, - keyids=[KEYS[0]['keyid'], KEYS[2]['keyid']], - threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, - keyids=[KEYS[1]['keyid'], KEYS[2]['keyid']], - threshold=threshold) - - tuf.roledb.add_role('Release', roleinfo) - - sig_status = tuf.sig.get_signature_status(signable, 'Root') - - self.assertEqual(2, sig_status['threshold']) - self.assertEqual([KEYS[0]['keyid']], sig_status['good_sigs']) - self.assertEqual([], sig_status['bad_sigs']) - self.assertEqual([], sig_status['unknown_sigs']) - self.assertEqual([KEYS[1]['keyid']], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_signing_schemes']) - - self.assertFalse(tuf.sig.verify(signable, 'Root')) - - self.assertRaises(tuf.exceptions.UnknownRoleError, - tuf.sig.get_signature_status, signable, 'unknown_role') - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - tuf.keydb.remove_key(KEYS[1]['keyid']) - - # Remove the roles. - tuf.roledb.remove_role('Root') - tuf.roledb.remove_role('Release') - - - - def test_check_signatures_no_role(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - tuf.keydb.add_key(KEYS[0]) - - # No specific role we're considering. It's invalid to use the - # function tuf.sig.verify() without a role specified because - # tuf.sig.verify() is checking trust, as well. - args = (signable, None) - self.assertRaises(securesystemslib.exceptions.Error, tuf.sig.verify, *args) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - - - - def test_verify_single_key(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - tuf.keydb.add_key(KEYS[0]) - threshold = 1 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, keyids=[KEYS[0]['keyid']], threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - - # This will call verify() and return True if 'signable' is valid, - # False otherwise. - self.assertTrue(tuf.sig.verify(signable, 'Root')) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - - # Remove the roles. - tuf.roledb.remove_role('Root') - - - - def test_verify_must_not_count_duplicate_keyids_towards_threshold(self): - # Create and sign dummy metadata twice with same key - # Note that we use the non-deterministic rsassa-pss signing scheme, so - # creating the signature twice shows that we don't only detect duplicate - # signatures but also different signatures from the same key. - signable = {"signed" : "test", "signatures" : []} - signed = securesystemslib.formats.encode_canonical( - signable["signed"]).encode("utf-8") - signable["signatures"].append( - securesystemslib.keys.create_signature(KEYS[0], signed)) - signable["signatures"].append( - securesystemslib.keys.create_signature(KEYS[0], signed)) - - # 'get_signature_status' uses keys from keydb for verification - tuf.keydb.add_key(KEYS[0]) - - # Assert that 'get_signature_status' returns two good signatures ... - status = tuf.sig.get_signature_status( - signable, "root", keyids=[KEYS[0]["keyid"]], threshold=2) - self.assertTrue(len(status["good_sigs"]) == 2) - - # ... but only one counts towards the threshold - self.assertFalse( - tuf.sig.verify(signable, "root", keyids=[KEYS[0]["keyid"]], threshold=2)) - - # Clean-up keydb - tuf.keydb.remove_key(KEYS[0]["keyid"]) - - - - def test_verify_count_different_keyids_for_same_key_towards_threshold(self): - # Create and sign dummy metadata twice with same key but different keyids - signable = {"signed" : "test", "signatures" : []} - key_sha256 = copy.deepcopy(KEYS[0]) - key_sha256["keyid"] = "deadbeef256" - - key_sha512 = copy.deepcopy(KEYS[0]) - key_sha512["keyid"] = "deadbeef512" - - signed = securesystemslib.formats.encode_canonical( - signable["signed"]).encode("utf-8") - signable["signatures"].append( - securesystemslib.keys.create_signature(key_sha256, signed)) - signable["signatures"].append( - securesystemslib.keys.create_signature(key_sha512, signed)) - - # 'get_signature_status' uses keys from keydb for verification - tuf.keydb.add_key(key_sha256) - tuf.keydb.add_key(key_sha512) - - # Assert that the key only counts toward the threshold once - keyids = [key_sha256["keyid"], key_sha512["keyid"]] - self.assertFalse( - tuf.sig.verify(signable, "root", keyids=keyids, threshold=2)) - - # Clean-up keydb - tuf.keydb.remove_key(key_sha256["keyid"]) - tuf.keydb.remove_key(key_sha512["keyid"]) - - - - def test_verify_unrecognized_sig(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - # Two keys sign it, but only one of them will be trusted. - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[2], signed)) - - tuf.keydb.add_key(KEYS[0]) - tuf.keydb.add_key(KEYS[1]) - threshold = 2 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, - keyids=[KEYS[0]['keyid'], KEYS[1]['keyid']], - threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - - self.assertFalse(tuf.sig.verify(signable, 'Root')) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - tuf.keydb.remove_key(KEYS[1]['keyid']) - - # Remove the roles. - tuf.roledb.remove_role('Root') - - - - def test_generate_rsa_signature(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - self.assertEqual(1, len(signable['signatures'])) - signature = signable['signatures'][0] - self.assertEqual(KEYS[0]['keyid'], signature['keyid']) - - returned_signature = tuf.sig.generate_rsa_signature(signable['signed'], KEYS[0]) - self.assertTrue(securesystemslib.formats.SIGNATURE_SCHEMA.matches(returned_signature)) - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[1], signed)) - - self.assertEqual(2, len(signable['signatures'])) - signature = signable['signatures'][1] - self.assertEqual(KEYS[1]['keyid'], signature['keyid']) - - - - def test_may_need_new_keys(self): - # One untrusted key in 'signable'. - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - tuf.keydb.add_key(KEYS[1]) - threshold = 1 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, keyids=[KEYS[1]['keyid']], threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - - sig_status = tuf.sig.get_signature_status(signable, 'Root') - - self.assertTrue(tuf.sig.may_need_new_keys(sig_status)) - - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[1]['keyid']) - - # Remove the roles. - tuf.roledb.remove_role('Root') - - - def test_signable_has_invalid_format(self): - # get_signature_status() and verify() validate 'signable' before continuing. - # 'signable' must be of the form: {'signed': , 'signatures': [{}]}. - # Object types are checked as well. - signable = {'not_signed' : 'test', 'signatures' : []} - args = (signable['not_signed'], KEYS[0]) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.sig.get_signature_status, *args) - - # 'signatures' value must be a list. Let's try a dict. - signable = {'signed' : 'test', 'signatures' : {}} - args = (signable['signed'], KEYS[0]) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.sig.get_signature_status, *args) - - - -# Run unit test. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_slow_retrieval_attack.py b/tests/test_slow_retrieval_attack.py deleted file mode 100755 index 6cf2e1a837..0000000000 --- a/tests/test_slow_retrieval_attack.py +++ /dev/null @@ -1,216 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_slow_retrieval_attack.py - - - Konstantin Andrianov. - - - March 13, 2012. - - April 5, 2014. - Refactored to use the 'unittest' module (test conditions in code, rather - than verifying text output), use pre-generated repository files, and - discontinue use of the old repository tools. Expanded comments and modified - previous setup. -vladimir.v.diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Simulate a slow retrieval attack, where an attacker is able to prevent clients - from receiving updates by responding to client requests so slowly that updates - never complete. Test cases included for two types of slow retrievals: data - that slowly trickles in, and data that is only returned after a long time - delay. TUF prevents slow retrieval attacks by ensuring the download rate - does not fall below a required rate (tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED). - - Note: There is no difference between 'updates' and 'target' files. - - # TODO: Consider additional tests for slow metadata download. Tests here only - use slow target download. -""" - -import os -import tempfile -import shutil -import logging -import unittest -import sys - -import tuf.log -import tuf.client.updater as updater -import tuf.unittest_toolbox as unittest_toolbox -import tuf.repository_tool as repo_tool -import tuf.roledb -import tuf.keydb - -from tests import utils - -logger = logging.getLogger(__name__) -repo_tool.disable_console_log_messages() - - - -class TestSlowRetrieval(unittest_toolbox.Modified_TestCase): - - def setUp(self): - # Modified_Testcase can handle temp dir removal - unittest_toolbox.Modified_TestCase.setUp(self) - self.temporary_directory = self.make_temp_directory(directory=os.getcwd()) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = tempfile.mkdtemp(dir=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client') - original_keystore = os.path.join(original_repository_files, 'keystore') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.client_directory = os.path.join(temporary_repository_root, 'client') - self.keystore_directory = os.path.join(temporary_repository_root, 'keystore') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - - # Produce a longer target file than exists in the other test repository - # data, to provide for a long-duration slow attack. Then we'll write new - # top-level metadata that includes a hash over that file, and provide that - # metadata to the client as well. - - # The slow retrieval server, in mode 2 (1 byte per second), will only - # sleep for a total of (target file size) seconds. Add a target file - # that contains sufficient number of bytes to trigger a slow retrieval - # error. A transfer should not be permitted to take 1 second per byte - # transferred. Because this test is currently expected to fail, I'm - # limiting the size to 10 bytes (10 seconds) to avoid expected testing - # delays.... Consider increasing again after fix, to, e.g. 400. - total_bytes = 10 - - repository = repo_tool.load_repository(self.repository_directory) - file1_filepath = os.path.join(self.repository_directory, 'targets', - 'file1.txt') - with open(file1_filepath, 'wb') as file_object: - data = 'a' * int(round(total_bytes)) - file_object.write(data.encode('utf-8')) - - key_file = os.path.join(self.keystore_directory, 'timestamp_key') - timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - key_file = os.path.join(self.keystore_directory, 'snapshot_key') - snapshot_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - key_file = os.path.join(self.keystore_directory, 'targets_key') - targets_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - - repository.targets.load_signing_key(targets_private) - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Since we've changed the repository metadata in this setup (by lengthening - # a target file and then writing new metadata), we also have to update the - # client metadata to get to the expected initial state, where the client - # knows the right target info (and so expects the right, longer target - # length. - # We'll skip using updater.refresh since we don't have a server running, - # and we'll update the metadata locally, manually. - shutil.rmtree(os.path.join( - self.client_directory, self.repository_name, 'metadata', 'current')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata'), - os.path.join(self.client_directory, self.repository_name, 'metadata', - 'current')) - - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - - self.server_process_handler = utils.TestServerProcess(log=logger, - server='slow_retrieval_server.py') - - logger.info('Slow Retrieval Server process started.') - - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Cleans the resources and flush the logged lines (if any). - self.server_process_handler.clean() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_delay_before_send(self): - # Simulate a slow retrieval attack. - # When download begins,the server blocks the download for a long - # time by doing nothing before it sends the first byte of data. - - # Verify that the TUF client detects replayed metadata and refuses to - # continue the update process. - try: - file1_target = self.repository_updater.get_one_valid_targetinfo('file1.txt') - self.repository_updater.download_target(file1_target, self.client_directory) - - # Verify that the specific 'tuf.exceptions.SlowRetrievalError' exception is raised by - # each mirror. - except tuf.exceptions.NoWorkingMirrorError as exception: - for mirror_url, mirror_error in exception.mirror_errors.items(): - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'targets', 'file1.txt') - - # Verify that 'file1.txt' is the culprit. - self.assertEqual(url_file.replace('\\', '/'), mirror_url) - self.assertTrue(isinstance(mirror_error, tuf.exceptions.SlowRetrievalError)) - - else: - self.fail('TUF did not prevent a slow retrieval attack.') - - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_trusted_metadata_set.py b/tests/test_trusted_metadata_set.py index a9bce8f833..24e38a7bb5 100644 --- a/tests/test_trusted_metadata_set.py +++ b/tests/test_trusted_metadata_set.py @@ -1,115 +1,127 @@ +"""Unit tests for 'tuf/ngclient/_internal/trusted_metadata_set.py'.""" import logging -from typing import Optional, Union, Callable import os import sys import unittest from datetime import datetime +from typing import Callable, ClassVar, Dict, List, Optional, Tuple -from tuf import exceptions +from securesystemslib.interface import ( + import_ed25519_privatekey_from_file, + import_rsa_privatekey_from_file, +) +from securesystemslib.signer import SSlibSigner + +from tests import utils +from tuf.api import exceptions from tuf.api.metadata import ( Metadata, - Signed, + MetaFile, Root, - Timestamp, Snapshot, - MetaFile, - Targets + Targets, + Timestamp, ) +from tuf.api.serialization.json import JSONSerializer from tuf.ngclient._internal.trusted_metadata_set import TrustedMetadataSet -from securesystemslib.signer import SSlibSigner -from securesystemslib.interface import( - import_ed25519_privatekey_from_file, - import_rsa_privatekey_from_file -) - -from tests import utils - logger = logging.getLogger(__name__) +# pylint: disable=too-many-public-methods class TestTrustedMetadataSet(unittest.TestCase): + """Tests for all public API of the TrustedMetadataSet class.""" + keystore: ClassVar[Dict[str, SSlibSigner]] + metadata: ClassVar[Dict[str, bytes]] + repo_dir: ClassVar[str] + + @classmethod def modify_metadata( - self, rolename: str, modification_func: Callable[["Signed"], None] + cls, rolename: str, modification_func: Callable ) -> bytes: """Instantiate metadata from rolename type, call modification_func and sign it again with self.keystore[rolename] signer. Attributes: - rolename: A denoting the name of the metadata which will be modified. + rolename: Denoting the name of the metadata which will be modified. modification_func: Function that will be called to modify the signed portion of metadata bytes. """ - metadata = Metadata.from_bytes(self.metadata[rolename]) + metadata = Metadata.from_bytes(cls.metadata[rolename]) modification_func(metadata.signed) - metadata.sign(self.keystore[rolename]) - return metadata.to_bytes() + metadata.sign(cls.keystore[rolename]) + return metadata.to_bytes(JSONSerializer(validate=True)) @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: cls.repo_dir = os.path.join( - os.getcwd(), 'repository_data', 'repository', 'metadata' + utils.TESTS_DIR, "repository_data", "repository", "metadata" ) cls.metadata = {} - for md in ["root", "timestamp", "snapshot", "targets", "role1", "role2"]: + for md in [ + Root.type, + Timestamp.type, + Snapshot.type, + Targets.type, + "role1", + "role2", + ]: with open(os.path.join(cls.repo_dir, f"{md}.json"), "rb") as f: cls.metadata[md] = f.read() - keystore_dir = os.path.join(os.getcwd(), 'repository_data', 'keystore') + keystore_dir = os.path.join( + utils.TESTS_DIR, "repository_data", "keystore" + ) cls.keystore = {} root_key_dict = import_rsa_privatekey_from_file( - os.path.join(keystore_dir, "root" + '_key'), - password="password" + os.path.join(keystore_dir, Root.type + "_key"), password="password" ) - cls.keystore["root"] = SSlibSigner(root_key_dict) - for role in ["delegation", "snapshot", "targets", "timestamp"]: + cls.keystore[Root.type] = SSlibSigner(root_key_dict) + for role in ["delegation", Snapshot.type, Targets.type, Timestamp.type]: key_dict = import_ed25519_privatekey_from_file( - os.path.join(keystore_dir, role + '_key'), - password="password" + os.path.join(keystore_dir, role + "_key"), password="password" ) cls.keystore[role] = SSlibSigner(key_dict) def hashes_length_modifier(timestamp: Timestamp) -> None: - timestamp.meta["snapshot.json"].hashes = None - timestamp.meta["snapshot.json"].length = None + timestamp.snapshot_meta.hashes = None + timestamp.snapshot_meta.length = None - cls.metadata["timestamp"] = cls.modify_metadata( - cls, "timestamp", hashes_length_modifier + cls.metadata[Timestamp.type] = cls.modify_metadata( + Timestamp.type, hashes_length_modifier ) def setUp(self) -> None: - self.trusted_set = TrustedMetadataSet(self.metadata["root"]) - + self.trusted_set = TrustedMetadataSet(self.metadata[Root.type]) def _update_all_besides_targets( self, timestamp_bytes: Optional[bytes] = None, snapshot_bytes: Optional[bytes] = None, - ): + ) -> None: """Update all metadata roles besides targets. Args: timestamp_bytes: Bytes used when calling trusted_set.update_timestamp(). - Default self.metadata["timestamp"]. + Default self.metadata[Timestamp.type]. snapshot_bytes: Bytes used when calling trusted_set.update_snapshot(). - Default self.metadata["snapshot"]. + Default self.metadata[Snapshot.type]. """ - timestamp_bytes = timestamp_bytes or self.metadata["timestamp"] + timestamp_bytes = timestamp_bytes or self.metadata[Timestamp.type] self.trusted_set.update_timestamp(timestamp_bytes) - snapshot_bytes = snapshot_bytes or self.metadata["snapshot"] + snapshot_bytes = snapshot_bytes or self.metadata[Snapshot.type] self.trusted_set.update_snapshot(snapshot_bytes) - - def test_update(self): - self.trusted_set.update_timestamp(self.metadata["timestamp"]) - self.trusted_set.update_snapshot(self.metadata["snapshot"]) - self.trusted_set.update_targets(self.metadata["targets"]) + def test_update(self) -> None: + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) + self.trusted_set.update_targets(self.metadata[Targets.type]) self.trusted_set.update_delegated_targets( - self.metadata["role1"], "role1", "targets" + self.metadata["role1"], "role1", Targets.type ) self.trusted_set.update_delegated_targets( self.metadata["role2"], "role2", "role1" @@ -124,279 +136,322 @@ def test_update(self): self.assertTrue(count, 6) - def test_out_of_order_ops(self): + def test_update_metadata_output(self) -> None: + timestamp = self.trusted_set.update_timestamp( + self.metadata["timestamp"] + ) + snapshot = self.trusted_set.update_snapshot(self.metadata["snapshot"]) + targets = self.trusted_set.update_targets(self.metadata["targets"]) + delegeted_targets_1 = self.trusted_set.update_delegated_targets( + self.metadata["role1"], "role1", "targets" + ) + delegeted_targets_2 = self.trusted_set.update_delegated_targets( + self.metadata["role2"], "role2", "role1" + ) + self.assertIsInstance(timestamp.signed, Timestamp) + self.assertIsInstance(snapshot.signed, Snapshot) + self.assertIsInstance(targets.signed, Targets) + self.assertIsInstance(delegeted_targets_1.signed, Targets) + self.assertIsInstance(delegeted_targets_2.signed, Targets) + + def test_out_of_order_ops(self) -> None: # Update snapshot before timestamp with self.assertRaises(RuntimeError): - self.trusted_set.update_snapshot(self.metadata["snapshot"]) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) - self.trusted_set.update_timestamp(self.metadata["timestamp"]) + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) # Update root after timestamp with self.assertRaises(RuntimeError): - self.trusted_set.update_root(self.metadata["root"]) + self.trusted_set.update_root(self.metadata[Root.type]) # Update targets before snapshot with self.assertRaises(RuntimeError): - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) - self.trusted_set.update_snapshot(self.metadata["snapshot"]) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) # update timestamp after snapshot with self.assertRaises(RuntimeError): - self.trusted_set.update_timestamp(self.metadata["timestamp"]) + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) # Update delegated targets before targets with self.assertRaises(RuntimeError): self.trusted_set.update_delegated_targets( - self.metadata["role1"], "role1", "targets" + self.metadata["role1"], "role1", Targets.type ) - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) # Update snapshot after sucessful targets update with self.assertRaises(RuntimeError): - self.trusted_set.update_snapshot(self.metadata["snapshot"]) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) self.trusted_set.update_delegated_targets( - self.metadata["role1"], "role1", "targets" + self.metadata["role1"], "role1", Targets.type ) + def test_root_with_invalid_json(self) -> None: + # Test loading initial root and root update + for test_func in [TrustedMetadataSet, self.trusted_set.update_root]: + # root is not json + with self.assertRaises(exceptions.RepositoryError): + test_func(b"") - def test_update_with_invalid_json(self): - # root.json not a json file at all - with self.assertRaises(exceptions.RepositoryError): - TrustedMetadataSet(b"") - # root.json is invalid - root = Metadata.from_bytes(self.metadata["root"]) - root.signed.version += 1 - with self.assertRaises(exceptions.RepositoryError): - TrustedMetadataSet(root.to_bytes()) + # root is invalid + root = Metadata.from_bytes(self.metadata[Root.type]) + root.signed.version += 1 + with self.assertRaises(exceptions.UnsignedMetadataError): + test_func(root.to_bytes()) - # update_root called with the wrong metadata type - with self.assertRaises(exceptions.RepositoryError): - self.trusted_set.update_root(self.metadata["snapshot"]) + # metadata is of wrong type + with self.assertRaises(exceptions.RepositoryError): + test_func(self.metadata[Snapshot.type]) - top_level_md = [ - (self.metadata["timestamp"], self.trusted_set.update_timestamp), - (self.metadata["snapshot"], self.trusted_set.update_snapshot), - (self.metadata["targets"], self.trusted_set.update_targets), + def test_top_level_md_with_invalid_json(self) -> None: + top_level_md: List[Tuple[bytes, Callable[[bytes], Metadata]]] = [ + (self.metadata[Timestamp.type], self.trusted_set.update_timestamp), + (self.metadata[Snapshot.type], self.trusted_set.update_snapshot), + (self.metadata[Targets.type], self.trusted_set.update_targets), ] for metadata, update_func in top_level_md: md = Metadata.from_bytes(metadata) # metadata is not json with self.assertRaises(exceptions.RepositoryError): update_func(b"") + # metadata is invalid md.signed.version += 1 - with self.assertRaises(exceptions.RepositoryError): + with self.assertRaises(exceptions.UnsignedMetadataError): update_func(md.to_bytes()) # metadata is of wrong type with self.assertRaises(exceptions.RepositoryError): - update_func(self.metadata["root"]) + update_func(self.metadata[Root.type]) update_func(metadata) - def test_update_root_new_root(self): + def test_update_root_new_root(self) -> None: # test that root can be updated with a new valid version def root_new_version_modifier(root: Root) -> None: root.version += 1 - root = self.modify_metadata("root", root_new_version_modifier) + root = self.modify_metadata(Root.type, root_new_version_modifier) self.trusted_set.update_root(root) - def test_update_root_new_root_cannot_be_verified_with_threshold(self): - # new_root data with threshold which cannot be verified. - root = Metadata.from_bytes(self.metadata["root"]) - # remove root role keyids representing root signatures - root.signed.roles["root"].keyids = [] + def test_update_root_new_root_fail_threshold_verification(self) -> None: + # Increase threshold in new root, do not add enough keys + def root_threshold_bump(root: Root) -> None: + root.version += 1 + root.roles[Root.type].threshold += 1 + + root = self.modify_metadata(Root.type, root_threshold_bump) with self.assertRaises(exceptions.UnsignedMetadataError): - self.trusted_set.update_root(root.to_bytes()) + self.trusted_set.update_root(root) - def test_update_root_new_root_ver_same_as_trusted_root_ver(self): - with self.assertRaises(exceptions.ReplayedMetadataError): - self.trusted_set.update_root(self.metadata["root"]) + def test_update_root_new_root_ver_same_as_trusted_root_ver(self) -> None: + with self.assertRaises(exceptions.BadVersionNumberError): + self.trusted_set.update_root(self.metadata[Root.type]) - def test_root_expired_final_root(self): + def test_root_expired_final_root(self) -> None: def root_expired_modifier(root: Root) -> None: root.expires = datetime(1970, 1, 1) - + # intermediate root can be expired - root = self.modify_metadata("root", root_expired_modifier) + root = self.modify_metadata(Root.type, root_expired_modifier) tmp_trusted_set = TrustedMetadataSet(root) # update timestamp to trigger final root expiry check with self.assertRaises(exceptions.ExpiredMetadataError): - tmp_trusted_set.update_timestamp(self.metadata["timestamp"]) + tmp_trusted_set.update_timestamp(self.metadata[Timestamp.type]) - - def test_update_timestamp_new_timestamp_ver_below_trusted_ver(self): + def test_update_timestamp_new_timestamp_ver_below_trusted_ver(self) -> None: # new_timestamp.version < trusted_timestamp.version def version_modifier(timestamp: Timestamp) -> None: timestamp.version = 3 - - timestamp = self.modify_metadata("timestamp", version_modifier) + + timestamp = self.modify_metadata(Timestamp.type, version_modifier) self.trusted_set.update_timestamp(timestamp) - with self.assertRaises(exceptions.ReplayedMetadataError): - self.trusted_set.update_timestamp(self.metadata["timestamp"]) + with self.assertRaises(exceptions.BadVersionNumberError): + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) - def test_update_timestamp_snapshot_ver_below_current(self): + def test_update_timestamp_snapshot_ver_below_current(self) -> None: def bump_snapshot_version(timestamp: Timestamp) -> None: - timestamp.meta["snapshot.json"].version = 2 + timestamp.snapshot_meta.version = 2 # set current known snapshot.json version to 2 - timestamp = self.modify_metadata("timestamp", bump_snapshot_version) + timestamp = self.modify_metadata(Timestamp.type, bump_snapshot_version) self.trusted_set.update_timestamp(timestamp) - # newtimestamp.meta["snapshot.json"].version < trusted_timestamp.meta["snapshot.json"].version - with self.assertRaises(exceptions.ReplayedMetadataError): - self.trusted_set.update_timestamp(self.metadata["timestamp"]) + # newtimestamp.meta.version < trusted_timestamp.meta.version + with self.assertRaises(exceptions.BadVersionNumberError): + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) - def test_update_timestamp_expired(self): + def test_update_timestamp_expired(self) -> None: # new_timestamp has expired def timestamp_expired_modifier(timestamp: Timestamp) -> None: timestamp.expires = datetime(1970, 1, 1) - # intermediate timestamp is allowed to be expired - timestamp = self.modify_metadata("timestamp", timestamp_expired_modifier) - self.trusted_set.update_timestamp(timestamp) + # expired intermediate timestamp is loaded but raises + timestamp = self.modify_metadata( + Timestamp.type, timestamp_expired_modifier + ) + with self.assertRaises(exceptions.ExpiredMetadataError): + self.trusted_set.update_timestamp(timestamp) - # update snapshot to trigger final timestamp expiry check + # snapshot update does start but fails because timestamp is expired with self.assertRaises(exceptions.ExpiredMetadataError): - self.trusted_set.update_snapshot(self.metadata["snapshot"]) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) - def test_update_snapshot_length_or_hash_mismatch(self): + def test_update_snapshot_length_or_hash_mismatch(self) -> None: def modify_snapshot_length(timestamp: Timestamp) -> None: - timestamp.meta["snapshot.json"].length = 1 + timestamp.snapshot_meta.length = 1 # set known snapshot.json length to 1 - timestamp = self.modify_metadata("timestamp", modify_snapshot_length) + timestamp = self.modify_metadata(Timestamp.type, modify_snapshot_length) self.trusted_set.update_timestamp(timestamp) with self.assertRaises(exceptions.RepositoryError): - self.trusted_set.update_snapshot(self.metadata["snapshot"]) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) - def test_update_snapshot_cannot_verify_snapshot_with_threshold(self): - self.trusted_set.update_timestamp(self.metadata["timestamp"]) - snapshot = Metadata.from_bytes(self.metadata["snapshot"]) + def test_update_snapshot_fail_threshold_verification(self) -> None: + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) + snapshot = Metadata.from_bytes(self.metadata[Snapshot.type]) snapshot.signatures.clear() with self.assertRaises(exceptions.UnsignedMetadataError): self.trusted_set.update_snapshot(snapshot.to_bytes()) - def test_update_snapshot_version_different_timestamp_snapshot_version(self): + def test_update_snapshot_version_diverge_timestamp_snapshot_version( + self, + ) -> None: def timestamp_version_modifier(timestamp: Timestamp) -> None: - timestamp.meta["snapshot.json"].version = 2 + timestamp.snapshot_meta.version = 2 - timestamp = self.modify_metadata("timestamp", timestamp_version_modifier) + timestamp = self.modify_metadata( + Timestamp.type, timestamp_version_modifier + ) self.trusted_set.update_timestamp(timestamp) - #intermediate snapshot is allowed to not match meta version - self.trusted_set.update_snapshot(self.metadata["snapshot"]) + # if intermediate snapshot version is incorrect, load it but also raise + with self.assertRaises(exceptions.BadVersionNumberError): + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) - # final snapshot must match meta version + # targets update starts but fails if snapshot version does not match with self.assertRaises(exceptions.BadVersionNumberError): - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) + def test_update_snapshot_file_removed_from_meta(self) -> None: + self._update_all_besides_targets(self.metadata[Timestamp.type]) - def test_update_snapshot_file_removed_from_meta(self): - self._update_all_besides_targets(self.metadata["timestamp"]) def remove_file_from_meta(snapshot: Snapshot) -> None: del snapshot.meta["targets.json"] # Test removing a meta_file in new_snapshot compared to the old snapshot - snapshot = self.modify_metadata("snapshot", remove_file_from_meta) + snapshot = self.modify_metadata(Snapshot.type, remove_file_from_meta) with self.assertRaises(exceptions.RepositoryError): self.trusted_set.update_snapshot(snapshot) - def test_update_snapshot_meta_version_decreases(self): - self.trusted_set.update_timestamp(self.metadata["timestamp"]) + def test_update_snapshot_meta_version_decreases(self) -> None: + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) def version_meta_modifier(snapshot: Snapshot) -> None: snapshot.meta["targets.json"].version += 1 - snapshot = self.modify_metadata("snapshot", version_meta_modifier) + snapshot = self.modify_metadata(Snapshot.type, version_meta_modifier) self.trusted_set.update_snapshot(snapshot) with self.assertRaises(exceptions.BadVersionNumberError): - self.trusted_set.update_snapshot(self.metadata["snapshot"]) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) + + def test_update_snapshot_expired_new_snapshot(self) -> None: + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) - def test_update_snapshot_expired_new_snapshot(self): - self.trusted_set.update_timestamp(self.metadata["timestamp"]) def snapshot_expired_modifier(snapshot: Snapshot) -> None: snapshot.expires = datetime(1970, 1, 1) - # intermediate snapshot is allowed to be expired - snapshot = self.modify_metadata("snapshot", snapshot_expired_modifier) - self.trusted_set.update_snapshot(snapshot) + # expired intermediate snapshot is loaded but will raise + snapshot = self.modify_metadata( + Snapshot.type, snapshot_expired_modifier + ) + with self.assertRaises(exceptions.ExpiredMetadataError): + self.trusted_set.update_snapshot(snapshot) - # update targets to trigger final snapshot expiry check + # targets update does start but fails because snapshot is expired with self.assertRaises(exceptions.ExpiredMetadataError): - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) - def test_update_snapshot_successful_rollback_checks(self): + def test_update_snapshot_successful_rollback_checks(self) -> None: def meta_version_bump(timestamp: Timestamp) -> None: - timestamp.meta["snapshot.json"].version += 1 + timestamp.snapshot_meta.version += 1 def version_bump(snapshot: Snapshot) -> None: snapshot.version += 1 # load a "local" timestamp, then update to newer one: - self.trusted_set.update_timestamp(self.metadata["timestamp"]) - new_timestamp = self.modify_metadata("timestamp", meta_version_bump) + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) + new_timestamp = self.modify_metadata(Timestamp.type, meta_version_bump) self.trusted_set.update_timestamp(new_timestamp) - # load a "local" snapshot, then update to newer one: - self.trusted_set.update_snapshot(self.metadata["snapshot"]) - new_snapshot = self.modify_metadata("snapshot", version_bump) + # load a "local" snapshot with mismatching version (loading happens but + # BadVersionNumberError is raised), then update to newer one: + with self.assertRaises(exceptions.BadVersionNumberError): + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) + new_snapshot = self.modify_metadata(Snapshot.type, version_bump) self.trusted_set.update_snapshot(new_snapshot) # update targets to trigger final snapshot meta version check - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) - def test_update_targets_no_meta_in_snapshot(self): + def test_update_targets_no_meta_in_snapshot(self) -> None: def no_meta_modifier(snapshot: Snapshot) -> None: snapshot.meta = {} - snapshot = self.modify_metadata("snapshot", no_meta_modifier) - self._update_all_besides_targets(self.metadata["timestamp"], snapshot) + snapshot = self.modify_metadata(Snapshot.type, no_meta_modifier) + self._update_all_besides_targets( + self.metadata[Timestamp.type], snapshot + ) # remove meta information with information about targets from snapshot with self.assertRaises(exceptions.RepositoryError): - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) - def test_update_targets_hash_different_than_snapshot_meta_hash(self): + def test_update_targets_hash_diverge_from_snapshot_meta_hash(self) -> None: def meta_length_modifier(snapshot: Snapshot) -> None: for metafile_path in snapshot.meta: snapshot.meta[metafile_path] = MetaFile(version=1, length=1) - snapshot = self.modify_metadata("snapshot", meta_length_modifier) - self._update_all_besides_targets(self.metadata["timestamp"], snapshot) + snapshot = self.modify_metadata(Snapshot.type, meta_length_modifier) + self._update_all_besides_targets( + self.metadata[Timestamp.type], snapshot + ) # observed_hash != stored hash in snapshot meta for targets with self.assertRaises(exceptions.RepositoryError): - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) - def test_update_targets_version_different_snapshot_meta_version(self): + def test_update_targets_version_diverge_snapshot_meta_version(self) -> None: def meta_modifier(snapshot: Snapshot) -> None: for metafile_path in snapshot.meta: snapshot.meta[metafile_path] = MetaFile(version=2) - snapshot = self.modify_metadata("snapshot", meta_modifier) - self._update_all_besides_targets(self.metadata["timestamp"], snapshot) + snapshot = self.modify_metadata(Snapshot.type, meta_modifier) + self._update_all_besides_targets( + self.metadata[Timestamp.type], snapshot + ) # new_delegate.signed.version != meta.version stored in snapshot with self.assertRaises(exceptions.BadVersionNumberError): - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) - def test_update_targets_expired_new_target(self): + def test_update_targets_expired_new_target(self) -> None: self._update_all_besides_targets() # new_delegated_target has expired def target_expired_modifier(target: Targets) -> None: target.expires = datetime(1970, 1, 1) - targets = self.modify_metadata("targets", target_expired_modifier) + targets = self.modify_metadata(Targets.type, target_expired_modifier) with self.assertRaises(exceptions.ExpiredMetadataError): self.trusted_set.update_targets(targets) # TODO test updating over initial metadata (new keys, newer timestamp, etc) -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() +if __name__ == "__main__": + utils.configure_test_logging(sys.argv) + unittest.main() diff --git a/tests/test_tutorial.py b/tests/test_tutorial.py deleted file mode 100755 index fcdee24bba..0000000000 --- a/tests/test_tutorial.py +++ /dev/null @@ -1,412 +0,0 @@ -#!/usr/bin/env python - -""" - - test_tutorial.py - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Regression test for the TUF tutorial as laid out in TUTORIAL.md. - This essentially runs the tutorial and checks some results. - - There are a few deviations from the TUTORIAL.md instructions: - - steps that involve user input (like passphrases) are modified slightly - to not require user input - - use of path separators '/' is replaced by join() calls. (We assume that - when following the tutorial, users will correctly deal with path - separators for their system if they happen to be using non-Linux systems.) - - shell instructions are mimicked using Python commands - -""" - - -import unittest -import datetime # part of TUTORIAL.md -import os # part of TUTORIAL.md, but also needed separately -import shutil -import tempfile -import sys - -if sys.version_info >= (3, 3): - import unittest.mock as mock - -else: - import mock - -from tuf.repository_tool import * # part of TUTORIAL.md - -from tests import utils - -import securesystemslib.exceptions - -from securesystemslib.formats import encode_canonical # part of TUTORIAL.md -from securesystemslib.keys import create_signature # part of TUTORIAL.md - - -class TestTutorial(unittest.TestCase): - def setUp(self): - self.working_dir = os.getcwd() - self.test_dir = os.path.realpath(tempfile.mkdtemp()) - os.chdir(self.test_dir) - - def tearDown(self): - os.chdir(self.working_dir) - shutil.rmtree(self.test_dir) - - def test_tutorial(self): - """ - Run the TUTORIAL.md tutorial. - Note that anywhere the tutorial provides a command that prompts for the - user to enter a passphrase/password, this test is changed to simply provide - that as an argument. It's not worth trying to arrange automated testing of - the interactive password entry process here. Anywhere user entry has been - skipped from the tutorial instructions, "# Skipping user entry of password" - is written, with the original line below it, starting with ##. - """ - - # ----- Tutorial Section: Keys - - generate_and_write_rsa_keypair(password='password', filepath='root_key', bits=2048) - - # Skipping user entry of password - ## generate_and_write_rsa_keypair_with_prompt('root_key2') - generate_and_write_rsa_keypair(password='password', filepath='root_key2') - - # Tutorial tells users to expect these files to exist: - # ['root_key', 'root_key.pub', 'root_key2', 'root_key2.pub'] - for fname in ['root_key', 'root_key.pub', 'root_key2', 'root_key2.pub']: - self.assertTrue(os.path.exists(fname)) - - # Generate key pair at /path/to/KEYID - fname = generate_and_write_rsa_keypair(password="password") - self.assertTrue(os.path.exists(fname)) - - - # ----- Tutorial Section: Import RSA Keys - - public_root_key = import_rsa_publickey_from_file('root_key.pub') - - # Skipping user entry of password - ## private_root_key = import_rsa_privatekey_from_file('root_key') - private_root_key = import_rsa_privatekey_from_file('root_key', 'password') - - # Skipping user entry of password - ## import_rsa_privatekey_from_file('root_key') - with self.assertRaises(securesystemslib.exceptions.CryptoError): - import_rsa_privatekey_from_file('root_key', 'not_the_real_pw') - - - - # ----- Tutorial Section: Create and Import Ed25519 Keys - - # Skipping user entry of password - ## generate_and_write_ed25519_keypair_with_prompt('ed25519_key') - generate_and_write_ed25519_keypair(password='password', filepath='ed25519_key') - - public_ed25519_key = import_ed25519_publickey_from_file('ed25519_key.pub') - - # Skipping user entry of password - ## private_ed25519_key = import_ed25519_privatekey_from_file('ed25519_key') - private_ed25519_key = import_ed25519_privatekey_from_file( - 'ed25519_key', 'password') - - - - # ----- Tutorial Section: Create Top-level Metadata - repository = create_new_repository('repository') - repository.root.add_verification_key(public_root_key) - self.assertTrue(repository.root.keys) - - public_root_key2 = import_rsa_publickey_from_file('root_key2.pub') - repository.root.add_verification_key(public_root_key2) - - repository.root.threshold = 2 - private_root_key2 = import_rsa_privatekey_from_file( - 'root_key2', password='password') - - repository.root.load_signing_key(private_root_key) - repository.root.load_signing_key(private_root_key2) - - # NOTE: The tutorial does not call dirty_roles anymore due to #964 and - # #958. We still call it here to see if roles are dirty as expected. - with mock.patch("tuf.repository_tool.logger") as mock_logger: - repository.dirty_roles() - # Concat strings to avoid Python2/3 unicode prefix problems ('' vs. u'') - mock_logger.info.assert_called_with("Dirty roles: " + str(['root'])) - - # Patch logger to assert that it accurately logs the repo's status. Since - # the logger is called multiple times, we have to assert for the accurate - # sequence of calls or rather its call arguments. - with mock.patch("tuf.repository_lib.logger") as mock_logger: - repository.status() - # Concat strings to avoid Python2/3 unicode prefix problems ('' vs. u'') - self.assertListEqual([ - repr('targets') + " role contains 0 / 1 public keys.", - repr('snapshot') + " role contains 0 / 1 public keys.", - repr('timestamp') + " role contains 0 / 1 public keys.", - repr('root') + " role contains 2 / 2 signatures.", - repr('targets') + " role contains 0 / 1 signatures." - ], [args[0] for args, _ in mock_logger.info.call_args_list]) - - generate_and_write_rsa_keypair(password='password', filepath='targets_key') - generate_and_write_rsa_keypair(password='password', filepath='snapshot_key') - generate_and_write_rsa_keypair(password='password', filepath='timestamp_key') - - repository.targets.add_verification_key(import_rsa_publickey_from_file( - 'targets_key.pub')) - repository.snapshot.add_verification_key(import_rsa_publickey_from_file( - 'snapshot_key.pub')) - repository.timestamp.add_verification_key(import_rsa_publickey_from_file( - 'timestamp_key.pub')) - - # Skipping user entry of password - ## private_targets_key = import_rsa_privatekey_from_file('targets_key') - private_targets_key = import_rsa_privatekey_from_file( - 'targets_key', 'password') - - # Skipping user entry of password - ## private_snapshot_key = import_rsa_privatekey_from_file('snapshot_key') - private_snapshot_key = import_rsa_privatekey_from_file( - 'snapshot_key', 'password') - - # Skipping user entry of password - ## private_timestamp_key = import_rsa_privatekey_from_file('timestamp_key') - private_timestamp_key = import_rsa_privatekey_from_file( - 'timestamp_key', 'password') - - repository.targets.load_signing_key(private_targets_key) - repository.snapshot.load_signing_key(private_snapshot_key) - repository.timestamp.load_signing_key(private_timestamp_key) - - repository.timestamp.expiration = datetime.datetime(2080, 10, 28, 12, 8) - - # NOTE: The tutorial does not call dirty_roles anymore due to #964 and - # #958. We still call it here to see if roles are dirty as expected. - with mock.patch("tuf.repository_tool.logger") as mock_logger: - repository.dirty_roles() - # Concat strings to avoid Python2/3 unicode prefix problems ('' vs. u'') - mock_logger.info.assert_called_with("Dirty roles: " + - str(['root', 'snapshot', 'targets', 'timestamp'])) - - repository.writeall() - - - - # ----- Tutorial Section: Targets - # These next commands in the tutorial are shown as bash commands, so I'll - # just simulate this with some Python commands. - ## $ cd repository/targets/ - ## $ echo 'file1' > file1.txt - ## $ echo 'file2' > file2.txt - ## $ echo 'file3' > file3.txt - ## $ mkdir myproject; echo 'file4' > myproject/file4.txt - ## $ cd ../../ - - with open(os.path.join('repository', 'targets', 'file1.txt'), 'w') as fobj: - fobj.write('file1') - with open(os.path.join('repository', 'targets', 'file2.txt'), 'w') as fobj: - fobj.write('file2') - with open(os.path.join('repository', 'targets', 'file3.txt'), 'w') as fobj: - fobj.write('file3') - - os.mkdir(os.path.join('repository', 'targets', 'myproject')) - with open(os.path.join('repository', 'targets', 'myproject', 'file4.txt'), - 'w') as fobj: - fobj.write('file4') - - - repository = load_repository('repository') - - # TODO: replace the hard-coded list of targets with a helper - # method that returns a list of normalized relative target paths - list_of_targets = ['file1.txt', 'file2.txt', 'file3.txt'] - - repository.targets.add_targets(list_of_targets) - - self.assertTrue('file1.txt' in repository.targets.target_files) - self.assertTrue('file2.txt' in repository.targets.target_files) - self.assertTrue('file3.txt' in repository.targets.target_files) - - target4_filepath = 'myproject/file4.txt' - target4_abspath = os.path.abspath(os.path.join( - 'repository', 'targets', target4_filepath)) - octal_file_permissions = oct(os.stat(target4_abspath).st_mode)[4:] - custom_file_permissions = {'file_permissions': octal_file_permissions} - repository.targets.add_target(target4_filepath, custom_file_permissions) - # Note that target filepaths specified in the repo use '/' even on Windows. - # (This is important to make metadata platform-independent.) - self.assertTrue( - os.path.join(target4_filepath) in repository.targets.target_files) - - - # Skipping user entry of password - ## private_targets_key = import_rsa_privatekey_from_file('targets_key') - private_targets_key = import_rsa_privatekey_from_file( - 'targets_key', 'password') - repository.targets.load_signing_key(private_targets_key) - - # Skipping user entry of password - ## private_snapshot_key = import_rsa_privatekey_from_file('snapshot_key') - private_snapshot_key = import_rsa_privatekey_from_file( - 'snapshot_key', 'password') - repository.snapshot.load_signing_key(private_snapshot_key) - - # Skipping user entry of password - ## private_timestamp_key = import_rsa_privatekey_from_file('timestamp_key') - private_timestamp_key = import_rsa_privatekey_from_file( - 'timestamp_key', 'password') - repository.timestamp.load_signing_key(private_timestamp_key) - - # NOTE: The tutorial does not call dirty_roles anymore due to #964 and - # #958. We still call it here to see if roles are dirty as expected. - with mock.patch("tuf.repository_tool.logger") as mock_logger: - repository.dirty_roles() - # Concat strings to avoid Python2/3 unicode prefix problems ('' vs. u'') - mock_logger.info.assert_called_with( - "Dirty roles: " + str(['snapshot', 'targets', 'timestamp'])) - - repository.writeall() - - repository.targets.remove_target('myproject/file4.txt') - self.assertTrue(os.path.exists(os.path.join( - 'repository','targets', 'myproject', 'file4.txt'))) - - # NOTE: The tutorial does not call dirty_roles anymore due to #964 and - # #958. We still call it here to see if roles are dirty as expected. - with mock.patch("tuf.repository_tool.logger") as mock_logger: - repository.dirty_roles() - # Concat strings to avoid Python2/3 unicode prefix problems ('' vs. u'') - mock_logger.info.assert_called_with( - "Dirty roles: " + str(['targets'])) - - repository.mark_dirty(['snapshot', 'timestamp']) - repository.writeall() - - - # ----- Tutorial Section: Excursion: Dump Metadata and Append Signature - signable_content = dump_signable_metadata( - os.path.join('repository', 'metadata.staged', 'timestamp.json')) - - # Skipping user entry of password - ## private_ed25519_key = import_ed25519_privatekey_from_file('ed25519_key') - private_ed25519_key = import_ed25519_privatekey_from_file('ed25519_key', 'password') - signature = create_signature( - private_ed25519_key, encode_canonical(signable_content).encode()) - append_signature( - signature, - os.path.join('repository', 'metadata.staged', 'timestamp.json')) - - - - # ----- Tutorial Section: Delegations - generate_and_write_rsa_keypair( - password='password', filepath='unclaimed_key', bits=2048) - public_unclaimed_key = import_rsa_publickey_from_file('unclaimed_key.pub') - repository.targets.delegate( - 'unclaimed', [public_unclaimed_key], ['myproject/*.txt']) - - repository.targets("unclaimed").add_target("myproject/file4.txt") - - # Skipping user entry of password - ## private_unclaimed_key = import_rsa_privatekey_from_file('unclaimed_key') - private_unclaimed_key = import_rsa_privatekey_from_file( - 'unclaimed_key', 'password') - repository.targets("unclaimed").load_signing_key(private_unclaimed_key) - - # NOTE: The tutorial does not call dirty_roles anymore due to #964 and - # #958. We still call it here to see if roles are dirty as expected. - with mock.patch("tuf.repository_tool.logger") as mock_logger: - repository.dirty_roles() - # Concat strings to avoid Python2/3 unicode prefix problems ('' vs. u'') - mock_logger.info.assert_called_with( - "Dirty roles: " + str(['targets', 'unclaimed'])) - - repository.mark_dirty(["snapshot", "timestamp"]) - repository.writeall() - - - # Simulate the following shell command: - ## $ cp -r "repository/metadata.staged/" "repository/metadata/" - shutil.copytree( - os.path.join('repository', 'metadata.staged'), - os.path.join('repository', 'metadata')) - - - # ----- Tutorial Section: Delegate to Hashed Bins - repository.targets('unclaimed').remove_target("myproject/file4.txt") - - targets = ['myproject/file4.txt'] - - # Patch logger to assert that it accurately logs the output of hashed bin - # delegation. The logger is called multiple times, first with info level - # then with warning level. So we have to assert for the accurate sequence - # of calls or rather its call arguments. - with mock.patch("tuf.repository_tool.logger") as mock_logger: - repository.targets('unclaimed').delegate_hashed_bins( - targets, [public_unclaimed_key], 32) - - self.assertListEqual([ - "Creating hashed bin delegations.\n" - "1 total targets.\n" - "32 hashed bins.\n" - "256 total hash prefixes.\n" - "Each bin ranges over 8 hash prefixes." - ] + ["Adding a verification key that has already been used."] * 32, - [ - args[0] for args, _ in - mock_logger.info.call_args_list + mock_logger.warning.call_args_list - ]) - - - for delegation in repository.targets('unclaimed').delegations: - delegation.load_signing_key(private_unclaimed_key) - - # NOTE: The tutorial does not call dirty_roles anymore due to #964 and - # #958. We still call it here to see if roles are dirty as expected. - with mock.patch("tuf.repository_tool.logger") as mock_logger: - repository.dirty_roles() - # Concat strings to avoid Python2/3 unicode prefix problems ('' vs. u'') - mock_logger.info.assert_called_with( - "Dirty roles: " + str(['00-07', '08-0f', '10-17', '18-1f', '20-27', - '28-2f', '30-37', '38-3f', '40-47', '48-4f', '50-57', '58-5f', - '60-67', '68-6f', '70-77', '78-7f', '80-87', '88-8f', '90-97', - '98-9f', 'a0-a7', 'a8-af', 'b0-b7', 'b8-bf', 'c0-c7', 'c8-cf', - 'd0-d7', 'd8-df', 'e0-e7', 'e8-ef', 'f0-f7', 'f8-ff', 'unclaimed'])) - - repository.mark_dirty(["snapshot", "timestamp"]) - repository.writeall() - - # ----- Tutorial Section: How to Perform an Update - - # A separate tutorial is linked to for client use. That is not tested here. - create_tuf_client_directory("repository/", "client/tufrepo/") - - - - # ----- Tutorial Section: Test TUF Locally - - # TODO: Run subprocess to simulate the following bash instructions: - - # $ cd "repository/"; python3 -m http.server 8001 - # We next retrieve targets from the TUF repository and save them to client/. The client.py script is available to download metadata and files from a specified repository. In a different command-line prompt . . . - - # $ cd "client/" - # $ ls - # metadata/ - - # $ client.py --repo http://localhost:8001 file1.txt - # $ ls . targets/ - # .: - # metadata targets - - # targets/: - # file1.txt - - - -# Run unit test. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_unittest_toolbox.py b/tests/test_unittest_toolbox.py deleted file mode 100755 index 5bd4169c87..0000000000 --- a/tests/test_unittest_toolbox.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_unittest_toolbox.py - - - Vladimir Diaz - - - July 14, 2017. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Test cases for unittest_toolbox.py. -""" - -import unittest -import logging -import shutil -import sys - -import tuf.unittest_toolbox as unittest_toolbox - -from tests import utils - -logger = logging.getLogger(__name__) - - -class TestUnittestToolbox(unittest_toolbox.Modified_TestCase): - def setUp(self): - unittest_toolbox.Modified_TestCase.setUp(self) - - def tearDown(self): - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_tear_down_already_deleted_dir(self): - temp_directory = self.make_temp_directory() - - # Delete the temp directory to make sure unittest_toolbox doesn't - # complain about the missing temp_directory. - shutil.rmtree(temp_directory) - - -# Run the unit tests. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_updater.py b/tests/test_updater.py deleted file mode 100755 index 0c28e6ca5f..0000000000 --- a/tests/test_updater.py +++ /dev/null @@ -1,2138 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_updater.py - - - Konstantin Andrianov. - - - October 15, 2012. - - March 11, 2014. - Refactored to remove mocked modules and old repository tool dependence, use - exact repositories, and add realistic retrieval of files. -vladimir.v.diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - 'test_updater.py' provides a collection of methods that test the public / - non-public methods and functions of 'tuf.client.updater.py'. - - The 'unittest_toolbox.py' module was created to provide additional testing - tools, such as automatically deleting temporary files created in test cases. - For more information, see 'tests/unittest_toolbox.py'. - - - Test cases here should follow a specific order (i.e., independent methods are - tested before dependent methods). More accurately, least dependent methods - are tested before most dependent methods. There is no reason to rewrite or - construct other methods that replicate already-tested methods solely for - testing purposes. This is possible because the 'unittest.TestCase' class - guarantees the order of unit tests. The 'test_something_A' method would - be tested before 'test_something_B'. To ensure the expected order of tests, - a number is placed after 'test' and before methods name like so: - 'test_1_check_directory'. The number is a measure of dependence, where 1 is - less dependent than 2. -""" - -import os -import time -import shutil -import copy -import tempfile -import logging -import errno -import sys -import unittest -import json - -if sys.version_info >= (3, 3): - import unittest.mock as mock -else: - import mock - -import tuf -import tuf.exceptions -import tuf.log -import tuf.formats -import tuf.keydb -import tuf.roledb -import tuf.repository_tool as repo_tool -import tuf.repository_lib as repo_lib -import tuf.unittest_toolbox as unittest_toolbox -import tuf.client.updater as updater - -from tests import utils - -import securesystemslib - -logger = logging.getLogger(__name__) -repo_tool.disable_console_log_messages() - - -class TestUpdater(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Needed because in some tests simple_server.py cannot be found. - # The reason is that the current working directory - # has been changed when executing a subprocess. - cls.SIMPLE_SERVER_PATH = os.path.join(os.getcwd(), 'simple_server.py') - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served - # by the SimpleHTTPServer launched here. The test cases of 'test_updater.py' - # assume the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger, - server=cls.SIMPLE_SERVER_PATH) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated for the test cases - shutil.rmtree(cls.temporary_directory) - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf.tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_keystore = os.path.join(original_repository_files, 'keystore') - original_client = os.path.join(original_repository_files, 'client') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.keystore_directory = \ - os.path.join(temporary_repository_root, 'keystore') - - self.client_directory = os.path.join(temporary_repository_root, - 'client') - self.client_metadata = os.path.join(self.client_directory, - self.repository_name, 'metadata') - self.client_metadata_current = os.path.join(self.client_metadata, - 'current') - self.client_metadata_previous = os.path.join(self.client_metadata, - 'previous') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Creating a repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - # Metadata role keys are needed by the test cases to make changes to the - # repository (e.g., adding a new target file to 'targets.json' and then - # requesting a refresh()). - self.role_keys = _load_role_keys(self.keystore_directory) - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - # UNIT TESTS. - - def test_1__init__exceptions(self): - # The client's repository requires a metadata directory (and the 'current' - # and 'previous' sub-directories), and at least the 'root.json' file. - # setUp(), called before each test case, instantiates the required updater - # objects and keys. The needed objects/data is available in - # 'self.repository_updater', 'self.client_directory', etc. - - - # Test: Invalid arguments. - # Invalid 'updater_name' argument. String expected. - self.assertRaises(securesystemslib.exceptions.FormatError, updater.Updater, 8, - self.repository_mirrors) - - # Invalid 'repository_mirrors' argument. 'tuf.formats.MIRRORDICT_SCHEMA' - # expected. - self.assertRaises(securesystemslib.exceptions.FormatError, updater.Updater, updater.Updater, 8) - - - # 'tuf.client.updater.py' requires that the client's repositories directory - # be configured in 'tuf.settings.py'. - tuf.settings.repositories_directory = None - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', - self.repository_mirrors) - # Restore 'tuf.settings.repositories_directory' to the original client - # directory. - tuf.settings.repositories_directory = self.client_directory - - # Test: repository does not exist - self.assertRaises(tuf.exceptions.MissingLocalRepositoryError, updater.Updater, - 'test_non_existing_repository', self.repository_mirrors) - - # Test: empty client repository (i.e., no metadata directory). - metadata_backup = self.client_metadata + '.backup' - shutil.move(self.client_metadata, metadata_backup) - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', - self.repository_mirrors) - # Restore the client's metadata directory. - shutil.move(metadata_backup, self.client_metadata) - - - # Test: repository with only a '{repository_directory}/metadata' directory. - # (i.e., missing the required 'current' and 'previous' sub-directories). - current_backup = self.client_metadata_current + '.backup' - previous_backup = self.client_metadata_previous + '.backup' - - shutil.move(self.client_metadata_current, current_backup) - shutil.move(self.client_metadata_previous, previous_backup) - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', - self.repository_mirrors) - - # Restore the client's previous directory. The required 'current' directory - # is still missing. - shutil.move(previous_backup, self.client_metadata_previous) - - # Test: repository with only a '{repository_directory}/metadata/previous' - # directory. - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', - self.repository_mirrors) - # Restore the client's current directory. - shutil.move(current_backup, self.client_metadata_current) - - # Test: repository with a '{repository_directory}/metadata/current' - # directory, but the 'previous' directory is missing. - shutil.move(self.client_metadata_previous, previous_backup) - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', - self.repository_mirrors) - shutil.move(previous_backup, self.client_metadata_previous) - - # Test: repository missing the required 'root.json' file. - client_root_file = os.path.join(self.client_metadata_current, 'root.json') - backup_root_file = client_root_file + '.backup' - shutil.move(client_root_file, backup_root_file) - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', - self.repository_mirrors) - # Restore the client's 'root.json file. - shutil.move(backup_root_file, client_root_file) - - # Test: Normal 'tuf.client.updater.Updater' instantiation. - updater.Updater('test_repository1', self.repository_mirrors) - - - - - - def test_1__load_metadata_from_file(self): - - # Setup - # Get the 'role1.json' filepath. Manually load the role metadata, and - # compare it against the loaded metadata by '_load_metadata_from_file()'. - role1_filepath = \ - os.path.join(self.client_metadata_current, 'role1.json') - role1_meta = securesystemslib.util.load_json_file(role1_filepath) - - # Load the 'role1.json' file with _load_metadata_from_file, which should - # store the loaded metadata in the 'self.repository_updater.metadata' - # store. - self.assertEqual(len(self.repository_updater.metadata['current']), 4) - self.repository_updater._load_metadata_from_file('current', 'role1') - - # Verify that the correct number of metadata objects has been loaded - # (i.e., only the 'root.json' file should have been loaded. - self.assertEqual(len(self.repository_updater.metadata['current']), 5) - - # Verify that the content of root metadata is valid. - self.assertEqual(self.repository_updater.metadata['current']['role1'], - role1_meta['signed']) - - # Verify that _load_metadata_from_file() doesn't raise an exception for - # improperly formatted metadata, and doesn't load the bad file. - with open(role1_filepath, 'ab') as file_object: - file_object.write(b'bad JSON data') - - self.repository_updater._load_metadata_from_file('current', 'role1') - self.assertEqual(len(self.repository_updater.metadata['current']), 5) - - # Test if we fail gracefully if we can't deserialize a meta file - self.repository_updater._load_metadata_from_file('current', 'empty_file') - self.assertFalse('empty_file' in self.repository_updater.metadata['current']) - - # Test invalid metadata set argument (must be either - # 'current' or 'previous'.) - self.assertRaises(securesystemslib.exceptions.Error, - self.repository_updater._load_metadata_from_file, - 'bad_metadata_set', 'role1') - - - - - def test_1__rebuild_key_and_role_db(self): - # Setup - root_roleinfo = tuf.roledb.get_roleinfo('root', self.repository_name) - root_metadata = self.repository_updater.metadata['current']['root'] - root_threshold = root_metadata['roles']['root']['threshold'] - number_of_root_keys = len(root_metadata['keys']) - - self.assertEqual(root_roleinfo['threshold'], root_threshold) - - # Ensure we add 2 to the number of root keys (actually, the number of root - # keys multiplied by the number of keyid hash algorithms), to include the - # delegated targets key (+1 for its sha512 keyid). The delegated roles of - # 'targets.json' are also loaded when the repository object is - # instantiated. - - self.assertEqual(number_of_root_keys + 1, len(tuf.keydb._keydb_dict[self.repository_name])) - - # Test: normal case. - self.repository_updater._rebuild_key_and_role_db() - - root_roleinfo = tuf.roledb.get_roleinfo('root', self.repository_name) - self.assertEqual(root_roleinfo['threshold'], root_threshold) - - # _rebuild_key_and_role_db() will only rebuild the keys and roles specified - # in the 'root.json' file, unlike __init__(). Instantiating an updater - # object calls both _rebuild_key_and_role_db() and _import_delegations(). - self.assertEqual(number_of_root_keys, len(tuf.keydb._keydb_dict[self.repository_name])) - - # Test: properly updated roledb and keydb dicts if the Root role changes. - root_metadata = self.repository_updater.metadata['current']['root'] - root_metadata['roles']['root']['threshold'] = 8 - root_metadata['keys'].popitem() - - self.repository_updater._rebuild_key_and_role_db() - - root_roleinfo = tuf.roledb.get_roleinfo('root', self.repository_name) - self.assertEqual(root_roleinfo['threshold'], 8) - self.assertEqual(number_of_root_keys - 1, len(tuf.keydb._keydb_dict[self.repository_name])) - - - - - def test_1__update_versioninfo(self): - # Tests - # Verify that the 'self.versioninfo' dictionary is empty (it starts off - # empty and is only populated if _update_versioninfo() is called. - versioninfo_dict = self.repository_updater.versioninfo - self.assertEqual(len(versioninfo_dict), 0) - - # Load the versioninfo of the top-level Targets role. This action - # populates the 'self.versioninfo' dictionary. - self.repository_updater._update_versioninfo('targets.json') - self.assertEqual(len(versioninfo_dict), 1) - self.assertTrue(tuf.formats.FILEINFODICT_SCHEMA.matches(versioninfo_dict)) - - # The Snapshot role stores the version numbers of all the roles available - # on the repository. Load Snapshot to extract Root's version number - # and compare it against the one loaded by 'self.repository_updater'. - snapshot_filepath = os.path.join(self.client_metadata_current, 'snapshot.json') - snapshot_signable = securesystemslib.util.load_json_file(snapshot_filepath) - targets_versioninfo = snapshot_signable['signed']['meta']['targets.json'] - - # Verify that the manually loaded version number of root.json matches - # the one loaded by the updater object. - self.assertTrue('targets.json' in versioninfo_dict) - self.assertEqual(versioninfo_dict['targets.json'], targets_versioninfo) - - # Verify that 'self.versioninfo' is incremented if another role is updated. - self.repository_updater._update_versioninfo('role1.json') - self.assertEqual(len(versioninfo_dict), 2) - - # Verify that 'self.versioninfo' is incremented if a non-existent role is - # requested, and has its versioninfo entry set to 'None'. - self.repository_updater._update_versioninfo('bad_role.json') - self.assertEqual(len(versioninfo_dict), 3) - self.assertEqual(versioninfo_dict['bad_role.json'], None) - - # Verify that the versioninfo specified in Timestamp is used if the Snapshot - # role hasn't been downloaded yet. - del self.repository_updater.metadata['current']['snapshot'] - #self.assertRaises(self.repository_updater._update_versioninfo('snapshot.json')) - self.repository_updater._update_versioninfo('snapshot.json') - self.assertEqual(versioninfo_dict['snapshot.json']['version'], 1) - - - - def test_1__refresh_must_not_count_duplicate_keyids_towards_threshold(self): - # Update root threshold on the server repository and sign twice with 1 key - repository = repo_tool.load_repository(self.repository_directory) - repository.root.threshold = 2 - repository.root.load_signing_key(self.role_keys['root']['private']) - - storage_backend = securesystemslib.storage.FilesystemBackend() - # The client uses the threshold from the previous root file to verify the - # new root. Thus we need to make two updates so that the threshold used for - # verification becomes 2. I.e. we bump the version, sign twice with the - # same key and write to disk '2.root.json' and '3.root.json'. - for version in [2, 3]: - repository.root.version = version - info = tuf.roledb.get_roleinfo("root") - metadata = repo_lib.generate_root_metadata( - info["version"], info["expires"], False) - signed_metadata = repo_lib.sign_metadata( - metadata, info["keyids"], "root.json", "default") - signed_metadata["signatures"].append(signed_metadata["signatures"][0]) - live_root_path = os.path.join( - self.repository_directory, "metadata", "root.json") - - # Bypass server side verification in 'write' or 'writeall', which would - # catch the unmet threshold. - # We also skip writing to 'metadata.staged' and copying to 'metadata' and - # instead write directly to 'metadata' - repo_lib.write_metadata_file(signed_metadata, live_root_path, - info["version"], True, storage_backend) - - - # Update from current '1.root.json' to '3.root.json' on client and assert - # raise of 'BadSignatureError' (caused by unmet signature threshold). - try: - self.repository_updater.refresh() - - except tuf.exceptions.NoWorkingMirrorError as e: - mirror_errors = list(e.mirror_errors.values()) - self.assertTrue(len(mirror_errors) == 1) - self.assertTrue( - isinstance(mirror_errors[0], - securesystemslib.exceptions.BadSignatureError)) - self.assertEqual( - str(mirror_errors[0]), - repr("root") + " metadata has bad signature.") - - else: - self.fail( - "Expected a NoWorkingMirrorError composed of one BadSignatureError") - - - def test_1__update_fileinfo(self): - # Tests - # Verify that the 'self.fileinfo' dictionary is empty (its starts off empty - # and is only populated if _update_fileinfo() is called. - fileinfo_dict = self.repository_updater.fileinfo - self.assertEqual(len(fileinfo_dict), 0) - - # Load the fileinfo of the top-level root role. This populates the - # 'self.fileinfo' dictionary. - self.repository_updater._update_fileinfo('root.json') - self.assertEqual(len(fileinfo_dict), 1) - self.assertTrue(tuf.formats.FILEDICT_SCHEMA.matches(fileinfo_dict)) - root_filepath = os.path.join(self.client_metadata_current, 'root.json') - length, hashes = securesystemslib.util.get_file_details(root_filepath) - root_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - self.assertTrue('root.json' in fileinfo_dict) - self.assertEqual(fileinfo_dict['root.json'], root_fileinfo) - - # Verify that 'self.fileinfo' is incremented if another role is updated. - self.repository_updater._update_fileinfo('targets.json') - self.assertEqual(len(fileinfo_dict), 2) - - # Verify that 'self.fileinfo' is inremented if a non-existent role is - # requested, and has its fileinfo entry set to 'None'. - self.repository_updater._update_fileinfo('bad_role.json') - self.assertEqual(len(fileinfo_dict), 3) - self.assertEqual(fileinfo_dict['bad_role.json'], None) - - - - - def test_2__fileinfo_has_changed(self): - # Verify that the method returns 'False' if file info was not changed. - root_filepath = os.path.join(self.client_metadata_current, 'root.json') - length, hashes = securesystemslib.util.get_file_details(root_filepath) - root_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - self.assertFalse(self.repository_updater._fileinfo_has_changed('root.json', - root_fileinfo)) - - # Verify that the method returns 'True' if length or hashes were changed. - new_length = 8 - new_root_fileinfo = tuf.formats.make_targets_fileinfo(new_length, hashes) - self.assertTrue(self.repository_updater._fileinfo_has_changed('root.json', - new_root_fileinfo)) - # Hashes were changed. - new_hashes = {'sha256': self.random_string()} - new_root_fileinfo = tuf.formats.make_targets_fileinfo(length, new_hashes) - self.assertTrue(self.repository_updater._fileinfo_has_changed('root.json', - new_root_fileinfo)) - - # Verify that _fileinfo_has_changed() returns True if no fileinfo (or set - # to None) exists for some role. - self.assertTrue(self.repository_updater._fileinfo_has_changed('bad.json', - new_root_fileinfo)) - - saved_fileinfo = self.repository_updater.fileinfo['root.json'] - self.repository_updater.fileinfo['root.json'] = None - self.assertTrue(self.repository_updater._fileinfo_has_changed('root.json', - new_root_fileinfo)) - - - self.repository_updater.fileinfo['root.json'] = saved_fileinfo - new_root_fileinfo['hashes']['sha666'] = '666' - self.repository_updater._fileinfo_has_changed('root.json', - new_root_fileinfo) - - - - def test_2__import_delegations(self): - # Setup. - # In order to test '_import_delegations' the parent of the delegation - # has to be in Repository.metadata['current'], but it has to be inserted - # there without using '_load_metadata_from_file()' since it calls - # '_import_delegations()'. - repository_name = self.repository_updater.repository_name - tuf.keydb.clear_keydb(repository_name) - tuf.roledb.clear_roledb(repository_name) - - self.assertEqual(len(tuf.roledb._roledb_dict[repository_name]), 0) - self.assertEqual(len(tuf.keydb._keydb_dict[repository_name]), 0) - - self.repository_updater._rebuild_key_and_role_db() - - self.assertEqual(len(tuf.roledb._roledb_dict[repository_name]), 4) - - # Take into account the number of keyids algorithms supported by default, - # which this test condition expects to be two (sha256 and sha512). - self.assertEqual(4, len(tuf.keydb._keydb_dict[repository_name])) - - # Test: pass a role without delegations. - self.repository_updater._import_delegations('root') - - # Verify that there was no change to the roledb and keydb dictionaries by - # checking the number of elements in the dictionaries. - self.assertEqual(len(tuf.roledb._roledb_dict[repository_name]), 4) - # Take into account the number of keyid hash algorithms, which this - # test condition expects to be one - self.assertEqual(len(tuf.keydb._keydb_dict[repository_name]), 4) - - # Test: normal case, first level delegation. - self.repository_updater._import_delegations('targets') - - self.assertEqual(len(tuf.roledb._roledb_dict[repository_name]), 5) - # The number of root keys (times the number of key hash algorithms) + - # delegation's key (+1 for its sha512 keyid). - self.assertEqual(len(tuf.keydb._keydb_dict[repository_name]), 4 + 1) - - # Verify that roledb dictionary was added. - self.assertTrue('role1' in tuf.roledb._roledb_dict[repository_name]) - - # Verify that keydb dictionary was updated. - role1_signable = \ - securesystemslib.util.load_json_file(os.path.join(self.client_metadata_current, - 'role1.json')) - keyids = [] - for signature in role1_signable['signatures']: - keyids.append(signature['keyid']) - - for keyid in keyids: - self.assertTrue(keyid in tuf.keydb._keydb_dict[repository_name]) - - # Verify that _import_delegations() ignores invalid keytypes in the 'keys' - # field of parent role's 'delegations'. - existing_keyid = keyids[0] - - self.repository_updater.metadata['current']['targets']\ - ['delegations']['keys'][existing_keyid]['keytype'] = 'bad_keytype' - self.repository_updater._import_delegations('targets') - - # Restore the keytype of 'existing_keyid'. - self.repository_updater.metadata['current']['targets']\ - ['delegations']['keys'][existing_keyid]['keytype'] = 'ed25519' - - # Verify that _import_delegations() raises an exception if one of the - # delegated keys is malformed. - valid_keyval = self.repository_updater.metadata['current']['targets']\ - ['delegations']['keys'][existing_keyid]['keyval'] - - self.repository_updater.metadata['current']['targets']\ - ['delegations']['keys'][existing_keyid]['keyval'] = 1 - self.assertRaises(securesystemslib.exceptions.FormatError, self.repository_updater._import_delegations, 'targets') - - self.repository_updater.metadata['current']['targets']\ - ['delegations']['keys'][existing_keyid]['keyval'] = valid_keyval - - # Verify that _import_delegations() raises an exception if one of the - # delegated roles is malformed. - self.repository_updater.metadata['current']['targets']\ - ['delegations']['roles'][0]['name'] = 1 - self.assertRaises(securesystemslib.exceptions.FormatError, self.repository_updater._import_delegations, 'targets') - - - - def test_2__versioninfo_has_been_updated(self): - # Verify that the method returns 'False' if a versioninfo was not changed. - snapshot_filepath = os.path.join(self.client_metadata_current, 'snapshot.json') - snapshot_signable = securesystemslib.util.load_json_file(snapshot_filepath) - targets_versioninfo = snapshot_signable['signed']['meta']['targets.json'] - - self.assertFalse(self.repository_updater._versioninfo_has_been_updated('targets.json', - targets_versioninfo)) - - # Verify that the method returns 'True' if Root's version number changes. - targets_versioninfo['version'] = 8 - self.assertTrue(self.repository_updater._versioninfo_has_been_updated('targets.json', - targets_versioninfo)) - - - - - - def test_2__move_current_to_previous(self): - # Test case will consist of removing a metadata file from client's - # '{client_repository}/metadata/previous' directory, executing the method - # and then verifying that the 'previous' directory contains the snapshot - # file. - previous_snapshot_filepath = os.path.join(self.client_metadata_previous, - 'snapshot.json') - os.remove(previous_snapshot_filepath) - self.assertFalse(os.path.exists(previous_snapshot_filepath)) - - # Verify that the current 'snapshot.json' is moved to the previous directory. - self.repository_updater._move_current_to_previous('snapshot') - self.assertTrue(os.path.exists(previous_snapshot_filepath)) - - - - - - def test_2__delete_metadata(self): - # This test will verify that 'root' metadata is never deleted. When a role - # is deleted verify that the file is not present in the - # 'self.repository_updater.metadata' dictionary. - self.repository_updater._delete_metadata('root') - self.assertTrue('root' in self.repository_updater.metadata['current']) - - self.repository_updater._delete_metadata('timestamp') - self.assertFalse('timestamp' in self.repository_updater.metadata['current']) - - - - - - def test_2__ensure_not_expired(self): - # This test condition will verify that nothing is raised when a metadata - # file has a future expiration date. - root_metadata = self.repository_updater.metadata['current']['root'] - self.repository_updater._ensure_not_expired(root_metadata, 'root') - - # Metadata with an expiration time in the future should, of course, not - # count as expired - expires = tuf.formats.unix_timestamp_to_datetime(int(time.time() + 10)) - expires = expires.isoformat() + 'Z' - root_metadata['expires'] = expires - self.assertTrue(tuf.formats.ROOT_SCHEMA.matches(root_metadata)) - self.repository_updater._ensure_not_expired(root_metadata, 'root') - - # Metadata that expires at the exact current time is considered expired - expire_time = int(time.time()) - expires = \ - tuf.formats.unix_timestamp_to_datetime(expire_time).isoformat()+'Z' - root_metadata['expires'] = expires - mock_time = mock.Mock() - mock_time.return_value = expire_time - self.assertTrue(tuf.formats.ROOT_SCHEMA.matches(root_metadata)) - with mock.patch('time.time', mock_time): - self.assertRaises(tuf.exceptions.ExpiredMetadataError, - self.repository_updater._ensure_not_expired, - root_metadata, 'root') - - # Metadata that expires in the past is considered expired - expires = tuf.formats.unix_timestamp_to_datetime(int(time.time() - 10)) - expires = expires.isoformat() + 'Z' - root_metadata['expires'] = expires - self.assertTrue(tuf.formats.ROOT_SCHEMA.matches(root_metadata)) - self.assertRaises(tuf.exceptions.ExpiredMetadataError, - self.repository_updater._ensure_not_expired, - root_metadata, 'root') - - - - - - def test_3__update_metadata(self): - # Setup - # _update_metadata() downloads, verifies, and installs the specified - # metadata role. Remove knowledge of currently installed metadata and - # verify that they are re-installed after calling _update_metadata(). - - # This is the default metadata that we would create for the timestamp role, - # because it has no signed metadata for itself. - DEFAULT_TIMESTAMP_FILELENGTH = tuf.settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH - - # This is the upper bound length for Targets metadata. - DEFAULT_TARGETS_FILELENGTH = tuf.settings.DEFAULT_TARGETS_REQUIRED_LENGTH - - # Save the versioninfo of 'targets.json,' needed later when re-installing - # with _update_metadata(). - targets_versioninfo = \ - self.repository_updater.metadata['current']['snapshot']['meta']\ - ['targets.json'] - - # Remove the currently installed metadata from the store and disk. Verify - # that the metadata dictionary is re-populated after calling - # _update_metadata(). - del self.repository_updater.metadata['current']['timestamp'] - del self.repository_updater.metadata['current']['targets'] - - timestamp_filepath = \ - os.path.join(self.client_metadata_current, 'timestamp.json') - targets_filepath = os.path.join(self.client_metadata_current, 'targets.json') - root_filepath = os.path.join(self.client_metadata_current, 'root.json') - os.remove(timestamp_filepath) - os.remove(targets_filepath) - - # Test: normal case. - # Verify 'timestamp.json' is properly installed. - self.assertFalse('timestamp' in self.repository_updater.metadata) - - logger.info('\nroleinfo: ' + repr(tuf.roledb.get_rolenames(self.repository_name))) - self.repository_updater._update_metadata('timestamp', - DEFAULT_TIMESTAMP_FILELENGTH) - self.assertTrue('timestamp' in self.repository_updater.metadata['current']) - os.path.exists(timestamp_filepath) - - # Verify 'targets.json' is properly installed. - self.assertFalse('targets' in self.repository_updater.metadata['current']) - self.repository_updater._update_metadata('targets', - DEFAULT_TARGETS_FILELENGTH, - targets_versioninfo['version']) - self.assertTrue('targets' in self.repository_updater.metadata['current']) - - targets_signable = securesystemslib.util.load_json_file(targets_filepath) - loaded_targets_version = targets_signable['signed']['version'] - self.assertEqual(targets_versioninfo['version'], loaded_targets_version) - - # Test: Invalid / untrusted version numbers. - # Invalid version number for 'targets.json'. - self.assertRaises(tuf.exceptions.NoWorkingMirrorError, - self.repository_updater._update_metadata, - 'targets', DEFAULT_TARGETS_FILELENGTH, 88) - - # Verify that the specific exception raised is correct for the previous - # case. - try: - self.repository_updater._update_metadata('targets', - DEFAULT_TARGETS_FILELENGTH, 88) - - except tuf.exceptions.NoWorkingMirrorError as e: - for mirror_error in e.mirror_errors.values(): - assert isinstance(mirror_error, tuf.exceptions.BadVersionNumberError) - - else: - self.fail( - 'Expected a NoWorkingMirrorError composed of BadVersionNumberErrors') - - # Verify that the specific exception raised is correct for the previous - # case. The version number is checked, so the specific error in - # this case should be 'tuf.exceptions.BadVersionNumberError'. - try: - self.repository_updater._update_metadata('targets', - DEFAULT_TARGETS_FILELENGTH, - 88) - - except tuf.exceptions.NoWorkingMirrorError as e: - for mirror_error in e.mirror_errors.values(): - assert isinstance(mirror_error, tuf.exceptions.BadVersionNumberError) - - else: - self.fail( - 'Expected a NoWorkingMirrorError composed of BadVersionNumberErrors') - - - - - - def test_3__get_metadata_file(self): - - ''' - This test focuses on making sure that the updater rejects unknown or - badly-formatted TUF specification version numbers.... - ''' - - # Make note of the correct supported TUF specification version. - correct_specification_version = tuf.SPECIFICATION_VERSION - - # Change it long enough to write new metadata. - tuf.SPECIFICATION_VERSION = '0.9.0' - - repository = repo_tool.load_repository(self.repository_directory) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - - # Change the supported TUF specification version back to what it should be - # so that we can parse the metadata and see that the spec version in the - # metadata does not match the code's expected spec version. - tuf.SPECIFICATION_VERSION = correct_specification_version - - upperbound_filelength = tuf.settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH - try: - self.repository_updater._get_metadata_file('timestamp', 'timestamp.json', - upperbound_filelength, 1) - - except tuf.exceptions.NoWorkingMirrorError as e: - # Note that this test provides a piece of metadata which would fail to - # be accepted -- with a different error -- if the specification version - # number were not a problem. - for mirror_error in e.mirror_errors.values(): - assert isinstance( - mirror_error, tuf.exceptions.UnsupportedSpecificationError) - - else: - self.fail( - 'Expected a failure to verify metadata when the metadata had a ' - 'specification version number that was unexpected. ' - 'No error was raised.') - - - - - - def test_3__update_metadata_if_changed(self): - # Setup. - # The client repository is initially loaded with only four top-level roles. - # Verify that the metadata store contains the metadata of only these four - # roles before updating the metadata of 'targets.json'. - self.assertEqual(len(self.repository_updater.metadata['current']), 4) - self.assertTrue('targets' in self.repository_updater.metadata['current']) - targets_path = os.path.join(self.client_metadata_current, 'targets.json') - self.assertTrue(os.path.exists(targets_path)) - self.assertEqual(self.repository_updater.metadata['current']['targets']['version'], 1) - - # Test: normal case. Update 'targets.json'. The version number should not - # change. - self.repository_updater._update_metadata_if_changed('targets') - - # Verify the current version of 'targets.json' has not changed. - self.assertEqual(self.repository_updater.metadata['current']['targets']['version'], 1) - - # Modify one target file on the remote repository. - repository = repo_tool.load_repository(self.repository_directory) - target3 = 'file3.txt' - - repository.targets.add_target(target3) - repository.root.version = repository.root.version + 1 - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Update 'targets.json' and verify that the client's current 'targets.json' - # has been updated. 'timestamp' and 'snapshot' must be manually updated - # so that new 'targets' can be recognized. - DEFAULT_TIMESTAMP_FILELENGTH = tuf.settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH - - self.repository_updater._update_metadata('timestamp', DEFAULT_TIMESTAMP_FILELENGTH) - self.repository_updater._update_metadata_if_changed('snapshot', 'timestamp') - self.repository_updater._update_metadata_if_changed('targets') - targets_path = os.path.join(self.client_metadata_current, 'targets.json') - self.assertTrue(os.path.exists(targets_path)) - self.assertTrue(self.repository_updater.metadata['current']['targets']) - self.assertEqual(self.repository_updater.metadata['current']['targets']['version'], 2) - - # Test for an invalid 'referenced_metadata' argument. - self.assertRaises(tuf.exceptions.RepositoryError, - self.repository_updater._update_metadata_if_changed, 'snapshot', 'bad_role') - - - - def test_3__targets_of_role(self): - # Setup. - # Extract the list of targets from 'targets.json', to be compared to what - # is returned by _targets_of_role('targets'). - targets_in_metadata = \ - self.repository_updater.metadata['current']['targets']['targets'] - - # Test: normal case. - targetinfos_list = self.repository_updater._targets_of_role('targets') - - # Verify that the list of targets was returned, and that it contains valid - # target files. - self.assertTrue(tuf.formats.TARGETINFOS_SCHEMA.matches(targetinfos_list)) - for targetinfo in targetinfos_list: - self.assertTrue((targetinfo['filepath'], targetinfo['fileinfo']) in targets_in_metadata.items()) - - - - - - def test_4_refresh(self): - # This unit test is based on adding an extra target file to the - # server and rebuilding all server-side metadata. All top-level metadata - # should be updated when the client calls refresh(). - - # First verify that an expired root metadata is updated. - expired_date = '1960-01-01T12:00:00Z' - self.repository_updater.metadata['current']['root']['expires'] = expired_date - self.repository_updater.refresh() - - # Second, verify that expired root metadata is not updated if - # 'unsafely_update_root_if_necessary' is explicitly set to 'False'. - expired_date = '1960-01-01T12:00:00Z' - self.repository_updater.metadata['current']['root']['expires'] = expired_date - self.assertRaises(tuf.exceptions.ExpiredMetadataError, - self.repository_updater.refresh, - unsafely_update_root_if_necessary=False) - - repository = repo_tool.load_repository(self.repository_directory) - target3 = 'file3.txt' - - repository.targets.add_target(target3) - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Reference 'self.Repository.metadata['current']['targets']'. Ensure - # 'target3' is not already specified. - targets_metadata = self.repository_updater.metadata['current']['targets'] - self.assertFalse(target3 in targets_metadata['targets']) - - # Verify the expected version numbers of the roles to be modified. - self.assertEqual(self.repository_updater.metadata['current']['targets']\ - ['version'], 1) - self.assertEqual(self.repository_updater.metadata['current']['snapshot']\ - ['version'], 1) - self.assertEqual(self.repository_updater.metadata['current']['timestamp']\ - ['version'], 1) - - # Test: normal case. 'targes.json' should now specify 'target3', and the - # following top-level metadata should have also been updated: - # 'snapshot.json' and 'timestamp.json'. - self.repository_updater.refresh() - - # Verify that the client's metadata was updated. - targets_metadata = self.repository_updater.metadata['current']['targets'] - self.assertTrue(target3 in targets_metadata['targets']) - - # Verify the expected version numbers of the updated roles. - self.assertEqual(self.repository_updater.metadata['current']['targets']\ - ['version'], 2) - self.assertEqual(self.repository_updater.metadata['current']['snapshot']\ - ['version'], 2) - self.assertEqual(self.repository_updater.metadata['current']['timestamp']\ - ['version'], 2) - - - - - - def test_4__refresh_targets_metadata(self): - # Setup. - # It is assumed that the client repository has only loaded the top-level - # metadata. Refresh the 'targets.json' metadata, including all delegated - # roles (i.e., the client should add the missing 'role1.json' metadata. - self.assertEqual(len(self.repository_updater.metadata['current']), 4) - - # Test: normal case. - self.repository_updater._refresh_targets_metadata(refresh_all_delegated_roles=True) - - # Verify that client's metadata files were refreshed successfully. - self.assertEqual(len(self.repository_updater.metadata['current']), 6) - - # Test for non-existing rolename. - self.repository_updater._refresh_targets_metadata('bad_rolename', - refresh_all_delegated_roles=False) - - # Test that non-json metadata in Snapshot is ignored. - self.repository_updater.metadata['current']['snapshot']['meta']['bad_role.xml'] = {} - self.repository_updater._refresh_targets_metadata(refresh_all_delegated_roles=True) - - - - def test_5_all_targets(self): - # Setup - # As with '_refresh_targets_metadata()', - - # Update top-level metadata before calling one of the "targets" methods, as - # recommended by 'updater.py'. - self.repository_updater.refresh() - - # Test: normal case. - with utils.ignore_deprecation_warnings('tuf.client.updater'): - all_targets = self.repository_updater.all_targets() - - # Verify format of 'all_targets', it should correspond to - # 'TARGETINFOS_SCHEMA'. - self.assertTrue(tuf.formats.TARGETINFOS_SCHEMA.matches(all_targets)) - - # Verify that there is a correct number of records in 'all_targets' list, - # and the expected filepaths specified in the metadata. On the targets - # directory of the repository, there should be 3 target files (2 of - # which are specified by 'targets.json'.) The delegated role 'role1' - # specifies 1 target file. The expected total number targets in - # 'all_targets' should be 3. - self.assertEqual(len(all_targets), 3) - - target_filepaths = [] - for target in all_targets: - target_filepaths.append(target['filepath']) - - self.assertTrue('file1.txt' in target_filepaths) - self.assertTrue('file2.txt' in target_filepaths) - self.assertTrue('file3.txt' in target_filepaths) - - - - - - def test_5_targets_of_role(self): - # Setup - # Remove knowledge of 'targets.json' from the metadata store. - self.repository_updater.metadata['current']['targets'] - - # Remove the metadata of the delegated roles. - #shutil.rmtree(os.path.join(self.client_metadata, 'targets')) - os.remove(os.path.join(self.client_metadata_current, 'targets.json')) - - # Extract the target files specified by the delegated role, 'role1.json', - # as available on the server-side version of the role. - role1_filepath = os.path.join(self.repository_directory, 'metadata', - 'role1.json') - role1_signable = securesystemslib.util.load_json_file(role1_filepath) - expected_targets = role1_signable['signed']['targets'] - - - # Test: normal case. - with utils.ignore_deprecation_warnings('tuf.client.updater'): - targetinfos = self.repository_updater.targets_of_role('role1') - - # Verify that the expected role files were downloaded and installed. - os.path.exists(os.path.join(self.client_metadata_current, 'targets.json')) - os.path.exists(os.path.join(self.client_metadata_current, 'targets', - 'role1.json')) - self.assertTrue('targets' in self.repository_updater.metadata['current']) - self.assertTrue('role1' in self.repository_updater.metadata['current']) - - # Verify that list of targets was returned and that it contains valid - # target files. - self.assertTrue(tuf.formats.TARGETINFOS_SCHEMA.matches(targetinfos)) - for targetinfo in targetinfos: - self.assertTrue((targetinfo['filepath'], targetinfo['fileinfo']) in expected_targets.items()) - - # Test: Invalid arguments. - # targets_of_role() expected a string rolename. - with utils.ignore_deprecation_warnings('tuf.client.updater'): - self.assertRaises(securesystemslib.exceptions.FormatError, self.repository_updater.targets_of_role, - 8) - self.assertRaises(tuf.exceptions.UnknownRoleError, self.repository_updater.targets_of_role, - 'unknown_rolename') - - - - - - def test_6_get_one_valid_targetinfo(self): - # Setup - # Unlike some of the other tests, start up a fresh server here. - # The SimpleHTTPServer started in the setupclass has a tendency to - # timeout in Windows after a few tests. - - # Creates a subprocess running a server. - server_process_handler = utils.TestServerProcess(log=logger, - server=self.SIMPLE_SERVER_PATH) - - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', 'targets_path': 'targets'}} - - # Creating a repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - # Extract the file information of the targets specified in 'targets.json'. - self.repository_updater.refresh() - targets_metadata = self.repository_updater.metadata['current']['targets'] - - target_files = targets_metadata['targets'] - # Extract random target from 'target_files', which will be compared to what - # is returned by get_one_valid_targetinfo(). Restore the popped target - # (dict value stored in the metadata store) so that it can be found later. - filepath, fileinfo = target_files.popitem() - target_files[filepath] = fileinfo - - target_targetinfo = self.repository_updater.get_one_valid_targetinfo(filepath) - self.assertTrue(tuf.formats.TARGETINFO_SCHEMA.matches(target_targetinfo)) - self.assertEqual(target_targetinfo['filepath'], filepath) - self.assertEqual(target_targetinfo['fileinfo'], fileinfo) - - # Test: invalid target path. - self.assertRaises(tuf.exceptions.UnknownTargetError, - self.repository_updater.get_one_valid_targetinfo, - self.random_path().lstrip(os.sep).lstrip('/')) - - # Test updater.get_one_valid_targetinfo() backtracking behavior (enabled by - # default.) - targets_directory = os.path.join(self.repository_directory, 'targets') - os.makedirs(os.path.join(targets_directory, 'foo')) - - foo_package = 'foo/foo1.1.tar.gz' - foo_pattern = 'foo/foo*.tar.gz' - - foo_fullpath = os.path.join(targets_directory, foo_package) - with open(foo_fullpath, 'wb') as file_object: - file_object.write(b'new release') - - # Modify delegations on the remote repository to test backtracking behavior. - repository = repo_tool.load_repository(self.repository_directory) - - - repository.targets.delegate('role3', [self.role_keys['targets']['public']], - [foo_pattern]) - - repository.targets.delegate('role4', [self.role_keys['targets']['public']], - [foo_pattern], list_of_targets=[foo_package]) - repository.targets('role4').add_target(foo_package) - - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.targets('role3').load_signing_key(self.role_keys['targets']['private']) - repository.targets('role4').load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - - # updater.get_one_valid_targetinfo() should find 'foo1.1.tar.gz' by - # backtracking to 'role3'. 'role2' allows backtracking. - self.repository_updater.refresh() - self.repository_updater.get_one_valid_targetinfo('foo/foo1.1.tar.gz') - - # A leading path separator is disallowed. - self.assertRaises(tuf.exceptions.FormatError, - self.repository_updater.get_one_valid_targetinfo, '/foo/foo1.1.tar.gz') - - # Test when 'role2' does *not* allow backtracking. If 'foo/foo1.1.tar.gz' - # is not provided by the authoritative 'role2', - # updater.get_one_valid_targetinfo() should return a - # 'tuf.exceptions.UnknownTargetError' exception. - repository = repo_tool.load_repository(self.repository_directory) - - repository.targets.revoke('role3') - repository.targets.revoke('role4') - - # Ensure we delegate in trusted order (i.e., 'role2' has higher priority.) - repository.targets.delegate('role3', [self.role_keys['targets']['public']], - [foo_pattern], terminating=True, list_of_targets=[]) - - repository.targets.delegate('role4', [self.role_keys['targets']['public']], - [foo_pattern], list_of_targets=[foo_package]) - - repository.targets('role3').load_signing_key(self.role_keys['targets']['private']) - repository.targets('role4').load_signing_key(self.role_keys['targets']['private']) - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Verify that 'tuf.exceptions.UnknownTargetError' is raised by - # updater.get_one_valid_targetinfo(). - self.repository_updater.refresh() - self.assertRaises(tuf.exceptions.UnknownTargetError, - self.repository_updater.get_one_valid_targetinfo, - 'foo/foo1.1.tar.gz') - - # Verify that a 'tuf.exceptions.FormatError' is raised for delegated paths - # that contain a leading path separator. - self.assertRaises(tuf.exceptions.FormatError, - self.repository_updater.get_one_valid_targetinfo, - '/foo/foo1.1.tar.gz') - - # Cleans the resources and flush the logged lines (if any). - server_process_handler.clean() - - - - - def test_6_download_target(self): - # Create temporary directory (destination directory of downloaded targets) - # that will be passed as an argument to 'download_target()'. - destination_directory = self.make_temp_directory() - target_filepaths = \ - list(self.repository_updater.metadata['current']['targets']['targets'].keys()) - - # Test: normal case. - # Get the target info, which is an argument to 'download_target()'. - - # 'target_filepaths' is expected to have at least two targets. The first - # target will be used to test against download_target(). The second - # will be used to test against download_target() and a repository with - # 'consistent_snapshot' set to True. - target_filepath1 = target_filepaths.pop() - targetinfo = self.repository_updater.get_one_valid_targetinfo(target_filepath1) - self.repository_updater.download_target(targetinfo, - destination_directory) - - download_filepath = \ - os.path.join(destination_directory, target_filepath1.lstrip('/')) - self.assertTrue(os.path.exists(download_filepath)) - length, hashes = \ - securesystemslib.util.get_file_details(download_filepath, - securesystemslib.settings.HASH_ALGORITHMS) - download_targetfileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Add any 'custom' data from the repository's target fileinfo to the - # 'download_targetfileinfo' object being tested. - if 'custom' in targetinfo['fileinfo']: - download_targetfileinfo['custom'] = targetinfo['fileinfo']['custom'] - - self.assertEqual(targetinfo['fileinfo'], download_targetfileinfo) - - # Test when consistent snapshots is set. First, create a valid - # repository with consistent snapshots set (root.json contains a - # "consistent_snapshot" entry that the updater uses to correctly fetch - # snapshots. The updater expects the existence of - # '.filename' files if root.json sets 'consistent_snapshot - # = True'. - - # The repository must be rewritten with 'consistent_snapshot' set. - repository = repo_tool.load_repository(self.repository_directory) - - # Write metadata for all the top-level roles , since consistent snapshot - # is now being set to true (i.e., the pre-generated repository isn't set - # to support consistent snapshots. A new version of targets.json is needed - # to ensure .filename target files are written to disk. - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - - repository.writeall(consistent_snapshot=True) - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # And ensure the client has the latest top-level metadata. - self.repository_updater.refresh() - - target_filepath2 = target_filepaths.pop() - targetinfo2 = self.repository_updater.get_one_valid_targetinfo(target_filepath2) - self.repository_updater.download_target(targetinfo2, - destination_directory) - - # Checks if the file has been successfully downloaded - download_filepath = os.path.join(destination_directory, target_filepath2) - self.assertTrue(os.path.exists(download_filepath)) - - # Removes the file so that it can be downloaded again in the next test - os.remove(download_filepath) - - # Test downloading with consistent snapshot enabled, but without adding - # the hash of the file as a prefix to its name. - - file1_path = targetinfo2['filepath'] - file1_hashes = securesystemslib.util.get_file_hashes( - os.path.join(self.repository_directory, 'targets', file1_path), - hash_algorithms=['sha256', 'sha512']) - - # Currently in the repository directory, those three files exists: - # "file1.txt", ".file1.txt" and ".file1.txt" - # where both sha256 and sha512 hashes are for file file1.txt. - # Remove the files with the hash digest prefix to ensure that - # the served target file is not prefixed. - os.remove(os.path.join(self.repository_directory, 'targets', - file1_hashes['sha256'] + '.' + file1_path)) - os.remove(os.path.join(self.repository_directory, 'targets', - file1_hashes['sha512'] + '.' + file1_path)) - - - self.repository_updater.download_target(targetinfo2, - destination_directory, - prefix_filename_with_hash=False) - - # Checks if the file has been successfully downloaded - self.assertTrue(os.path.exists(download_filepath)) - - # Test for a destination that cannot be written to (apart from a target - # file that already exists at the destination) and which raises an - # exception. - bad_destination_directory = 'bad' * 2000 - - try: - self.repository_updater.download_target(targetinfo, bad_destination_directory) - - except OSError as e: - self.assertTrue( - e.errno in [errno.ENAMETOOLONG, errno.ENOENT, errno.EINVAL], - "wrong errno: " + str(e.errno)) - - else: - self.fail('No OSError raised') - - - # Test: Invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.repository_updater.download_target, - 8, destination_directory) - - self.assertRaises(securesystemslib.exceptions.FormatError, - self.repository_updater.download_target, - targetinfo, 8) - - # Test: - # Attempt a file download of a valid target, however, a download exception - # occurs because the target is not within the mirror's confined target - # directories. Adjust mirrors dictionary, so that 'confined_target_dirs' - # field contains at least one confined target and excludes needed target - # file. - mirrors = self.repository_updater.mirrors - for mirror_name, mirror_info in mirrors.items(): - mirrors[mirror_name]['confined_target_dirs'] = [self.random_path()] - - try: - self.repository_updater.download_target(targetinfo, - destination_directory) - - except tuf.exceptions.NoWorkingMirrorError as exception: - # Ensure that no mirrors were found due to mismatch in confined target - # directories. get_list_of_mirrors() returns an empty list in this case, - # which does not generate specific exception errors. - self.assertEqual(len(exception.mirror_errors), 0) - - else: - self.fail( - 'Expected a NoWorkingMirrorError with zero mirror errors in it.') - - - - - - def test_7_updated_targets(self): - # Verify that the list of targets returned by updated_targets() contains - # all the files that need to be updated, these files include modified and - # new target files. Also, confirm that files that need not to be updated - # are absent from the list. - # Setup - - # Unlike some of the other tests, start up a fresh server here. - # The SimpleHTTPServer started in the setupclass has a tendency to - # timeout in Windows after a few tests. - - # Creates a subprocess running a server. - server_process_handler = utils.TestServerProcess(log=logger, - server=self.SIMPLE_SERVER_PATH) - - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', 'targets_path': 'targets'}} - - # Creating a repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - # Create temporary directory which will hold client's target files. - destination_directory = self.make_temp_directory() - - # Get the list of target files. It will be used as an argument to the - # 'updated_targets()' function. - with utils.ignore_deprecation_warnings('tuf.client.updater'): - all_targets = self.repository_updater.all_targets() - - # Test for duplicates and targets in the root directory of the repository. - additional_target = all_targets[0].copy() - all_targets.append(additional_target) - additional_target_in_root_directory = additional_target.copy() - additional_target_in_root_directory['filepath'] = 'file1.txt' - all_targets.append(additional_target_in_root_directory) - - # At this point client needs to update and download all targets. - # Test: normal cases. - updated_targets = \ - self.repository_updater.updated_targets(all_targets, destination_directory) - - with utils.ignore_deprecation_warnings('tuf.client.updater'): - all_targets = self.repository_updater.all_targets() - - # Assumed the pre-generated repository specifies two target files in - # 'targets.json' and one delegated target file in 'role1.json'. - self.assertEqual(len(updated_targets), 3) - - # Test: download one of the targets. - download_target = copy.deepcopy(updated_targets).pop() - self.repository_updater.download_target(download_target, - destination_directory) - - updated_targets = \ - self.repository_updater.updated_targets(all_targets, destination_directory) - - self.assertEqual(len(updated_targets), 2) - - # Test: download all the targets. - for download_target in all_targets: - self.repository_updater.download_target(download_target, - destination_directory) - updated_targets = \ - self.repository_updater.updated_targets(all_targets, destination_directory) - - self.assertEqual(len(updated_targets), 0) - - - # Test: Invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.repository_updater.updated_targets, - 8, destination_directory) - - self.assertRaises(securesystemslib.exceptions.FormatError, - self.repository_updater.updated_targets, - all_targets, 8) - - # Modify one target file on the remote repository. - repository = repo_tool.load_repository(self.repository_directory) - - target1 = os.path.join(self.repository_directory, 'targets', 'file1.txt') - repository.targets.remove_target(os.path.basename(target1)) - - length, hashes = securesystemslib.util.get_file_details(target1) - - repository.targets.add_target(os.path.basename(target1)) - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - - with open(target1, 'ab') as file_object: - file_object.write(b'append extra text') - - length, hashes = securesystemslib.util.get_file_details(target1) - - repository.targets.add_target(os.path.basename(target1)) - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Ensure the client has up-to-date metadata. - self.repository_updater.refresh() - - # Verify that the new target file is considered updated. - with utils.ignore_deprecation_warnings('tuf.client.updater'): - all_targets = self.repository_updater.all_targets() - updated_targets = \ - self.repository_updater.updated_targets(all_targets, destination_directory) - self.assertEqual(len(updated_targets), 1) - - # Cleans the resources and flush the logged lines (if any). - server_process_handler.clean() - - - - - def test_8_remove_obsolete_targets(self): - # Setup. - # Unlike some of the other tests, start up a fresh server here. - # The SimpleHTTPServer started in the setupclass has a tendency to - # timeout in Windows after a few tests. - - # Creates a subprocess running a server. - server_process_handler = utils.TestServerProcess(log=logger, - server=self.SIMPLE_SERVER_PATH) - - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', 'targets_path': 'targets'}} - - # Creating a repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - # Create temporary directory that will hold the client's target files. - destination_directory = self.make_temp_directory() - - # Populate 'destination_direction' with all target files. - with utils.ignore_deprecation_warnings('tuf.client.updater'): - all_targets = self.repository_updater.all_targets() - - self.assertEqual(len(os.listdir(destination_directory)), 0) - - for target in all_targets: - self.repository_updater.download_target(target, destination_directory) - - self.assertEqual(len(os.listdir(destination_directory)), 3) - - # Remove two target files from the server's repository. - repository = repo_tool.load_repository(self.repository_directory) - target1 = os.path.join(self.repository_directory, 'targets', 'file1.txt') - repository.targets.remove_target(os.path.basename(target1)) - - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Update client's metadata. - self.repository_updater.refresh() - - # Test: normal case. - # Verify number of target files in 'destination_directory' (should be 1 - # after the update made to the remote repository), and call - # 'remove_obsolete_targets()'. - with utils.ignore_deprecation_warnings('tuf.client.updater'): - all_targets = self.repository_updater.all_targets() - - updated_targets = \ - self.repository_updater.updated_targets(all_targets, - destination_directory) - - for updated_target in updated_targets: - self.repository_updater.download_target(updated_target, - destination_directory) - - self.assertEqual(len(os.listdir(destination_directory)), 3) - self.repository_updater.remove_obsolete_targets(destination_directory) - self.assertEqual(len(os.listdir(destination_directory)), 2) - - # Verify that, if there are no obsolete files, the number of files - # in 'destination_directory' remains the same. - self.repository_updater.remove_obsolete_targets(destination_directory) - self.assertEqual(len(os.listdir(destination_directory)), 2) - - # Test coverage for a destination path that causes an exception not due - # to an already removed target. - bad_destination_directory = 'bad' * 2000 - self.repository_updater.remove_obsolete_targets(bad_destination_directory) - - # Test coverage for a target that is not specified in current metadata. - del self.repository_updater.metadata['current']['targets']['targets']['file2.txt'] - self.repository_updater.remove_obsolete_targets(destination_directory) - - # Test coverage for a role that doesn't exist in the previously trusted set - # of metadata. - del self.repository_updater.metadata['previous']['targets'] - self.repository_updater.remove_obsolete_targets(destination_directory) - - # Cleans the resources and flush the logged lines (if any). - server_process_handler.clean() - - - - def test_9__get_target_hash(self): - # Test normal case. - # Test target filepaths with ascii and non-ascii characters. - expected_target_hashes = { - '/file1.txt': 'e3a3d89eb3b70ce3fbce6017d7b8c12d4abd5635427a0e8a238f53157df85b3d', - '/Jalape\xc3\xb1o': '78bfd5c314680545eb48ecad508aceb861f8d6e680f4fe1b791da45c298cda88' - } - for filepath, target_hash in expected_target_hashes.items(): - self.assertTrue(tuf.formats.RELPATH_SCHEMA.matches(filepath)) - self.assertTrue(securesystemslib.formats.HASH_SCHEMA.matches(target_hash)) - self.assertEqual(self.repository_updater._get_target_hash(filepath), target_hash) - - # Test for improperly formatted argument. - #self.assertRaises(securesystemslib.exceptions.FormatError, self.repository_updater._get_target_hash, 8) - - - - - def test_10__check_file_length(self): - # Test for exception if file object is not equal to trusted file length. - with tempfile.TemporaryFile() as temp_file_object: - temp_file_object.write(b'X') - temp_file_object.seek(0) - self.assertRaises(tuf.exceptions.DownloadLengthMismatchError, - self.repository_updater._check_file_length, - temp_file_object, 10) - - - - - - def test_10__targets_of_role(self): - # Test for non-existent role. - self.assertRaises(tuf.exceptions.UnknownRoleError, - self.repository_updater._targets_of_role, - 'non-existent-role') - - # Test for role that hasn't been loaded yet. - del self.repository_updater.metadata['current']['targets'] - self.assertEqual(len(self.repository_updater._targets_of_role('targets', - skip_refresh=True)), 0) - - # 'targets.json' tracks two targets. - self.assertEqual(len(self.repository_updater._targets_of_role('targets')), - 2) - - - - def test_10__preorder_depth_first_walk(self): - - # Test that infinite loop is prevented if the target file is not found and - # the max number of delegations is reached. - valid_max_number_of_delegations = tuf.settings.MAX_NUMBER_OF_DELEGATIONS - tuf.settings.MAX_NUMBER_OF_DELEGATIONS = 0 - self.assertEqual(None, self.repository_updater._preorder_depth_first_walk('unknown.txt')) - - # Reset the setting for max number of delegations so that subsequent unit - # tests reference the expected setting. - tuf.settings.MAX_NUMBER_OF_DELEGATIONS = valid_max_number_of_delegations - - # Attempt to create a circular delegation, where role1 performs a - # delegation to the top-level Targets role. The updater should ignore the - # delegation and not raise an exception. - targets_path = os.path.join(self.client_metadata_current, 'targets.json') - targets_metadata = securesystemslib.util.load_json_file(targets_path) - targets_metadata['signed']['delegations']['roles'][0]['paths'] = ['/file8.txt'] - with open(targets_path, 'wb') as file_object: - file_object.write(repo_lib._get_written_metadata(targets_metadata)) - - role1_path = os.path.join(self.client_metadata_current, 'role1.json') - role1_metadata = securesystemslib.util.load_json_file(role1_path) - role1_metadata['signed']['delegations']['roles'][0]['name'] = 'targets' - role1_metadata['signed']['delegations']['roles'][0]['paths'] = ['/file8.txt'] - with open(role1_path, 'wb') as file_object: - file_object.write(repo_lib._get_written_metadata(role1_metadata)) - - role2_path = os.path.join(self.client_metadata_current, 'role2.json') - role2_metadata = securesystemslib.util.load_json_file(role2_path) - role2_metadata['signed']['delegations']['roles'] = role1_metadata['signed']['delegations']['roles'] - role2_metadata['signed']['delegations']['roles'][0]['paths'] = ['/file8.txt'] - with open(role2_path, 'wb') as file_object: - file_object.write(repo_lib._get_written_metadata(role2_metadata)) - - logger.debug('attempting circular delegation') - self.assertEqual(None, self.repository_updater._preorder_depth_first_walk('/file8.txt')) - - - - - - - def test_10__visit_child_role(self): - # Call _visit_child_role and test the dict keys: 'paths', - # 'path_hash_prefixes', and if both are missing. - - targets_role = self.repository_updater.metadata['current']['targets'] - targets_role['delegations']['roles'][0]['paths'] = ['/*.txt', '/target.exe'] - child_role = targets_role['delegations']['roles'][0] - - role1_path = os.path.join(self.client_metadata_current, 'role1.json') - role1_metadata = securesystemslib.util.load_json_file(role1_path) - role1_metadata['signed']['delegations']['roles'][0]['paths'] = ['/*.exe'] - with open(role1_path, 'wb') as file_object: - file_object.write(repo_lib._get_written_metadata(role1_metadata)) - - self.assertEqual(self.repository_updater._visit_child_role(child_role, - '/target.exe'), child_role['name']) - - # Test for a valid path hash prefix... - child_role['path_hash_prefixes'] = ['8baf'] - self.assertEqual(self.repository_updater._visit_child_role(child_role, - '/file3.txt'), child_role['name']) - - # ... and an invalid one, as well. - child_role['path_hash_prefixes'] = ['badd'] - self.assertEqual(self.repository_updater._visit_child_role(child_role, - '/file3.txt'), None) - - # Test for a forbidden target. - del child_role['path_hash_prefixes'] - self.repository_updater._visit_child_role(child_role, '/forbidden.tgz') - - # Verify that unequal path_hash_prefixes are skipped. - child_role['path_hash_prefixes'] = ['bad', 'bad'] - self.assertEqual(None, self.repository_updater._visit_child_role(child_role, - '/unknown.exe')) - - # Test if both 'path' and 'path_hash_prefixes' are missing. - del child_role['paths'] - del child_role['path_hash_prefixes'] - self.assertRaises(securesystemslib.exceptions.FormatError, self.repository_updater._visit_child_role, - child_role, child_role['name']) - - - - def test_11__verify_metadata_file(self): - # Test for invalid metadata content. - with tempfile.TemporaryFile() as metadata_file_object: - metadata_file_object.write(b'X') - metadata_file_object.seek(0) - - self.assertRaises(tuf.exceptions.InvalidMetadataJSONError, - self.repository_updater._verify_metadata_file, - metadata_file_object, 'root') - - - def test_13__targets_of_role(self): - # Test case where a list of targets is given. By default, the 'targets' - # parameter is None. - targets = [{'filepath': 'file1.txt', 'fileinfo': {'length': 1, 'hashes': {'sha256': 'abc'}}}] - self.repository_updater._targets_of_role('targets', - targets=targets, skip_refresh=False) - - - - -class TestMultiRepoUpdater(unittest_toolbox.Modified_TestCase): - - def setUp(self): - # Modified_Testcase can handle temp dir removal - unittest_toolbox.Modified_TestCase.setUp(self) - self.temporary_directory = self.make_temp_directory(directory=os.getcwd()) - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - - self.temporary_repository_root = tempfile.mkdtemp(dir=self.temporary_directory) - - # Needed because in some tests simple_server.py cannot be found. - # The reason is that the current working directory - # has been changed when executing a subprocess. - self.SIMPLE_SERVER_PATH = os.path.join(os.getcwd(), 'simple_server.py') - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client', 'test_repository1') - original_keystore = os.path.join(original_repository_files, 'keystore') - original_map_file = os.path.join(original_repository_files, 'map.json') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = os.path.join(self.temporary_repository_root, - 'repository_server1') - self.repository_directory2 = os.path.join(self.temporary_repository_root, - 'repository_server2') - - # Setting 'tuf.settings.repositories_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.temporary_repository_root - - repository_name = 'test_repository1' - repository_name2 = 'test_repository2' - - self.client_directory = os.path.join(self.temporary_repository_root, - repository_name) - self.client_directory2 = os.path.join(self.temporary_repository_root, - repository_name2) - - self.keystore_directory = os.path.join(self.temporary_repository_root, - 'keystore') - self.map_file = os.path.join(self.client_directory, 'map.json') - self.map_file2 = os.path.join(self.client_directory2, 'map.json') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_repository, self.repository_directory2) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_client, self.client_directory2) - shutil.copyfile(original_map_file, self.map_file) - shutil.copyfile(original_map_file, self.map_file2) - shutil.copytree(original_keystore, self.keystore_directory) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - - # Creates a subprocess running a server. - self.server_process_handler = utils.TestServerProcess(log=logger, - server=self.SIMPLE_SERVER_PATH, popen_cwd=self.repository_directory) - - logger.debug('Server process started.') - - # Creates a subprocess running a server. - self.server_process_handler2 = utils.TestServerProcess(log=logger, - server=self.SIMPLE_SERVER_PATH, popen_cwd=self.repository_directory2) - - logger.debug('Server process 2 started.') - - url_prefix = \ - 'http://' + utils.TEST_HOST_ADDRESS + ':' + \ - str(self.server_process_handler.port) - url_prefix2 = \ - 'http://' + utils.TEST_HOST_ADDRESS + ':' + \ - str(self.server_process_handler2.port) - - # We have all of the necessary information for two repository mirrors - # in map.json, except for url prefixes. - # For the url prefixes, we create subprocesses that run a server script. - # In server scripts we get a free port from the OS which is sent - # back to the parent process. - # That's why we dynamically add the ports to the url prefixes - # and changing the content of map.json. - self.map_file_path = os.path.join(self.client_directory, 'map.json') - data = securesystemslib.util.load_json_file(self.map_file_path) - - data['repositories']['test_repository1'] = [url_prefix] - data['repositories']['test_repository2'] = [url_prefix2] - with open(self.map_file_path, 'w') as f: - json.dump(data, f) - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', 'targets_path': 'targets'}} - - self.repository_mirrors2 = {'mirror1': {'url_prefix': url_prefix2, - 'metadata_path': 'metadata', 'targets_path': 'targets'}} - - # Create the repository instances. The test cases will use these client - # updaters to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(repository_name, - self.repository_mirrors) - self.repository_updater2 = updater.Updater(repository_name2, - self.repository_mirrors2) - - # Creating a repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.multi_repo_updater = updater.MultiRepoUpdater(self.map_file) - - # Metadata role keys are needed by the test cases to make changes to the - # repository (e.g., adding a new target file to 'targets.json' and then - # requesting a refresh()). - self.role_keys = _load_role_keys(self.keystore_directory) - - - - def tearDown(self): - - # Cleans the resources and flush the logged lines (if any). - self.server_process_handler.clean() - self.server_process_handler2.clean() - - # updater.Updater() populates the roledb with the name "test_repository1" - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Remove top-level temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - - # UNIT TESTS. - def test__init__(self): - # The client's repository requires a metadata directory (and the 'current' - # and 'previous' sub-directories), and at least the 'root.json' file. - # setUp(), called before each test case, instantiates the required updater - # objects and keys. The needed objects/data is available in - # 'self.repository_updater', 'self.client_directory', etc. - - # Test: Invalid arguments. - # Invalid 'updater_name' argument. String expected. - self.assertRaises(securesystemslib.exceptions.FormatError, - updater.MultiRepoUpdater, 8) - - # Restore 'tuf.settings.repositories_directory' to the original client - # directory. - tuf.settings.repositories_directory = self.client_directory - - # Test for a non-existent map file. - self.assertRaises(tuf.exceptions.Error, updater.MultiRepoUpdater, - 'non-existent.json') - - # Test for a map file that doesn't contain the required fields. - root_filepath = os.path.join( - self.repository_directory, 'metadata', 'root.json') - self.assertRaises(securesystemslib.exceptions.FormatError, - updater.MultiRepoUpdater, root_filepath) - - # Test for a valid instantiation. - multi_repo_updater = updater.MultiRepoUpdater(self.map_file_path) - - - - def test__target_matches_path_pattern(self): - multi_repo_updater = updater.MultiRepoUpdater(self.map_file_path) - paths = ['foo*.tgz', 'bar*.tgz', 'file1.txt'] - self.assertTrue( - multi_repo_updater._target_matches_path_pattern('bar-1.0.tgz', paths)) - self.assertTrue( - multi_repo_updater._target_matches_path_pattern('file1.txt', paths)) - self.assertFalse( - multi_repo_updater._target_matches_path_pattern('baz-1.0.tgz', paths)) - - - - def test_get_valid_targetinfo(self): - multi_repo_updater = updater.MultiRepoUpdater(self.map_file_path) - - # Verify the multi repo updater refuses to save targetinfo if - # required local repositories are missing. - repo_dir = os.path.join(tuf.settings.repositories_directory, - 'test_repository1') - backup_repo_dir = os.path.join(tuf.settings.repositories_directory, - 'test_repository1.backup') - shutil.move(repo_dir, backup_repo_dir) - self.assertRaises(tuf.exceptions.Error, - multi_repo_updater.get_valid_targetinfo, 'file3.txt') - - # Restore the client's repository directory. - shutil.move(backup_repo_dir, repo_dir) - - # Verify that the Root file must exist. - root_filepath = os.path.join(repo_dir, 'metadata', 'current', 'root.json') - backup_root_filepath = os.path.join(root_filepath, root_filepath + '.backup') - shutil.move(root_filepath, backup_root_filepath) - self.assertRaises(tuf.exceptions.Error, - multi_repo_updater.get_valid_targetinfo, 'file3.txt') - - # Restore the Root file. - shutil.move(backup_root_filepath, root_filepath) - - # Test that the first mapping is skipped if it's irrelevant to the target - # file. - self.assertRaises(tuf.exceptions.UnknownTargetError, - multi_repo_updater.get_valid_targetinfo, 'non-existent.txt') - - # Verify that a targetinfo is not returned for a non-existent target. - multi_repo_updater.map_file['mapping'][1]['terminating'] = False - self.assertRaises(tuf.exceptions.UnknownTargetError, - multi_repo_updater.get_valid_targetinfo, 'non-existent.txt') - multi_repo_updater.map_file['mapping'][1]['terminating'] = True - - # Test for a mapping that sets terminating = True, and that appears before - # the final mapping. - multi_repo_updater.map_file['mapping'][0]['terminating'] = True - self.assertRaises(tuf.exceptions.UnknownTargetError, - multi_repo_updater.get_valid_targetinfo, 'bad3.txt') - multi_repo_updater.map_file['mapping'][0]['terminating'] = False - - # Test for the case where multiple repos sign for the same target. - valid_targetinfo = multi_repo_updater.get_valid_targetinfo('file1.txt') - - multi_repo_updater.map_file['mapping'][0]['threshold'] = 2 - valid_targetinfo = multi_repo_updater.get_valid_targetinfo('file1.txt') - - # Verify that valid targetinfo is matched for two repositories that provide - # different custom field. Make sure to set the 'match_custom_field' - # argument to 'False' when calling get_valid_targetinfo(). - repository = repo_tool.load_repository(self.repository_directory2) - - target1 = os.path.join(self.repository_directory2, 'targets', 'file1.txt') - repository.targets.remove_target(os.path.basename(target1)) - - custom_field = {"custom": "my_custom_data"} - repository.targets.add_target(os.path.basename(target1), custom_field) - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory2, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory2, 'metadata.staged'), - os.path.join(self.repository_directory2, 'metadata')) - - # Do we get the expected match for the two targetinfo that only differ - # by the custom field? - valid_targetinfo = multi_repo_updater.get_valid_targetinfo( - 'file1.txt', match_custom_field=False) - - # Verify the case where two repositories provide different targetinfo. - # Modify file1.txt so that different length and hashes are reported by the - # two repositories. - repository = repo_tool.load_repository(self.repository_directory2) - target1 = os.path.join(self.repository_directory2, 'targets', 'file1.txt') - with open(target1, 'ab') as file_object: - file_object.write(b'append extra text') - - repository.targets.remove_target(os.path.basename(target1)) - - repository.targets.add_target(os.path.basename(target1)) - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory2, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory2, 'metadata.staged'), - os.path.join(self.repository_directory2, 'metadata')) - - # Ensure the threshold is modified to 2 (assumed to be 1, by default) and - # verify that get_valid_targetinfo() raises an UnknownTargetError - # despite both repos signing for file1.txt. - multi_repo_updater.map_file['mapping'][0]['threshold'] = 2 - self.assertRaises(tuf.exceptions.UnknownTargetError, - multi_repo_updater.get_valid_targetinfo, 'file1.txt') - - - - - - def test_get_updater(self): - multi_repo_updater = updater.MultiRepoUpdater(self.map_file_path) - - # Test for a non-existent repository name. - self.assertEqual(None, multi_repo_updater.get_updater('bad_repo_name')) - - # Test get_updater indirectly via the "private" _update_from_repository(). - self.assertRaises(tuf.exceptions.Error, multi_repo_updater._update_from_repository, 'bad_repo_name', 'file3.txt') - - # Test for a repository that doesn't exist. - multi_repo_updater.map_file['repositories']['bad_repo_name'] = ['https://bogus:30002'] - self.assertEqual(None, multi_repo_updater.get_updater('bad_repo_name')) - - - -def _load_role_keys(keystore_directory): - - # Populating 'self.role_keys' by importing the required public and private - # keys of 'tuf/tests/repository_data/'. The role keys are needed when - # modifying the remote repository used by the test cases in this unit test. - - # The pre-generated key files in 'repository_data/keystore' are all encrypted with - # a 'password' passphrase. - EXPECTED_KEYFILE_PASSWORD = 'password' - - # Store and return the cryptography keys of the top-level roles, including 1 - # delegated role. - role_keys = {} - - root_key_file = os.path.join(keystore_directory, 'root_key') - targets_key_file = os.path.join(keystore_directory, 'targets_key') - snapshot_key_file = os.path.join(keystore_directory, 'snapshot_key') - timestamp_key_file = os.path.join(keystore_directory, 'timestamp_key') - delegation_key_file = os.path.join(keystore_directory, 'delegation_key') - - role_keys = {'root': {}, 'targets': {}, 'snapshot': {}, 'timestamp': {}, - 'role1': {}} - - # Import the top-level and delegated role public keys. - role_keys['root']['public'] = \ - repo_tool.import_rsa_publickey_from_file(root_key_file+'.pub') - role_keys['targets']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(targets_key_file+'.pub') - role_keys['snapshot']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(snapshot_key_file+'.pub') - role_keys['timestamp']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(timestamp_key_file+'.pub') - role_keys['role1']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(delegation_key_file+'.pub') - - # Import the private keys of the top-level and delegated roles. - role_keys['root']['private'] = \ - repo_tool.import_rsa_privatekey_from_file(root_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['targets']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(targets_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['snapshot']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['timestamp']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['role1']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(delegation_key_file, - EXPECTED_KEYFILE_PASSWORD) - - return role_keys - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_updater_consistent_snapshot.py b/tests/test_updater_consistent_snapshot.py new file mode 100644 index 0000000000..e4bab8a8c7 --- /dev/null +++ b/tests/test_updater_consistent_snapshot.py @@ -0,0 +1,266 @@ +#!/usr/bin/env python + +# Copyright 2021, New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +"""Test ngclient Updater toggling consistent snapshot""" + +import os +import sys +import tempfile +import unittest +from typing import Any, Dict, Iterable, List, Optional + +from tests import utils +from tests.repository_simulator import RepositorySimulator +from tuf.api.metadata import ( + SPECIFICATION_VERSION, + TOP_LEVEL_ROLE_NAMES, + DelegatedRole, + TargetFile, + Targets, +) +from tuf.ngclient import Updater + + +class TestConsistentSnapshot(unittest.TestCase): + """Test different combinations of 'consistent_snapshot' and + 'prefix_targets_with_hash' and verify that the correct URLs + are formed for each combination""" + + # set dump_dir to trigger repository state dumps + dump_dir: Optional[str] = None + + def setUp(self) -> None: + # pylint: disable=consider-using-with + self.subtest_count = 0 + self.temp_dir = tempfile.TemporaryDirectory() + self.metadata_dir = os.path.join(self.temp_dir.name, "metadata") + self.targets_dir = os.path.join(self.temp_dir.name, "targets") + os.mkdir(self.metadata_dir) + os.mkdir(self.targets_dir) + self.sim: RepositorySimulator + + def tearDown(self) -> None: + self.temp_dir.cleanup() + + def setup_subtest( + self, consistent_snapshot: bool, prefix_targets: bool = True + ) -> None: + self.sim = self._init_repo(consistent_snapshot, prefix_targets) + + self.subtest_count += 1 + if self.dump_dir is not None: + # create subtest dumpdir + name = f"{self.id().split('.')[-1]}-{self.subtest_count}" + self.sim.dump_dir = os.path.join(self.dump_dir, name) + os.mkdir(self.sim.dump_dir) + + def teardown_subtest(self) -> None: + if self.dump_dir is not None: + self.sim.write() + + utils.cleanup_dir(self.metadata_dir) + + def _init_repo( + self, consistent_snapshot: bool, prefix_targets: bool = True + ) -> RepositorySimulator: + """Create a new RepositorySimulator instance""" + sim = RepositorySimulator() + sim.root.consistent_snapshot = consistent_snapshot + sim.root.version += 1 + sim.publish_root() + sim.prefix_targets_with_hash = prefix_targets + + # Init trusted root with the latest consistent_snapshot + with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f: + f.write(sim.signed_roots[-1]) + + return sim + + def _init_updater(self) -> Updater: + """Create a new Updater instance""" + return Updater( + self.metadata_dir, + "https://example.com/metadata/", + self.targets_dir, + "https://example.com/targets/", + self.sim, + ) + + def _assert_metadata_files_exist(self, roles: Iterable[str]) -> None: + """Assert that local metadata files exist for 'roles'""" + local_metadata_files = os.listdir(self.metadata_dir) + for role in roles: + self.assertIn(f"{role}.json", local_metadata_files) + + def _assert_targets_files_exist(self, filenames: Iterable[str]) -> None: + """Assert that local files with 'filenames' exist""" + local_target_files = os.listdir(self.targets_dir) + for filename in filenames: + self.assertIn(filename, local_target_files) + + top_level_roles_data: utils.DataSet = { + "consistent_snaphot disabled": { + "consistent_snapshot": False, + "calls": [ + ("root", 3), + ("timestamp", None), + ("snapshot", None), + ("targets", None), + ], + }, + "consistent_snaphot enabled": { + "consistent_snapshot": True, + "calls": [ + ("root", 3), + ("timestamp", None), + ("snapshot", 1), + ("targets", 1), + ], + }, + } + + @utils.run_sub_tests_with_dataset(top_level_roles_data) + def test_top_level_roles_update( + self, test_case_data: Dict[str, Any] + ) -> None: + # Test if the client fetches and stores metadata files with the + # correct version prefix, depending on 'consistent_snapshot' config + try: + consistent_snapshot: bool = test_case_data["consistent_snapshot"] + exp_calls: List[Any] = test_case_data["calls"] + + self.setup_subtest(consistent_snapshot) + updater = self._init_updater() + + # cleanup fetch tracker metadata + self.sim.fetch_tracker.metadata.clear() + updater.refresh() + + # metadata files are fetched with the expected version (or None) + self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls) + # metadata files are always persisted without a version prefix + self._assert_metadata_files_exist(TOP_LEVEL_ROLE_NAMES) + finally: + self.teardown_subtest() + + delegated_roles_data: utils.DataSet = { + "consistent_snaphot disabled": { + "consistent_snapshot": False, + "expected_version": None, + }, + "consistent_snaphot enabled": { + "consistent_snapshot": True, + "expected_version": 1, + }, + } + + @utils.run_sub_tests_with_dataset(delegated_roles_data) + def test_delegated_roles_update( + self, test_case_data: Dict[str, Any] + ) -> None: + # Test if the client fetches and stores delegated metadata files with + # the correct version prefix, depending on 'consistent_snapshot' config + try: + consistent_snapshot: bool = test_case_data["consistent_snapshot"] + exp_version: Optional[int] = test_case_data["expected_version"] + rolenames = ["role1", "..", "."] + exp_calls = [(role, exp_version) for role in rolenames] + + self.setup_subtest(consistent_snapshot) + # Add new delegated targets + spec_version = ".".join(SPECIFICATION_VERSION) + for role in rolenames: + delegated_role = DelegatedRole(role, [], 1, False, ["*"], None) + targets = Targets( + 1, spec_version, self.sim.safe_expiry, {}, None + ) + self.sim.add_delegation("targets", delegated_role, targets) + self.sim.update_snapshot() + updater = self._init_updater() + updater.refresh() + + # cleanup fetch tracker metadata + self.sim.fetch_tracker.metadata.clear() + # trigger updater to fetch the delegated metadata + updater.get_targetinfo("anything") + # metadata files are fetched with the expected version (or None) + self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls) + # metadata files are always persisted without a version prefix + self._assert_metadata_files_exist(rolenames) + finally: + self.teardown_subtest() + + targets_download_data: utils.DataSet = { + "consistent_snaphot disabled": { + "consistent_snapshot": False, + "prefix_targets": True, + "hash_algo": None, + "targetpaths": ["file", "file.txt", "..file.ext", "f.le"], + }, + "consistent_snaphot enabled without prefixed targets": { + "consistent_snapshot": True, + "prefix_targets": False, + "hash_algo": None, + "targetpaths": ["file", "file.txt", "..file.ext", "f.le"], + }, + "consistent_snaphot enabled with prefixed targets": { + "consistent_snapshot": True, + "prefix_targets": True, + "hash_algo": "sha256", + "targetpaths": ["file", "file.txt", "..file.ext", "f.le"], + }, + } + + @utils.run_sub_tests_with_dataset(targets_download_data) + def test_download_targets(self, test_case_data: Dict[str, Any]) -> None: + # Test if the client fetches and stores target files with + # the correct hash prefix, depending on 'consistent_snapshot' + # and 'prefix_targets_with_hash' config + try: + consistent_snapshot: bool = test_case_data["consistent_snapshot"] + prefix_targets_with_hash: bool = test_case_data["prefix_targets"] + hash_algo: Optional[str] = test_case_data["hash_algo"] + targetpaths: List[str] = test_case_data["targetpaths"] + + self.setup_subtest(consistent_snapshot, prefix_targets_with_hash) + # Add targets to repository + for targetpath in targetpaths: + self.sim.targets.version += 1 + self.sim.add_target("targets", b"content", targetpath) + self.sim.update_snapshot() + + updater = self._init_updater() + updater.config.prefix_targets_with_hash = prefix_targets_with_hash + updater.refresh() + + for path in targetpaths: + info = updater.get_targetinfo(path) + assert isinstance(info, TargetFile) + updater.download_target(info) + + # target files are always persisted without hash prefix + self._assert_targets_files_exist([info.path]) + + # files are fetched with the expected hash prefix (or None) + exp_calls = [ + (path, None if not hash_algo else info.hashes[hash_algo]) + ] + + self.assertListEqual(self.sim.fetch_tracker.targets, exp_calls) + self.sim.fetch_tracker.targets.clear() + finally: + self.teardown_subtest() + + +if __name__ == "__main__": + if "--dump" in sys.argv: + TestConsistentSnapshot.dump_dir = tempfile.mkdtemp() + print( + f"Repository Simulator dumps in {TestConsistentSnapshot.dump_dir}" + ) + sys.argv.remove("--dump") + + utils.configure_test_logging(sys.argv) + unittest.main() diff --git a/tests/test_updater_delegation_graphs.py b/tests/test_updater_delegation_graphs.py new file mode 100644 index 0000000000..b1ab219687 --- /dev/null +++ b/tests/test_updater_delegation_graphs.py @@ -0,0 +1,512 @@ +#!/usr/bin/env python + +# Copyright 2021, New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +"""Test updating delegated targets roles and searching for +target files with various delegation graphs""" + +import os +import sys +import tempfile +import unittest +from dataclasses import astuple, dataclass, field +from typing import Iterable, List, Optional + +from tests import utils +from tests.repository_simulator import RepositorySimulator +from tuf.api.exceptions import UnsignedMetadataError +from tuf.api.metadata import ( + SPECIFICATION_VERSION, + TOP_LEVEL_ROLE_NAMES, + DelegatedRole, + Targets, +) +from tuf.ngclient import Updater + + +@dataclass +class TestDelegation: + delegator: str + rolename: str + keyids: List[str] = field(default_factory=list) + threshold: int = 1 + terminating: bool = False + paths: Optional[List[str]] = field(default_factory=lambda: ["*"]) + path_hash_prefixes: Optional[List[str]] = None + + +@dataclass +class TestTarget: + rolename: str + content: bytes + targetpath: str + + +@dataclass +class DelegationsTestCase: + """A delegations graph as lists of delegations and target files + and the expected order of traversal as a list of role names.""" + + delegations: List[TestDelegation] + target_files: List[TestTarget] = field(default_factory=list) + visited_order: List[str] = field(default_factory=list) + + +@dataclass +class TargetTestCase: + targetpath: str + found: bool + visited_order: List[str] = field(default_factory=list) + + +class TestDelegations(unittest.TestCase): + """Base class for delegation tests""" + + # set dump_dir to trigger repository state dumps + dump_dir: Optional[str] = None + + def setUp(self) -> None: + # pylint: disable=consider-using-with + self.subtest_count = 0 + self.temp_dir = tempfile.TemporaryDirectory() + self.metadata_dir = os.path.join(self.temp_dir.name, "metadata") + self.targets_dir = os.path.join(self.temp_dir.name, "targets") + os.mkdir(self.metadata_dir) + os.mkdir(self.targets_dir) + self.sim: RepositorySimulator + + def tearDown(self) -> None: + self.temp_dir.cleanup() + + def setup_subtest(self) -> None: + self.subtest_count += 1 + if self.dump_dir is not None: + # create subtest dumpdir + name = f"{self.id().split('.')[-1]}-{self.subtest_count}" + self.sim.dump_dir = os.path.join(self.dump_dir, name) + os.mkdir(self.sim.dump_dir) + # dump the repo simulator metadata + self.sim.write() + + def teardown_subtest(self) -> None: + utils.cleanup_dir(self.metadata_dir) + + def _init_repo(self, test_case: DelegationsTestCase) -> None: + """Create a new RepositorySimulator instance and + populate it with delegations and target files""" + + self.sim = RepositorySimulator() + spec_version = ".".join(SPECIFICATION_VERSION) + for d in test_case.delegations: + if d.rolename in self.sim.md_delegates: + targets = self.sim.md_delegates[d.rolename].signed + else: + targets = Targets( + 1, spec_version, self.sim.safe_expiry, {}, None + ) + # unpack 'd' but skip "delegator" + role = DelegatedRole(*astuple(d)[1:]) + self.sim.add_delegation(d.delegator, role, targets) + + for target in test_case.target_files: + self.sim.add_target(*astuple(target)) + + if test_case.target_files: + self.sim.targets.version += 1 + self.sim.update_snapshot() + + def _init_updater(self) -> Updater: + """Create a new Updater instance""" + # Init trusted root for Updater + with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f: + f.write(self.sim.signed_roots[0]) + + return Updater( + self.metadata_dir, + "https://example.com/metadata/", + self.targets_dir, + "https://example.com/targets/", + self.sim, + ) + + def _assert_files_exist(self, roles: Iterable[str]) -> None: + """Assert that local metadata files exist for 'roles'""" + expected_files = sorted([f"{role}.json" for role in roles]) + local_metadata_files = sorted(os.listdir(self.metadata_dir)) + self.assertListEqual(local_metadata_files, expected_files) + + +class TestDelegationsGraphs(TestDelegations): + """Test creating delegations graphs with different complexity + and successfully updating the delegated roles metadata""" + + graphs: utils.DataSet = { + "basic delegation": DelegationsTestCase( + delegations=[TestDelegation("targets", "A")], + visited_order=["A"], + ), + "single level delegations": DelegationsTestCase( + delegations=[ + TestDelegation("targets", "A"), + TestDelegation("targets", "B"), + ], + visited_order=["A", "B"], + ), + "two-level delegations": DelegationsTestCase( + delegations=[ + TestDelegation("targets", "A"), + TestDelegation("targets", "B"), + TestDelegation("B", "C"), + ], + visited_order=["A", "B", "C"], + ), + "two-level test DFS order of traversal": DelegationsTestCase( + delegations=[ + TestDelegation("targets", "A"), + TestDelegation("targets", "B"), + TestDelegation("A", "C"), + TestDelegation("A", "D"), + ], + visited_order=["A", "C", "D", "B"], + ), + "three-level delegation test DFS order of traversal": DelegationsTestCase( + delegations=[ + TestDelegation("targets", "A"), + TestDelegation("targets", "B"), + TestDelegation("A", "C"), + TestDelegation("C", "D"), + ], + visited_order=["A", "C", "D", "B"], + ), + "two-level terminating ignores all but role's descendants": DelegationsTestCase( + delegations=[ + TestDelegation("targets", "A"), + TestDelegation("targets", "B"), + TestDelegation("A", "C", terminating=True), + TestDelegation("A", "D"), + ], + visited_order=["A", "C"], + ), + "three-level terminating ignores all but role's descendants": DelegationsTestCase( + delegations=[ + TestDelegation("targets", "A"), + TestDelegation("targets", "B"), + TestDelegation("A", "C", terminating=True), + TestDelegation("C", "D"), + ], + visited_order=["A", "C", "D"], + ), + "two-level ignores all branches not matching 'paths'": DelegationsTestCase( + delegations=[ + TestDelegation("targets", "A", paths=["*.py"]), + TestDelegation("targets", "B"), + TestDelegation("A", "C"), + ], + visited_order=["B"], + ), + "three-level ignores all branches not matching 'paths'": DelegationsTestCase( + delegations=[ + TestDelegation("targets", "A"), + TestDelegation("targets", "B"), + TestDelegation("A", "C", paths=["*.py"]), + TestDelegation("C", "D"), + ], + visited_order=["A", "B"], + ), + "cyclic graph": DelegationsTestCase( + delegations=[ + TestDelegation("targets", "A"), + TestDelegation("targets", "B"), + TestDelegation("B", "C"), + TestDelegation("C", "D"), + TestDelegation("D", "B"), + ], + visited_order=["A", "B", "C", "D"], + ), + "two roles delegating to a third": DelegationsTestCase( + delegations=[ + TestDelegation("targets", "A"), + TestDelegation("targets", "B"), + TestDelegation("B", "C"), + TestDelegation("A", "C"), + ], + # Under all same conditions, 'C' is reached through 'A' first" + visited_order=["A", "C", "B"], + ), + "two roles delegating to a third different 'paths'": DelegationsTestCase( + delegations=[ + TestDelegation("targets", "A"), + TestDelegation("targets", "B"), + TestDelegation("B", "C"), + TestDelegation("A", "C", paths=["*.py"]), + ], + # 'C' is reached through 'B' since 'A' does not delegate a matching pattern" + visited_order=["A", "B", "C"], + ), + "max number of delegations": DelegationsTestCase( + delegations=[ + TestDelegation("targets", "A"), + TestDelegation("targets", "B"), + TestDelegation("targets", "C"), + TestDelegation("C", "D"), + TestDelegation("C", "E"), + ], + # "E" is skipped, max_delegations is 4 + visited_order=["A", "B", "C", "D"], + ), + } + + @utils.run_sub_tests_with_dataset(graphs) + def test_graph_traversal(self, test_data: DelegationsTestCase) -> None: + """Test that delegated roles are traversed in the order of appearance + in the delegator's metadata, using pre-order depth-first search""" + + try: + exp_files = [*TOP_LEVEL_ROLE_NAMES, *test_data.visited_order] + exp_calls = [(role, 1) for role in test_data.visited_order] + + self._init_repo(test_data) + self.setup_subtest() + + updater = self._init_updater() + # restrict the max number of delegations to simplify the test + updater.config.max_delegations = 4 + # Call explicitly refresh to simplify the expected_calls list + updater.refresh() + self.sim.fetch_tracker.metadata.clear() + # Check that metadata dir contains only top-level roles + self._assert_files_exist(TOP_LEVEL_ROLE_NAMES) + + # Looking for a non-existing targetpath forces updater + # to visit all possible delegated roles + targetfile = updater.get_targetinfo("missingpath") + self.assertIsNone(targetfile) + # Check that the delegated roles were visited in the expected + # order and the corresponding metadata files were persisted + self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls) + self._assert_files_exist(exp_files) + finally: + self.teardown_subtest() + + invalid_metadata: utils.DataSet = { + "unsigned delegated role": DelegationsTestCase( + delegations=[ + TestDelegation("targets", "invalid"), + TestDelegation("targets", "B"), + TestDelegation("invalid", "C"), + ], + # The traversal stops after visiting an invalid role + visited_order=["invalid"], + ) + } + + @utils.run_sub_tests_with_dataset(invalid_metadata) + def test_invalid_metadata(self, test_data: DelegationsTestCase) -> None: + try: + self._init_repo(test_data) + # The invalid role is the last visited + invalid_role = test_data.visited_order[-1] + self.sim.signers[invalid_role].clear() + + self.setup_subtest() + # The invalid role metadata must not be persisted + exp_files = [*TOP_LEVEL_ROLE_NAMES, *test_data.visited_order[:-1]] + exp_calls = [(role, 1) for role in test_data.visited_order] + + updater = self._init_updater() + # Call explicitly refresh to simplify the expected_calls list + updater.refresh() + self.sim.fetch_tracker.metadata.clear() + + with self.assertRaises(UnsignedMetadataError): + updater.get_targetinfo("missingpath") + # Check that there were no visited roles after the invalid one + # and only the valid metadata files were persisted + self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls) + self._assert_files_exist(exp_files) + finally: + self.teardown_subtest() + + def test_safely_encoded_rolenames(self) -> None: + """Test that delegated roles names are safely encoded in the filenames + and URLs. + """ + + roles_to_filenames = { + "../a": "..%2Fa.json", + ".": "..json", + "/": "%2F.json", + "ö": "%C3%B6.json", + } + + delegations = [] + for rolename in roles_to_filenames: + delegations.append(TestDelegation("targets", rolename)) + + delegated_rolenames = DelegationsTestCase(delegations) + self._init_repo(delegated_rolenames) + updater = self._init_updater() + updater.refresh() + + # trigger updater to fetch the delegated metadata + self.sim.fetch_tracker.metadata.clear() + updater.get_targetinfo("anything") + + # assert that local delegated metadata filenames are expected + local_metadata = os.listdir(self.metadata_dir) + for fname in roles_to_filenames.values(): + self.assertTrue(fname in local_metadata) + + # assert that requested URLs are quoted without extension + exp_calls = [(quoted[:-5], 1) for quoted in roles_to_filenames.values()] + self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls) + + hash_bins_graph: utils.DataSet = { + "delegations": DelegationsTestCase( + delegations=[ + TestDelegation( + "targets", + "role1", + paths=None, + path_hash_prefixes=["8", "9", "a", "b"], + ), + TestDelegation( + "targets", + "role2", + paths=None, + path_hash_prefixes=["0", "1", "2", "3"], + ), + TestDelegation( + "targets", + "role3", + paths=None, + path_hash_prefixes=["c", "d", "e", "f"], + ), + ], + visited_order=["role1", "role2", "role3"], + ), + } + + @utils.run_sub_tests_with_dataset(hash_bins_graph) + def test_hash_bins_graph_traversal( + self, test_data: DelegationsTestCase + ) -> None: + """Test that delegated roles are traversed in the order of appearance + in the delegator's metadata, using pre-order depth-first search and that + they correctly reffer to the corresponding hash bin prefixes""" + + try: + exp_files = [*TOP_LEVEL_ROLE_NAMES, *test_data.visited_order] + exp_calls = [(role, 1) for role in test_data.visited_order] + + self._init_repo(test_data) + self.setup_subtest() + + updater = self._init_updater() + # Call explicitly refresh to simplify the expected_calls list + updater.refresh() + self.sim.fetch_tracker.metadata.clear() + # Check that metadata dir contains only top-level roles + self._assert_files_exist(TOP_LEVEL_ROLE_NAMES) + + # Looking for a non-existing targetpath forces updater + # to visit a correspondning delegated role + targetfile = updater.get_targetinfo("missingpath") + self.assertIsNone(targetfile) + targetfile = updater.get_targetinfo("othermissingpath") + self.assertIsNone(targetfile) + targetfile = updater.get_targetinfo("thirdmissingpath") + self.assertIsNone(targetfile) + # Check that the delegated roles were visited in the expected + # order and the corresponding metadata files were persisted + self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls) + self._assert_files_exist(exp_files) + finally: + self.teardown_subtest() + + +class TestTargetFileSearch(TestDelegations): + r""" + Create a single repository with the following delegations: + + targets + *.doc, *md / \ release/*/* + A B + release/x/* / \ release/y/*.zip + C D + + Test that Updater successfully finds the target files metadata, + traversing the delegations as expected. + """ + + delegations_tree = DelegationsTestCase( + delegations=[ + TestDelegation("targets", "A", paths=["*.doc", "*.md"]), + TestDelegation("targets", "B", paths=["releases/*/*"]), + TestDelegation("B", "C", paths=["releases/x/*"]), + TestDelegation("B", "D", paths=["releases/y/*.zip"]), + ], + target_files=[ + TestTarget("targets", b"targetfile content", "targetfile"), + TestTarget("A", b"README by A", "README.md"), + TestTarget("C", b"x release by C", "releases/x/x_v1"), + TestTarget("D", b"y release by D", "releases/y/y_v1.zip"), + TestTarget("D", b"z release by D", "releases/z/z_v1.zip"), + ], + ) + + def setUp(self) -> None: + super().setUp() + self._init_repo(self.delegations_tree) + + # fmt: off + targets: utils.DataSet = { + "no delegations": + TargetTestCase("targetfile", True, []), + "targetpath matches wildcard": + TargetTestCase("README.md", True, ["A"]), + "targetpath with separators x": + TargetTestCase("releases/x/x_v1", True, ["B", "C"]), + "targetpath with separators y": + TargetTestCase("releases/y/y_v1.zip", True, ["B", "D"]), + "targetpath is not delegated by all roles in the chain": + TargetTestCase("releases/z/z_v1.zip", False, ["B"]), + } + # fmt: on + + @utils.run_sub_tests_with_dataset(targets) + def test_targetfile_search(self, test_data: TargetTestCase) -> None: + try: + self.setup_subtest() + exp_files = [*TOP_LEVEL_ROLE_NAMES, *test_data.visited_order] + exp_calls = [(role, 1) for role in test_data.visited_order] + exp_target = self.sim.target_files[test_data.targetpath].target_file + + updater = self._init_updater() + # Call explicitly refresh to simplify the expected_calls list + updater.refresh() + self.sim.fetch_tracker.metadata.clear() + target = updater.get_targetinfo(test_data.targetpath) + if target is not None: + # Confirm that the expected TargetFile is found + self.assertTrue(test_data.found) + self.assertDictEqual(target.to_dict(), exp_target.to_dict()) + else: + self.assertFalse(test_data.found) + # Check that the delegated roles were visited in the expected + # order and the corresponding metadata files were persisted + self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls) + self._assert_files_exist(exp_files) + finally: + self.teardown_subtest() + + +if __name__ == "__main__": + if "--dump" in sys.argv: + TestDelegations.dump_dir = tempfile.mkdtemp() + print(f"Repository Simulator dumps in {TestDelegations.dump_dir}") + sys.argv.remove("--dump") + + utils.configure_test_logging(sys.argv) + unittest.main() diff --git a/tests/test_updater_fetch_target.py b/tests/test_updater_fetch_target.py new file mode 100644 index 0000000000..a7af0ec157 --- /dev/null +++ b/tests/test_updater_fetch_target.py @@ -0,0 +1,190 @@ +#!/usr/bin/env python + +# Copyright 2021, New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +"""Test 'Fetch target' from 'Detailed client workflow' as well as +target files storing/loading from cache. +""" +import os +import sys +import tempfile +import unittest +from dataclasses import dataclass +from typing import Optional + +from tests import utils +from tests.repository_simulator import RepositorySimulator +from tuf.api.exceptions import RepositoryError +from tuf.ngclient import Updater + + +@dataclass +class TestTarget: + path: str + content: bytes + encoded_path: str + + +class TestFetchTarget(unittest.TestCase): + """Test ngclient downloading and caching target files.""" + + # set dump_dir to trigger repository state dumps + dump_dir: Optional[str] = None + + def setUp(self) -> None: + # pylint: disable-next=consider-using-with + self.temp_dir = tempfile.TemporaryDirectory() + self.metadata_dir = os.path.join(self.temp_dir.name, "metadata") + self.targets_dir = os.path.join(self.temp_dir.name, "targets") + os.mkdir(self.metadata_dir) + os.mkdir(self.targets_dir) + + # Setup the repository, bootstrap client root.json + self.sim = RepositorySimulator() + with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f: + f.write(self.sim.signed_roots[0]) + + if self.dump_dir is not None: + # create test specific dump directory + name = self.id().split(".")[-1] + self.sim.dump_dir = os.path.join(self.dump_dir, name) + os.mkdir(self.sim.dump_dir) + + def tearDown(self) -> None: + self.temp_dir.cleanup() + + def _init_updater(self) -> Updater: + """Creates a new updater instance.""" + if self.sim.dump_dir is not None: + self.sim.write() + + updater = Updater( + self.metadata_dir, + "https://example.com/metadata/", + self.targets_dir, + "https://example.com/targets/", + self.sim, + ) + return updater + + targets: utils.DataSet = { + "standard case": TestTarget( + path="targetpath", + content=b"target content", + encoded_path="targetpath", + ), + "non-asci case": TestTarget( + path="åäö", + content=b"more content", + encoded_path="%C3%A5%C3%A4%C3%B6", + ), + "subdirectory case": TestTarget( + path="a/b/c/targetpath", + content=b"dir target content", + encoded_path="a%2Fb%2Fc%2Ftargetpath", + ), + } + + @utils.run_sub_tests_with_dataset(targets) + def test_fetch_target(self, target: TestTarget) -> None: + path = os.path.join(self.targets_dir, target.encoded_path) + + updater = self._init_updater() + # target does not exist yet + self.assertIsNone(updater.get_targetinfo(target.path)) + + # Add targets to repository + self.sim.targets.version += 1 + self.sim.add_target("targets", target.content, target.path) + self.sim.update_snapshot() + + updater = self._init_updater() + # target now exists, is not in cache yet + info = updater.get_targetinfo(target.path) + assert info is not None + # Test without and with explicit local filepath + self.assertIsNone(updater.find_cached_target(info)) + self.assertIsNone(updater.find_cached_target(info, path)) + + # download target, assert it is in cache and content is correct + self.assertEqual(path, updater.download_target(info)) + self.assertEqual(path, updater.find_cached_target(info)) + self.assertEqual(path, updater.find_cached_target(info, path)) + + with open(path, "rb") as f: + self.assertEqual(f.read(), target.content) + + # download using explicit filepath as well + os.remove(path) + self.assertEqual(path, updater.download_target(info, path)) + self.assertEqual(path, updater.find_cached_target(info)) + self.assertEqual(path, updater.find_cached_target(info, path)) + + def test_invalid_target_download(self) -> None: + target = TestTarget("targetpath", b"content", "targetpath") + + # Add target to repository + self.sim.targets.version += 1 + self.sim.add_target("targets", target.content, target.path) + self.sim.update_snapshot() + + updater = self._init_updater() + info = updater.get_targetinfo(target.path) + assert info is not None + + # Corrupt the file content to not match the hash + self.sim.target_files[target.path].data = b"conten@" + with self.assertRaises(RepositoryError): + updater.download_target(info) + + # Corrupt the file content to not match the length + self.sim.target_files[target.path].data = b"cont" + with self.assertRaises(RepositoryError): + updater.download_target(info) + + # Verify the file is not persisted in cache + self.assertIsNone(updater.find_cached_target(info)) + + def test_invalid_target_cache(self) -> None: + target = TestTarget("targetpath", b"content", "targetpath") + + # Add target to repository + self.sim.targets.version += 1 + self.sim.add_target("targets", target.content, target.path) + self.sim.update_snapshot() + + # Download the target + updater = self._init_updater() + info = updater.get_targetinfo(target.path) + assert info is not None + path = updater.download_target(info) + self.assertEqual(path, updater.find_cached_target(info)) + + # Add newer content to the same targetpath + target.content = b"contentv2" + self.sim.targets.version += 1 + self.sim.add_target("targets", target.content, target.path) + self.sim.update_snapshot() + + # Newer content is detected, old cached version is not used + updater = self._init_updater() + info = updater.get_targetinfo(target.path) + assert info is not None + self.assertIsNone(updater.find_cached_target(info)) + + # Download target, assert it is in cache and content is the newer + path = updater.download_target(info) + self.assertEqual(path, updater.find_cached_target(info)) + with open(path, "rb") as f: + self.assertEqual(f.read(), target.content) + + +if __name__ == "__main__": + if "--dump" in sys.argv: + TestFetchTarget.dump_dir = tempfile.mkdtemp() + print(f"Repository Simulator dumps in {TestFetchTarget.dump_dir}") + sys.argv.remove("--dump") + + utils.configure_test_logging(sys.argv) + unittest.main() diff --git a/tests/test_updater_key_rotations.py b/tests/test_updater_key_rotations.py new file mode 100644 index 0000000000..c78e5b65aa --- /dev/null +++ b/tests/test_updater_key_rotations.py @@ -0,0 +1,290 @@ +#!/usr/bin/env python + +# Copyright 2021, New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +"""Test ngclient Updater key rotation handling""" + +import os +import sys +import tempfile +import unittest +from dataclasses import dataclass +from typing import ClassVar, Dict, List, Optional, Type + +from securesystemslib.signer import SSlibSigner + +from tests import utils +from tests.repository_simulator import RepositorySimulator +from tests.utils import run_sub_tests_with_dataset +from tuf.api.exceptions import UnsignedMetadataError +from tuf.api.metadata import Key, Root +from tuf.ngclient import Updater + + +@dataclass +class MdVersion: + keys: List[int] + threshold: int + sigs: List[int] + res: Optional[Type[Exception]] = None + + +class TestUpdaterKeyRotations(unittest.TestCase): + """Test ngclient root rotation handling""" + + # set dump_dir to trigger repository state dumps + dump_dir: Optional[str] = None + temp_dir: ClassVar[tempfile.TemporaryDirectory] + keys: ClassVar[List[Key]] + signers: ClassVar[List[SSlibSigner]] + + @classmethod + def setUpClass(cls) -> None: + # pylint: disable-next=consider-using-with + cls.temp_dir = tempfile.TemporaryDirectory() + + # Pre-create a bunch of keys and signers + cls.keys = [] + cls.signers = [] + for _ in range(10): + key, signer = RepositorySimulator.create_key() + cls.keys.append(key) + cls.signers.append(signer) + + @classmethod + def tearDownClass(cls) -> None: + cls.temp_dir.cleanup() + + def setup_subtest(self) -> None: + # Setup repository for subtest: make sure no roots have been published + # pylint: disable=attribute-defined-outside-init + self.sim = RepositorySimulator() + self.sim.signed_roots.clear() + self.sim.root.version = 0 + + if self.dump_dir is not None: + # create subtest dumpdir + # pylint: disable=no-member + name = f"{self.id().split('.')[-1]}-{self.case_name}" + self.sim.dump_dir = os.path.join(self.dump_dir, name) + os.mkdir(self.sim.dump_dir) + + def _run_refresh(self) -> None: + """Create new updater, run refresh""" + if self.sim.dump_dir is not None: + self.sim.write() + + # bootstrap with initial root + # pylint: disable=attribute-defined-outside-init + self.metadata_dir = tempfile.mkdtemp(dir=self.temp_dir.name) + with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f: + f.write(self.sim.signed_roots[0]) + + updater = Updater( + self.metadata_dir, + "https://example.com/metadata/", + fetcher=self.sim, + ) + updater.refresh() + + # fmt: off + root_rotation_cases = { + "1-of-1 key rotation": [ + MdVersion(keys=[1], threshold=1, sigs=[1]), + MdVersion(keys=[2], threshold=1, sigs=[2, 1]), + MdVersion(keys=[2], threshold=1, sigs=[2]), + ], + "1-of-1 key rotation, unused signatures": [ + MdVersion(keys=[1], threshold=1, sigs=[3, 1, 4]), + MdVersion(keys=[2], threshold=1, sigs=[3, 2, 1, 4]), + MdVersion(keys=[2], threshold=1, sigs=[3, 2, 4]), + ], + "1-of-1 key rotation fail: not signed with old key": [ + MdVersion(keys=[1], threshold=1, sigs=[1]), + MdVersion(keys=[2], threshold=1, sigs=[2, 3, 4], res=UnsignedMetadataError), + ], + "1-of-1 key rotation fail: not signed with new key": [ + MdVersion(keys=[1], threshold=1, sigs=[1]), + MdVersion(keys=[2], threshold=1, sigs=[1, 3, 4], res=UnsignedMetadataError), + ], + "3-of-5, sign with different keycombos": [ + MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]), + MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 4, 1]), + MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 1, 3]), + MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 1, 3]), + ], + "3-of-5, one key rotated": [ + MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]), + MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 4, 1]), + ], + "3-of-5, one key rotate fails: not signed with 3 new keys": [ + MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]), + MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 2, 4], res=UnsignedMetadataError), + ], + "3-of-5, one key rotate fails: not signed with 3 old keys": [ + MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]), + MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 4, 5], res=UnsignedMetadataError), + ], + "3-of-5, one key rotated, with intermediate step": [ + MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]), + MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 2, 4, 5]), + MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 4, 5]), + ], + "3-of-5, all keys rotated, with intermediate step": [ + MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]), + MdVersion(keys=[5, 6, 7, 8, 9], threshold=3, sigs=[0, 2, 4, 5, 6, 7]), + MdVersion(keys=[5, 6, 7, 8, 9], threshold=3, sigs=[5, 6, 7]), + ], + "1-of-3 threshold increase to 2-of-3": [ + MdVersion(keys=[1, 2, 3], threshold=1, sigs=[1]), + MdVersion(keys=[1, 2, 3], threshold=2, sigs=[1, 2]), + ], + "1-of-3 threshold bump to 2-of-3 fails: new threshold not reached": [ + MdVersion(keys=[1, 2, 3], threshold=1, sigs=[1]), + MdVersion(keys=[1, 2, 3], threshold=2, sigs=[2], res=UnsignedMetadataError), + ], + "2-of-3 threshold decrease to 1-of-3": [ + MdVersion(keys=[1, 2, 3], threshold=2, sigs=[1, 2]), + MdVersion(keys=[1, 2, 3], threshold=1, sigs=[1, 2]), + MdVersion(keys=[1, 2, 3], threshold=1, sigs=[1]), + ], + "2-of-3 threshold decr. to 1-of-3 fails: old threshold not reached": [ + MdVersion(keys=[1, 2, 3], threshold=2, sigs=[1, 2]), + MdVersion(keys=[1, 2, 3], threshold=1, sigs=[1], res=UnsignedMetadataError), + ], + "1-of-2 threshold increase to 2-of-2": [ + MdVersion(keys=[1], threshold=1, sigs=[1]), + MdVersion(keys=[1, 2], threshold=2, sigs=[1, 2]), + ], + } + # fmt: on + + @run_sub_tests_with_dataset(root_rotation_cases) + def test_root_rotation(self, root_versions: List[MdVersion]) -> None: + """Test Updater.refresh() with various sequences of root updates + + Each MdVersion in the list describes root keys and signatures of a + remote root metadata version. As an example: + MdVersion([1,2,3], 2, [1,2]) + defines a root that contains keys 1, 2 and 3 with threshold 2. The + metadata is signed with keys 1 and 2. + + Assert that refresh() result is expected and that local root on disk is + the expected one after all roots have been loaded from remote using the + standard client update workflow. + """ + self.setup_subtest() + + # Publish all remote root versions defined in root_versions + for rootver in root_versions: + # clear root keys, signers + self.sim.root.roles[Root.type].keyids.clear() + self.sim.signers[Root.type].clear() + + self.sim.root.roles[Root.type].threshold = rootver.threshold + for i in rootver.keys: + self.sim.root.add_key(Root.type, self.keys[i]) + for i in rootver.sigs: + self.sim.add_signer(Root.type, self.signers[i]) + self.sim.root.version += 1 + self.sim.publish_root() + + # run client workflow, assert success/failure + expected_error = root_versions[-1].res + if expected_error is None: + self._run_refresh() + expected_local_root = self.sim.signed_roots[-1] + else: + # failure expected: local root should be the root before last + with self.assertRaises(expected_error): + self._run_refresh() + expected_local_root = self.sim.signed_roots[-2] + + # assert local root on disk is expected + with open(os.path.join(self.metadata_dir, "root.json"), "rb") as f: + self.assertEqual(f.read(), expected_local_root) + + # fmt: off + non_root_rotation_cases: Dict[str, MdVersion] = { + "1-of-1 key rotation": + MdVersion(keys=[2], threshold=1, sigs=[2]), + "1-of-1 key rotation, unused signatures": + MdVersion(keys=[1], threshold=1, sigs=[3, 1, 4]), + "1-of-1 key rotation fail: not signed with new key": + MdVersion(keys=[2], threshold=1, sigs=[1, 3, 4], res=UnsignedMetadataError), + "3-of-5, one key signature wrong: not signed with 3 expected keys": + MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 2, 4], res=UnsignedMetadataError), + "2-of-5, one key signature mising: threshold not reached": + MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 4], res=UnsignedMetadataError), + "3-of-5, sign first combo": + MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]), + "3-of-5, sign second combo": + MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 4, 1]), + "3-of-5, sign third combo": + MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 1, 3]), + "3-of-5, sign fourth combo": + MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[1, 2, 3]), + "3-of-5, sign fifth combo": + MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[2, 3, 4]), + } + # fmt: on + + @run_sub_tests_with_dataset(non_root_rotation_cases) + def test_non_root_rotations(self, md_version: MdVersion) -> None: + """Test Updater.refresh() with various sequences of metadata updates + + Each MdVersion in the list describes metadata keys and signatures + of a remote metadata version. As an example: + MdVersion([1,2,3], 2, [1,2]) + defines a metadata that contains keys 1, 2 and 3 with threshold 2. The + metadata is signed with keys 1 and 2. + + Assert that refresh() result is expected and that local metadata on disk + is the expected one after all roots have been loaded from remote using + the standard client update workflow. + """ + self.setup_subtest() + roles = ["timestamp", "snapshot", "targets"] + for role in roles: + + # clear role keys, signers + self.sim.root.roles[role].keyids.clear() + self.sim.signers[role].clear() + + self.sim.root.roles[role].threshold = md_version.threshold + for i in md_version.keys: + self.sim.root.add_key(role, self.keys[i]) + + for i in md_version.sigs: + self.sim.add_signer(role, self.signers[i]) + + self.sim.root.version += 1 + self.sim.publish_root() + + # run client workflow, assert success/failure + expected_error = md_version.res + if expected_error is None: + self._run_refresh() + + # Call fetch_metadata to sign metadata with new keys + expected_local_md: bytes = self.sim.fetch_metadata(role) + # assert local metadata role is on disk as expected + md_path = os.path.join(self.metadata_dir, f"{role}.json") + with open(md_path, "rb") as f: + data = f.read() + self.assertEqual(data, expected_local_md) + else: + # failure expected + with self.assertRaises(expected_error): + self._run_refresh() + + +if __name__ == "__main__": + if "--dump" in sys.argv: + TestUpdaterKeyRotations.dump_dir = tempfile.mkdtemp() + print(f"Repository dumps in {TestUpdaterKeyRotations.dump_dir}") + sys.argv.remove("--dump") + + utils.configure_test_logging(sys.argv) + unittest.main() diff --git a/tests/test_updater_ng.py b/tests/test_updater_ng.py index 9286f32c71..c87a8fdc74 100644 --- a/tests/test_updater_ng.py +++ b/tests/test_updater_ng.py @@ -6,228 +6,327 @@ """Test Updater class """ +import logging import os import shutil -import tempfile -import logging import sys +import tempfile import unittest -import tuf.unittest_toolbox as unittest_toolbox +from typing import Callable, ClassVar, List +from unittest.mock import MagicMock, patch + +from securesystemslib.interface import import_rsa_privatekey_from_file +from securesystemslib.signer import SSlibSigner from tests import utils -from tuf.api.metadata import Metadata from tuf import ngclient -from securesystemslib.signer import SSlibSigner -from securesystemslib.interface import import_rsa_privatekey_from_file +from tuf.api import exceptions +from tuf.api.metadata import ( + Metadata, + Root, + Snapshot, + TargetFile, + Targets, + Timestamp, +) logger = logging.getLogger(__name__) -class TestUpdater(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Needed because in some tests simple_server.py cannot be found. - # The reason is that the current working directory - # has been changed when executing a subprocess. - cls.SIMPLE_SERVER_PATH = os.path.join(os.getcwd(), 'simple_server.py') - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served - # by the SimpleHTTPServer launched here. The test cases of 'test_updater.py' - # assume the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger, - server=cls.SIMPLE_SERVER_PATH) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated for the test cases - shutil.rmtree(cls.temporary_directory) - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf.tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_keystore = os.path.join(original_repository_files, 'keystore') - original_client = os.path.join(original_repository_files, 'client', 'test_repository1', 'metadata', 'current') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.keystore_directory = \ - os.path.join(temporary_repository_root, 'keystore') - - self.client_directory = os.path.join(temporary_repository_root, 'client') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - self.metadata_url = f"{url_prefix}/metadata/" - self.targets_url = f"{url_prefix}/targets/" - # Creating a repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = ngclient.Updater(self.client_directory, - self.metadata_url, - self.targets_url) - - def tearDown(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.tearDown(self) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - def _create_consistent_target(self, targetname: str, target_hash:str) -> None: - """Create consistent targets copies of their non-consistent counterparts - inside the repository directory. - - Args: - targetname: A string denoting the name of the target file. - target_hash: A string denoting the hash of the target. - - """ - consistent_target_name = f"{target_hash}.{targetname}" - source_path = os.path.join(self.repository_directory, "targets", targetname) - destination_path = os.path.join( - self.repository_directory, "targets", consistent_target_name - ) - shutil.copy(source_path, destination_path) - - - def _make_root_file_with_consistent_snapshot_true(self) -> None: - """Swap the existing root file inside the client directory with a new root - file where the consistent_snapshot is set to true.""" - root_path = os.path.join(self.client_directory, "root.json") - root = Metadata.from_file(root_path) - root.signed.consistent_snapshot = True - root_key_path = os.path.join(self.keystore_directory, "root_key") - root_key_dict = import_rsa_privatekey_from_file( +class TestUpdater(unittest.TestCase): + """Test the Updater class from 'tuf/ngclient/updater.py'.""" + + # pylint: disable=too-many-instance-attributes + server_process_handler: ClassVar[utils.TestServerProcess] + + @classmethod + def setUpClass(cls) -> None: + cls.tmp_test_root_dir = tempfile.mkdtemp(dir=os.getcwd()) + + # Launch a SimpleHTTPServer + # Test cases will request metadata and target files that have been + # pre-generated in 'tuf/tests/repository_data', and are copied to + # CWD/tmp_test_root_dir/* + cls.server_process_handler = utils.TestServerProcess(log=logger) + + @classmethod + def tearDownClass(cls) -> None: + # Cleans resources, flush the logged lines (if any) and remove test dir + cls.server_process_handler.clean() + shutil.rmtree(cls.tmp_test_root_dir) + + def setUp(self) -> None: + # Create tmp test dir inside of tmp test root dir to independently serve + # new repository files for each test. We delete all tmp dirs at once in + # tearDownClass after the server has released all resources. + self.tmp_test_dir = tempfile.mkdtemp(dir=self.tmp_test_root_dir) + + # Copy the original repository files provided in the test folder so that + # any modifications are restricted to the copies. + # The 'repository_data' directory is expected to exist in 'tuf.tests/'. + original_repository_files = os.path.join( + utils.TESTS_DIR, "repository_data" + ) + + original_repository = os.path.join( + original_repository_files, "repository" + ) + original_keystore = os.path.join(original_repository_files, "keystore") + original_client = os.path.join( + original_repository_files, + "client", + "test_repository1", + "metadata", + "current", + ) + + # Save references to the often-needed client repository directories. + # Test cases need these references to access metadata and target files. + self.repository_directory = os.path.join( + self.tmp_test_dir, "repository" + ) + self.keystore_directory = os.path.join(self.tmp_test_dir, "keystore") + self.client_directory = os.path.join(self.tmp_test_dir, "client") + + # Copy the original 'repository', 'client', and 'keystore' directories + # to the temporary repository the test cases can use. + shutil.copytree(original_repository, self.repository_directory) + shutil.copytree(original_client, self.client_directory) + shutil.copytree(original_keystore, self.keystore_directory) + + # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. + repository_basepath = self.repository_directory[len(os.getcwd()) :] + url_prefix = ( + "http://" + + utils.TEST_HOST_ADDRESS + + ":" + + str(self.server_process_handler.port) + + repository_basepath.replace("\\", "/") + ) + + self.metadata_url = f"{url_prefix}/metadata/" + self.targets_url = f"{url_prefix}/targets/" + self.dl_dir = tempfile.mkdtemp(dir=self.tmp_test_dir) + # Creating a repository instance. The test cases will use this client + # updater to refresh metadata, fetch target files, etc. + self.updater = ngclient.Updater( + metadata_dir=self.client_directory, + metadata_base_url=self.metadata_url, + target_dir=self.dl_dir, + target_base_url=self.targets_url, + ) + + def tearDown(self) -> None: + # Logs stdout and stderr from the sever subprocess. + self.server_process_handler.flush_log() + + def _modify_repository_root( + self, + modification_func: Callable[[Metadata], None], + bump_version: bool = False, + ) -> None: + """Apply 'modification_func' to root and persist it.""" + role_path = os.path.join( + self.repository_directory, "metadata", "root.json" + ) + root = Metadata.from_file(role_path) + modification_func(root) + if bump_version: + root.signed.version += 1 + root_key_path = os.path.join(self.keystore_directory, "root_key") + root_key_dict = import_rsa_privatekey_from_file( root_key_path, password="password" - ) - root_signer = SSlibSigner(root_key_dict) - root.sign(root_signer) - # Remove the old root file and replace it with the newer root file. - os.remove(root_path) - root.to_file(root_path) - - - def test_refresh_on_consistent_targets(self): - # Generate a new root file where consistent_snapshot is set to true and - # replace the old root metadata file with it. - self._make_root_file_with_consistent_snapshot_true() - self.repository_updater = ngclient.Updater(self.client_directory, - self.metadata_url, - self.targets_url) - # All metadata is in local directory already - self.repository_updater.refresh() - - # Get targetinfo for "file1.txt" listed in targets - targetinfo1 = self.repository_updater.get_one_valid_targetinfo("file1.txt") - # Get targetinfo for "file3.txt" listed in the delegated role1 - targetinfo3 = self.repository_updater.get_one_valid_targetinfo("file3.txt") - - # Create consistent targets with file path HASH.FILENAME.EXT - target1_hash = list(targetinfo1.hashes.values())[0] - target3_hash = list(targetinfo3.hashes.values())[0] - self._create_consistent_target("file1.txt", target1_hash) - self._create_consistent_target("file3.txt", target3_hash) - - destination_directory = self.make_temp_directory() - updated_targets = self.repository_updater.updated_targets( - [targetinfo1, targetinfo3], destination_directory - ) - - self.assertListEqual(updated_targets, [targetinfo1, targetinfo3]) - self.repository_updater.download_target(targetinfo1, destination_directory) - updated_targets = self.repository_updater.updated_targets( - updated_targets, destination_directory - ) - - self.assertListEqual(updated_targets, [targetinfo3]) - - self.repository_updater.download_target(targetinfo3, destination_directory) - updated_targets = self.repository_updater.updated_targets( - updated_targets, destination_directory - ) - - self.assertListEqual(updated_targets, []) - - def test_refresh(self): - # Test refresh without consistent targets - targets without hash prefixes. - - # All metadata is in local directory already - self.repository_updater.refresh() - - # Get targetinfo for 'file1.txt' listed in targets - targetinfo1 = self.repository_updater.get_one_valid_targetinfo("file1.txt") - # Get targetinfo for 'file3.txt' listed in the delegated role1 - targetinfo3 = self.repository_updater.get_one_valid_targetinfo("file3.txt") - - destination_directory = self.make_temp_directory() - updated_targets = self.repository_updater.updated_targets([targetinfo1, targetinfo3], - destination_directory) - - self.assertListEqual(updated_targets, [targetinfo1, targetinfo3]) - - self.repository_updater.download_target(targetinfo1, destination_directory) - updated_targets = self.repository_updater.updated_targets(updated_targets, - destination_directory) - - self.assertListEqual(updated_targets, [targetinfo3]) - - - self.repository_updater.download_target(targetinfo3, destination_directory) - updated_targets = self.repository_updater.updated_targets(updated_targets, - destination_directory) - - self.assertListEqual(updated_targets, []) - - def test_refresh_with_only_local_root(self): - os.remove(os.path.join(self.client_directory, "timestamp.json")) - os.remove(os.path.join(self.client_directory, "snapshot.json")) - os.remove(os.path.join(self.client_directory, "targets.json")) - os.remove(os.path.join(self.client_directory, "role1.json")) - - self.repository_updater.refresh() - - # Get targetinfo for 'file3.txt' listed in the delegated role1 - targetinfo3 = self.repository_updater.get_one_valid_targetinfo('file3.txt') - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() + ) + signer = SSlibSigner(root_key_dict) + root.sign(signer) + root.to_file( + os.path.join(self.repository_directory, "metadata", "root.json") + ) + root.to_file( + os.path.join( + self.repository_directory, + "metadata", + f"{root.signed.version}.root.json", + ) + ) + + def _assert_files(self, roles: List[str]) -> None: + """Assert that local metadata files exist for 'roles'""" + expected_files = [f"{role}.json" for role in roles] + client_files = sorted(os.listdir(self.client_directory)) + self.assertEqual(client_files, expected_files) + + def test_refresh_and_download(self) -> None: + # Test refresh without consistent targets - targets without hash prefix. + + # top-level targets are already in local cache (but remove others) + os.remove(os.path.join(self.client_directory, "role1.json")) + os.remove(os.path.join(self.client_directory, "role2.json")) + os.remove(os.path.join(self.client_directory, "1.root.json")) + + # top-level metadata is in local directory already + self.updater.refresh() + self._assert_files( + [Root.type, Snapshot.type, Targets.type, Timestamp.type] + ) + + # Get targetinfos, assert that cache does not contain files + info1 = self.updater.get_targetinfo("file1.txt") + assert isinstance(info1, TargetFile) + self._assert_files( + [Root.type, Snapshot.type, Targets.type, Timestamp.type] + ) + + # Get targetinfo for 'file3.txt' listed in the delegated role1 + info3 = self.updater.get_targetinfo("file3.txt") + assert isinstance(info3, TargetFile) + expected_files = [ + "role1", + Root.type, + Snapshot.type, + Targets.type, + Timestamp.type, + ] + self._assert_files(expected_files) + self.assertIsNone(self.updater.find_cached_target(info1)) + self.assertIsNone(self.updater.find_cached_target(info3)) + + # Download files, assert that cache has correct files + self.updater.download_target(info1) + path = self.updater.find_cached_target(info1) + self.assertEqual(path, os.path.join(self.dl_dir, info1.path)) + self.assertIsNone(self.updater.find_cached_target(info3)) + + self.updater.download_target(info3) + path = self.updater.find_cached_target(info1) + self.assertEqual(path, os.path.join(self.dl_dir, info1.path)) + path = self.updater.find_cached_target(info3) + self.assertEqual(path, os.path.join(self.dl_dir, info3.path)) + + def test_refresh_with_only_local_root(self) -> None: + os.remove(os.path.join(self.client_directory, "timestamp.json")) + os.remove(os.path.join(self.client_directory, "snapshot.json")) + os.remove(os.path.join(self.client_directory, "targets.json")) + os.remove(os.path.join(self.client_directory, "role1.json")) + os.remove(os.path.join(self.client_directory, "role2.json")) + os.remove(os.path.join(self.client_directory, "1.root.json")) + self._assert_files([Root.type]) + + self.updater.refresh() + self._assert_files( + [Root.type, Snapshot.type, Targets.type, Timestamp.type] + ) + + # Get targetinfo for 'file3.txt' listed in the delegated role1 + self.updater.get_targetinfo("file3.txt") + expected_files = [ + "role1", + Root.type, + Snapshot.type, + Targets.type, + Timestamp.type, + ] + self._assert_files(expected_files) + + def test_implicit_refresh_with_only_local_root(self) -> None: + os.remove(os.path.join(self.client_directory, "timestamp.json")) + os.remove(os.path.join(self.client_directory, "snapshot.json")) + os.remove(os.path.join(self.client_directory, "targets.json")) + os.remove(os.path.join(self.client_directory, "role1.json")) + os.remove(os.path.join(self.client_directory, "role2.json")) + os.remove(os.path.join(self.client_directory, "1.root.json")) + self._assert_files(["root"]) + + # Get targetinfo for 'file3.txt' listed in the delegated role1 + self.updater.get_targetinfo("file3.txt") + expected_files = ["role1", "root", "snapshot", "targets", "timestamp"] + self._assert_files(expected_files) + + def test_both_target_urls_not_set(self) -> None: + # target_base_url = None and Updater._target_base_url = None + updater = ngclient.Updater( + self.client_directory, self.metadata_url, self.dl_dir + ) + info = TargetFile(1, {"sha256": ""}, "targetpath") + with self.assertRaises(ValueError): + updater.download_target(info) + + def test_no_target_dir_no_filepath(self) -> None: + # filepath = None and Updater.target_dir = None + updater = ngclient.Updater(self.client_directory, self.metadata_url) + info = TargetFile(1, {"sha256": ""}, "targetpath") + with self.assertRaises(ValueError): + updater.find_cached_target(info) + with self.assertRaises(ValueError): + updater.download_target(info) + + def test_external_targets_url(self) -> None: + self.updater.refresh() + info = self.updater.get_targetinfo("file1.txt") + assert isinstance(info, TargetFile) + + self.updater.download_target(info, target_base_url=self.targets_url) + + def test_length_hash_mismatch(self) -> None: + self.updater.refresh() + targetinfo = self.updater.get_targetinfo("file1.txt") + assert isinstance(targetinfo, TargetFile) + + length = targetinfo.length + with self.assertRaises(exceptions.RepositoryError): + targetinfo.length = 44 + self.updater.download_target(targetinfo) + + with self.assertRaises(exceptions.RepositoryError): + targetinfo.length = length + targetinfo.hashes = {"sha256": "abcd"} + self.updater.download_target(targetinfo) + + # pylint: disable=protected-access + def test_updating_root(self) -> None: + # Bump root version, resign and refresh + self._modify_repository_root(lambda root: None, bump_version=True) + self.updater.refresh() + self.assertEqual(self.updater._trusted_set.root.signed.version, 2) + + def test_missing_targetinfo(self) -> None: + self.updater.refresh() + + # Get targetinfo for non-existing file + self.assertIsNone(self.updater.get_targetinfo("file33.txt")) + + @patch.object(os, "replace", wraps=os.replace) + @patch.object(os, "remove", wraps=os.remove) + def test_persist_metadata_fails( + self, wrapped_remove: MagicMock, wrapped_replace: MagicMock + ) -> None: + # Testing that when write succeeds (the file is created) and replace + # fails by throwing OSError, then the file will be deleted. + wrapped_replace.side_effect = OSError() + with self.assertRaises(OSError): + self.updater._persist_metadata("target", b"data") + + wrapped_replace.assert_called_once() + wrapped_remove.assert_called_once() + + # Assert that the created tempfile during writing is eventually deleted + # or in other words, there is no temporary file left in the folder. + for filename in os.listdir(self.updater._dir): + self.assertFalse(filename.startswith("tmp")) + + def test_invalid_target_base_url(self) -> None: + info = TargetFile(1, {"sha256": ""}, "targetpath") + with self.assertRaises(exceptions.DownloadError): + self.updater.download_target(info, target_base_url="invalid_url") + + def test_non_existing_target_file(self) -> None: + info = TargetFile(1, {"sha256": ""}, "/non_existing_file.txt") + # When non-existing target file is given, download fails with + # "404 Client Error: File not found for url" + with self.assertRaises(exceptions.DownloadHTTPError): + self.updater.download_target(info) + + +if __name__ == "__main__": + utils.configure_test_logging(sys.argv) + unittest.main() diff --git a/tests/test_updater_root_rotation_integration.py b/tests/test_updater_root_rotation_integration.py deleted file mode 100755 index 26faebb987..0000000000 --- a/tests/test_updater_root_rotation_integration.py +++ /dev/null @@ -1,684 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_updater_root_rotation_integration.py - - - Evan Cordell. - - - August 8, 2016. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - 'test_updater_root_rotation.py' provides a collection of methods that test - root key rotation in the example client. - - - Test cases here should follow a specific order (i.e., independent methods are - tested before dependent methods). More accurately, least dependent methods - are tested before most dependent methods. There is no reason to rewrite or - construct other methods that replicate already-tested methods solely for - testing purposes. This is possible because the 'unittest.TestCase' class - guarantees the order of unit tests. The 'test_something_A' method would - be tested before 'test_something_B'. To ensure the expected order of tests, - a number is placed after 'test' and before methods name like so: - 'test_1_check_directory'. The number is a measure of dependence, where 1 is - less dependent than 2. -""" - -import os -import shutil -import tempfile -import logging -import unittest -import filecmp -import sys - -import tuf -import tuf.log -import tuf.keydb -import tuf.roledb -import tuf.exceptions -import tuf.repository_tool as repo_tool -import tuf.unittest_toolbox as unittest_toolbox -import tuf.client.updater as updater -import tuf.settings - -from tests import utils - -import securesystemslib - -logger = logging.getLogger(__name__) -repo_tool.disable_console_log_messages() - - -class TestUpdater(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). Test - # cases will request metadata and target files that have been pre-generated - # in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of 'test_updater.py' - # assume the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated for the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf.tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_keystore = os.path.join(original_repository_files, 'keystore') - original_client = os.path.join(original_repository_files, 'client') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.keystore_directory = \ - os.path.join(temporary_repository_root, 'keystore') - self.client_directory = os.path.join(temporary_repository_root, 'client') - self.client_metadata = os.path.join(self.client_directory, - self.repository_name, 'metadata') - self.client_metadata_current = os.path.join(self.client_metadata, 'current') - self.client_metadata_previous = os.path.join(self.client_metadata, 'previous') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Creating a repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - # Metadata role keys are needed by the test cases to make changes to the - # repository (e.g., adding a new target file to 'targets.json' and then - # requesting a refresh()). - self.role_keys = _load_role_keys(self.keystore_directory) - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - # UNIT TESTS. - def test_root_rotation(self): - repository = repo_tool.load_repository(self.repository_directory) - repository.root.threshold = 2 - - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - - # Errors, not enough signing keys to satisfy root's threshold. - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - - repository.root.add_verification_key(self.role_keys['role1']['public']) - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.root.load_signing_key(self.role_keys['role1']['private']) - repository.writeall() - - repository.root.add_verification_key(self.role_keys['snapshot']['public']) - repository.root.load_signing_key(self.role_keys['snapshot']['private']) - repository.root.threshold = 3 - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - self.repository_updater.refresh() - - - - def test_verify_root_with_current_keyids_and_threshold(self): - """ - Each root file is signed by the current root threshold of keys as well - as the previous root threshold of keys. Test that a root file which is - not 'self-signed' with the current root threshold of keys causes the - update to fail - """ - # Load repository with root.json == 1.root.json (available on client) - # Signing key: "root", Threshold: 1 - repository = repo_tool.load_repository(self.repository_directory) - - # Rotate keys and update root: 1.root.json --> 2.root.json - # Signing key: "root" (previous) and "root2" (current) - # Threshold (for both): 1 - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.root.add_verification_key(self.role_keys['root2']['public']) - repository.root.load_signing_key(self.role_keys['root2']['private']) - # Remove the previous "root" key from the list of current - # verification keys - repository.root.remove_verification_key(self.role_keys['root']['public']) - repository.writeall() - - # Move staged metadata to "live" metadata - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Intercept 2.root.json and tamper with "root2" (current) key signature - root2_path_live = os.path.join( - self.repository_directory, 'metadata', '2.root.json') - root2 = securesystemslib.util.load_json_file(root2_path_live) - - for idx, sig in enumerate(root2['signatures']): - if sig['keyid'] == self.role_keys['root2']['public']['keyid']: - sig_len = len(root2['signatures'][idx]['sig']) - root2['signatures'][idx]['sig'] = "deadbeef".ljust(sig_len, '0') - - roo2_fobj = tempfile.TemporaryFile() - roo2_fobj.write(tuf.repository_lib._get_written_metadata(root2)) - securesystemslib.util.persist_temp_file(roo2_fobj, root2_path_live) - - # Update 1.root.json -> 2.root.json - # Signature verification with current keys should fail because we replaced - with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm: - self.repository_updater.refresh() - - for mirror_url, mirror_error in cm.exception.mirror_errors.items(): - self.assertTrue(mirror_url.endswith('/2.root.json')) - self.assertTrue(isinstance(mirror_error, - securesystemslib.exceptions.BadSignatureError)) - - # Assert that the current 'root.json' on the client side is the verified one - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '1.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - - - - - def test_verify_root_with_duplicate_current_keyids(self): - """ - Each root file is signed by the current root threshold of keys as well - as the previous root threshold of keys. In each case, a keyid must only - count once towards the threshold. Test that the new root signatures - specific signature verification implemented in _verify_root_self_signed() - only counts one signature per keyid towards the threshold. - """ - # Load repository with root.json == 1.root.json (available on client) - # Signing key: "root", Threshold: 1 - repository = repo_tool.load_repository(self.repository_directory) - - # Add an additional signing key and bump the threshold to 2 - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.root.add_verification_key(self.role_keys['root2']['public']) - repository.root.load_signing_key(self.role_keys['root2']['private']) - repository.root.threshold = 2 - repository.writeall() - - # Move staged metadata to "live" metadata - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Modify 2.root.json and list two signatures with the same keyid - root2_path_live = os.path.join( - self.repository_directory, 'metadata', '2.root.json') - root2 = securesystemslib.util.load_json_file(root2_path_live) - - signatures = [] - signatures.append(root2['signatures'][0]) - signatures.append(root2['signatures'][0]) - - root2['signatures'] = signatures - - root2_fobj = tempfile.TemporaryFile() - root2_fobj.write(tuf.repository_lib._get_written_metadata(root2)) - securesystemslib.util.persist_temp_file(root2_fobj, root2_path_live) - - # Update 1.root.json -> 2.root.json - # Signature verification with new keys should fail because the threshold - # can only be met by two signatures with the same keyid - with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm: - self.repository_updater.refresh() - - for mirror_url, mirror_error in cm.exception.mirror_errors.items(): - self.assertTrue(mirror_url.endswith('/2.root.json')) - self.assertTrue(isinstance(mirror_error, - securesystemslib.exceptions.BadSignatureError)) - - # Assert that the current 'root.json' on the client side is the verified one - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '1.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - - - - - def test_root_rotation_full(self): - """Test that a client whose root is outdated by multiple versions and who - has none of the latest nor next-to-latest root keys can still update and - does so by incrementally verifying all roots until the most recent one. """ - # Load initial repository with 1.root.json == root.json, signed by "root" - # key. This is the root.json that is already on the client. - repository = repo_tool.load_repository(self.repository_directory) - - # 1st rotation: 1.root.json --> 2.root.json - # 2.root.json will be signed by previous "root" key and by new "root2" key - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.root.add_verification_key(self.role_keys['root2']['public']) - repository.root.load_signing_key(self.role_keys['root2']['private']) - repository.writeall() - - # 2nd rotation: 2.root.json --> 3.root.json - # 3.root.json will be signed by previous "root2" key and by new "root3" key - repository.root.unload_signing_key(self.role_keys['root']['private']) - repository.root.remove_verification_key(self.role_keys['root']['public']) - repository.root.add_verification_key(self.role_keys['root3']['public']) - repository.root.load_signing_key(self.role_keys['root3']['private']) - repository.writeall() - - # Move staged metadata to "live" metadata - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Update on client 1.root.json --> 2.root.json --> 3.root.json - self.repository_updater.refresh() - - # Assert that client updated to the latest root from the repository - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '3.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - - - def test_root_rotation_max(self): - """Test that client does not rotate beyond a configured upper bound, i.e. - `current_version + MAX_NUMBER_ROOT_ROTATIONS`. """ - # NOTE: The nature of below root changes is irrelevant. Here we only want - # the client to update but not beyond a configured upper bound. - - # 1.root.json --> 2.root.json (add root2 and root3 keys) - repository = repo_tool.load_repository(self.repository_directory) - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.root.add_verification_key(self.role_keys['root2']['public']) - repository.root.load_signing_key(self.role_keys['root2']['private']) - repository.root.add_verification_key(self.role_keys['root3']['public']) - repository.root.load_signing_key(self.role_keys['root3']['private']) - repository.writeall() - - # 2.root.json --> 3.root.json (change threshold) - repository.root.threshold = 2 - repository.writeall() - - # 3.root.json --> 4.root.json (change threshold again) - repository.root.threshold = 3 - repository.writeall() - - # Move staged metadata to "live" metadata - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Assert that repo indeed has "4.root.json" and that it's the latest root - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '4.root.json'), - os.path.join(self.repository_directory, 'metadata', 'root.json'))) - - # Lower max root rotation cap so that client stops updating early - max_rotation_backup = tuf.settings.MAX_NUMBER_ROOT_ROTATIONS - tuf.settings.MAX_NUMBER_ROOT_ROTATIONS = 2 - - # Update on client 1.root.json --> 2.root.json --> 3.root.json, - # but stop before updating to 4.root.json - self.repository_updater.refresh() - - # Assert that the client indeed only updated until 3.root.json - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '3.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - # reset - tuf.settings.MAX_NUMBER_ROOT_ROTATIONS = max_rotation_backup - - - - def test_root_rotation_missing_keys(self): - repository = repo_tool.load_repository(self.repository_directory) - - # A partially written root.json (threshold = 2, and signed with only 1 key) - # causes an invalid root chain later. - repository.root.threshold = 2 - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - - repository.write('root') - repository.write('snapshot') - repository.write('timestamp') - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Create a new, valid root.json. - # Still not valid, because it is not written with a threshold of 2 - # previous keys - repository.root.add_verification_key(self.role_keys['role1']['public']) - repository.root.load_signing_key(self.role_keys['role1']['private']) - - repository.writeall() - - repository.root.add_verification_key(self.role_keys['snapshot']['public']) - repository.root.load_signing_key(self.role_keys['snapshot']['private']) - repository.root.threshold = 3 - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm: - self.repository_updater.refresh() - - for mirror_url, mirror_error in cm.exception.mirror_errors.items(): - self.assertTrue(mirror_url.endswith('/2.root.json')) - self.assertTrue(isinstance(mirror_error, - securesystemslib.exceptions.BadSignatureError)) - - # Assert that the current 'root.json' on the client side is the verified one - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '1.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - - - - def test_root_rotation_unmet_last_version_threshold(self): - """Test that client detects a root.json version that is not signed - by a previous threshold of signatures """ - - repository = repo_tool.load_repository(self.repository_directory) - - # Add verification keys - repository.root.add_verification_key(self.role_keys['root']['public']) - repository.root.add_verification_key(self.role_keys['role1']['public']) - - repository.targets.add_verification_key(self.role_keys['targets']['public']) - repository.snapshot.add_verification_key(self.role_keys['snapshot']['public']) - repository.timestamp.add_verification_key(self.role_keys['timestamp']['public']) - - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - - # Add signing keys - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.root.load_signing_key(self.role_keys['role1']['private']) - - # Set root threshold - repository.root.threshold = 2 - repository.writeall() - - # Unload Root's previous signing keys to ensure that these keys are not - # used by mistake. - repository.root.unload_signing_key(self.role_keys['role1']['private']) - repository.root.unload_signing_key(self.role_keys['root']['private']) - - # Add new verification key - repository.root.add_verification_key(self.role_keys['snapshot']['public']) - - # Remove one of the original signing keys - repository.root.remove_verification_key(self.role_keys['role1']['public']) - - # Set the threshold for the new Root file, but note that the previous - # threshold of 2 must still be met. - repository.root.threshold = 1 - - repository.root.load_signing_key(self.role_keys['role1']['private']) - repository.root.load_signing_key(self.role_keys['snapshot']['private']) - - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - - # We use write() rather than writeall() because the latter should fail due - # to the missing self.role_keys['root'] signature. - repository.write('root', increment_version_number=True) - repository.write('snapshot', increment_version_number=True) - repository.write('timestamp', increment_version_number=True) - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # The following refresh should fail because root must be signed by the - # previous self.role_keys['root'] key, which wasn't loaded. - with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm: - self.repository_updater.refresh() - - for mirror_url, mirror_error in cm.exception.mirror_errors.items(): - self.assertTrue(mirror_url.endswith('/3.root.json')) - self.assertTrue(isinstance(mirror_error, - securesystemslib.exceptions.BadSignatureError)) - - # Assert that the current 'root.json' on the client side is the verified one - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '2.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - - - def test_root_rotation_unmet_new_threshold(self): - """Test that client detects a root.json version that is not signed - by a current threshold of signatures """ - repository = repo_tool.load_repository(self.repository_directory) - - # Create a new, valid root.json. - repository.root.threshold = 2 - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.root.add_verification_key(self.role_keys['root2']['public']) - repository.root.load_signing_key(self.role_keys['root2']['private']) - - repository.writeall() - - # Increase the threshold and add a new verification key without - # actually loading the signing key - repository.root.threshold = 3 - repository.root.add_verification_key(self.role_keys['root3']['public']) - - # writeall fails as expected since the third signature is missing - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - # write an invalid '3.root.json' as partially signed - repository.write('root') - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - - # The following refresh should fail because root must be signed by the - # current self.role_keys['root3'] key, which wasn't loaded. - with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm: - self.repository_updater.refresh() - - for mirror_url, mirror_error in cm.exception.mirror_errors.items(): - self.assertTrue(mirror_url.endswith('/3.root.json')) - self.assertTrue(isinstance(mirror_error, - securesystemslib.exceptions.BadSignatureError)) - - # Assert that the current 'root.json' on the client side is the verified one - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '2.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - - - def test_root_rotation_discard_untrusted_version(self): - """Test that client discards root.json version that failed the - signature verification """ - repository = repo_tool.load_repository(self.repository_directory) - - # Rotate the root key without signing with the previous version key 'root' - repository.root.remove_verification_key(self.role_keys['root']['public']) - repository.root.add_verification_key(self.role_keys['root2']['public']) - repository.root.load_signing_key(self.role_keys['root2']['private']) - - # 2.root.json - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Refresh on the client side should fail because 2.root.json is not signed - # with a threshold of prevous keys - with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm: - self.repository_updater.refresh() - - for mirror_url, mirror_error in cm.exception.mirror_errors.items(): - self.assertTrue(mirror_url.endswith('/2.root.json')) - self.assertTrue(isinstance(mirror_error, - securesystemslib.exceptions.BadSignatureError)) - - # Assert that the current 'root.json' on the client side is the trusted one - # and 2.root.json is discarded - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '1.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - - - -def _load_role_keys(keystore_directory): - - # Populating 'self.role_keys' by importing the required public and private - # keys of 'tuf/tests/repository_data/'. The role keys are needed when - # modifying the remote repository used by the test cases in this unit test. - - # The pre-generated key files in 'repository_data/keystore' are all encrypted - # with a 'password' passphrase. - EXPECTED_KEYFILE_PASSWORD = 'password' - - # Store and return the cryptography keys of the top-level roles, including 1 - # delegated role. - role_keys = {} - - root_key_file = os.path.join(keystore_directory, 'root_key') - root2_key_file = os.path.join(keystore_directory, 'root_key2') - root3_key_file = os.path.join(keystore_directory, 'root_key3') - targets_key_file = os.path.join(keystore_directory, 'targets_key') - snapshot_key_file = os.path.join(keystore_directory, 'snapshot_key') - timestamp_key_file = os.path.join(keystore_directory, 'timestamp_key') - delegation_key_file = os.path.join(keystore_directory, 'delegation_key') - - role_keys = {'root': {}, 'root2': {}, 'root3': {}, 'targets': {}, 'snapshot': - {}, 'timestamp': {}, 'role1': {}} - - # Import the top-level and delegated role public keys. - role_keys['root']['public'] = \ - repo_tool.import_rsa_publickey_from_file(root_key_file+'.pub') - role_keys['root2']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(root2_key_file+'.pub') - role_keys['root3']['public'] = \ - repo_tool.import_ecdsa_publickey_from_file(root3_key_file+'.pub') - role_keys['targets']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(targets_key_file+'.pub') - role_keys['snapshot']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(snapshot_key_file+'.pub') - role_keys['timestamp']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(timestamp_key_file+'.pub') - role_keys['role1']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(delegation_key_file+'.pub') - - # Import the private keys of the top-level and delegated roles. - role_keys['root']['private'] = \ - repo_tool.import_rsa_privatekey_from_file(root_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['root2']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(root2_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['root3']['private'] = \ - repo_tool.import_ecdsa_privatekey_from_file(root3_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['targets']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(targets_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['snapshot']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['timestamp']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['role1']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(delegation_key_file, - EXPECTED_KEYFILE_PASSWORD) - - return role_keys - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_updater_top_level_update.py b/tests/test_updater_top_level_update.py new file mode 100644 index 0000000000..c92ee57e47 --- /dev/null +++ b/tests/test_updater_top_level_update.py @@ -0,0 +1,815 @@ +#!/usr/bin/env python + +# Copyright 2021, New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +"""Test ngclient Updater top-level metadata update workflow""" + +import builtins +import datetime +import os +import sys +import tempfile +import unittest +from typing import Iterable, Optional +from unittest.mock import MagicMock, Mock, call, patch + +from tests import utils +from tests.repository_simulator import RepositorySimulator +from tuf.api.exceptions import ( + BadVersionNumberError, + DownloadLengthMismatchError, + ExpiredMetadataError, + LengthOrHashMismatchError, + UnsignedMetadataError, +) +from tuf.api.metadata import ( + SPECIFICATION_VERSION, + TOP_LEVEL_ROLE_NAMES, + DelegatedRole, + Metadata, + Root, + Snapshot, + Targets, + Timestamp, +) +from tuf.ngclient import Updater + + +# pylint: disable=too-many-public-methods +class TestRefresh(unittest.TestCase): + """Test update of top-level metadata following + 'Detailed client workflow' in the specification.""" + + # set dump_dir to trigger repository state dumps + dump_dir: Optional[str] = None + + past_datetime = datetime.datetime.utcnow().replace( + microsecond=0 + ) - datetime.timedelta(days=5) + + def setUp(self) -> None: + # pylint: disable=consider-using-with + self.temp_dir = tempfile.TemporaryDirectory() + self.metadata_dir = os.path.join(self.temp_dir.name, "metadata") + self.targets_dir = os.path.join(self.temp_dir.name, "targets") + os.mkdir(self.metadata_dir) + os.mkdir(self.targets_dir) + + self.sim = RepositorySimulator() + + # boostrap client with initial root metadata + with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f: + f.write(self.sim.signed_roots[0]) + + if self.dump_dir is not None: + # create test specific dump directory + name = self.id().split(".")[-1] + self.sim.dump_dir = os.path.join(self.dump_dir, name) + os.mkdir(self.sim.dump_dir) + + def tearDown(self) -> None: + self.temp_dir.cleanup() + + def _run_refresh(self) -> Updater: + """Create a new Updater instance and refresh""" + if self.dump_dir is not None: + self.sim.write() + + updater = Updater( + self.metadata_dir, + "https://example.com/metadata/", + self.targets_dir, + "https://example.com/targets/", + self.sim, + ) + updater.refresh() + return updater + + def _init_updater(self) -> Updater: + """Create a new Updater instance""" + if self.dump_dir is not None: + self.sim.write() + + return Updater( + self.metadata_dir, + "https://example.com/metadata/", + self.targets_dir, + "https://example.com/targets/", + self.sim, + ) + + def _assert_files_exist(self, roles: Iterable[str]) -> None: + """Assert that local metadata files exist for 'roles'""" + expected_files = sorted([f"{role}.json" for role in roles]) + local_metadata_files = sorted(os.listdir(self.metadata_dir)) + self.assertListEqual(local_metadata_files, expected_files) + + def _assert_content_equals( + self, role: str, version: Optional[int] = None + ) -> None: + """Assert that local file content is the expected""" + expected_content = self.sim.fetch_metadata(role, version) + with open(os.path.join(self.metadata_dir, f"{role}.json"), "rb") as f: + self.assertEqual(f.read(), expected_content) + + def _assert_version_equals(self, role: str, expected_version: int) -> None: + """Assert that local metadata version is the expected""" + md = Metadata.from_file(os.path.join(self.metadata_dir, f"{role}.json")) + self.assertEqual(md.signed.version, expected_version) + + def test_first_time_refresh(self) -> None: + # Metadata dir contains only the mandatory initial root.json + self._assert_files_exist([Root.type]) + + # Add one more root version to repository so that + # refresh() updates from local trusted root (v1) to + # remote root (v2) + self.sim.root.version += 1 + self.sim.publish_root() + + self._run_refresh() + + self._assert_files_exist(TOP_LEVEL_ROLE_NAMES) + for role in TOP_LEVEL_ROLE_NAMES: + version = 2 if role == Root.type else None + self._assert_content_equals(role, version) + + def test_trusted_root_missing(self) -> None: + os.remove(os.path.join(self.metadata_dir, "root.json")) + with self.assertRaises(OSError): + self._run_refresh() + + # Metadata dir is empty + self.assertFalse(os.listdir(self.metadata_dir)) + + def test_trusted_root_expired(self) -> None: + # Create an expired root version + self.sim.root.expires = self.past_datetime + self.sim.root.version += 1 + self.sim.publish_root() + + # Update to latest root which is expired but still + # saved as a local root. + updater = self._init_updater() + with self.assertRaises(ExpiredMetadataError): + updater.refresh() + + self._assert_files_exist([Root.type]) + self._assert_content_equals(Root.type, 2) + + # Local root metadata can be loaded even if expired + updater = self._init_updater() + + # Create a non-expired root version and refresh + self.sim.root.expires = self.sim.safe_expiry + self.sim.root.version += 1 + self.sim.publish_root() + updater.refresh() + + # Root is successfully updated to latest version + self._assert_files_exist(TOP_LEVEL_ROLE_NAMES) + self._assert_content_equals(Root.type, 3) + + def test_trusted_root_unsigned(self) -> None: + # Local trusted root is not signed + root_path = os.path.join(self.metadata_dir, "root.json") + md_root = Metadata.from_file(root_path) + md_root.signatures.clear() + md_root.to_file(root_path) + + with self.assertRaises(UnsignedMetadataError): + self._run_refresh() + + # The update failed, no changes in metadata + self._assert_files_exist([Root.type]) + md_root_after = Metadata.from_file(root_path) + self.assertEqual(md_root.to_bytes(), md_root_after.to_bytes()) + + def test_max_root_rotations(self) -> None: + # Root must stop looking for new versions after Y number of + # intermediate files were downloaded. + updater = self._init_updater() + updater.config.max_root_rotations = 3 + + # Create some number of roots greater than 'max_root_rotations' + while self.sim.root.version < updater.config.max_root_rotations + 3: + self.sim.root.version += 1 + self.sim.publish_root() + + md_root = Metadata.from_file( + os.path.join(self.metadata_dir, "root.json") + ) + initial_root_version = md_root.signed.version + + updater.refresh() + + # Assert that root version was increased with no more + # than 'max_root_rotations' + self._assert_version_equals( + Root.type, initial_root_version + updater.config.max_root_rotations + ) + + def test_intermediate_root_incorrectly_signed(self) -> None: + # Check for an arbitrary software attack + + # Intermediate root v2 is unsigned + self.sim.root.version += 1 + root_signers = self.sim.signers[Root.type].copy() + self.sim.signers[Root.type].clear() + self.sim.publish_root() + + # Final root v3 is correctly signed + self.sim.root.version += 1 + self.sim.signers[Root.type] = root_signers + self.sim.publish_root() + + # Incorrectly signed intermediate root is detected + with self.assertRaises(UnsignedMetadataError): + self._run_refresh() + + # The update failed, latest root version is v1 + self._assert_files_exist([Root.type]) + self._assert_content_equals(Root.type, 1) + + def test_intermediate_root_expired(self) -> None: + # The expiration of the new (intermediate) root metadata file + # does not matter yet + + # Intermediate root v2 is expired + self.sim.root.expires = self.past_datetime + self.sim.root.version += 1 + self.sim.publish_root() + + # Final root v3 is up to date + self.sim.root.expires = self.sim.safe_expiry + self.sim.root.version += 1 + self.sim.publish_root() + + self._run_refresh() + + # Successfully updated to root v3 + self._assert_files_exist(TOP_LEVEL_ROLE_NAMES) + self._assert_content_equals(Root.type, 3) + + def test_final_root_incorrectly_signed(self) -> None: + # Check for an arbitrary software attack + self.sim.root.version += 1 # root v2 + self.sim.signers[Root.type].clear() + self.sim.publish_root() + + with self.assertRaises(UnsignedMetadataError): + self._run_refresh() + + # The update failed, latest root version is v1 + self._assert_files_exist([Root.type]) + self._assert_content_equals(Root.type, 1) + + def test_new_root_same_version(self) -> None: + # Check for a rollback_attack + # Repository serves a root file with the same version as previous + self.sim.publish_root() + with self.assertRaises(BadVersionNumberError): + self._run_refresh() + + # The update failed, latest root version is v1 + self._assert_files_exist([Root.type]) + self._assert_content_equals(Root.type, 1) + + def test_new_root_nonconsecutive_version(self) -> None: + # Repository serves non-consecutive root version + self.sim.root.version += 2 + self.sim.publish_root() + with self.assertRaises(BadVersionNumberError): + self._run_refresh() + + # The update failed, latest root version is v1 + self._assert_files_exist([Root.type]) + self._assert_content_equals(Root.type, 1) + + def test_final_root_expired(self) -> None: + # Check for a freeze attack + # Final root is expired + self.sim.root.expires = self.past_datetime + self.sim.root.version += 1 + self.sim.publish_root() + + with self.assertRaises(ExpiredMetadataError): + self._run_refresh() + + # The update failed but final root is persisted on the file system + self._assert_files_exist([Root.type]) + self._assert_content_equals(Root.type, 2) + + def test_new_timestamp_unsigned(self) -> None: + # Check for an arbitrary software attack + self.sim.signers[Timestamp.type].clear() + with self.assertRaises(UnsignedMetadataError): + self._run_refresh() + + self._assert_files_exist([Root.type]) + + @patch.object(datetime, "datetime", wraps=datetime.datetime) + def test_expired_timestamp_version_rollback(self, mock_time: Mock) -> None: + """Verifies that local timestamp is used in rollback checks even if it is expired. + + The timestamp updates and rollback checks are performed + with the following timing: + - Timestamp v1 expiry set to day 7 + - First updater refresh performed on day 0 + - Repository publishes timestamp v2 on day 0 + - Timestamp v2 expiry set to day 21 + - Second updater refresh performed on day 18: + assert that rollback check uses expired timestamp v1""" + + now = datetime.datetime.utcnow() + self.sim.timestamp.expires = now + datetime.timedelta(days=7) + + self.sim.timestamp.version = 2 + + # Make a successful update of valid metadata which stores it in cache + self._run_refresh() + + self.sim.timestamp.expires = now + datetime.timedelta(days=21) + + self.sim.timestamp.version = 1 + + mock_time.utcnow.return_value = ( + datetime.datetime.utcnow() + datetime.timedelta(days=18) + ) + with patch("datetime.datetime", mock_time): + # Check that a rollback protection is performed even if + # local timestamp has expired + with self.assertRaises(BadVersionNumberError): + self._run_refresh() + + self._assert_version_equals(Timestamp.type, 2) + + @patch.object(datetime, "datetime", wraps=datetime.datetime) + def test_expired_timestamp_snapshot_rollback(self, mock_time: Mock) -> None: + """Verifies that rollback protection is done even if local timestamp has expired. + + The snapshot updates and rollback protection checks are performed + with the following timing: + - Timestamp v1 expiry set to day 7 + - Repository bumps snapshot to v3 on day 0 + - First updater refresh performed on day 0 + - Timestamp v2 expiry set to day 21 + - Second updater refresh performed on day 18: + assert that rollback protection is done with expired timestamp v1""" + + now = datetime.datetime.utcnow() + self.sim.timestamp.expires = now + datetime.timedelta(days=7) + + # Bump the snapshot version number to 3 + self.sim.update_snapshot() + self.sim.update_snapshot() + + # Make a successful update of valid metadata which stores it in cache + self._run_refresh() + + self.sim.snapshot.version = 1 + # Snapshot version number is set to 2, which is still less than 3 + self.sim.update_snapshot() + self.sim.timestamp.expires = now + datetime.timedelta(days=21) + + mock_time.utcnow.return_value = ( + datetime.datetime.utcnow() + datetime.timedelta(days=18) + ) + with patch("datetime.datetime", mock_time): + # Assert that rollback protection is done even if + # local timestamp has expired + with self.assertRaises(BadVersionNumberError): + self._run_refresh() + + self._assert_version_equals(Timestamp.type, 3) + + def test_new_timestamp_version_rollback(self) -> None: + # Check for a rollback attack + self.sim.timestamp.version = 2 + self._run_refresh() + + self.sim.timestamp.version = 1 + with self.assertRaises(BadVersionNumberError): + self._run_refresh() + + self._assert_version_equals(Timestamp.type, 2) + + def test_new_timestamp_snapshot_rollback(self) -> None: + # Check for a rollback attack. + self.sim.snapshot.version = 2 + self.sim.update_timestamp() # timestamp v2 + self._run_refresh() + + # Snapshot meta version is smaller than previous + self.sim.timestamp.snapshot_meta.version = 1 + self.sim.timestamp.version += 1 # timestamp v3 + + with self.assertRaises(BadVersionNumberError): + self._run_refresh() + + self._assert_version_equals(Timestamp.type, 2) + + def test_new_timestamp_expired(self) -> None: + # Check for a freeze attack + self.sim.timestamp.expires = self.past_datetime + self.sim.update_timestamp() + + with self.assertRaises(ExpiredMetadataError): + self._run_refresh() + + self._assert_files_exist([Root.type]) + + def test_new_timestamp_fast_forward_recovery(self) -> None: + """Test timestamp fast-forward recovery using key rotation. + + The timestamp recovery is made by the following steps + - Remove the timestamp key + - Create and add a new key for timestamp + - Bump and publish root + - Rollback the timestamp version + """ + + # attacker updates to a higher version + self.sim.timestamp.version = 99999 + + # client refreshes the metadata and see the new timestamp version + self._run_refresh() + self._assert_version_equals(Timestamp.type, 99999) + + # repository rotates timestamp keys, rolls back timestamp version + self.sim.rotate_keys(Timestamp.type) + self.sim.root.version += 1 + self.sim.publish_root() + self.sim.timestamp.version = 1 + + # client refresh the metadata and see the initial timestamp version + self._run_refresh() + self._assert_version_equals(Timestamp.type, 1) + + def test_new_snapshot_hash_mismatch(self) -> None: + # Check against timestamp role’s snapshot hash + + # Update timestamp with snapshot's hashes + self.sim.compute_metafile_hashes_length = True + self.sim.update_timestamp() # timestamp v2 + self._run_refresh() + + # Modify snapshot contents without updating + # timestamp's snapshot hash + self.sim.snapshot.expires += datetime.timedelta(days=1) + self.sim.snapshot.version += 1 # snapshot v2 + self.sim.timestamp.snapshot_meta.version = self.sim.snapshot.version + self.sim.timestamp.version += 1 # timestamp v3 + + # Hash mismatch error + with self.assertRaises(LengthOrHashMismatchError): + self._run_refresh() + + self._assert_version_equals(Timestamp.type, 3) + self._assert_version_equals(Snapshot.type, 1) + + def test_new_snapshot_unsigned(self) -> None: + # Check for an arbitrary software attack + self.sim.signers[Snapshot.type].clear() + with self.assertRaises(UnsignedMetadataError): + self._run_refresh() + + self._assert_files_exist([Root.type, Timestamp.type]) + + def test_new_snapshot_version_mismatch(self) -> None: + # Check against timestamp role’s snapshot version + + # Increase snapshot version without updating timestamp + self.sim.snapshot.version += 1 + with self.assertRaises(BadVersionNumberError): + self._run_refresh() + + self._assert_files_exist([Root.type, Timestamp.type]) + + def test_new_snapshot_version_rollback(self) -> None: + # Check for a rollback attack + self.sim.snapshot.version = 2 + self.sim.update_timestamp() + self._run_refresh() + + self.sim.snapshot.version = 1 + self.sim.update_timestamp() + + with self.assertRaises(BadVersionNumberError): + self._run_refresh() + + self._assert_version_equals(Snapshot.type, 2) + + def test_new_snapshot_fast_forward_recovery(self) -> None: + """Test snapshot fast-forward recovery using key rotation. + + The snapshot recovery requires the snapshot and timestamp key rotation. + It is made by the following steps: + - Remove the snapshot and timestamp keys + - Create and add a new key for snapshot and timestamp + - Rollback snapshot version + - Bump and publish root + - Bump the timestamp + """ + + # attacker updates to a higher version (bumping timestamp is required) + self.sim.snapshot.version = 99999 + self.sim.update_timestamp() + + # client refreshes the metadata and see the new snapshot version + self._run_refresh() + self._assert_version_equals(Snapshot.type, 99999) + + # repository rotates snapshot & timestamp keys, rolls back snapshot + self.sim.rotate_keys(Snapshot.type) + self.sim.rotate_keys(Timestamp.type) + self.sim.root.version += 1 + self.sim.publish_root() + + self.sim.snapshot.version = 1 + self.sim.update_timestamp() + + # client refresh the metadata and see the initial snapshot version + self._run_refresh() + self._assert_version_equals(Snapshot.type, 1) + + def test_new_snapshot_expired(self) -> None: + # Check for a freeze attack + self.sim.snapshot.expires = self.past_datetime + self.sim.update_snapshot() + + with self.assertRaises(ExpiredMetadataError): + self._run_refresh() + + self._assert_files_exist([Root.type, Timestamp.type]) + + def test_new_targets_hash_mismatch(self) -> None: + # Check against snapshot role’s targets hashes + + # Update snapshot with target's hashes + self.sim.compute_metafile_hashes_length = True + self.sim.update_snapshot() + self._run_refresh() + + # Modify targets contents without updating + # snapshot's targets hashes + self.sim.targets.version += 1 + self.sim.snapshot.meta[ + "targets.json" + ].version = self.sim.targets.version + self.sim.snapshot.version += 1 + self.sim.update_timestamp() + + with self.assertRaises(LengthOrHashMismatchError): + self._run_refresh() + + self._assert_version_equals(Snapshot.type, 3) + self._assert_version_equals(Targets.type, 1) + + def test_new_targets_unsigned(self) -> None: + # Check for an arbitrary software attack + self.sim.signers[Targets.type].clear() + with self.assertRaises(UnsignedMetadataError): + self._run_refresh() + + self._assert_files_exist([Root.type, Timestamp.type, Snapshot.type]) + + def test_new_targets_version_mismatch(self) -> None: + # Check against snapshot role’s targets version + + # Increase targets version without updating snapshot + self.sim.targets.version += 1 + with self.assertRaises(BadVersionNumberError): + self._run_refresh() + + self._assert_files_exist([Root.type, Timestamp.type, Snapshot.type]) + + def test_new_targets_expired(self) -> None: + # Check for a freeze attack. + self.sim.targets.expires = self.past_datetime + self.sim.update_snapshot() + + with self.assertRaises(ExpiredMetadataError): + self._run_refresh() + + self._assert_files_exist([Root.type, Timestamp.type, Snapshot.type]) + + def test_compute_metafile_hashes_length(self) -> None: + self.sim.compute_metafile_hashes_length = True + self.sim.update_snapshot() + self._run_refresh() + self._assert_version_equals(Timestamp.type, 2) + self._assert_version_equals(Snapshot.type, 2) + + self.sim.compute_metafile_hashes_length = False + self.sim.update_snapshot() + self._run_refresh() + + self._assert_version_equals(Timestamp.type, 3) + self._assert_version_equals(Snapshot.type, 3) + + def test_new_targets_fast_forward_recovery(self) -> None: + """Test targets fast-forward recovery using key rotation. + + The targets recovery is made by issuing new Snapshot keys, by following + steps: + - Remove the snapshot key + - Create and add a new key for snapshot + - Bump and publish root + - Rollback the target version + """ + # attacker updates to a higher version + self.sim.targets.version = 99999 + self.sim.update_snapshot() + + # client refreshes the metadata and see the new targets version + self._run_refresh() + self._assert_version_equals(Targets.type, 99999) + + # repository rotates snapshot keys, rolls back targets version + self.sim.rotate_keys(Snapshot.type) + self.sim.root.version += 1 + self.sim.publish_root() + + self.sim.targets.version = 1 + self.sim.update_snapshot() + + # client refreshes the metadata version and see initial targets version + self._run_refresh() + self._assert_version_equals(Targets.type, 1) + + @patch.object(builtins, "open", wraps=builtins.open) + def test_not_loading_targets_twice(self, wrapped_open: MagicMock) -> None: + # Do not load targets roles more than once when traversing + # the delegations tree + + # Add new delegated targets, update the snapshot + spec_version = ".".join(SPECIFICATION_VERSION) + targets = Targets(1, spec_version, self.sim.safe_expiry, {}, None) + role = DelegatedRole("role1", [], 1, False, ["*"], None) + self.sim.add_delegation("targets", role, targets) + self.sim.update_snapshot() + + # Run refresh, top-level roles are loaded + updater = self._run_refresh() + # Clean up calls to open during refresh() + wrapped_open.reset_mock() + + # First time looking for "somepath", only 'role1' must be loaded + updater.get_targetinfo("somepath") + wrapped_open.assert_called_once_with( + os.path.join(self.metadata_dir, "role1.json"), "rb" + ) + wrapped_open.reset_mock() + # Second call to get_targetinfo, all metadata is already loaded + updater.get_targetinfo("somepath") + wrapped_open.assert_not_called() + + def test_snapshot_rollback_with_local_snapshot_hash_mismatch(self) -> None: + # Test triggering snapshot rollback check on a newly downloaded snapshot + # when the local snapshot is loaded even when there is a hash mismatch + # with timestamp.snapshot_meta. + + # By raising this flag on timestamp update the simulator would: + # 1) compute the hash of the new modified version of snapshot + # 2) assign the hash to timestamp.snapshot_meta + # The purpose is to create a hash mismatch between timestamp.meta and + # the local snapshot, but to have hash match between timestamp.meta and + # the next snapshot version. + self.sim.compute_metafile_hashes_length = True + + # Initialize all metadata and assign targets version higher than 1. + self.sim.targets.version = 2 + self.sim.update_snapshot() + self._run_refresh() + + # The new targets must have a lower version than the local trusted one. + self.sim.targets.version = 1 + self.sim.update_snapshot() + + # During the snapshot update, the local snapshot will be loaded even if + # there is a hash mismatch with timestamp.snapshot_meta, because it will + # be considered as trusted. + # Should fail as a new version of snapshot will be fetched which lowers + # the snapshot.meta["targets.json"] version by 1 and throws an error. + with self.assertRaises(BadVersionNumberError): + self._run_refresh() + + @patch.object(builtins, "open", wraps=builtins.open) + def test_load_metadata_from_cache(self, wrapped_open: MagicMock) -> None: + + # Add new delegated targets + spec_version = ".".join(SPECIFICATION_VERSION) + targets = Targets(1, spec_version, self.sim.safe_expiry, {}, None) + role = DelegatedRole("role1", [], 1, False, ["*"], None) + self.sim.add_delegation("targets", role, targets) + self.sim.update_snapshot() + + # Make a successful update of valid metadata which stores it in cache + updater = self._run_refresh() + updater.get_targetinfo("non_existent_target") + + # Clean up calls to open during refresh() + wrapped_open.reset_mock() + # Clean up fetch tracker metadata + self.sim.fetch_tracker.metadata.clear() + + # Create a new updater and perform a second update while + # the metadata is already stored in cache (metadata dir) + updater = Updater( + self.metadata_dir, + "https://example.com/metadata/", + self.targets_dir, + "https://example.com/targets/", + self.sim, + ) + updater.get_targetinfo("non_existent_target") + + # Test that metadata is loaded from cache and not downloaded + wrapped_open.assert_has_calls( + [ + call(os.path.join(self.metadata_dir, "root.json"), "rb"), + call(os.path.join(self.metadata_dir, "timestamp.json"), "rb"), + call(os.path.join(self.metadata_dir, "snapshot.json"), "rb"), + call(os.path.join(self.metadata_dir, "targets.json"), "rb"), + call(os.path.join(self.metadata_dir, "role1.json"), "rb"), + ] + ) + + expected_calls = [("root", 2), ("timestamp", None)] + self.assertListEqual(self.sim.fetch_tracker.metadata, expected_calls) + + @patch.object(datetime, "datetime", wraps=datetime.datetime) + def test_expired_metadata(self, mock_time: Mock) -> None: + """Verifies that expired local timestamp/snapshot can be used for + updating from remote. + + The updates and verifications are performed with the following timing: + - Timestamp v1 expiry set to day 7 + - First updater refresh performed on day 0 + - Repository bumps snapshot and targets to v2 on day 0 + - Timestamp v2 expiry set to day 21 + - Second updater refresh performed on day 18, + it is successful and timestamp/snaphot final versions are v2""" + + now = datetime.datetime.utcnow() + self.sim.timestamp.expires = now + datetime.timedelta(days=7) + + # Make a successful update of valid metadata which stores it in cache + self._run_refresh() + + self.sim.targets.version += 1 + self.sim.update_snapshot() + self.sim.timestamp.expires = now + datetime.timedelta(days=21) + + # Mocking time so that local timestam has expired + # but the new timestamp has not + mock_time.utcnow.return_value = ( + datetime.datetime.utcnow() + datetime.timedelta(days=18) + ) + with patch("datetime.datetime", mock_time): + self._run_refresh() + + # Assert that the final version of timestamp/snapshot is version 2 + # which means a successful refresh is performed + # with expired local metadata + for role in ["timestamp", "snapshot", "targets"]: + md = Metadata.from_file( + os.path.join(self.metadata_dir, f"{role}.json") + ) + self.assertEqual(md.signed.version, 2) + + def test_max_metadata_lengths(self) -> None: + """Test that clients configured max metadata lengths are respected""" + + # client has root v1 already: create a new one available for download + self.sim.root.version += 1 + self.sim.publish_root() + + config_vars = [ + "root_max_length", + "timestamp_max_length", + "snapshot_max_length", + "targets_max_length", + ] + # make sure going over any length limit raises DownloadLengthMismatchError + for var_name in config_vars: + updater = self._init_updater() + setattr(updater.config, var_name, 100) + with self.assertRaises(DownloadLengthMismatchError): + updater.refresh() + + # All good with normal length limits + updater = self._init_updater() + updater.refresh() + + +if __name__ == "__main__": + if "--dump" in sys.argv: + TestRefresh.dump_dir = tempfile.mkdtemp() + print(f"Repository Simulator dumps in {TestRefresh.dump_dir}") + sys.argv.remove("--dump") + + utils.configure_test_logging(sys.argv) + unittest.main() diff --git a/tests/test_updater_validation.py b/tests/test_updater_validation.py new file mode 100644 index 0000000000..3ce7d4f76e --- /dev/null +++ b/tests/test_updater_validation.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python + +# Copyright 2022, New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +"""Test ngclient Updater validations. +""" + +import os +import sys +import tempfile +import unittest + +from tests import utils +from tests.repository_simulator import RepositorySimulator +from tuf.ngclient import Updater + + +class TestUpdater(unittest.TestCase): + """Test ngclient Updater input validation.""" + + def setUp(self) -> None: + # pylint: disable-next=consider-using-with + self.temp_dir = tempfile.TemporaryDirectory() + self.metadata_dir = os.path.join(self.temp_dir.name, "metadata") + self.targets_dir = os.path.join(self.temp_dir.name, "targets") + os.mkdir(self.metadata_dir) + os.mkdir(self.targets_dir) + + # Setup the repository, bootstrap client root.json + self.sim = RepositorySimulator() + with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f: + f.write(self.sim.signed_roots[0]) + + def tearDown(self) -> None: + self.temp_dir.cleanup() + + def _new_updater(self) -> Updater: + return Updater( + self.metadata_dir, + "https://example.com/metadata/", + self.targets_dir, + "https://example.com/targets/", + fetcher=self.sim, + ) + + def test_local_target_storage_fail(self) -> None: + self.sim.add_target("targets", b"content", "targetpath") + self.sim.targets.version += 1 + self.sim.update_snapshot() + + updater = self._new_updater() + target_info = updater.get_targetinfo("targetpath") + assert target_info is not None + with self.assertRaises(FileNotFoundError): + updater.download_target(target_info, filepath="") + + def test_non_existing_metadata_dir(self) -> None: + with self.assertRaises(FileNotFoundError): + # Initialize Updater with non-existing metadata_dir + Updater( + "non_existing_metadata_dir", + "https://example.com/metadata/", + fetcher=self.sim, + ) + + +if __name__ == "__main__": + utils.configure_test_logging(sys.argv) + unittest.main() diff --git a/tests/test_utils.py b/tests/test_utils.py index 63589fb516..2fefeedbdc 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -20,104 +20,62 @@ Provide tests for some of the functions in utils.py module. """ -import os import logging -import unittest import socket import sys - -import tuf.unittest_toolbox as unittest_toolbox +import unittest from tests import utils logger = logging.getLogger(__name__) -class TestServerProcess(unittest_toolbox.Modified_TestCase): - - def tearDown(self): - # Make sure we are calling clean on existing attribute. - if hasattr(self, 'server_process_handler'): - self.server_process_handler.clean() - - def can_connect(self): +def can_connect(port: int) -> bool: + """Check if a socket can connect on the given port""" try: - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.connect(('localhost', self.server_process_handler.port)) - return True - except: - return False + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.connect(("localhost", port)) + return True + # pylint: disable=broad-except + except Exception: + return False finally: - # The process will always enter in finally even we return. - if sock: - sock.close() - - - def test_simple_server_startup(self): - # Test normal case - self.server_process_handler = utils.TestServerProcess(log=logger) - - # Make sure we can connect to the server - self.assertTrue(self.can_connect()) - - - def test_simple_https_server_startup(self): - # Test normal case - good_cert_path = os.path.join('ssl_certs', 'ssl_cert.crt') - self.server_process_handler = utils.TestServerProcess(log=logger, - server='simple_https_server.py', extra_cmd_args=[good_cert_path]) - - # Make sure we can connect to the server - self.assertTrue(self.can_connect()) - self.server_process_handler.clean() - - # Test when no cert file is provided - self.server_process_handler = utils.TestServerProcess(log=logger, - server='simple_https_server.py') - - # Make sure we can connect to the server - self.assertTrue(self.can_connect()) - self.server_process_handler.clean() - - # Test with a non existing cert file. - non_existing_cert_path = os.path.join('ssl_certs', 'non_existing.crt') - self.server_process_handler = utils.TestServerProcess(log=logger, - server='simple_https_server.py', - extra_cmd_args=[non_existing_cert_path]) - - # Make sure we can connect to the server - self.assertTrue(self.can_connect()) - + # The process will always enter in finally even after return. + if sock: + sock.close() - def test_slow_retrieval_server_startup(self): - # Test normal case - self.server_process_handler = utils.TestServerProcess(log=logger, - server='slow_retrieval_server.py') - # Make sure we can connect to the server - self.assertTrue(self.can_connect()) +class TestServerProcess(unittest.TestCase): + """Test functionality provided in TestServerProcess from tests/utils.py.""" + def test_simple_server_startup(self) -> None: + # Test normal case + server_process_handler = utils.TestServerProcess(log=logger) - def test_cleanup(self): - # Test normal case - self.server_process_handler = utils.TestServerProcess(log=logger, - server='simple_server.py') + # Make sure we can connect to the server + self.assertTrue(can_connect(server_process_handler.port)) + server_process_handler.clean() - self.server_process_handler.clean() + def test_cleanup(self) -> None: + # Test normal case + server_process_handler = utils.TestServerProcess( + log=logger, server="simple_server.py" + ) - # Check if the process has successfully been killed. - self.assertFalse(self.server_process_handler.is_process_running()) + server_process_handler.clean() + # Check if the process has successfully been killed. + self.assertFalse(server_process_handler.is_process_running()) - def test_server_exit_before_timeout(self): - self.assertRaises(utils.TestServerProcessError, utils.TestServerProcess, - logger, server='non_existing_server.py') + def test_server_exit_before_timeout(self) -> None: + with self.assertRaises(utils.TestServerProcessError): + utils.TestServerProcess(logger, server="non_existing_server.py") - # Test starting a server which immediately exits." - self.assertRaises(utils.TestServerProcessError, utils.TestServerProcess, - logger, server='fast_server_exit.py') + # Test starting a server which immediately exits." + with self.assertRaises(utils.TestServerProcessError): + utils.TestServerProcess(logger, server="fast_server_exit.py") -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() +if __name__ == "__main__": + utils.configure_test_logging(sys.argv) + unittest.main() diff --git a/tests/utils.py b/tests/utils.py index 6a3ee66b1d..6d82b46089 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -21,41 +21,71 @@ """ import argparse -from contextlib import contextmanager import errno import logging +import os +import queue import socket -import sys -import time import subprocess +import sys import threading +import time +import unittest import warnings -import queue - -import tuf.log +from contextlib import contextmanager +from typing import IO, Any, Callable, Dict, Iterator, List, Optional logger = logging.getLogger(__name__) +# May may be used to reliably read other files in tests dir regardless of cwd +TESTS_DIR = os.path.dirname(os.path.realpath(__file__)) + # Used when forming URLs on the client side -TEST_HOST_ADDRESS = '127.0.0.1' +TEST_HOST_ADDRESS = "127.0.0.1" +# DataSet is only here so type hints can be used. +DataSet = Dict[str, Any] -class TestServerProcessError(Exception): +# Test runner decorator: Runs the test as a set of N SubTests, +# (where N is number of items in dataset), feeding the actual test +# function one test case at a time +def run_sub_tests_with_dataset( + dataset: DataSet, +) -> Callable[[Callable], Callable]: + """Decorator starting a unittest.TestCase.subtest() for each of the + cases in dataset""" + + def real_decorator( + function: Callable[[unittest.TestCase, Any], None] + ) -> Callable[[unittest.TestCase], None]: + def wrapper(test_cls: unittest.TestCase) -> None: + for case, data in dataset.items(): + with test_cls.subTest(case=case): + # Save case name for future reference + test_cls.case_name = case.replace(" ", "_") + function(test_cls, data) - def __init__(self, value="TestServerProcess"): - self.value = value + return wrapper - def __str__(self): - return repr(self.value) + return real_decorator + + +class TestServerProcessError(Exception): + def __init__(self, value: str = "TestServerProcess") -> None: + super().__init__() + self.value = value + + def __str__(self) -> str: + return repr(self.value) @contextmanager -def ignore_deprecation_warnings(module): - with warnings.catch_warnings(): - warnings.filterwarnings('ignore', - category=DeprecationWarning, - module=module) - yield +def ignore_deprecation_warnings(module: str) -> Iterator[None]: + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", category=DeprecationWarning, module=module + ) + yield # Wait until host:port accepts connections. @@ -64,247 +94,273 @@ def ignore_deprecation_warnings(module): # but the current blocking connect() seems to work fast on Linux and seems # to at least work on Windows (ECONNREFUSED unfortunately has a 2 second # timeout on Windows) -def wait_for_server(host, server, port, timeout=10): - start = time.time() - remaining_timeout = timeout - succeeded = False - while not succeeded and remaining_timeout > 0: - try: - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.settimeout(remaining_timeout) - sock.connect((host, port)) - succeeded = True - except socket.timeout as e: - pass - except IOError as e: - # ECONNREFUSED is expected while the server is not started - if e.errno not in [errno.ECONNREFUSED]: - logger.warning("Unexpected error while waiting for server: " + str(e)) - # Avoid pegging a core just for this - time.sleep(0.01) - finally: - if sock: - sock.close() - sock = None - remaining_timeout = timeout - (time.time() - start) - - if not succeeded: - raise TimeoutError("Could not connect to the " + server \ - + " on port " + str(port) + "!") - - -def configure_test_logging(argv): - # parse arguments but only handle '-v': argv may contain - # other things meant for unittest argument parser - parser = argparse.ArgumentParser(add_help=False) - parser.add_argument('-v', '--verbose', action='count', default=0) - args, _ = parser.parse_known_args(argv) - - if args.verbose <= 1: - # 0 and 1 both mean ERROR: this way '-v' makes unittest print test - # names without increasing log level - loglevel = logging.ERROR - elif args.verbose == 2: - loglevel = logging.WARNING - elif args.verbose == 3: - loglevel = logging.INFO - else: - loglevel = logging.DEBUG - - logging.basicConfig(level=loglevel) - tuf.log.set_log_level(loglevel) - - -class TestServerProcess(): - """ - - Creates a child process with the subprocess.Popen object and - uses a thread-safe Queue structure for logging. - - - log: - Logger which will be used for logging. - - server: - Path to the server to run in the subprocess. - Default is "simpler_server.py". - - timeout: - Time in seconds in which the server should start or otherwise - TimeoutError error will be raised. - Default is 10. - - popen_cwd: - Current working directory used when instancing a - subprocess.Popen object. - Default is "." - - extra_cmd_args: - List of additional arguments for the command - which will start the subprocess. - More precisely "python -u ". - When no list is provided, an empty list ("[]") will be assigned to it. - """ - - - def __init__(self, log, server='simple_server.py', - timeout=10, popen_cwd=".", extra_cmd_args=None): - - self.server = server - self.__logger = log - # Stores popped messages from the queue. - self.__logged_messages = [] - if extra_cmd_args is None: - extra_cmd_args = [] - - try: - self._start_server(timeout, extra_cmd_args, popen_cwd) - wait_for_server('localhost', self.server, self.port, timeout) - except Exception as e: - # Clean the resources and log the server errors if any exists. - self.clean() - raise e - - - - def _start_server(self, timeout, extra_cmd_args, popen_cwd): - """ - Start the server subprocess and a thread - responsible to redirect stdout/stderr to the Queue. - Waits for the port message maximum timeout seconds. - """ - - self._start_process(extra_cmd_args, popen_cwd) - self._start_redirect_thread() - - self._wait_for_port(timeout) - - self.__logger.info(self.server + ' serving on ' + str(self.port)) - - - - def _start_process(self, extra_cmd_args, popen_cwd): - """Starts the process running the server.""" - - # The "-u" option forces stdin, stdout and stderr to be unbuffered. - command = [sys.executable, '-u', self.server] + extra_cmd_args - - # Reusing one subprocess in multiple tests, but split up the logs for each. - self.__server_process = subprocess.Popen(command, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=popen_cwd) - - - - def _start_redirect_thread(self): - """Starts a thread responsible to redirect stdout/stderr to the Queue.""" - - # Run log_queue_worker() in a thread. - # The thread will exit when the child process dies. - self._log_queue = queue.Queue() - log_thread = threading.Thread(target=self._log_queue_worker, - args=(self.__server_process.stdout, self._log_queue)) - - # "daemon = True" means the thread won't interfere with the process exit. - log_thread.daemon = True - log_thread.start() - - - @staticmethod - def _log_queue_worker(stream, line_queue): +def wait_for_server( + host: str, server: str, port: int, timeout: int = 10 +) -> None: + """Wait for server start until timeout is reached or server has started""" + start = time.time() + remaining_timeout = timeout + succeeded = False + while not succeeded and remaining_timeout > 0: + try: + sock: Optional[socket.socket] = socket.socket( + socket.AF_INET, socket.SOCK_STREAM + ) + assert sock is not None + sock.settimeout(remaining_timeout) + sock.connect((host, port)) + succeeded = True + except socket.timeout: + pass + except IOError as e: + # ECONNREFUSED is expected while the server is not started + if e.errno not in [errno.ECONNREFUSED]: + logger.warning( + "Unexpected error while waiting for server: %s", str(e) + ) + # Avoid pegging a core just for this + time.sleep(0.01) + finally: + if sock: + sock.close() + sock = None + remaining_timeout = int(timeout - (time.time() - start)) + + if not succeeded: + raise TimeoutError( + "Could not connect to the " + server + " on port " + str(port) + "!" + ) + + +def configure_test_logging(argv: List[str]) -> None: + """Configure logger level for a certain test file""" + # parse arguments but only handle '-v': argv may contain + # other things meant for unittest argument parser + parser = argparse.ArgumentParser(add_help=False) + parser.add_argument("-v", "--verbose", action="count", default=0) + args, _ = parser.parse_known_args(argv) + + if args.verbose <= 1: + # 0 and 1 both mean ERROR: this way '-v' makes unittest print test + # names without increasing log level + loglevel = logging.ERROR + elif args.verbose == 2: + loglevel = logging.WARNING + elif args.verbose == 3: + loglevel = logging.INFO + else: + loglevel = logging.DEBUG + + logging.basicConfig(level=loglevel) + + +def cleanup_dir(path: str) -> None: + """Delete all files inside a directory""" + for filepath in [ + os.path.join(path, filename) for filename in os.listdir(path) + ]: + os.remove(filepath) + + +class TestServerProcess: + """Helper class used to create a child process with the subprocess.Popen + object and use a thread-safe Queue structure for logging. + + Args: + log: Logger which will be used for logging. + server: Path to the server to run in the subprocess. + timeout: Time in seconds in which the server should start or otherwise + TimeoutError error will be raised. + popen_cwd: Current working directory used when instancing a + subprocess.Popen object. + extra_cmd_args: Additional arguments for the command which will start + the subprocess. More precisely: + "python -u ". + If no list is provided, an empty list ("[]") will be assigned to it. """ - Worker function to run in a seprate thread. - Reads from 'stream', puts lines in a Queue (Queue is thread-safe). - """ - - while True: - # readline() is a blocking operation. - # decode to push a string in the queue instead of 8-bit bytes. - log_line = stream.readline().decode('utf-8') - line_queue.put(log_line) - - if len(log_line) == 0: - # This is the end of the stream meaning the server process has exited. - stream.close() - break - - - def _wait_for_port(self, timeout): - """ - Validates the first item from the Queue against the port message. - If validation is successful, self.port is set. - Raises TestServerProcessError if the process has exited or - TimeoutError if no message was found within timeout seconds. - """ - - # We have hardcoded the message we expect on a successful server startup. - # This message should be the first message sent by the server! - expected_msg = 'bind succeeded, server port is: ' - try: - line = self._log_queue.get(timeout=timeout) - if len(line) == 0: - # The process has exited. - raise TestServerProcessError(self.server + ' exited unexpectedly ' \ - + 'with code ' + str(self.__server_process.poll()) + '!') - - elif line.startswith(expected_msg): - self.port = int(line[len(expected_msg):]) - else: - # An exception or some other message is printed from the server. - self.__logged_messages.append(line) - # Check if more lines are logged. + def __init__( + self, + log: logging.Logger, + server: str = os.path.join(TESTS_DIR, "simple_server.py"), + timeout: int = 10, + popen_cwd: str = ".", + extra_cmd_args: Optional[List[str]] = None, + ): + + self.server = server + self.__logger = log + # Stores popped messages from the queue. + self.__logged_messages: List[str] = [] + self.__server_process: Optional[subprocess.Popen] = None + self._log_queue: Optional[queue.Queue] = None + self.port = -1 + if extra_cmd_args is None: + extra_cmd_args = [] + + try: + self._start_server(timeout, extra_cmd_args, popen_cwd) + wait_for_server("localhost", self.server, self.port, timeout) + except Exception as e: + # Clean the resources and log the server errors if any exists. + self.clean() + raise e + + def _start_server( + self, timeout: int, extra_cmd_args: List[str], popen_cwd: str + ) -> None: + """ + Start the server subprocess and a thread + responsible to redirect stdout/stderr to the Queue. + Waits for the port message maximum timeout seconds. + """ + + self._start_process(extra_cmd_args, popen_cwd) + self._start_redirect_thread() + + self._wait_for_port(timeout) + + self.__logger.info(self.server + " serving on " + str(self.port)) + + def _start_process(self, extra_cmd_args: List[str], popen_cwd: str) -> None: + """Starts the process running the server.""" + + # The "-u" option forces stdin, stdout and stderr to be unbuffered. + command = [sys.executable, "-u", self.server] + extra_cmd_args + + # Reusing one subprocess in multiple tests, but split up the logs + # for each. + # pylint: disable=consider-using-with + self.__server_process = subprocess.Popen( + command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + cwd=popen_cwd, + ) + + def _start_redirect_thread(self) -> None: + """Starts a thread redirecting the stdout/stderr to the Queue.""" + + assert isinstance(self.__server_process, subprocess.Popen) + # Run log_queue_worker() in a thread. + # The thread will exit when the child process dies. + self._log_queue = queue.Queue() + log_thread = threading.Thread( + target=self._log_queue_worker, + args=(self.__server_process.stdout, self._log_queue), + ) + + # "daemon = True" means the thread won't interfere with the + # process exit. + log_thread.daemon = True + log_thread.start() + + @staticmethod + def _log_queue_worker(stream: IO, line_queue: queue.Queue) -> None: + """ + Worker function to run in a seprate thread. + Reads from 'stream', puts lines in a Queue (Queue is thread-safe). + """ + + while True: + # readline() is a blocking operation. + # decode to push a string in the queue instead of 8-bit bytes. + log_line = stream.readline().decode("utf-8") + line_queue.put(log_line) + + if len(log_line) == 0: + # This is the end of the stream meaning the server process + # has exited. + stream.close() + break + + def _wait_for_port(self, timeout: int) -> None: + """ + Validates the first item from the Queue against the port message. + If validation is successful, self.port is set. + Raises TestServerProcessError if the process has exited or + TimeoutError if no message was found within timeout seconds. + """ + + assert isinstance(self.__server_process, subprocess.Popen) + assert isinstance(self._log_queue, queue.Queue) + # We have hardcoded the message we expect on a successful server + # startup. This message should be the first message sent by the server! + expected_msg = "bind succeeded, server port is: " + try: + line = self._log_queue.get(timeout=timeout) + if len(line) == 0: + # The process has exited. + raise TestServerProcessError( + self.server + + " exited unexpectedly " + + "with code " + + str(self.__server_process.poll()) + + "!" + ) + + if line.startswith(expected_msg): + self.port = int(line[len(expected_msg) :]) + else: + # An exception or some other message is printed from the server. + self.__logged_messages.append(line) + # Check if more lines are logged. + self.flush_log() + raise TestServerProcessError( + self.server + + " did not print port " + + "message as first stdout line as expected!" + ) + except queue.Empty as e: + raise TimeoutError( + "Failure during " + self.server + " startup!" + ) from e + + def _kill_server_process(self) -> None: + """Kills the server subprocess if it's running.""" + + assert isinstance(self.__server_process, subprocess.Popen) + if self.is_process_running(): + self.__logger.info( + "Server process " + + str(self.__server_process.pid) + + " terminated." + ) + self.__server_process.kill() + self.__server_process.wait() + + def flush_log(self) -> None: + """Flushes the log lines from the logging queue.""" + + assert isinstance(self._log_queue, queue.Queue) + while True: + # Get lines from log_queue + try: + line = self._log_queue.get(block=False) + if len(line) > 0: + self.__logged_messages.append(line) + except queue.Empty: + # No more lines are logged in the queue. + break + + if len(self.__logged_messages) > 0: + title = "Test server (" + self.server + ") output:\n" + message = [title] + self.__logged_messages + self.__logger.info("| ".join(message)) + self.__logged_messages = [] + + def clean(self) -> None: + """ + Kills the subprocess and closes the TempFile. + Calls flush_log to check for logged information, but not yet flushed. + """ + + # If there is anything logged, flush it before closing the resourses. self.flush_log() - raise TestServerProcessError(self.server + ' did not print port ' \ - + 'message as first stdout line as expected!') - except queue.Empty: - raise TimeoutError('Failure during ' + self.server + ' startup!') - - - - def _kill_server_process(self): - """Kills the server subprocess if it's running.""" - - if self.is_process_running(): - self.__logger.info('Server process ' + str(self.__server_process.pid) + - ' terminated.') - self.__server_process.kill() - self.__server_process.wait() - - - - def flush_log(self): - """Flushes the log lines from the logging queue.""" - - while True: - # Get lines from log_queue - try: - line = self._log_queue.get(block=False) - if len(line) > 0: - self.__logged_messages.append(line) - except queue.Empty: - # No more lines are logged in the queue. - break - - if len(self.__logged_messages) > 0: - title = "Test server (" + self.server + ") output:\n" - message = [title] + self.__logged_messages - self.__logger.info('| '.join(message)) - self.__logged_messages = [] - - - - def clean(self): - """ - Kills the subprocess and closes the TempFile. - Calls flush_log to check for logged information, but not yet flushed. - """ - - # If there is anything logged, flush it before closing the resourses. - self.flush_log() - - self._kill_server_process() - + self._kill_server_process() - def is_process_running(self): - return True if self.__server_process.poll() is None else False + def is_process_running(self) -> bool: + assert isinstance(self.__server_process, subprocess.Popen) + # pylint: disable=simplifiable-if-expression + return True if self.__server_process.poll() is None else False diff --git a/tox.ini b/tox.ini index 6c359e69a3..d61df9390e 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,8 @@ # and then run "tox" from this directory. [tox] -envlist = lint,py{36,37,38,39} +isolated_build=true +envlist = lint,docs,py skipsdist = true [testenv] @@ -16,7 +17,7 @@ changedir = tests commands = python3 --version python3 -m coverage run aggregate_tests.py - python3 -m coverage report -m --fail-under 97 --omit "{toxinidir}/tuf/ngclient/*" + python3 -m coverage report -m --fail-under 97 deps = -r{toxinidir}/requirements-test.txt @@ -38,18 +39,13 @@ commands = [testenv:lint] changedir = {toxinidir} +lint_dirs = tuf examples tests verify_release commands = - # Use different configs for new (tuf/api/*) and legacy code - # TODO: configure black and isort args in pyproject.toml (see #1161) - black --check --diff --line-length 80 tuf/api tuf/ngclient - isort --check --diff --line-length 80 --profile black -p tuf tuf/api tuf/ngclient - pylint -j 0 tuf/api tuf/ngclient --rcfile=tuf/api/pylintrc + black --check --diff {[testenv:lint]lint_dirs} + isort --check --diff {[testenv:lint]lint_dirs} + pylint -j 0 --rcfile=pyproject.toml {[testenv:lint]lint_dirs} - # NOTE: Contrary to what the pylint docs suggest, ignoring full paths does - # work, unfortunately each subdirectory has to be ignored explicitly. - pylint -j 0 tuf --ignore=tuf/api,tuf/api/serialization,tuf/ngclient,tuf/ngclient/_internal - - mypy + mypy {[testenv:lint]lint_dirs} bandit -r tuf @@ -59,4 +55,4 @@ deps = changedir = {toxinidir} commands = - sphinx-build -b html docs docs/build/html + sphinx-build -b html docs docs/build/html -W diff --git a/tuf/ATTACKS.md b/tuf/ATTACKS.md deleted file mode 100644 index 56042516b8..0000000000 --- a/tuf/ATTACKS.md +++ /dev/null @@ -1,323 +0,0 @@ -# Demonstrate protection against malicious updates - -## Table of Contents ## -- [Blocking Malicious Updates](#blocking-malicious-updates) - - [Arbitrary Package Attack](#arbitrary-package-attack) - - [Rollback Attack](#rollback-attack) - - [Indefinite Freeze Attack](#indefinite-freeze-attack) - - [Endless Data Attack](#endless-data-attack) - - [Compromised Key Attack](#compromised-key-attack) - - [Slow Retrieval Attack](#slow-retrieval-attack) -- [Conclusion](#conclusion) - -## Blocking Malicious Updates ## -TUF protects against a number of attacks, some of which include rollback, -arbitrary package, and mix and match attacks. We begin this document on -blocking malicious updates by demonstrating how the client rejects a target -file downloaded from the software repository that doesn't match what is listed -in TUF metadata. - -The following demonstration requires and operates on the repository created in -the [repository management -tutorial](https://github.com/theupdateframework/python-tuf/blob/develop/tuf/README.md). - -### Arbitrary Package Attack ### -In an arbitrary package attack, an attacker installs anything they want on the -client system. That is, an attacker can provide arbitrary files in response to -download requests and the files will not be detected as illegitimate. We -simulate an arbitrary package attack by creating a "malicious" target file -that our client attempts to fetch. - -```Bash -$ mv 'repository/targets/file2.txt' 'repository/targets/file2.txt.backup' -$ echo 'bad_target' > 'repository/targets/file2.txt' -``` - -We next reset our local timestamp (so that a new update is prompted), and -the target files previously downloaded by the client. -```Bash -$ rm -rf "client/targets/" "client/metadata/current/timestamp.json" -``` - -The client now performs an update and should detect the invalid target file... -Note: The following command should be executed in the "client/" directory. -```Bash -$ python3 basic_client.py --repo http://localhost:8001 -Error: No working mirror was found: - localhost:8001: BadHashError() -``` - -The log file (tuf.log) saved to the current working directory contains more -information on the update procedure and the cause of the BadHashError. - -```Bash -... - -BadHashError: Observed -hash ('f569179171c86aa9ed5e8b1d6c94dfd516123189568d239ed57d818946aaabe7') != -expected hash (u'67ee5478eaadb034ba59944eb977797b49ca6aa8d3574587f36ebcbeeb65f70e') -[2016-10-20 19:45:16,079 UTC] [tuf.client.updater] [ERROR] [_get_file:1415@updater.py] -Failed to update /file2.txt from all mirrors: {u'http://localhost:8001/targets/file2.txt': BadHashError()} -``` - -Note: The "malicious" target file should be removed and the original file2.txt -restored, otherwise the following examples will fail with BadHashError -exceptions: - -```Bash -$ mv 'repository/targets/file2.txt.backup' 'repository/targets/file2.txt' -``` - -### Indefinite Freeze Attack ### -In an indefinite freeze attack, an attacker continues to present a software -update system with the same files the client has already seen. The result is -that the client does not know that new files are available. Although the -client would be unable to prevent an attacker or compromised repository from -feeding it stale metadata, it can at least detect when an attacker is doing so -indefinitely. The signed metadata used by TUF contains an "expires" field that -indicates when metadata should no longer be trusted. - -In the following simulation, the client first tries to perform an update. - -```Bash -$ python3 basic_client.py --repo http://localhost:8001 -``` - -According to the logger (`tuf.log` file in the current working directory), -everything appears to be up-to-date. The remote server should also show that -the client retrieved only the timestamp.json file. Let's suppose now that an -attacker continues to feed our client the same stale metadata. If we were to -move the time to a future date that would cause metadata to expire, the TUF -framework should raise an exception or error to indicate that the metadata -should no longer be trusted. - -```Bash -$ sudo date -s '2080-12-25 12:34:56' -Wed Dec 25 12:34:56 EST 2080 - -$ python3 basic_client.py --repo http://localhost:8001 -Error: No working mirror was found: - u'localhost:8001': ExpiredMetadataError(u"Metadata u'root' expired on Tue Jan 1 00:00:00 2030 (UTC).",) -``` - -Note: Reset the date to continue with the rest of the attacks. - - -### Rollback Attack ### -In a rollback attack, an attacker presents a software update system with older -files than those the client has already seen, causing the client to use files -older than those the client knows about. We begin this example by saving the -current version of the Timestamp file available on the repository. This saved -file will later be served to the client to see if it is rejected. The client -should not accept versions of metadata that is older than previously trusted. - -Navigate to the directory containing the server's files and save the current -timestamp.json to a temporary location: -```Bash -$ cp repository/metadata/timestamp.json /tmp -``` - -We should next generate a new Timestamp file on the repository side. -```Bash -$ python3 ->>> from tuf.repository_tool import * ->>> repository = load_repository('repository') ->>> repository.timestamp.version -1 ->>> repository.timestamp.version = 2 ->>> repository.dirty_roles() -Dirty roles: [u'timestamp'] ->>> private_timestamp_key = import_rsa_privatekey_from_file("keystore/timestamp_key") -Enter a password for the encrypted RSA file (/path/to/keystore/timestamp_key): ->>> repository.timestamp.load_signing_key(private_timestamp_key) ->>> repository.write('timestamp') - -$ cp repository/metadata.staged/* repository/metadata -``` - -Now start the HTTP server from the directory containing the 'repository' -subdirectory. -```Bash -$ python3 -m SimpleHTTPServer 8001 -``` - -And perform an update so that the client retrieves the updated timestamp.json. -```Bash -$ python3 basic_client.py --repo http://localhost:8001 -``` - -Finally, move the previous timestamp.json file to the current live repository -and have the client try to download the outdated version. The client should -reject it! -```Bash -$ cp /tmp/timestamp.json repository/metadata/ -$ cd repository; python3 -m SimpleHTTPServer 8001 -``` - -On the client side, perform an update... -```Bash -$ python3 basic_client.py --repo http://localhost:8001 -Error: No working mirror was found: - u'localhost:8001': ReplayedMetadataError() -``` - -The tuf.log file contains more information about the ReplayedMetadataError -exception and update process. Please reset timestamp.json to the latest -version, which can be found in the 'repository/metadata.staged' subdirectory. - -```Bash -$ cp repository/metadata.staged/timestamp.json repository/metadata -``` - - -### Endless Data Attack ### -In an endless data attack, an attacker responds to a file download request with -an endless stream of data, causing harm to clients (e.g., a disk partition -filling up or memory exhaustion). In this simulated attack, we append extra -data to one of the target files available on the software repository. The -client should only download the exact number of bytes it expects for a -requested target file (according to what is listed in trusted TUF metadata). - -```Bash -$ cp repository/targets/file1.txt /tmp -$ python3 -c "print 'a' * 1000" >> repository/targets/file1.txt -``` - -Now delete the local metadata and target files on the client side so -that remote metadata and target files are downloaded again. -```Bash -$ rm -rf client/targets/ -$ rm client/metadata/current/snapshot.json* client/metadata/current/timestamp.json* -``` - -Lastly, perform an update to verify that the file1.txt is downloaded up to the -expected size, and no more. The target file available on the software -repository does contain more data than expected, though. - -```Bash -$ python3 basic_client.py --repo http://localhost:8001 -``` - -At this point, part of the "file1.txt" file should have been fetched. That is, -up to 31 bytes of it should have been downloaded, and the rest of the maliciously -appended data ignored. If we inspect the logger, we'd discover the following: - -```Bash -[2016-10-06 21:37:39,092 UTC] [tuf.download] [INFO] [_download_file:235@download.py] -Downloading: u'http://localhost:8001/targets/file1.txt' - -[2016-10-06 21:37:39,145 UTC] [tuf.download] [INFO] [_check_downloaded_length:610@download.py] -Downloaded 31 bytes out of the expected 31 bytes. - -[2016-10-06 21:37:39,145 UTC] [tuf.client.updater] [INFO] [_get_file:1372@updater.py] -Not decompressing http://localhost:8001/targets/file1.txt - -[2016-10-06 21:37:39,145 UTC] [tuf.client.updater] [INFO] [_check_hashes:778@updater.py] -The file's sha256 hash is correct: 65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da -``` - -Indeed, the sha256 sum of the first 31 bytes of the "file1.txt" available -on the repository should match to what is trusted. The client did not -downloaded the appended data. - -Note: Restore file1.txt - -```Bash -$ cp /tmp/file1.txt repository/targets/ -``` - - -### Compromised Key Attack ### -An attacker who compromise less than a given threshold of keys is limited in -scope. This includes relying on a single online key (such as only being -protected by SSL) or a single offline key (such as most software update systems -use to sign files). In this example, we attempt to sign a role file with -less-than-a-threshold number of keys. A single key (suppose this is a -compromised key) is used to demonstrate that roles must be signed with the -total number of keys required for the role. In order to compromise a role, an -attacker would have to compromise a threshold of keys. This approach of -requiring a threshold number of signatures provides compromise resilience. - -Let's attempt to sign a new snapshot file with a less-than-threshold number of -keys. The client should reject the partially signed snapshot file served by -the repository (or imagine that it is a compromised software repository). - -```Bash -$ python3 ->>> from tuf.repository_tool import * ->>> repository = load_repository('repository') ->>> version = repository.root.version ->>> repository.root.version = version + 1 ->>> private_root_key = import_rsa_privatekey_from_file("keystore/root_key", password="password") ->>> repository.root.load_signing_key(private_root_key) ->>> private_root_key2 = import_rsa_privatekey_from_file("keystore/root_key2", password="password") ->>> repository.root.load_signing_key(private_root_key2) - ->>> repository.snapshot.version = 8 ->>> repository.snapshot.threshold = 2 ->>> private_snapshot_key = import_rsa_privatekey_from_file("keystore/snapshot_key", password="password") ->>> repository.snapshot.load_signing_key(private_snapshot_key) - ->>> repository.timestamp.version = 8 ->>> private_timestamp_key = import_rsa_privatekey_from_file("keystore/timestamp_key", password="password") ->>> repository.timestamp.load_signing_key(private_timestamp_key) - ->>> repository.write('root') ->>> repository.write('snapshot') ->>> repository.write('timestamp') - -$ cp repository/metadata.staged/* repository/metadata -``` - -The client now attempts to refresh the top-level metadata and the -partially written snapshot.json, which should be rejected. - -```Bash -$ python3 basic_client.py --repo http://localhost:8001 -Error: No working mirror was found: - u'localhost:8001': BadSignatureError() -``` - - -### Slow Retrieval Attack ### -In a slow retrieval attack, an attacker responds to clients with a very slow -stream of data that essentially results in the client never continuing the -update process. In this example, we simulate a slow retrieval attack by -spawning a server that serves data at a slow rate to our update client data. -TUF should not be vulnerable to this attack, and the framework should raise an -exception or error when it detects that a malicious server is serving it data -at a slow enough rate. - -We first spawn the server that slowly streams data to the client. The -'slow_retrieval_server.py' module (can be found in the tests/ directory of the -source code) should be copied over to the server's 'repository/' directory from -which to launch it. - -```Bash -# Before launching the slow retrieval server, copy 'slow_retrieval_server.py' -# to the 'repository/' directory and run it from that directory as follows: -$ python3 slow_retrieval_server.py 8002 mode_2 -``` - -The client may now make a request to the slow retrieval server on port 8002. -However, before doing so, we'll reduce (for the purposes of this demo) the -minimum average download rate allowed and download chunk size. Open the -'settings.py' module and set MIN_AVERAGE_DOWNLOAD_SPEED = 5 and CHUNK_SIZE = 1. -This should make it so that the client detects the slow retrieval server's -delayed streaming. - -```Bash -$ python3 basic_client.py --verbose 1 --repo http://localhost:8002 -Error: No working mirror was found: - u'localhost:8002': SlowRetrievalError() -``` - -The framework should detect the slow retrieval attack and raise a -SlowRetrievalError exception to the client application. - - -## Conclusion ## -These are just some of the attacks that TUF provides protection against. For -more attacks and updater weaknesses, please see the -[Security](https://github.com/theupdateframework/python-tuf/blob/develop/docs/SECURITY.md) -page. diff --git a/tuf/README-developer-tools.md b/tuf/README-developer-tools.md deleted file mode 100644 index 1b593400a5..0000000000 --- a/tuf/README-developer-tools.md +++ /dev/null @@ -1,342 +0,0 @@ -# The Update Framework Developer Tool: How to Update your Project Securely on a TUF Repository - -## Table of Contents -- [Overview](#overview) -- [Creating a Simple Project](#creating_a_simple_project) - - [Generating a Key](#generating_a_key) - - [The Project Class](#the_project_class) - - [Signing and Writing the Metadata](#signing_and_writing_the_metadata) -- [Loading an Existing Project](#loading_an_existing_project) -- [Delegations](#delegations) -- [Managing Keys](#managing_keys) -- [Managing Targets](#managing_targets) - - -## Overview -The Update Framework (TUF) is a Python-based security system for software -updates. In order to prevent your users from downloading vulnerable or malicious -code disguised as updates to your software, TUF requires that each update you -release include certain metadata verifying your authorship of the files. - -The TUF developer tools are a Python Library that enables you to create and -maintain the required metadata for files hosted on a TUF Repository. (We call -these files “targets,” to distinguish them from the metadata associated with -them. Both of these together comprise a complete “project”.) You will use these -tools to generate the keys and metadata you need to claim and secure your files -on the repository, and to update the metadata and sign it with those keys -whenever you upload a new version of those files. - -This document will teach you how to use these tools in two parts. The first -part walks through the creation of a minimal-complexity TUF project, which is -all you need to get started, and can be expanded later. The second part details -the full functionality of the tools, which offer a finer degree of control in -securing your project. - - -## Creating a Simple Project -This section walks through the creation of a small example project with just -one target. Once created, this project will be fully functional, and can be -modified as needed. - - -### Generating a Key -First, we will need to generate a key to sign the metadata. Keys are generated -in pairs: one public and the other private. The private key is -password-protected and is used to sign metadata. The public key can be shared -freely, and is used to verify signatures made by the private key. You will need -to share your public key with the repository hosting your project so they can -verify your metadata is signed by the right person. - -The generate\_and\_write\_rsa\_keypair function will create two key files named -"path/to/key.pub", which is the public key and "path/to/key", which -is the private key. - -``` ->>> from tuf.developer_tool import * ->>> generate_and_write_rsa_keypair_with_prompt(filepath="path/to/key") -enter password to encrypt private key file 'path/to/key' -(leave empty if key should not be encrypted): -Confirm: ->>> -``` - -We can also use the bits parameter to set a different key length (the default -is 3072). We can also `generate_and_write_rsa_keypair` with a `password` -parameter if a prompt is not desired. - -In this example we will be using rsa keys, but ed25519 keys are also supported. - -Now we have a key for our project, we can proceed to create our project. - - -### The Project Class -The TUF developer tool is built around the Project class, which is used to -organize groups of targets associated with a single set of metadata. A single -Project instance is used to keep track of all the target files and metadata -files in one project. The Project also keeps track of the keys and signatures, -so that it can update all the metadata with the correct changes and signatures -on a single command. - -Before creating a project, you must know where it will be located in the TUF -Repository. In the following example, we will create a project to be hosted as -"repo/unclaimed/example_project" within the repository, and store a local copy -of the metadata at "path/to/metadata". The project will comprise a single -target file, "local/path/to/example\_project/target\_1" locally, and we will -secure it with the key generated above. - -First, we must import the generated keys. We can do that by issuing the -following command: - -``` ->>> public_key = import_rsa_publickey_from_file("path/to/keys.pub") -``` - -After importing the key, we can generate a new project with the following -command: - -``` ->>> project = create_new_project(project_name="example_project", -... metadata_directory="local/path/to/metadata/", -... targets_directory="local/path/to/example_project", -... location_in_repository="repo/unclaimed", key=public_key) -``` - -Let's list the arguments and make sense out of this rather long function call: - -- create a project named example_project: the name of the metadata file will match this name -- the metadata will be located in "local/path/to/metadata", this means all of the generated files -for this project will be located here -- the targets are located in local/path/to/example project. If your targets are located in some other -place, you can point the targets directory there. Files must reside under the path local/path/to/example_project or else it won't be possible to add them. -- location\_in\_repository points to repo/unclaimed, this will be prepended to the paths in the generated metadata so the signatures all match. - -Now the project is in memory and we can do different operations on it such as -adding and removing targets, delegating files, changing signatures and keys, -etc. For the moment we are interested in adding our one and only target inside -the project. - -To add a target, we issue the following method: - -``` ->>> project.add_target("local/path/to/example_project/target_1") -``` - -Note that the file "target\_1" should be located in -"local/path/to/example\_project", or this method will throw an -error. - -At this point, the metadata is not valid. We have assigned a key to the -project, but we have not *signed* it with that key. Signing is the process of -generating a signature with our private key so it can be verified with the -public key by the server (upon uploading) and by the clients (when updating). - - -### Signing and Writing the Metadata ### -In order to sign the metadata, we need to import the private key corresponding -to the public key we added to the project. One the key is loaded to the project, -it will automatically be used to sign the metadata whenever it is written. - -``` ->>> private_key = import_rsa_privatekey_from_file("path/to/key") -Enter password for the RSA key: ->>> project.load_signing_key(private_key) ->>> project.write() -``` - -When all changes to the project have been written, the metadata is ready to be -uploaded to the repository, and it is safe to exit the Python interpreter, or -to delete the Project instance. - -The project can be loaded later to update changes to the project. The metadata -contains checksums that have to match the actual files or else it won't be -accepted by the upstream repository. - -At this point, if you have followed all the steps in this document so far -(substituting appropriate names and filepaths) you will have created a basic -TUF project, which can be expanded as needed. The simplest way to get your -project secured is to add all your files using add\_target() (or see [Managing -Keys](#managing_keys) on how to add whole directories). If your project has -several contributors, you may want to consider adding -[delegations](#delegations) to your project. - - -## Loading an Existing Project -To make changes to existing metadata, we will need the Project again. We can -restore it with the load_project() function. - -``` ->>> from tuf.developer_tool import * ->>> project = load_project("local/path/to/metadata") -``` -Each time the project is loaded anew, the necessary private keys must also be -loaded in order to sign metadata. - -``` ->>> private_key = import_rsa_privatekey_from_file("path/to/key") -Enter a password for the RSA key: ->>> project.load_signing_key(private_key) ->>> project.write() -``` - -If your project does not use any delegations, the five commands above are all -you need to update your project's metadata. - - -## Delegations - -The project we created above is secured entirely by one key. If you want to -allow someone else to update part of your project independently, you will need -to delegate a new role for them. For example, we can do the following: - -``` ->>> other_key = import_rsa_publickey_from_file(“another_public_key.pub”) ->>> targets = ['local/path/to/newtarget'] ->>> project.delegate(“newrole”, [other_key], targets) -``` - -The new role is now an attribute of the Project instance, and contains the same -methods as Project. For example, we can add targets in the same way as before: - -``` ->>> project(“newrole”).add_target(“delegated_1”) -``` - -Recall that we input the other person’s key as part of a list. That list can -contain any number of public keys. We can also add keys to the role after -creating it using the [add\_verification\_key()](#adding_a_key_to_a_delegation) -method. - -### Delegated Paths - -By default, a delegated role is permitted to add and modify targets anywhere in -the Project's targets directory. We can delegate trust of paths to a role to -limit this permission. - -``` ->>> project.add_paths(["delegated/filepath"], "newrole") -``` - -This will prevent the delegated role from signing targets whose local filepaths -do not begin with "delegated/filepath". We can delegate several filepaths to a -role by adding them to the list in the first parameter, or by invoking the -method again. A role with multiple delegated paths can add targets to any of -them. - -Note that this method is invoked from the parent role (in this case, the Project) -and takes the delegated role name as an argument. - -### Nested Delegations - -It is possible for a delegated role to have delegations of its own. We can do -this by calling delegate() on a delegated role: - -``` ->>> project("newrole").delegate(“nestedrole”, [key], targets) -``` - -Nested delegations function no differently than first-order delegations. to -demonstrate, adding a target to nested delegation looks like this: - -``` ->>> project("newrole")("nestedrole").add_target("foo") -``` - -### Revoking Delegations -Delegations can be revoked, removing the delegated role from the project. - -``` ->>> project.revoke("newrole") -``` - - -## Managing Keys -This section describes the key-related functions and parameters not covered in -the [Creating a Simple Project](#creating_a_simple_project) section. - -### Additional Parameters for Key Generation -When generating keys, it is possible to specify the length of the key in bits -and its password as parameters: - -``` ->>> generate_and_write_rsa_keypair(password="pw", filepath="path/to/key", bits=2048) -``` -The bits parameter defaults to 3072, and values below 2048 will raise an error. -The password parameter is only intended to be used in scripts. - - -### Adding a Key to a Delegation -New verifications keys can be added to an existing delegation using -add\_verification\_key(): - -``` ->>> project("rolename").add_verification_key(pubkey) -``` - -A delegation can have several verification keys at once. By default, a -delegated role with multiple keys can be written using any one of their -corresponding signing keys. To modify this behavior, you can change the -delegated role's [threshold](#delegation_thrsholds). - -### Removing a Key from a Delegation -Verification keys can also be removed, like this: - -``` ->>> project("rolename").remove_verification_key(pubkey) -``` - -Remember that a project can only have one key, so this method will return an -error if there is already a key assigned to it. In order to replace a key we -must first delete the existing one and then add the new one. It is possible to -omit the key parameter in the create\_new\_project() function, and add the key -later. - -### Changing the Project Key -Each Project instance can only have one verification key. This key can be -replaced by removing it and adding a new key, in that order. - -``` ->>> project.remove_verification_key(oldkey) ->>> project.add_verification_key(new) -``` - - -### Delegation Thresholds - -Every delegated role has a threshold, which determines how many of its signing -keys need to be loaded to write the role. The threshold defaults to 1, and -should not exceed the number of verification keys assigned to the role. The -threshold can be accessed as a property of a delegated role. - -``` ->>> project("rolename").threshold = 2 -``` - -The above line will set the "rolename" role's threshold to 2. - - -## Managing Targets -There are supporting functions of the targets library to make the project -maintenance easier. These functions are described in this section. - -### Adding Targets by Directory -This function is especially useful when creating a new project to add all the -files contained in the targets directory. The following code block illustrates -the usage of this function: - -``` ->>> list_of_targets = \ -... project.get_filepaths_in_directory(“path/within/targets/folder”, -... recursive_walk=False, follow_links=False) ->>> project.add_targets(list_of_targets) -``` - -### Deleting Targets from a Project -It is possible that we want to delete existing targets inside our project. To -stop the developer tool from tracking this file we can issue the following -command: - -``` ->>> project.remove_target(“target_1”) -``` - -Now the target file won't be part of the metadata. diff --git a/tuf/README.md b/tuf/README.md deleted file mode 100644 index dbc53b61b5..0000000000 --- a/tuf/README.md +++ /dev/null @@ -1,5 +0,0 @@ -[Quickstart](../docs/QUICKSTART.md) - -[CLI](../docs/CLI.md) - -[Tutorial](../docs/TUTORIAL.md) diff --git a/tuf/__init__.py b/tuf/__init__.py index 27498a6d94..ee21e1e23f 100755 --- a/tuf/__init__.py +++ b/tuf/__init__.py @@ -1,13 +1,8 @@ -# This value is used in the requests user agent. -# setup.py has it hard-coded separately. -# Currently, when the version is changed, it must be set in both locations. -# TODO: Single-source the version number. -__version__ = "0.17.0" +# Copyright New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +"""TUF +""" -# This reference implementation produces metadata intended to conform to -# version 1.0.0 of the TUF specification, and is expected to consume metadata -# conforming to version 1.0.0 of the TUF specification. -# All downloaded metadata must be equal to our supported major version of 1. -# For example, "1.4.3" and "1.0.0" are supported. "2.0.0" is not supported. -# See https://github.com/theupdateframework/specification -SPECIFICATION_VERSION = '1.0.0' +# This value is used in the requests user agent. +__version__ = "1.1.0" diff --git a/tuf/api/exceptions.py b/tuf/api/exceptions.py new file mode 100644 index 0000000000..18fe43711d --- /dev/null +++ b/tuf/api/exceptions.py @@ -0,0 +1,66 @@ +# Copyright New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +""" +Define TUF exceptions used inside the new modern implementation. +The names chosen for TUF Exception classes should end in 'Error' except where +there is a good reason not to, and provide that reason in those cases. +""" + + +#### Repository errors #### + +# pylint: disable=unused-import +from securesystemslib.exceptions import StorageError + + +class RepositoryError(Exception): + """An error with a repository's state, such as a missing file. + It covers all exceptions that come from the repository side when + looking from the perspective of users of metadata API or ngclient.""" + + +class UnsignedMetadataError(RepositoryError): + """An error about metadata object with insufficient threshold of + signatures.""" + + +class BadVersionNumberError(RepositoryError): + """An error for metadata that contains an invalid version number.""" + + +class ExpiredMetadataError(RepositoryError): + """Indicate that a TUF Metadata file has expired.""" + + +class LengthOrHashMismatchError(RepositoryError): + """An error while checking the length and hash values of an object.""" + + +#### Download Errors #### + + +class DownloadError(Exception): + """An error occurred while attempting to download a file.""" + + +class DownloadLengthMismatchError(DownloadError): + """Indicate that a mismatch of lengths was seen while downloading a file.""" + + +class SlowRetrievalError(DownloadError): + """Indicate that downloading a file took an unreasonably long time.""" + + +class DownloadHTTPError(DownloadError): + """ + Returned by FetcherInterface implementations for HTTP errors. + + Args: + message: The HTTP error messsage + status_code: The HTTP status code + """ + + def __init__(self, message: str, status_code: int): + super().__init__(message) + self.status_code = status_code diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index fb4a1daf2c..7bdccb9594 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -1,27 +1,38 @@ # Copyright New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 -"""TUF role metadata model. +""" +The low-level Metadata API in ``tuf.api.metadata`` module contains: + +* Safe de/serialization of metadata to and from files. +* Access to and modification of signed metadata content. +* Signing metadata and verifying signatures. + +Metadata API implements functionality at the metadata file level, it does +not provide TUF repository or client functionality on its own (but can be used +to implement them). + +The API design is based on the file format defined in the `TUF specification +`_ and the object +attributes generally follow the JSON format used in the specification. -This module provides container classes for TUF role metadata, including methods -to read and write from and to file, perform TUF-compliant metadata updates, and -create and verify signatures. +The above principle means that a ``Metadata`` object represents a single +metadata file, and has a ``signed`` attribute that is an instance of one of the +four top level signed classes (``Root``, ``Timestamp``, ``Snapshot`` and ``Targets``). +To make Python type annotations useful ``Metadata`` can be type constrained: e.g. the +signed attribute of ``Metadata[Root]`` is known to be ``Root``. -The metadata model supports any custom serialization format, defaulting to JSON -as wireline format and Canonical JSON for reproducible signature creation and -verification. -Custom serializers must implement the abstract serialization interface defined -in 'tuf.api.serialization', and may use the [to|from]_dict convenience methods -available in the class model. +Currently Metadata API supports JSON as the file format. +A basic example of repository implementation using the Metadata is available in +`examples/repo_example `_. """ import abc import fnmatch import io import logging import tempfile -from collections import OrderedDict -from datetime import datetime, timedelta +from datetime import datetime from typing import ( IO, Any, @@ -45,20 +56,27 @@ from securesystemslib.storage import FilesystemBackend, StorageBackendInterface from securesystemslib.util import persist_temp_file -from tuf import exceptions +from tuf.api import exceptions from tuf.api.serialization import ( MetadataDeserializer, MetadataSerializer, + SerializationError, SignedSerializer, ) +_ROOT = "root" +_SNAPSHOT = "snapshot" +_TARGETS = "targets" +_TIMESTAMP = "timestamp" + # pylint: disable=too-many-lines logger = logging.getLogger(__name__) # We aim to support SPECIFICATION_VERSION and require the input metadata # files to have the same major version (the first number) as ours. -SPECIFICATION_VERSION = ["1", "0", "19"] +SPECIFICATION_VERSION = ["1", "0", "29"] +TOP_LEVEL_ROLE_NAMES = {_ROOT, _TIMESTAMP, _SNAPSHOT, _TARGETS} # T is a Generic type constraint for Metadata.signed T = TypeVar("T", "Root", "Timestamp", "Snapshot", "Targets") @@ -70,9 +88,10 @@ class Metadata(Generic[T]): Provides methods to convert to and from dictionary, read and write to and from file and to create and verify metadata signatures. - Metadata[T] is a generic container type where T can be any one type of - [Root, Timestamp, Snapshot, Targets]. The purpose of this is to allow - static type checking of the signed attribute in code using Metadata:: + ``Metadata[T]`` is a generic container type where T can be any one type of + [``Root``, ``Timestamp``, ``Snapshot``, ``Targets``]. The purpose of this + is to allow static type checking of the signed attribute in code using + Metadata:: root_md = Metadata[Root].from_file("root.json") # root_md type is now Metadata[Root]. This means signed and its @@ -82,54 +101,89 @@ class Metadata(Generic[T]): Using a type constraint is not required but not doing so means T is not a specific type so static typing cannot happen. Note that the type constraint - "[Root]" is not validated at runtime (as pure annotations are not available + ``[Root]`` is not validated at runtime (as pure annotations are not available then). - Attributes: - signed: A subclass of Signed, which has the actual metadata payload, - i.e. one of Targets, Snapshot, Timestamp or Root. - signatures: An ordered dictionary of keyids to Signature objects, each - signing the canonical serialized representation of 'signed'. + New Metadata instances can be created from scratch with:: + + one_day = datetime.utcnow() + timedelta(days=1) + timestamp = Metadata(Timestamp(expires=one_day)) + + Apart from ``expires`` all of the arguments to the inner constructors have + reasonable default values for new metadata. + + *All parameters named below are not just constructor arguments but also + instance attributes.* + + Args: + signed: Actual metadata payload, i.e. one of ``Targets``, + ``Snapshot``, ``Timestamp`` or ``Root``. + signatures: Ordered dictionary of keyids to ``Signature`` objects, each + signing the canonical serialized representation of ``signed``. + Default is an empty dictionary. + unrecognized_fields: Dictionary of all attributes that are not managed + by TUF Metadata API. These fields are NOT signed and it's preferable + if unrecognized fields are added to the Signed derivative classes. """ - def __init__(self, signed: T, signatures: "OrderedDict[str, Signature]"): + def __init__( + self, + signed: T, + signatures: Optional[Dict[str, Signature]] = None, + unrecognized_fields: Optional[Dict[str, Any]] = None, + ): self.signed: T = signed - self.signatures = signatures + self.signatures = signatures if signatures is not None else {} + if unrecognized_fields is None: + unrecognized_fields = {} + + self.unrecognized_fields = unrecognized_fields + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Metadata): + return False + + return ( + self.signatures == other.signatures + # Order of the signatures matters (see issue #1788). + and list(self.signatures.items()) == list(other.signatures.items()) + and self.signed == other.signed + and self.unrecognized_fields == other.unrecognized_fields + ) @classmethod def from_dict(cls, metadata: Dict[str, Any]) -> "Metadata[T]": - """Creates Metadata object from its dict representation. + """Creates ``Metadata`` object from its json/dict representation. - Arguments: + Args: metadata: TUF metadata in dict representation. Raises: - KeyError: The metadata dict format is invalid. - ValueError: The metadata has an unrecognized signed._type field. + ValueError, KeyError, TypeError: Invalid arguments. Side Effect: Destroys the metadata dict passed by reference. Returns: - A TUF Metadata object. + TUF ``Metadata`` object. """ # Dispatch to contained metadata class on metadata _type field. _type = metadata["signed"]["_type"] - if _type == "targets": + if _type == _TARGETS: inner_cls: Type[Signed] = Targets - elif _type == "snapshot": + elif _type == _SNAPSHOT: inner_cls = Snapshot - elif _type == "timestamp": + elif _type == _TIMESTAMP: inner_cls = Timestamp - elif _type == "root": + elif _type == _ROOT: inner_cls = Root else: raise ValueError(f'unrecognized metadata type "{_type}"') # Make sure signatures are unique - signatures: "OrderedDict[str, Signature]" = OrderedDict() + signatures: Dict[str, Signature] = {} for sig_dict in metadata.pop("signatures"): sig = Signature.from_dict(sig_dict) if sig.keyid in signatures: @@ -142,6 +196,8 @@ def from_dict(cls, metadata: Dict[str, Any]) -> "Metadata[T]": # Specific type T is not known at static type check time: use cast signed=cast(T, inner_cls.from_dict(metadata.pop("signed"))), signatures=signatures, + # All fields left in the metadata dict are unrecognized. + unrecognized_fields=metadata, ) @classmethod @@ -153,22 +209,21 @@ def from_file( ) -> "Metadata[T]": """Loads TUF metadata from file storage. - Arguments: - filename: The path to read the file from. - deserializer: A MetadataDeserializer subclass instance that + Args: + filename: Path to read the file from. + deserializer: ``MetadataDeserializer`` subclass instance that implements the desired wireline format deserialization. Per - default a JSONDeserializer is used. - storage_backend: An object that implements - securesystemslib.storage.StorageBackendInterface. Per default - a (local) FilesystemBackend is used. - + default a ``JSONDeserializer`` is used. + storage_backend: Object that implements + ``securesystemslib.storage.StorageBackendInterface``. + Default is ``FilesystemBackend`` (i.e. a local file). Raises: - securesystemslib.exceptions.StorageError: The file cannot be read. + exceptions.StorageError: The file cannot be read. tuf.api.serialization.DeserializationError: The file cannot be deserialized. Returns: - A TUF Metadata object. + TUF ``Metadata`` object. """ if storage_backend is None: @@ -185,17 +240,17 @@ def from_bytes( ) -> "Metadata[T]": """Loads TUF metadata from raw data. - Arguments: - data: metadata content as bytes. - deserializer: Optional; A MetadataDeserializer instance that - implements deserialization. Default is JSONDeserializer. + Args: + data: Metadata content. + deserializer: ``MetadataDeserializer`` implementation to use. + Default is ``JSONDeserializer``. Raises: tuf.api.serialization.DeserializationError: The file cannot be deserialized. Returns: - A TUF Metadata object. + TUF ``Metadata`` object. """ if deserializer is None: @@ -212,9 +267,16 @@ def to_bytes( ) -> bytes: """Return the serialized TUF file format as bytes. - Arguments: - serializer: A MetadataSerializer instance that implements the - desired serialization format. Default is JSONSerializer. + Note that if bytes are first deserialized into ``Metadata`` and then + serialized with ``to_bytes()``, the two are not required to be + identical even though the signatures are guaranteed to stay valid. If + byte-for-byte equivalence is required (which is the case when content + hashes are used in other metadata), the original content should be used + instead of re-serializing. + + Args: + serializer: ``MetadataSerializer`` instance that implements the + desired serialization format. Default is ``JSONSerializer``. Raises: tuf.api.serialization.SerializationError: @@ -235,7 +297,11 @@ def to_dict(self) -> Dict[str, Any]: signatures = [sig.to_dict() for sig in self.signatures.values()] - return {"signatures": signatures, "signed": self.signed.to_dict()} + return { + "signatures": signatures, + "signed": self.signed.to_dict(), + **self.unrecognized_fields, + } def to_file( self, @@ -245,18 +311,24 @@ def to_file( ) -> None: """Writes TUF metadata to file storage. - Arguments: - filename: The path to write the file to. - serializer: A MetadataSerializer instance that implements the - desired serialization format. Default is JSONSerializer. - storage_backend: A StorageBackendInterface implementation. Default - is FilesystemBackend (i.e. a local file). + Note that if a file is first deserialized into ``Metadata`` and then + serialized with ``to_file()``, the two files are not required to be + identical even though the signatures are guaranteed to stay valid. If + byte-for-byte equivalence is required (which is the case when file + hashes are used in other metadata), the original file should be used + instead of re-serializing. + + Args: + filename: Path to write the file to. + serializer: ``MetadataSerializer`` instance that implements the + desired serialization format. Default is ``JSONSerializer``. + storage_backend: ``StorageBackendInterface`` implementation. Default + is ``FilesystemBackend`` (i.e. a local file). Raises: tuf.api.serialization.SerializationError: The metadata object cannot be serialized. - securesystemslib.exceptions.StorageError: - The file cannot be written. + exceptions.StorageError: The file cannot be written. """ bytes_data = self.to_bytes(serializer) @@ -272,25 +344,26 @@ def sign( append: bool = False, signed_serializer: Optional[SignedSerializer] = None, ) -> Signature: - """Creates signature over 'signed' and assigns it to 'signatures'. + """Creates signature over ``signed`` and assigns it to ``signatures``. - Arguments: - signer: A securesystemslib.signer.Signer implementation. - append: A boolean indicating if the signature should be appended to + Args: + signer: A ``securesystemslib.signer.Signer`` object that provides a private + key and signing implementation to generate the signature. A standard + implementation is available in ``securesystemslib.signer.SSlibSigner``. + append: ``True`` if the signature should be appended to the list of signatures or replace any existing signatures. The default behavior is to replace signatures. - signed_serializer: A SignedSerializer that implements the desired - serialization format. Default is CanonicalJSONSerializer. + signed_serializer: ``SignedSerializer`` that implements the desired + serialization format. Default is ``CanonicalJSONSerializer``. Raises: tuf.api.serialization.SerializationError: - 'signed' cannot be serialized. - securesystemslib.exceptions.CryptoError, \ - securesystemslib.exceptions.UnsupportedAlgorithmError: - Signing errors. + ``signed`` cannot be serialized. + exceptions.UnsignedMetadataError: Signing errors. Returns: - Securesystemslib Signature object that was added into signatures. + ``securesystemslib.signer.Signature`` object that was added into + signatures. """ if signed_serializer is None: @@ -300,7 +373,14 @@ def sign( signed_serializer = CanonicalJSONSerializer() - signature = signer.sign(signed_serializer.serialize(self.signed)) + bytes_data = signed_serializer.serialize(self.signed) + + try: + signature = signer.sign(bytes_data) + except Exception as e: + raise exceptions.UnsignedMetadataError( + "Problem signing the metadata" + ) from e if not append: self.signatures.clear() @@ -315,18 +395,20 @@ def verify_delegate( delegated_metadata: "Metadata", signed_serializer: Optional[SignedSerializer] = None, ) -> None: - """Verifies that 'delegated_metadata' is signed with the required - threshold of keys for the delegated role 'delegated_role'. + """Verifies that ``delegated_metadata`` is signed with the required + threshold of keys for the delegated role ``delegated_role``. Args: delegated_role: Name of the delegated role to verify - delegated_metadata: The Metadata object for the delegated role - signed_serializer: Optional; serializer used for delegate - serialization. Default is CanonicalJSONSerializer. + delegated_metadata: ``Metadata`` object for the delegated role + signed_serializer: Serializer used for delegate + serialization. Default is ``CanonicalJSONSerializer``. Raises: - UnsignedMetadataError: 'delegate' was not signed with required - threshold of keys for 'role_name' + UnsignedMetadataError: ``delegated_role`` was not signed with + required threshold of keys for ``role_name``. + ValueError: no delegation was found for ``delegated_role``. + TypeError: called this function on non-delegating metadata class. """ # Find the keys and role in delegator metadata @@ -339,10 +421,7 @@ def verify_delegate( raise ValueError(f"No delegation found for {delegated_role}") keys = self.signed.delegations.keys - roles = self.signed.delegations.roles - # Assume role names are unique in delegations.roles: #1426 - # Find first role in roles with matching name (or None if no match) - role = next((r for r in roles if r.name == delegated_role), None) + role = self.signed.delegations.roles.get(delegated_role) else: raise TypeError("Call is valid only on delegator metadata") @@ -363,65 +442,102 @@ def verify_delegate( raise exceptions.UnsignedMetadataError( f"{delegated_role} was signed by {len(signing_keys)}/" f"{role.threshold} keys", - delegated_metadata.signed, ) class Signed(metaclass=abc.ABCMeta): """A base class for the signed part of TUF metadata. - Objects with base class Signed are usually included in a Metadata object + Objects with base class Signed are usually included in a ``Metadata`` object on the signed attribute. This class provides attributes and methods that are common for all TUF metadata types (roles). - Attributes: - _type: The metadata type string. Also available without underscore. - version: The metadata version number. - spec_version: The TUF specification version number (semver) the - metadata format adheres to. - expires: The metadata expiration datetime object. - unrecognized_fields: Dictionary of all unrecognized fields. + *All parameters named below are not just constructor arguments but also + instance attributes.* + + Args: + version: Metadata version number. If None, then 1 is assigned. + spec_version: Supported TUF specification version. If None, then the + version currently supported by the library is assigned. + expires: Metadata expiry date. If None, then current date and time is + assigned. + unrecognized_fields: Dictionary of all attributes that are not managed + by TUF Metadata API + + Raises: + ValueError: Invalid arguments. """ - # Signed implementations are expected to override this - _signed_type: ClassVar[str] = "signed" + # type is required for static reference without changing the API + type: ClassVar[str] = "signed" # _type and type are identical: 1st replicates file format, 2nd passes lint @property def _type(self) -> str: - return self._signed_type + return self.type @property - def type(self) -> str: - return self._signed_type + def expires(self) -> datetime: + """The metadata expiry date:: + + # Use 'datetime' module to e.g. expire in seven days from now + obj.expires = utcnow() + timedelta(days=7) + """ + return self._expires + + @expires.setter + def expires(self, value: datetime) -> None: + self._expires = value.replace(microsecond=0) # NOTE: Signed is a stupid name, because this might not be signed yet, but # we keep it to match spec terminology (I often refer to this as "payload", # or "inner metadata") def __init__( self, - version: int, - spec_version: str, - expires: datetime, - unrecognized_fields: Optional[Mapping[str, Any]] = None, - ) -> None: + version: Optional[int], + spec_version: Optional[str], + expires: Optional[datetime], + unrecognized_fields: Optional[Dict[str, Any]], + ): + if spec_version is None: + spec_version = ".".join(SPECIFICATION_VERSION) + # Accept semver (X.Y.Z) but also X.Y for legacy compatibility spec_list = spec_version.split(".") - if ( - len(spec_list) != 3 - or not all(el.isdigit() for el in spec_list) - or spec_list[0] != SPECIFICATION_VERSION[0] + if len(spec_list) not in [2, 3] or not all( + el.isdigit() for el in spec_list ): - raise ValueError( - f"Unsupported spec_version, got {spec_list}, " - f"supported {'.'.join(SPECIFICATION_VERSION)}" - ) + raise ValueError(f"Failed to parse spec_version {spec_version}") + + # major version must match + if spec_list[0] != SPECIFICATION_VERSION[0]: + raise ValueError(f"Unsupported spec_version {spec_version}") + self.spec_version = spec_version - self.expires = expires - if version <= 0: + self.expires = expires or datetime.utcnow() + + if version is None: + version = 1 + elif version <= 0: raise ValueError(f"version must be > 0, got {version}") self.version = version - self.unrecognized_fields: Mapping[str, Any] = unrecognized_fields or {} + + if unrecognized_fields is None: + unrecognized_fields = {} + + self.unrecognized_fields = unrecognized_fields + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Signed): + return False + + return ( + self.type == other.type + and self.version == other.version + and self.spec_version == other.spec_version + and self.expires == other.expires + and self.unrecognized_fields == other.unrecognized_fields + ) @abc.abstractmethod def to_dict(self) -> Dict[str, Any]: @@ -431,23 +547,23 @@ def to_dict(self) -> Dict[str, Any]: @classmethod @abc.abstractmethod def from_dict(cls, signed_dict: Dict[str, Any]) -> "Signed": - """Deserialization helper, creates object from dict representation""" + """Deserialization helper, creates object from json/dict representation""" raise NotImplementedError @classmethod def _common_fields_from_dict( cls, signed_dict: Dict[str, Any] ) -> Tuple[int, str, datetime]: - """Returns common fields of 'Signed' instances from the passed dict + """Returns common fields of ``Signed`` instances from the passed dict representation, and returns an ordered list to be passed as leading positional arguments to a subclass constructor. - See '{Root, Timestamp, Snapshot, Targets}.from_dict' methods for usage. + See ``{Root, Timestamp, Snapshot, Targets}.from_dict`` methods for usage. """ _type = signed_dict.pop("_type") - if _type != cls._signed_type: - raise ValueError(f"Expected type {cls._signed_type}, got {_type}") + if _type != cls.type: + raise ValueError(f"Expected type {cls.type}, got {_type}") version = signed_dict.pop("version") spec_version = signed_dict.pop("spec_version") @@ -456,12 +572,13 @@ def _common_fields_from_dict( # what the constructor expects and what we store. The inverse operation # is implemented in '_common_fields_to_dict'. expires = datetime.strptime(expires_str, "%Y-%m-%dT%H:%M:%SZ") + return version, spec_version, expires def _common_fields_to_dict(self) -> Dict[str, Any]: - """Returns dict representation of common fields of 'Signed' instances. + """Returns dict representation of common fields of ``Signed`` instances. - See '{Root, Timestamp, Snapshot, Targets}.to_dict' methods for usage. + See ``{Root, Timestamp, Snapshot, Targets}.to_dict`` methods for usage. """ return { @@ -476,44 +593,40 @@ def is_expired(self, reference_time: Optional[datetime] = None) -> bool: """Checks metadata expiration against a reference time. Args: - reference_time: Optional; The time to check expiration date against. - A naive datetime in UTC expected. - If not provided, checks against the current UTC date and time. + reference_time: Time to check expiration date against. A naive + datetime in UTC expected. Default is current UTC date and time. Returns: - True if expiration time is less than the reference time. + ``True`` if expiration time is less than the reference time. """ if reference_time is None: reference_time = datetime.utcnow() return reference_time >= self.expires - # Modification. - def bump_expiration(self, delta: timedelta = timedelta(days=1)) -> None: - """Increments the expires attribute by the passed timedelta.""" - self.expires += delta - - def bump_version(self) -> None: - """Increments the metadata version number by 1.""" - self.version += 1 - class Key: """A container class representing the public portion of a Key. - Please note that "Key" instances are not semanticly validated during - initialization: this only happens at signature verification time. + Supported key content (type, scheme and keyval) is defined in + `` Securesystemslib``. + + *All parameters named below are not just constructor arguments but also + instance attributes.* - Attributes: - keyid: An identifier string that must uniquely identify a key within - the metadata it is used in. This implementation does not verify - that keyid is the hash of a specific representation of the key. - keytype: A string denoting a public key signature system, - such as "rsa", "ed25519", "ecdsa" and "ecdsa-sha2-nistp256". - scheme: A string denoting a corresponding signature scheme. For example: + Args: + keyid: Key identifier that is unique within the metadata it is used in. + Keyid is not verified to be the hash of a specific representation + of the key. + keytype: Key type, e.g. "rsa", "ed25519" or "ecdsa-sha2-nistp256". + scheme: Signature scheme. For example: "rsassa-pss-sha256", "ed25519", and "ecdsa-sha2-nistp256". - keyval: A dictionary containing the public portion of the key. - unrecognized_fields: Dictionary of all unrecognized fields. + keyval: Opaque key content + unrecognized_fields: Dictionary of all attributes that are not managed + by TUF Metadata API + + Raises: + TypeError: Invalid type for an argument. """ def __init__( @@ -522,21 +635,40 @@ def __init__( keytype: str, scheme: str, keyval: Dict[str, str], - unrecognized_fields: Optional[Mapping[str, Any]] = None, - ) -> None: + unrecognized_fields: Optional[Dict[str, Any]] = None, + ): if not all( isinstance(at, str) for at in [keyid, keytype, scheme] - ) or not isinstance(keyval, Dict): + ) or not isinstance(keyval, dict): raise TypeError("Unexpected Key attributes types!") self.keyid = keyid self.keytype = keytype self.scheme = scheme self.keyval = keyval - self.unrecognized_fields: Mapping[str, Any] = unrecognized_fields or {} + if unrecognized_fields is None: + unrecognized_fields = {} + + self.unrecognized_fields = unrecognized_fields + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Key): + return False + + return ( + self.keyid == other.keyid + and self.keytype == other.keytype + and self.scheme == other.scheme + and self.keyval == other.keyval + and self.unrecognized_fields == other.unrecognized_fields + ) @classmethod def from_dict(cls, keyid: str, key_dict: Dict[str, Any]) -> "Key": - """Creates Key object from its dict representation.""" + """Creates ``Key`` object from its json/dict representation. + + Raises: + KeyError, TypeError: Invalid arguments. + """ keytype = key_dict.pop("keytype") scheme = key_dict.pop("scheme") keyval = key_dict.pop("keyval") @@ -553,7 +685,7 @@ def to_dict(self) -> Dict[str, Any]: } def to_securesystemslib_key(self) -> Dict[str, Any]: - """Returns a Securesystemslib compatible representation of self.""" + """Returns a ``Securesystemslib`` compatible representation of self.""" return { "keyid": self.keyid, "keytype": self.keytype, @@ -563,15 +695,27 @@ def to_securesystemslib_key(self) -> Dict[str, Any]: @classmethod def from_securesystemslib_key(cls, key_dict: Dict[str, Any]) -> "Key": - """ - Creates a Key object from a securesystemlib key dict representation + """Creates a ``Key`` object from a securesystemlib key json/dict representation removing the private key from keyval. + + Args: + key_dict: Key in securesystemlib dict representation. + + Raises: + ValueError: ``key_dict`` value is not following the securesystemslib + format. """ - key_meta = sslib_keys.format_keyval_to_metadata( - key_dict["keytype"], - key_dict["scheme"], - key_dict["keyval"], - ) + try: + key_meta = sslib_keys.format_keyval_to_metadata( + key_dict["keytype"], + key_dict["scheme"], + key_dict["keyval"], + ) + except sslib_exceptions.FormatError as e: + raise ValueError( + "key_dict value is not following the securesystemslib format" + ) from e + return cls( key_dict["keyid"], key_meta["keytype"], @@ -584,13 +728,13 @@ def verify_signature( metadata: Metadata, signed_serializer: Optional[SignedSerializer] = None, ) -> None: - """Verifies that the 'metadata.signatures' contains a signature made - with this key, correctly signing 'metadata.signed'. + """Verifies that the ``metadata.signatures`` contains a signature made + with this key, correctly signing ``metadata.signed``. - Arguments: + Args: metadata: Metadata to verify - signed_serializer: Optional; SignedSerializer to serialize - 'metadata.signed' with. Default is CanonicalJSONSerializer. + signed_serializer: ``SignedSerializer`` to serialize + ``metadata.signed`` with. Default is ``CanonicalJSONSerializer``. Raises: UnsignedMetadataError: The signature could not be verified for a @@ -600,8 +744,7 @@ def verify_signature( signature = metadata.signatures[self.keyid] except KeyError: raise exceptions.UnsignedMetadataError( - f"no signature for key {self.keyid} found in metadata", - metadata.signed, + f"No signature for key {self.keyid} found in metadata" ) from None if signed_serializer is None: @@ -617,17 +760,18 @@ def verify_signature( signed_serializer.serialize(metadata.signed), ): raise exceptions.UnsignedMetadataError( - f"Failed to verify {self.keyid} signature", - metadata.signed, + f"Failed to verify {self.keyid} signature" ) except ( sslib_exceptions.CryptoError, sslib_exceptions.FormatError, sslib_exceptions.UnsupportedAlgorithmError, + SerializationError, ) as e: + # Log unexpected failure, but continue as if there was no signature + logger.info("Key %s failed to verify sig: %s", self.keyid, str(e)) raise exceptions.UnsignedMetadataError( - f"Failed to verify {self.keyid} signature", - metadata.signed, + f"Failed to verify {self.keyid} signature" ) from e @@ -637,33 +781,53 @@ class Role: Role defines how many keys are required to successfully sign the roles metadata, and which keys are accepted. - Attributes: - keyids: A set of strings representing signing keys for this role. + *All parameters named below are not just constructor arguments but also + instance attributes.* + + Args: + keyids: Roles signing key identifiers. threshold: Number of keys required to sign this role's metadata. - unrecognized_fields: Dictionary of all unrecognized fields. + unrecognized_fields: Dictionary of all attributes that are not managed + by TUF Metadata API + + Raises: + ValueError: Invalid arguments. """ def __init__( self, keyids: List[str], threshold: int, - unrecognized_fields: Optional[Mapping[str, Any]] = None, - ) -> None: - keyids_set = set(keyids) - if len(keyids_set) != len(keyids): - raise ValueError( - f"keyids should be a list of unique strings," - f" instead got {keyids}" - ) + unrecognized_fields: Optional[Dict[str, Any]] = None, + ): + if len(set(keyids)) != len(keyids): + raise ValueError(f"Nonunique keyids: {keyids}") if threshold < 1: raise ValueError("threshold should be at least 1!") - self.keyids = keyids_set + self.keyids = keyids self.threshold = threshold - self.unrecognized_fields: Mapping[str, Any] = unrecognized_fields or {} + if unrecognized_fields is None: + unrecognized_fields = {} + + self.unrecognized_fields = unrecognized_fields + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Role): + return False + + return ( + self.keyids == other.keyids + and self.threshold == other.threshold + and self.unrecognized_fields == other.unrecognized_fields + ) @classmethod def from_dict(cls, role_dict: Dict[str, Any]) -> "Role": - """Creates Role object from its dict representation.""" + """Creates ``Role`` object from its json/dict representation. + + Raises: + ValueError, KeyError: Invalid arguments. + """ keyids = role_dict.pop("keyids") threshold = role_dict.pop("threshold") # All fields left in the role_dict are unrecognized. @@ -672,7 +836,7 @@ def from_dict(cls, role_dict: Dict[str, Any]) -> "Role": def to_dict(self) -> Dict[str, Any]: """Returns the dictionary representation of self.""" return { - "keyids": sorted(self.keyids), + "keyids": self.keyids, "threshold": self.threshold, **self.unrecognized_fields, } @@ -681,36 +845,68 @@ def to_dict(self) -> Dict[str, Any]: class Root(Signed): """A container for the signed part of root metadata. - Attributes: - consistent_snapshot: An optional boolean indicating whether the - repository supports consistent snapshots. - keys: Dictionary of keyids to Keys. Defines the keys used in 'roles'. + Parameters listed below are also instance attributes. + + Args: + version: Metadata version number. Default is 1. + spec_version: Supported TUF specification version. Default is the + version currently supported by the library. + expires: Metadata expiry date. Default is current date and time. + keys: Dictionary of keyids to Keys. Defines the keys used in ``roles``. + Default is empty dictionary. roles: Dictionary of role names to Roles. Defines which keys are - required to sign the metadata for a specific role. + required to sign the metadata for a specific role. Default is + a dictionary of top level roles without keys and threshold of 1. + consistent_snapshot: ``True`` if repository supports consistent snapshots. + Default is True. + unrecognized_fields: Dictionary of all attributes that are not managed + by TUF Metadata API + + Raises: + ValueError: Invalid arguments. """ - _signed_type = "root" + type = _ROOT - # TODO: determine an appropriate value for max-args # pylint: disable=too-many-arguments def __init__( self, - version: int, - spec_version: str, - expires: datetime, - keys: Dict[str, Key], - roles: Dict[str, Role], - consistent_snapshot: Optional[bool] = None, - unrecognized_fields: Optional[Mapping[str, Any]] = None, - ) -> None: + version: Optional[int] = None, + spec_version: Optional[str] = None, + expires: Optional[datetime] = None, + keys: Optional[Dict[str, Key]] = None, + roles: Optional[Mapping[str, Role]] = None, + consistent_snapshot: Optional[bool] = True, + unrecognized_fields: Optional[Dict[str, Any]] = None, + ): super().__init__(version, spec_version, expires, unrecognized_fields) self.consistent_snapshot = consistent_snapshot - self.keys = keys + self.keys = keys if keys is not None else {} + + if roles is None: + roles = {r: Role([], 1) for r in TOP_LEVEL_ROLE_NAMES} + elif set(roles) != TOP_LEVEL_ROLE_NAMES: + raise ValueError("Role names must be the top-level metadata roles") self.roles = roles + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Root): + return False + + return ( + super().__eq__(other) + and self.keys == other.keys + and self.roles == other.roles + and self.consistent_snapshot == other.consistent_snapshot + ) + @classmethod def from_dict(cls, signed_dict: Dict[str, Any]) -> "Root": - """Creates Root object from its dict representation.""" + """Creates ``Root`` object from its json/dict representation. + + Raises: + ValueError, KeyError, TypeError: Invalid arguments. + """ common_args = cls._common_fields_from_dict(signed_dict) consistent_snapshot = signed_dict.pop("consistent_snapshot", None) keys = signed_dict.pop("keys") @@ -742,18 +938,37 @@ def to_dict(self) -> Dict[str, Any]: ) return root_dict - # Update key for a role. def add_key(self, role: str, key: Key) -> None: - """Adds new signing key for delegated role 'role'.""" - self.roles[role].keyids.add(key.keyid) + """Adds new signing key for delegated role ``role``. + + Args: + role: Name of the role, for which ``key`` is added. + key: Signing key to be added for ``role``. + + Raises: + ValueError: If ``role`` doesn't exist. + """ + if role not in self.roles: + raise ValueError(f"Role {role} doesn't exist") + if key.keyid not in self.roles[role].keyids: + self.roles[role].keyids.append(key.keyid) self.keys[key.keyid] = key def remove_key(self, role: str, keyid: str) -> None: - """Removes key from 'role' and updates the key store. + """Removes key from ``role`` and updates the key store. + + Args: + role: Name of the role, for which a signing key is removed. + keyid: Identifier of the key to be removed for ``role``. Raises: - KeyError: If 'role' does not include the key + ValueError: If ``role`` doesn't exist or if ``role`` doesn't include + the key. """ + if role not in self.roles: + raise ValueError(f"Role {role} doesn't exist") + if keyid not in self.roles[role].keyids: + raise ValueError(f"Key with id {keyid} is not used by {role}") self.roles[role].keyids.remove(keyid) for keyinfo in self.roles.values(): if keyid in keyinfo.keyids: @@ -763,7 +978,7 @@ def remove_key(self, role: str, keyid: str) -> None: class BaseFile: - """A base class of MetaFile and TargetFile. + """A base class of ``MetaFile`` and ``TargetFile``. Encapsulates common static methods for length and hash verification. """ @@ -772,7 +987,7 @@ class BaseFile: def _verify_hashes( data: Union[bytes, IO[bytes]], expected_hashes: Dict[str, str] ) -> None: - """Verifies that the hash of 'data' matches 'expected_hashes'""" + """Verifies that the hash of ``data`` matches ``expected_hashes``""" is_bytes = isinstance(data, bytes) for algo, exp_hash in expected_hashes.items(): try: @@ -793,7 +1008,7 @@ def _verify_hashes( observed_hash = digest_object.hexdigest() if observed_hash != exp_hash: raise exceptions.LengthOrHashMismatchError( - f"Observed hash {observed_hash} does not match" + f"Observed hash {observed_hash} does not match " f"expected hash {exp_hash}" ) @@ -801,7 +1016,7 @@ def _verify_hashes( def _verify_length( data: Union[bytes, IO[bytes]], expected_length: int ) -> None: - """Verifies that the length of 'data' matches 'expected_length'""" + """Verifies that the length of ``data`` matches ``expected_length``""" if isinstance(data, bytes): observed_length = len(data) else: @@ -811,7 +1026,7 @@ def _verify_length( if observed_length != expected_length: raise exceptions.LengthOrHashMismatchError( - f"Observed length {observed_length} does not match" + f"Observed length {observed_length} does not match " f"expected length {expected_length}" ) @@ -832,11 +1047,19 @@ def _validate_length(length: int) -> None: class MetaFile(BaseFile): """A container with information about a particular metadata file. - Attributes: - version: An integer indicating the version of the metadata file. - length: An optional integer indicating the length of the metadata file. - hashes: An optional dictionary of hash algorithm names to hash values. - unrecognized_fields: Dictionary of all unrecognized fields. + *All parameters named below are not just constructor arguments but also + instance attributes.* + + Args: + version: Version of the metadata file. + length: Length of the metadata file in bytes. + hashes: Dictionary of hash algorithm names to hashes of the metadata + file content. + unrecognized_fields: Dictionary of all attributes that are not managed + by TUF Metadata API + + Raises: + ValueError, TypeError: Invalid arguments. """ def __init__( @@ -844,8 +1067,8 @@ def __init__( version: int, length: Optional[int] = None, hashes: Optional[Dict[str, str]] = None, - unrecognized_fields: Optional[Mapping[str, Any]] = None, - ) -> None: + unrecognized_fields: Optional[Dict[str, Any]] = None, + ): if version <= 0: raise ValueError(f"Metafile version must be > 0, got {version}") @@ -857,11 +1080,29 @@ def __init__( self.version = version self.length = length self.hashes = hashes - self.unrecognized_fields: Mapping[str, Any] = unrecognized_fields or {} + if unrecognized_fields is None: + unrecognized_fields = {} + + self.unrecognized_fields = unrecognized_fields + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, MetaFile): + return False + + return ( + self.version == other.version + and self.length == other.length + and self.hashes == other.hashes + and self.unrecognized_fields == other.unrecognized_fields + ) @classmethod def from_dict(cls, meta_dict: Dict[str, Any]) -> "MetaFile": - """Creates MetaFile object from its dict representation.""" + """Creates ``MetaFile`` object from its json/dict representation. + + Raises: + ValueError, KeyError: Invalid arguments. + """ version = meta_dict.pop("version") length = meta_dict.pop("length", None) hashes = meta_dict.pop("hashes", None) @@ -885,7 +1126,7 @@ def to_dict(self) -> Dict[str, Any]: return res_dict def verify_length_and_hashes(self, data: Union[bytes, IO[bytes]]) -> None: - """Verifies that the length and hashes of "data" match expected values. + """Verifies that the length and hashes of ``data`` match expected values. Args: data: File object or its content in bytes. @@ -904,74 +1145,115 @@ def verify_length_and_hashes(self, data: Union[bytes, IO[bytes]]) -> None: class Timestamp(Signed): """A container for the signed part of timestamp metadata. - Timestamp contains information about the snapshot Metadata file. + TUF file format uses a dictionary to contain the snapshot information: + this is not the case with ``Timestamp.snapshot_meta`` which is a ``MetaFile``. + + *All parameters named below are not just constructor arguments but also + instance attributes.* - Attributes: - meta: A dictionary of filenames to MetaFiles. The only valid key value - is the snapshot filename, as defined by the specification. + Args: + version: Metadata version number. Default is 1. + spec_version: Supported TUF specification version. Default is the + version currently supported by the library. + expires: Metadata expiry date. Default is current date and time. + unrecognized_fields: Dictionary of all attributes that are not managed + by TUF Metadata API + snapshot_meta: Meta information for snapshot metadata. Default is a + MetaFile with version 1. + + Raises: + ValueError: Invalid arguments. """ - _signed_type = "timestamp" + type = _TIMESTAMP def __init__( self, - version: int, - spec_version: str, - expires: datetime, - meta: Dict[str, MetaFile], - unrecognized_fields: Optional[Mapping[str, Any]] = None, - ) -> None: + version: Optional[int] = None, + spec_version: Optional[str] = None, + expires: Optional[datetime] = None, + snapshot_meta: Optional[MetaFile] = None, + unrecognized_fields: Optional[Dict[str, Any]] = None, + ): super().__init__(version, spec_version, expires, unrecognized_fields) - self.meta = meta + self.snapshot_meta = snapshot_meta or MetaFile(1) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Timestamp): + return False + + return ( + super().__eq__(other) and self.snapshot_meta == other.snapshot_meta + ) @classmethod def from_dict(cls, signed_dict: Dict[str, Any]) -> "Timestamp": - """Creates Timestamp object from its dict representation.""" + """Creates ``Timestamp`` object from its json/dict representation. + + Raises: + ValueError, KeyError: Invalid arguments. + """ common_args = cls._common_fields_from_dict(signed_dict) meta_dict = signed_dict.pop("meta") - meta = {"snapshot.json": MetaFile.from_dict(meta_dict["snapshot.json"])} + snapshot_meta = MetaFile.from_dict(meta_dict["snapshot.json"]) # All fields left in the timestamp_dict are unrecognized. - return cls(*common_args, meta, signed_dict) + return cls(*common_args, snapshot_meta, signed_dict) def to_dict(self) -> Dict[str, Any]: """Returns the dict representation of self.""" res_dict = self._common_fields_to_dict() - res_dict["meta"] = { - "snapshot.json": self.meta["snapshot.json"].to_dict() - } + res_dict["meta"] = {"snapshot.json": self.snapshot_meta.to_dict()} return res_dict - # Modification. - def update(self, snapshot_meta: MetaFile) -> None: - """Assigns passed info about snapshot metadata to meta dict.""" - self.meta["snapshot.json"] = snapshot_meta - class Snapshot(Signed): """A container for the signed part of snapshot metadata. Snapshot contains information about all target Metadata files. - Attributes: - meta: A dictionary of target metadata filenames to MetaFile objects. + *All parameters named below are not just constructor arguments but also + instance attributes.* + + Args: + version: Metadata version number. Default is 1. + spec_version: Supported TUF specification version. Default is the + version currently supported by the library. + expires: Metadata expiry date. Default is current date and time. + unrecognized_fields: Dictionary of all attributes that are not managed + by TUF Metadata API + meta: Dictionary of targets filenames to ``MetaFile`` objects. Default + is a dictionary with a Metafile for "snapshot.json" version 1. + + Raises: + ValueError: Invalid arguments. """ - _signed_type = "snapshot" + type = _SNAPSHOT def __init__( self, - version: int, - spec_version: str, - expires: datetime, - meta: Dict[str, MetaFile], - unrecognized_fields: Optional[Mapping[str, Any]] = None, - ) -> None: + version: Optional[int] = None, + spec_version: Optional[str] = None, + expires: Optional[datetime] = None, + meta: Optional[Dict[str, MetaFile]] = None, + unrecognized_fields: Optional[Dict[str, Any]] = None, + ): super().__init__(version, spec_version, expires, unrecognized_fields) - self.meta = meta + self.meta = meta if meta is not None else {"targets.json": MetaFile(1)} + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Snapshot): + return False + + return super().__eq__(other) and self.meta == other.meta @classmethod def from_dict(cls, signed_dict: Dict[str, Any]) -> "Snapshot": - """Creates Snapshot object from its dict representation.""" + """Creates ``Snapshot`` object from its json/dict representation. + + Raises: + ValueError, KeyError: Invalid arguments. + """ common_args = cls._common_fields_from_dict(signed_dict) meta_dicts = signed_dict.pop("meta") meta = {} @@ -990,32 +1272,34 @@ def to_dict(self) -> Dict[str, Any]: snapshot_dict["meta"] = meta_dict return snapshot_dict - # Modification. - def update(self, rolename: str, role_info: MetaFile) -> None: - """Assigns passed (delegated) targets role info to meta dict.""" - metadata_fn = f"{rolename}.json" - self.meta[metadata_fn] = role_info - class DelegatedRole(Role): """A container with information about a delegated role. A delegation can happen in two ways: - - paths is set: delegates targets matching any path pattern in paths - - path_hash_prefixes is set: delegates targets whose target path hash - starts with any of the prefixes in path_hash_prefixes + - ``paths`` is set: delegates targets matching any path pattern in ``paths`` + - ``path_hash_prefixes`` is set: delegates targets whose target path hash + starts with any of the prefixes in ``path_hash_prefixes`` + + ``paths`` and ``path_hash_prefixes`` are mutually exclusive: both cannot be + set, at least one of them must be set. - paths and path_hash_prefixes are mutually exclusive: both cannot be set, - at least one of them must be set. + *All parameters named below are not just constructor arguments but also + instance attributes.* - Attributes: - name: A string giving the name of the delegated role. - terminating: A boolean indicating whether subsequent delegations - should be considered during a target lookup. - paths: An optional list of path pattern strings. See note above. - path_hash_prefixes: An optional list of hash prefixes. See note above. - unrecognized_fields: Dictionary of all unrecognized fields. + Args: + name: Delegated role name. + keyids: Delegated role signing key identifiers. + threshold: Number of keys required to sign this role's metadata. + terminating: ``True`` if this delegation terminates a target lookup. + paths: Path patterns. See note above. + path_hash_prefixes: Hash prefixes. See note above. + unrecognized_fields: Dictionary of all attributes that are not managed + by TUF Metadata API + + Raises: + ValueError: Invalid arguments. """ def __init__( @@ -1026,8 +1310,8 @@ def __init__( terminating: bool, paths: Optional[List[str]] = None, path_hash_prefixes: Optional[List[str]] = None, - unrecognized_fields: Optional[Mapping[str, Any]] = None, - ) -> None: + unrecognized_fields: Optional[Dict[str, Any]] = None, + ): super().__init__(keyids, threshold, unrecognized_fields) self.name = name self.terminating = terminating @@ -1037,12 +1321,35 @@ def __init__( if paths is None and path_hash_prefixes is None: raise ValueError("One of paths or path_hash_prefixes must be set") + if paths is not None and any(not isinstance(p, str) for p in paths): + raise ValueError("Paths must be strings") + if path_hash_prefixes is not None and any( + not isinstance(p, str) for p in path_hash_prefixes + ): + raise ValueError("Path_hash_prefixes must be strings") + self.paths = paths self.path_hash_prefixes = path_hash_prefixes + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DelegatedRole): + return False + + return ( + super().__eq__(other) + and self.name == other.name + and self.terminating == other.terminating + and self.paths == other.paths + and self.path_hash_prefixes == other.path_hash_prefixes + ) + @classmethod def from_dict(cls, role_dict: Dict[str, Any]) -> "DelegatedRole": - """Creates DelegatedRole object from its dict representation.""" + """Creates ``DelegatedRole`` object from its json/dict representation. + + Raises: + ValueError, KeyError: Invalid arguments. + """ name = role_dict.pop("name") keyids = role_dict.pop("keyids") threshold = role_dict.pop("threshold") @@ -1076,8 +1383,8 @@ def to_dict(self) -> Dict[str, Any]: @staticmethod def _is_target_in_pathpattern(targetpath: str, pathpattern: str) -> bool: - """Determines whether "targetname" matches the "pathpattern".""" - # We need to make sure that targetname and pathpattern are pointing to + """Determines whether ``targetpath`` matches the ``pathpattern``.""" + # We need to make sure that targetpath and pathpattern are pointing to # the same directory as fnmatch doesn't threat "/" as a special symbol. target_parts = targetpath.split("/") pattern_parts = pathpattern.split("/") @@ -1093,14 +1400,18 @@ def _is_target_in_pathpattern(targetpath: str, pathpattern: str) -> bool: return True def is_delegated_path(self, target_filepath: str) -> bool: - """Determines whether the given 'target_filepath' is in one of - the paths that DelegatedRole is trusted to provide. + """Determines whether the given ``target_filepath`` is in one of + the paths that ``DelegatedRole`` is trusted to provide. - The target_filepath and the DelegatedRole paths are expected to be in - their canonical forms, so e.g. "a/b" instead of "a//b" . Only "/" is + The ``target_filepath`` and the ``DelegatedRole`` paths are expected to be + in their canonical forms, so e.g. "a/b" instead of "a//b" . Only "/" is supported as target path separator. Leading separators are not handled as special cases (see `TUF specification on targetpath `_). + + Args: + target_filepath: URL path to a target file, relative to a base + targets URL. """ if self.path_hash_prefixes is not None: @@ -1127,43 +1438,79 @@ def is_delegated_path(self, target_filepath: str) -> bool: class Delegations: """A container object storing information about all delegations. - Attributes: - keys: Dictionary of keyids to Keys. Defines the keys used in 'roles'. - roles: List of DelegatedRoles that define which keys are required to - sign the metadata for a specific role. The roles order also - defines the order that role delegations are considered in. - unrecognized_fields: Dictionary of all unrecognized fields. + *All parameters named below are not just constructor arguments but also + instance attributes.* + + Args: + keys: Dictionary of keyids to Keys. Defines the keys used in ``roles``. + roles: Ordered dictionary of role names to DelegatedRoles instances. It + defines which keys are required to sign the metadata for a specific + role. The roles order also defines the order that role delegations + are considered during target searches. + unrecognized_fields: Dictionary of all attributes that are not managed + by TUF Metadata API + + Raises: + ValueError: Invalid arguments. """ def __init__( self, keys: Dict[str, Key], - roles: List[DelegatedRole], - unrecognized_fields: Optional[Mapping[str, Any]] = None, - ) -> None: + roles: Dict[str, DelegatedRole], + unrecognized_fields: Optional[Dict[str, Any]] = None, + ): self.keys = keys + + for role in roles: + if not role or role in TOP_LEVEL_ROLE_NAMES: + raise ValueError( + "Delegated roles cannot be empty or use top-level role names" + ) + self.roles = roles - self.unrecognized_fields = unrecognized_fields or {} + if unrecognized_fields is None: + unrecognized_fields = {} + + self.unrecognized_fields = unrecognized_fields + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Delegations): + return False + + return ( + self.keys == other.keys + # Order of the delegated roles matters (see issue #1788). + and list(self.roles.items()) == list(other.roles.items()) + and self.roles == other.roles + and self.unrecognized_fields == other.unrecognized_fields + ) @classmethod def from_dict(cls, delegations_dict: Dict[str, Any]) -> "Delegations": - """Creates Delegations object from its dict representation.""" + """Creates ``Delegations`` object from its json/dict representation. + + Raises: + ValueError, KeyError, TypeError: Invalid arguments. + """ keys = delegations_dict.pop("keys") keys_res = {} for keyid, key_dict in keys.items(): keys_res[keyid] = Key.from_dict(keyid, key_dict) roles = delegations_dict.pop("roles") - roles_res = [] + roles_res: Dict[str, DelegatedRole] = {} for role_dict in roles: new_role = DelegatedRole.from_dict(role_dict) - roles_res.append(new_role) + if new_role.name in roles_res: + raise ValueError(f"Duplicate role {new_role.name}") + roles_res[new_role.name] = new_role # All fields left in the delegations_dict are unrecognized. return cls(keys_res, roles_res, delegations_dict) def to_dict(self) -> Dict[str, Any]: """Returns the dict representation of self.""" keys = {keyid: key.to_dict() for keyid, key in self.keys.items()} - roles = [role_obj.to_dict() for role_obj in self.roles] + roles = [role_obj.to_dict() for role_obj in self.roles.values()] return { "keys": keys, "roles": roles, @@ -1174,12 +1521,19 @@ def to_dict(self) -> Dict[str, Any]: class TargetFile(BaseFile): """A container with information about a particular target file. - Attributes: - length: An integer indicating the length of the target file. - hashes: A dictionary of hash algorithm names to hash values. - path: A string denoting the path to a target file relative to a base - URL of targets. - unrecognized_fields: Dictionary of all unrecognized fields. + *All parameters named below are not just constructor arguments but also + instance attributes.* + + Args: + length: Length of the target file in bytes. + hashes: Dictionary of hash algorithm names to hashes of the target + file content. + path: URL path to a target file, relative to a base targets URL. + unrecognized_fields: Dictionary of all attributes that are not managed + by TUF Metadata API + + Raises: + ValueError, TypeError: Invalid arguments. """ def __init__( @@ -1187,8 +1541,8 @@ def __init__( length: int, hashes: Dict[str, str], path: str, - unrecognized_fields: Optional[Mapping[str, Any]] = None, - ) -> None: + unrecognized_fields: Optional[Dict[str, Any]] = None, + ): self._validate_length(length) self._validate_hashes(hashes) @@ -1196,15 +1550,35 @@ def __init__( self.length = length self.hashes = hashes self.path = path - self.unrecognized_fields = unrecognized_fields or {} + if unrecognized_fields is None: + unrecognized_fields = {} + + self.unrecognized_fields = unrecognized_fields @property def custom(self) -> Any: - return self.unrecognized_fields.get("custom", None) + """Can be used to provide implementation specific data related to the + target. python-tuf does not use or validate this data.""" + return self.unrecognized_fields.get("custom") + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, TargetFile): + return False + + return ( + self.length == other.length + and self.hashes == other.hashes + and self.path == other.path + and self.unrecognized_fields == other.unrecognized_fields + ) @classmethod def from_dict(cls, target_dict: Dict[str, Any], path: str) -> "TargetFile": - """Creates TargetFile object from its dict representation.""" + """Creates ``TargetFile`` object from its json/dict representation. + + Raises: + ValueError, KeyError, TypeError: Invalid arguments. + """ length = target_dict.pop("length") hashes = target_dict.pop("hashes") @@ -1219,11 +1593,84 @@ def to_dict(self) -> Dict[str, Any]: **self.unrecognized_fields, } + @classmethod + def from_file( + cls, + target_file_path: str, + local_path: str, + hash_algorithms: Optional[List[str]] = None, + ) -> "TargetFile": + """Creates ``TargetFile`` object from a file. + + Args: + target_file_path: URL path to a target file, relative to a base + targets URL. + local_path: Local path to target file content. + hash_algorithms: Hash algorithms to calculate hashes with. If not + specified the securesystemslib default hash algorithm is used. + Raises: + FileNotFoundError: The file doesn't exist. + ValueError: The hash algorithms list contains an unsupported + algorithm. + """ + with open(local_path, "rb") as file: + return cls.from_data(target_file_path, file, hash_algorithms) + + @classmethod + def from_data( + cls, + target_file_path: str, + data: Union[bytes, IO[bytes]], + hash_algorithms: Optional[List[str]] = None, + ) -> "TargetFile": + """Creates ``TargetFile`` object from bytes. + + Args: + target_file_path: URL path to a target file, relative to a base + targets URL. + data: Target file content. + hash_algorithms: Hash algorithms to create the hashes with. If not + specified the securesystemslib default hash algorithm is used. + + Raises: + ValueError: The hash algorithms list contains an unsupported + algorithm. + """ + if isinstance(data, bytes): + length = len(data) + else: + data.seek(0, io.SEEK_END) + length = data.tell() + + hashes = {} + + if hash_algorithms is None: + hash_algorithms = [sslib_hash.DEFAULT_HASH_ALGORITHM] + + for algorithm in hash_algorithms: + try: + if isinstance(data, bytes): + digest_object = sslib_hash.digest(algorithm) + digest_object.update(data) + else: + digest_object = sslib_hash.digest_fileobject( + data, algorithm + ) + except ( + sslib_exceptions.UnsupportedAlgorithmError, + sslib_exceptions.FormatError, + ) as e: + raise ValueError(f"Unsupported algorithm '{algorithm}'") from e + + hashes[algorithm] = digest_object.hexdigest() + + return cls(length, hashes, target_file_path) + def verify_length_and_hashes(self, data: Union[bytes, IO[bytes]]) -> None: - """Verifies that length and hashes of "data" match expected values. + """Verifies that length and hashes of ``data`` match expected values. Args: - data: File object or its content in bytes. + data: Target file object or its content in bytes. Raises: LengthOrHashMismatchError: Calculated length or hashes do not @@ -1239,34 +1686,60 @@ class Targets(Signed): Targets contains verifying information about target files and also delegates responsibility to other Targets roles. - Attributes: - targets: A dictionary of target filenames to TargetFiles - delegations: An optional Delegations that defines how this Targets - further delegates responsibility to other Targets Metadata files. + *All parameters named below are not just constructor arguments but also + instance attributes.* + + Args: + version: Metadata version number. Default is 1. + spec_version: Supported TUF specification version. Default is the + version currently supported by the library. + expires: Metadata expiry date. Default is current date and time. + targets: Dictionary of target filenames to TargetFiles. Default is an + empty dictionary. + delegations: Defines how this Targets delegates responsibility to other + Targets Metadata files. Default is None. + unrecognized_fields: Dictionary of all attributes that are not managed + by TUF Metadata API + + Raises: + ValueError: Invalid arguments. """ - _signed_type = "targets" + type = _TARGETS - # TODO: determine an appropriate value for max-args # pylint: disable=too-many-arguments def __init__( self, - version: int, - spec_version: str, - expires: datetime, - targets: Dict[str, TargetFile], + version: Optional[int] = None, + spec_version: Optional[str] = None, + expires: Optional[datetime] = None, + targets: Optional[Dict[str, TargetFile]] = None, delegations: Optional[Delegations] = None, - unrecognized_fields: Optional[Mapping[str, Any]] = None, + unrecognized_fields: Optional[Dict[str, Any]] = None, ) -> None: super().__init__(version, spec_version, expires, unrecognized_fields) - self.targets = targets + self.targets = targets if targets is not None else {} self.delegations = delegations + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Targets): + return False + + return ( + super().__eq__(other) + and self.targets == other.targets + and self.delegations == other.delegations + ) + @classmethod def from_dict(cls, signed_dict: Dict[str, Any]) -> "Targets": - """Creates Targets object from its dict representation.""" + """Creates ``Targets`` object from its json/dict representation. + + Raises: + ValueError, KeyError, TypeError: Invalid arguments. + """ common_args = cls._common_fields_from_dict(signed_dict) - targets = signed_dict.pop("targets") + targets = signed_dict.pop(_TARGETS) try: delegations_dict = signed_dict.pop("delegations") except KeyError: @@ -1287,12 +1760,47 @@ def to_dict(self) -> Dict[str, Any]: targets = {} for target_path, target_file_obj in self.targets.items(): targets[target_path] = target_file_obj.to_dict() - targets_dict["targets"] = targets + targets_dict[_TARGETS] = targets if self.delegations is not None: targets_dict["delegations"] = self.delegations.to_dict() return targets_dict - # Modification. - def update(self, fileinfo: TargetFile) -> None: - """Assigns passed target file info to meta dict.""" - self.targets[fileinfo.path] = fileinfo + def add_key(self, role: str, key: Key) -> None: + """Adds new signing key for delegated role ``role``. + + Args: + role: Name of the role, for which ``key`` is added. + key: Signing key to be added for ``role``. + + Raises: + ValueError: If there are no delegated roles or if ``role`` is not + delegated by this Target. + """ + if self.delegations is None or role not in self.delegations.roles: + raise ValueError(f"Delegated role {role} doesn't exist") + if key.keyid not in self.delegations.roles[role].keyids: + self.delegations.roles[role].keyids.append(key.keyid) + self.delegations.keys[key.keyid] = key + + def remove_key(self, role: str, keyid: str) -> None: + """Removes key from delegated role ``role`` and updates the delegations + key store. + + Args: + role: Name of the role, for which a signing key is removed. + keyid: Identifier of the key to be removed for ``role``. + + Raises: + ValueError: If there are no delegated roles or if ``role`` is not + delegated by this ``Target`` or if key is not used by ``role``. + """ + if self.delegations is None or role not in self.delegations.roles: + raise ValueError(f"Delegated role {role} doesn't exist") + if keyid not in self.delegations.roles[role].keyids: + raise ValueError(f"Key with id {keyid} is not used by {role}") + self.delegations.roles[role].keyids.remove(keyid) + for keyinfo in self.delegations.roles.values(): + if keyid in keyinfo.keyids: + return + + del self.delegations.keys[keyid] diff --git a/tuf/api/pylintrc b/tuf/api/pylintrc deleted file mode 100644 index 01139a8804..0000000000 --- a/tuf/api/pylintrc +++ /dev/null @@ -1,53 +0,0 @@ -# Minimal pylint configuration file for Secure Systems Lab Python Style Guide: -# https://github.com/secure-systems-lab/code-style-guidelines -# -# Based on Google Python Style Guide pylintrc and pylint defaults: -# https://google.github.io/styleguide/pylintrc -# http://pylint.pycqa.org/en/latest/technical_reference/features.html - -[MESSAGES CONTROL] -# Disable the message, report, category or checker with the given id(s). -# NOTE: To keep this config as short as possible we only disable checks that -# are currently in conflict with our code. If new code displeases the linter -# (for good reasons) consider updating this config file, or disable checks with -# 'pylint: disable=XYZ' comments. -disable=fixme, - too-few-public-methods, - too-many-arguments, - -[BASIC] -good-names=i,j,k,v,e,f,fn,fp,_type -# Regexes for allowed names are copied from the Google pylintrc -# NOTE: Pylint captures regex name groups such as 'snake_case' or 'camel_case'. -# If there are multiple groups it enfoces the prevalent naming style inside -# each modules. Names in the exempt capturing group are ignored. -function-rgx=^(?:(?PsetUp|tearDown|setUpModule|tearDownModule)|(?P_?[A-Z][a-zA-Z0-9]*)|(?P_?[a-z][a-z0-9_]*))$ -method-rgx=(?x)^(?:(?P_[a-z0-9_]+__|runTest|setUp|tearDown|setUpTestCase|tearDownTestCase|setupSelf|tearDownClass|setUpClass|(test|assert)_*[A-Z0-9][a-zA-Z0-9_]*|next)|(?P_{0,2}[A-Z][a-zA-Z0-9_]*)|(?P_{0,2}[a-z][a-z0-9_]*))$ -argument-rgx=^[a-z][a-z0-9_]*$ -attr-rgx=^_{0,2}[a-z][a-z0-9_]*$ -class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ -class-rgx=^_?[A-Z][a-zA-Z0-9]*$ -const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ -inlinevar-rgx=^[a-z][a-z0-9_]*$ -module-rgx=^(_?[a-z][a-z0-9_]*|__init__)$ -no-docstring-rgx=(__.*__|main|test.*|.*test|.*Test)$ -variable-rgx=^[a-z][a-z0-9_]*$ -docstring-min-length=10 - -[FORMAT] -ignore-long-lines=(?x)( - ^\s*(\#\ )??$| - ^\s*(from\s+\S+\s+)?import\s+.+$) -indent-string=" " -indent-after-paren=4 -max-line-length=80 -single-line-if-stmt=yes - -[LOGGING] -logging-format-style=old - -[MISCELLANEOUS] -notes=TODO - -[STRING] -check-quote-consistency=yes diff --git a/tuf/api/serialization/__init__.py b/tuf/api/serialization/__init__.py index 4ec0a4aef1..7aef8b9884 100644 --- a/tuf/api/serialization/__init__.py +++ b/tuf/api/serialization/__init__.py @@ -1,33 +1,34 @@ # Copyright New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 -"""TUF role metadata de/serialization. - -This sub-package provides abstract base classes and concrete implementations to -serialize and deserialize TUF role metadata and metadata parts. +"""``tuf.api.serialization`` module provides abstract base classes and concrete +implementations to serialize and deserialize TUF metadata. Any custom de/serialization implementations should inherit from the abstract -base classes defined in this __init__.py module. +base classes defined in this module. The implementations can use the +``to_dict()``/``from_dict()`` implementations available in the Metadata +API objects. - Metadata de/serializers are used to convert to and from wireline formats. - Signed serializers are used to canonicalize data for cryptographic signatures generation and verification. - """ + import abc from typing import TYPE_CHECKING +from tuf.api.exceptions import RepositoryError + if TYPE_CHECKING: # pylint: disable=cyclic-import from tuf.api.metadata import Metadata, Signed -# TODO: Should these be in tuf.exceptions or inherit from tuf.exceptions.Error? -class SerializationError(Exception): +class SerializationError(RepositoryError): """Error during serialization.""" -class DeserializationError(Exception): +class DeserializationError(RepositoryError): """Error during deserialization.""" @@ -36,7 +37,7 @@ class MetadataDeserializer(metaclass=abc.ABCMeta): @abc.abstractmethod def deserialize(self, raw_data: bytes) -> "Metadata": - """Deserialize passed bytes to Metadata object.""" + """Deserialize bytes to Metadata object.""" raise NotImplementedError @@ -45,7 +46,7 @@ class MetadataSerializer(metaclass=abc.ABCMeta): @abc.abstractmethod def serialize(self, metadata_obj: "Metadata") -> bytes: - """Serialize passed Metadata object to bytes.""" + """Serialize Metadata object to bytes.""" raise NotImplementedError @@ -54,5 +55,5 @@ class SignedSerializer(metaclass=abc.ABCMeta): @abc.abstractmethod def serialize(self, signed_obj: "Signed") -> bytes: - """Serialize passed Signed object to bytes.""" + """Serialize Signed object to bytes.""" raise NotImplementedError diff --git a/tuf/api/serialization/json.py b/tuf/api/serialization/json.py index 43814993b9..3355511a66 100644 --- a/tuf/api/serialization/json.py +++ b/tuf/api/serialization/json.py @@ -1,15 +1,15 @@ # Copyright New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 -"""TUF role metadata JSON serialization and deserialization. - -This module provides concrete implementations to serialize and deserialize TUF -role metadata to and from the JSON wireline format for transportation, and -to serialize the 'signed' part of TUF role metadata to the OLPC Canonical JSON -format for signature generation and verification. - +"""``tuf.api.serialization.json`` module provides concrete implementations to +serialize and deserialize TUF role metadata to and from the JSON wireline +format for transportation, and to serialize the 'signed' part of TUF role +metadata to the OLPC Canonical JSON format for signature generation and +verification. """ + import json +from typing import Optional from securesystemslib.formats import encode_canonical @@ -37,7 +37,7 @@ def deserialize(self, raw_data: bytes) -> Metadata: metadata_obj = Metadata.from_dict(json_dict) except Exception as e: - raise DeserializationError from e + raise DeserializationError("Failed to deserialize JSON") from e return metadata_obj @@ -45,17 +45,22 @@ def deserialize(self, raw_data: bytes) -> Metadata: class JSONSerializer(MetadataSerializer): """Provides Metadata to JSON serialize method. - Attributes: + Args: compact: A boolean indicating if the JSON bytes generated in - 'serialize' should be compact by excluding whitespace. + 'serialize' should be compact by excluding whitespace. + validate: Check that the metadata object can be deserialized again + without change of contents and thus find common mistakes. + This validation might slow down serialization significantly. """ - def __init__(self, compact: bool = False) -> None: + def __init__(self, compact: bool = False, validate: Optional[bool] = False): self.compact = compact + self.validate = validate def serialize(self, metadata_obj: Metadata) -> bytes: """Serialize Metadata object into utf-8 encoded JSON bytes.""" + try: indent = None if self.compact else 1 separators = (",", ":") if self.compact else (",", ": ") @@ -66,8 +71,18 @@ def serialize(self, metadata_obj: Metadata) -> bytes: sort_keys=True, ).encode("utf-8") + if self.validate: + try: + new_md_obj = JSONDeserializer().deserialize(json_bytes) + if metadata_obj != new_md_obj: + raise ValueError( + "Metadata changes if you serialize and deserialize." + ) + except Exception as e: + raise ValueError("Metadata cannot be validated!") from e + except Exception as e: - raise SerializationError from e + raise SerializationError("Failed to serialize JSON") from e return json_bytes diff --git a/tuf/client/README.md b/tuf/client/README.md deleted file mode 100644 index 29b838bc4d..0000000000 --- a/tuf/client/README.md +++ /dev/null @@ -1,151 +0,0 @@ -# updater.py -**updater.py** is intended as the only TUF module that software update -systems need to utilize for a low-level integration. It provides a single -class representing an updater that includes methods to download, install, and -verify metadata or target files in a secure manner. Importing -**tuf.client.updater** and instantiating its main class is all that is -required by the client prior to a TUF update request. The importation and -instantiation steps allow TUF to load all of the required metadata files -and set the repository mirror information. - -The **tuf.repository_tool** module can be used to create a TUF repository. See -[tuf/README](../README.md) for more information on creating TUF repositories. - - -## Overview of the Update Process - -1. The software update system instructs TUF to check for updates. - -2. TUF downloads and verifies timestamp.json. - -3. If timestamp.json indicates that snapshot.json has changed, TUF downloads and -verifies snapshot.json. - -4. TUF determines which metadata files listed in snapshot.json differ from those -described in the last snapshot.json that TUF has seen. If root.json has changed, -the update process starts over using the new root.json. - -5. TUF provides the software update system with a list of available files -according to targets.json. - -6. The software update system instructs TUF to download a specific target -file. - -7. TUF downloads and verifies the file and then makes the file available to -the software update system. - - -If at any point in the above procedure there is a problem (i.e., if unexpired, -signed, valid metadata cannot be retrieved from the repository), the Root file -is downloaded and the process is retried once more (and only once to avoid an -infinite loop). Optionally, the software update system using the framework -can decide how to proceed rather than automatically downloading a new Root file. - - -## Example Client -### Refresh TUF Metadata -```Python -# The client first imports the 'updater.py' module, the only module the -# client is required to import. The client will utilize a single class -# from this module. -import tuf.client.updater -import tuf.settings - -# The only other module the client interacts with is 'settings'. The -# client accesses this module solely to set the repository directory. -# This directory will hold the files downloaded from a remote repository. -tuf.settings.repositories_directory = 'path/to/local_repository' - -# Next, the client creates a dictionary object containing the repository -# mirrors. The client may download content from any one of these mirrors. -# In the example below, a single mirror named 'mirror1' is defined. The -# mirror is located at 'http://localhost:8001', and all of the metadata -# and targets files can be found in the 'metadata' and 'targets' directory, -# respectively. If the client wishes to only download target files from -# specific directories on the mirror, the 'confined_target_dirs' field -# should be set. In this example, the client hasn't set confined_target_dirs, -# which is interpreted as no confinement. In other words, the client can download -# targets from any directory or subdirectories. If the client had chosen -# 'targets1/', they would have been confined to the '/targets/targets1/' -# directory on the 'http://localhost:8001' mirror. -repository_mirrors = {'mirror1': {'url_prefix': 'http://localhost:8001', - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - -# The updater may now be instantiated. The Updater class of 'updater.py' -# is called with two arguments. The first argument assigns a name to this -# particular updater and the second argument the repository mirrors defined -# above. -updater = tuf.client.updater.Updater('updater', repository_mirrors) - -# The client calls the refresh() method to ensure it has the latest -# copies of the top-level metadata files (i.e., Root, Targets, Snapshot, -# Timestamp). -updater.refresh() -``` - - -### Download Specific Target File -```Python -# Example demonstrating an update that downloads a specific target. - -# Refresh the metadata of the top-level roles (i.e., Root, Targets, Snapshot, Timestamp). -updater.refresh() - -# get_one_valid_targetinfo() updates role metadata when required. In other -# words, if the client doesn't possess the metadata that lists 'LICENSE.txt', -# get_one_valid_targetinfo() will try to fetch / update it. -target = updater.get_one_valid_targetinfo('LICENSE.txt') -updated_target = updater.updated_targets([target], destination_directory) - -for target in updated_target: - updater.download_target(target, destination_directory) - # Client code here may also reference target information (including 'custom') - # by directly accessing the dictionary entries of the target. The 'custom' - # entry is additional file information explicitly set by the remote repository. - target_path = target['filepath'] - target_length = target['fileinfo']['length'] - target_hashes = target['fileinfo']['hashes'] - target_custom_data = target['fileinfo']['custom'] - - # Remove any files from the destination directory that are no longer being - # tracked. For example, a target file from a previous snapshot that has since - # been removed on the remote repository. - updater.remove_obsolete_targets(destination_directory) -``` - -### A Simple Integration Example with client.py -``` Bash -# Assume a simple TUF repository has been setup with 'repo.py'. -$ client.py --repo http://localhost:8001 - -# Metadata and target files are silently updated. An exception is only raised if an error, -# or attack, is detected. Inspect 'tuf.log' for the outcome of the update process. - -$ cat tuf.log -[2013-12-16 16:17:05,267 UTC] [tuf.download] [INFO][_download_file:726@download.py] -Downloading: http://localhost:8001/metadata/timestamp.json - -[2013-12-16 16:17:05,269 UTC] [tuf.download] [WARNING][_check_content_length:589@download.py] -reported_length (545) < required_length (2048) - -[2013-12-16 16:17:05,269 UTC] [tuf.download] [WARNING][_check_downloaded_length:656@download.py] -Downloaded 545 bytes, but expected 2048 bytes. There is a difference of 1503 bytes! - -[2013-12-16 16:17:05,611 UTC] [tuf.download] [INFO][_download_file:726@download.py] -Downloading: http://localhost:8001/metadata/snapshot.json - -[2013-12-16 16:17:05,612 UTC] [tuf.client.updater] [INFO][_check_hashes:636@updater.py] -The file's sha256 hash is correct: 782675fadd650eeb2926d33c401b5896caacf4fd6766498baf2bce2f3b739db4 - -[2013-12-16 16:17:05,951 UTC] [tuf.download] [INFO][_download_file:726@download.py] -Downloading: http://localhost:8001/metadata/targets.json - -[2013-12-16 16:17:05,952 UTC] [tuf.client.updater] [INFO][_check_hashes:636@updater.py] -The file's sha256 hash is correct: a5019c28a1595c43a14cad2b6252c4d1db472dd6412a9204181ad6d61b1dd69a - -[2013-12-16 16:17:06,299 UTC] [tuf.download] [INFO][_download_file:726@download.py] -Downloading: http://localhost:8001/targets/file1.txt - -[2013-12-16 16:17:06,303 UTC] [tuf.client.updater] [INFO][_check_hashes:636@updater.py] -The file's sha256 hash is correct: ecdc5536f73bdae8816f0ea40726ef5e9b810d914493075903bb90623d97b1d8 diff --git a/tuf/client/fetcher.py b/tuf/client/fetcher.py deleted file mode 100644 index 8768bdd4b9..0000000000 --- a/tuf/client/fetcher.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2021, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -"""Provides an interface for network IO abstraction. -""" - -# Imports -import abc - -# Classes -class FetcherInterface(): - """Defines an interface for abstract network download. - - By providing a concrete implementation of the abstract interface, - users of the framework can plug-in their preferred/customized - network stack. - """ - - __metaclass__ = abc.ABCMeta - - @abc.abstractmethod - def fetch(self, url, required_length): - """Fetches the contents of HTTP/HTTPS url from a remote server. - - Ensures the length of the downloaded data is up to 'required_length'. - - Arguments: - url: A URL string that represents a file location. - required_length: An integer value representing the file length in bytes. - - Raises: - tuf.exceptions.SlowRetrievalError: A timeout occurs while receiving data. - tuf.exceptions.FetcherHTTPError: An HTTP error code is received. - - Returns: - A bytes iterator - """ - raise NotImplementedError # pragma: no cover diff --git a/tuf/client/updater.py b/tuf/client/updater.py deleted file mode 100755 index ffb38dcb30..0000000000 --- a/tuf/client/updater.py +++ /dev/null @@ -1,3186 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - updater.py - - - Geremy Condra - Vladimir Diaz - - - July 2012. Based on a previous version of this module. (VLAD) - - - See LICENSE-MIT OR LICENSE for licensing information. - - - 'updater.py' is intended to be the only TUF module that software update - systems need to utilize. It provides a single class representing an - updater that includes methods to download, install, and verify - metadata/target files in a secure manner. Importing 'updater.py' and - instantiating its main class is all that is required by the client prior - to a TUF update request. The importation and instantiation steps allow - TUF to load all of the required metadata files and set the repository mirror - information. - - An overview of the update process: - - 1. The software update system instructs TUF to check for updates. - - 2. TUF downloads and verifies timestamp.json. - - 3. If timestamp.json indicates that snapshot.json has changed, TUF downloads - and verifies snapshot.json. - - 4. TUF determines which metadata files listed in snapshot.json differ from - those described in the last snapshot.json that TUF has seen. If root.json - has changed, the update process starts over using the new root.json. - - 5. TUF provides the software update system with a list of available files - according to targets.json. - - 6. The software update system instructs TUF to download a specific target - file. - - 7. TUF downloads and verifies the file and then makes the file available to - the software update system. - - - - # The client first imports the 'updater.py' module, the only module the - # client is required to import. The client will utilize a single class - # from this module. - from tuf.client.updater import Updater - - # The only other module the client interacts with is 'tuf.settings'. The - # client accesses this module solely to set the repository directory. - # This directory will hold the files downloaded from a remote repository. - from tuf import settings - settings.repositories_directory = 'local-repository' - - # Next, the client creates a dictionary object containing the repository - # mirrors. The client may download content from any one of these mirrors. - # In the example below, a single mirror named 'mirror1' is defined. The - # mirror is located at 'http://localhost:8001', and all of the metadata - # and targets files can be found in the 'metadata' and 'targets' directory, - # respectively. If the client wishes to only download target files from - # specific directories on the mirror, the 'confined_target_dirs' field - # should be set. In this example, the client hasn't set confined_target_dirs, - # which is interpreted as no confinement. - # In other words, the client can download - # targets from any directory or subdirectories. If the client had chosen - # 'targets1/', they would have been confined to the '/targets/targets1/' - # directory on the 'http://localhost:8001' mirror. - repository_mirrors = {'mirror1': {'url_prefix': 'http://localhost:8001', - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # The updater may now be instantiated. The Updater class of 'updater.py' - # is called with two arguments. The first argument assigns a name to this - # particular updater and the second argument the repository mirrors defined - # above. - updater = Updater('updater', repository_mirrors) - - # The client next calls the refresh() method to ensure it has the latest - # copies of the metadata files. - updater.refresh() - - # get_one_valid_targetinfo() updates role metadata when required. In other - # words, if the client doesn't possess the metadata that lists 'LICENSE.txt', - # get_one_valid_targetinfo() will try to fetch / update it. - target = updater.get_one_valid_targetinfo('LICENSE.txt') - - # Determine if 'target' has changed since the client's last refresh(). A - # target is considered updated if it does not exist in - # 'destination_directory' (current directory) or the target located there has - # changed. - destination_directory = '.' - updated_target = updater.updated_targets([target], destination_directory) - - for target in updated_target: - updater.download_target(target, destination_directory) - # Client code here may also reference target information (including - # 'custom') by directly accessing the dictionary entries of the target. - # The 'custom' entry is additional file information explicitly set by the - # remote repository. - target_path = target['filepath'] - target_length = target['fileinfo']['length'] - target_hashes = target['fileinfo']['hashes'] - target_custom_data = target['fileinfo']['custom'] -""" - -import errno -import logging -import os -import shutil -import time -import fnmatch -import copy -import warnings -import io - -from securesystemslib import exceptions as sslib_exceptions -from securesystemslib import formats as sslib_formats -from securesystemslib import hash as sslib_hash -from securesystemslib import keys as sslib_keys -from securesystemslib import util as sslib_util - -import tuf -from tuf import download -from tuf import exceptions -from tuf import formats -from tuf import keydb -from tuf import log # pylint: disable=unused-import -from tuf import mirrors -from tuf import roledb -from tuf import settings -from tuf import sig -from tuf import requests_fetcher - -# The Timestamp role does not have signed metadata about it; otherwise we -# would need an infinite regress of metadata. Therefore, we use some -# default, but sane, upper file length for its metadata. -DEFAULT_TIMESTAMP_UPPERLENGTH = settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH - -# The Root role may be updated without knowing its version number if -# top-level metadata cannot be safely downloaded (e.g., keys may have been -# revoked, thus requiring a new Root file that includes the updated keys) -# and 'unsafely_update_root_if_necessary' is True. -# We use some default, but sane, upper file length for its metadata. -DEFAULT_ROOT_UPPERLENGTH = settings.DEFAULT_ROOT_REQUIRED_LENGTH - -# See 'log.py' to learn how logging is handled in TUF. -logger = logging.getLogger(__name__) - - -class MultiRepoUpdater(object): - """ - - Provide a way for clients to request a target file from multiple - repositories. Which repositories to query is determined by the map - file (i.e,. map.json). - - See TAP 4 for more information on the map file and how to request updates - from multiple repositories. TAP 4 describes how users may specify that a - particular threshold of repositories be used for some targets, while a - different threshold of repositories be used for others. - - - map_file: - The path of the map file. The map file is needed to determine which - repositories to query given a target file. - - - securesystemslib.exceptions.FormatError, if the map file is improperly - formatted. - - tuf.exceptions.Error, if the map file cannot be loaded. - - - None. - - - None. - """ - - def __init__(self, map_file): - # Is 'map_file' a path? If not, raise - # 'securesystemslib.exceptions.FormatError'. The actual content of the map - # file is validated later on in this method. - sslib_formats.PATH_SCHEMA.check_match(map_file) - - # A dictionary mapping repositories to TUF updaters. - self.repository_names_to_updaters = {} - - try: - # The map file dictionary that associates targets with repositories. - self.map_file = sslib_util.load_json_file(map_file) - - except (sslib_exceptions.Error) as e: - raise exceptions.Error('Cannot load the map file: ' + str(e)) - - # Raise securesystemslib.exceptions.FormatError if the map file is - # improperly formatted. - formats.MAPFILE_SCHEMA.check_match(self.map_file) - - # Save the "repositories" entry of the map file, with the following - # example format: - # - # "repositories": { - # "Django": ["https://djangoproject.com/"], - # "PyPI": ["https://pypi.python.org/"] - # } - self.repository_names_to_mirrors = self.map_file['repositories'] - - - - def get_valid_targetinfo(self, target_filename, match_custom_field=True): - """ - - Get valid targetinfo, if any, for the given 'target_filename'. The map - file controls the targetinfo returned (see TAP 4). Return a dict of the - form {updater1: targetinfo, updater2: targetinfo, ...}, where the dict - keys are updater objects, and the dict values the matching targetinfo for - 'target_filename'. - - - target_filename: - The relative path of the target file to update. - - match_custom_field: - Boolean that indicates whether the optional custom field in targetinfo - should match across the targetinfo provided by the threshold of - repositories. - - - tuf.exceptions.FormatError, if the argument is improperly formatted. - - tuf.exceptions.Error, if the required local metadata directory or the - Root file does not exist. - - tuf.exceptions.UnknownTargetError, if the repositories in the map file do - not agree on the target, or none of them have signed for the target. - - - None. - - - A dict of the form: {updater1: targetinfo, updater2: targetinfo, ...}. - The targetinfo (conformant with tuf.formats.TARGETINFO_SCHEMA) is for - 'target_filename'. - """ - - # Is the argument properly formatted? If not, raise - # 'tuf.exceptions.FormatError'. - formats.RELPATH_SCHEMA.check_match(target_filename) - - # TAP 4 requires that the following attributes be present in mappings: - # "paths", "repositories", "terminating", and "threshold". - formats.MAPPING_SCHEMA.check_match(self.map_file['mapping']) - - # Set the top-level directory containing the metadata for each repository. - repositories_directory = settings.repositories_directory - - # Verify that the required local directories exist for each repository. - self._verify_metadata_directories(repositories_directory) - - # Iterate mappings. - # [{"paths": [], "repositories": [], "terminating": Boolean, "threshold": - # NUM}, ...] - for mapping in self.map_file['mapping']: - - logger.debug('Interrogating mappings..' + repr(mapping)) - if not self._target_matches_path_pattern( - target_filename, mapping['paths']): - # The mapping is irrelevant to the target file. Try the next one, if - # any. - continue - - # The mapping is relevant to the target... - else: - # Do the repositories in the mapping provide a threshold of matching - # targetinfo? - valid_targetinfo = self._matching_targetinfo(target_filename, - mapping, match_custom_field) - - if valid_targetinfo: - return valid_targetinfo - - else: - # If we are here, it means either (1) the mapping is irrelevant to - # the target, (2) the targets were missing from all repositories in - # this mapping, or (3) the targets on all repositories did not match. - # Whatever the case may be, are we allowed to continue to the next - # mapping? Let's check the terminating entry! - if not mapping['terminating']: - logger.debug('The mapping was irrelevant to the target, and' - ' "terminating" was set to False. Trying the next mapping...') - continue - - else: - raise exceptions.UnknownTargetError('The repositories in the' - ' mapping do not agree on the target, or none of them have' - ' signed for the target, and "terminating" was set to True.') - - # If we are here, it means either there were no mappings, or none of the - # mappings provided the target. - logger.debug('Did not find valid targetinfo for ' + repr(target_filename)) - raise exceptions.UnknownTargetError('The repositories in the map' - ' file do not agree on the target, or none of them have signed' - ' for the target.') - - - - - - def _verify_metadata_directories(self, repositories_directory): - # Iterate 'self.repository_names_to_mirrors' and verify that the expected - # local files and directories exist. TAP 4 requires a separate local - # directory for each repository. - for repository_name in self.repository_names_to_mirrors: - - logger.debug('Interrogating repository: ' + repr(repository_name)) - # Each repository must cache its metadata in a separate location. - repository_directory = os.path.join(repositories_directory, - repository_name) - - if not os.path.isdir(repository_directory): - raise exceptions.Error('The metadata directory' - ' for ' + repr(repository_name) + ' must exist' - ' at ' + repr(repository_directory)) - - else: - logger.debug('Found local directory for ' + repr(repository_name)) - - # The latest known root metadata file must also exist on disk. - root_file = os.path.join( - repository_directory, 'metadata', 'current', 'root.json') - - if not os.path.isfile(root_file): - raise exceptions.Error( - 'The Root file must exist at ' + repr(root_file)) - - else: - logger.debug('Found local Root file at ' + repr(root_file)) - - - - - - def _matching_targetinfo( - self, target_filename, mapping, match_custom_field=True): - valid_targetinfo = {} - - # Retrieve the targetinfo from each repository using the underlying - # Updater() instance. - for repository_name in mapping['repositories']: - logger.debug('Retrieving targetinfo for ' + repr(target_filename) + - ' from repository...') - - try: - targetinfo, updater = self._update_from_repository( - repository_name, target_filename) - - except (exceptions.UnknownTargetError, exceptions.Error): - continue - - valid_targetinfo[updater] = targetinfo - - matching_targetinfo = {} - logger.debug('Verifying that a threshold of targetinfo are equal...') - - # Iterate 'valid_targetinfo', looking for a threshold number of matches - # for 'targetinfo'. The first targetinfo to reach the required threshold - # is returned. For example, suppose the following list of targetinfo and - # a threshold of 2: - # [A, B, C, B, A, C] - # In this case, targetinfo B is returned. - for valid_updater, compared_targetinfo in valid_targetinfo.items(): - - if not self._targetinfo_match( - targetinfo, compared_targetinfo, match_custom_field): - continue - - else: - - matching_targetinfo[valid_updater] = targetinfo - - if not len(matching_targetinfo) >= mapping['threshold']: - continue - - else: - logger.debug('Found a threshold of matching targetinfo!') - # We now have a targetinfo (that matches across a threshold of - # repositories as instructed by the map file), along with the - # updaters that sign for it. - logger.debug( - 'Returning updaters for targetinfo: ' + repr(targetinfo)) - - return matching_targetinfo - - return None - - - - - - def _targetinfo_match(self, targetinfo1, targetinfo2, match_custom_field=True): - if match_custom_field: - return (targetinfo1 == targetinfo2) - - else: - targetinfo1_without_custom = copy.deepcopy(targetinfo1) - targetinfo2_without_custom = copy.deepcopy(targetinfo2) - targetinfo1_without_custom['fileinfo'].pop('custom', None) - targetinfo2_without_custom['fileinfo'].pop('custom', None) - - return (targetinfo1_without_custom == targetinfo2_without_custom) - - - - - - def _target_matches_path_pattern(self, target_filename, path_patterns): - for path_pattern in path_patterns: - logger.debug('Interrogating pattern ' + repr(path_pattern) + 'for' - ' target: ' + repr(target_filename)) - - # Example: "foo.tgz" should match with "/*.tgz". Make sure to strip any - # leading path separators so that a match is made if a repo maintainer - # uses a leading separator with a delegated glob pattern, but a client - # doesn't include one when a target file is requested. - if fnmatch.fnmatch(target_filename.lstrip(os.sep), path_pattern.lstrip(os.sep)): - logger.debug('Found a match for ' + repr(target_filename)) - return True - - else: - logger.debug('Continue searching for relevant paths.') - continue - - # If we are here, then none of the paths are relevant to the target. - logger.debug('None of the paths are relevant.') - return False - - - - - - - def get_updater(self, repository_name): - """ - - Get the updater instance corresponding to 'repository_name'. - - - repository_name: - The name of the repository as it appears in the map file. For example, - "Django" and "PyPI" in the "repositories" entry of the map file. - - "repositories": { - "Django": ["https://djangoproject.com/"], - "PyPI": ["https://pypi.python.org/"] - } - - - tuf.exceptions.FormatError, if any of the arguments are improperly - formatted. - - - None. - - - Returns the Updater() instance for 'repository_name'. If the instance - does not exist, return None. - """ - - # Are the arguments properly formatted? If not, raise - # 'tuf.exceptions.FormatError'. - formats.NAME_SCHEMA.check_match(repository_name) - - updater = self.repository_names_to_updaters.get(repository_name) - - if not updater: - - if repository_name not in self.repository_names_to_mirrors: - return None - - else: - # Create repository mirrors object needed by the - # tuf.client.updater.Updater(). Each 'repository_name' can have more - # than one mirror. - repo_mirrors = {} - - for url in self.repository_names_to_mirrors[repository_name]: - repo_mirrors[url] = { - 'url_prefix': url, - 'metadata_path': 'metadata', - 'targets_path': 'targets'} - - try: - # NOTE: State (e.g., keys) should NOT be shared across different - # updater instances. - logger.debug('Adding updater for ' + repr(repository_name)) - updater = Updater(repository_name, repo_mirrors) - - except Exception: - return None - - else: - self.repository_names_to_updaters[repository_name] = updater - - else: - logger.debug('Found an updater for ' + repr(repository_name)) - - # Ensure the updater's metadata is the latest before returning it. - updater.refresh() - return updater - - - - - - def _update_from_repository(self, repository_name, target_filename): - - updater = self.get_updater(repository_name) - - if not updater: - raise exceptions.Error( - 'Cannot load updater for ' + repr(repository_name)) - - else: - # Get one valid target info from the Updater object. - # 'tuf.exceptions.UnknownTargetError' raised by get_one_valid_targetinfo - # if a valid target cannot be found. - return updater.get_one_valid_targetinfo(target_filename), updater - - - - - -class Updater(object): - """ - - Provide a class that can download target files securely. The updater - keeps track of currently and previously trusted metadata, target files - available to the client, target file attributes such as file size and - hashes, key and role information, metadata signatures, and the ability - to determine when the download of a file should be permitted. - - - self.metadata: - Dictionary holding the currently and previously trusted metadata. - - Example: {'current': {'root': ROOT_SCHEMA, - 'targets':TARGETS_SCHEMA, ...}, - 'previous': {'root': ROOT_SCHEMA, - 'targets':TARGETS_SCHEMA, ...}} - - self.metadata_directory: - The directory where trusted metadata is stored. - - self.versioninfo: - A cache of version numbers for the roles available on the repository. - - Example: {'targets.json': {'version': 128}, ...} - - self.mirrors: - The repository mirrors from which metadata and targets are available. - Conformant to 'tuf.formats.MIRRORDICT_SCHEMA'. - - self.repository_name: - The name of the updater instance. - - - refresh(): - This method downloads, verifies, and loads metadata for the top-level - roles in a specific order (i.e., root -> timestamp -> snapshot -> targets) - The expiration time for downloaded metadata is also verified. - - The metadata for delegated roles are not refreshed by this method, but by - the method that returns targetinfo (i.e., get_one_valid_targetinfo()). - The refresh() method should be called by the client before any target - requests. - - get_one_valid_targetinfo(file_path): - Returns the target information for a specific file identified by its file - path. This target method also downloads the metadata of updated targets. - - updated_targets(targets, destination_directory): - After the client has retrieved the target information for those targets - they are interested in updating, they would call this method to determine - which targets have changed from those saved locally on disk. All the - targets that have changed are returns in a list. From this list, they - can request a download by calling 'download_target()'. - - download_target(target, destination_directory): - This method performs the actual download of the specified target. The - file is saved to the 'destination_directory' argument. - - remove_obsolete_targets(destination_directory): - Any files located in 'destination_directory' that were previously - served by the repository but have since been removed, can be deleted - from disk by the client by calling this method. - - Note: The methods listed above are public and intended for the software - updater integrating TUF with this module. All other methods that may begin - with a single leading underscore are non-public and only used internally. - updater.py is not subclassed in TUF, nor is it designed to be subclassed, - so double leading underscores is not used. - http://www.python.org/dev/peps/pep-0008/#method-names-and-instance-variables - """ - - def __init__(self, repository_name, repository_mirrors, fetcher=None): - """ - - Constructor. Instantiating an updater object causes all the metadata - files for the top-level roles to be read from disk, including the key and - role information for the delegated targets of 'targets'. The actual - metadata for delegated roles is not loaded in __init__. The metadata for - these delegated roles, including nested delegated roles, are loaded, - updated, and saved to the 'self.metadata' store, as needed, by - get_one_valid_targetinfo(). - - The initial set of metadata files are provided by the software update - system utilizing TUF. - - In order to use an updater, the following directories must already - exist locally: - - {tuf.settings.repositories_directory}/{repository_name}/metadata/current - {tuf.settings.repositories_directory}/{repository_name}/metadata/previous - - and, at a minimum, the root metadata file must exist: - - {tuf.settings.repositories_directory}/{repository_name}/metadata/current/root.json - - - repository_name: - The name of the repository. - - repository_mirrors: - A dictionary holding repository mirror information, conformant to - 'tuf.formats.MIRRORDICT_SCHEMA'. This dictionary holds - information such as the directory containing the metadata and target - files, the server's URL prefix, and the target content directories the - client should be confined to. - - repository_mirrors = {'mirror1': {'url_prefix': 'http://localhost:8001', - 'metadata_path': 'metadata', - 'targets_path': 'targets', - 'confined_target_dirs': ['']}} - - fetcher: - A concrete 'FetcherInterface' implementation. Performs the network - related download operations. If an external implementation is not - provided, tuf.fetcher.RequestsFetcher is used. - - - securesystemslib.exceptions.FormatError: - If the arguments are improperly formatted. - - tuf.exceptions.RepositoryError: - If there is an error with the updater's repository files, such - as a missing 'root.json' file. - - - Th metadata files (e.g., 'root.json', 'targets.json') for the top- level - roles are read from disk and stored in dictionaries. In addition, the - key and roledb modules are populated with 'repository_name' entries. - - - None. - """ - - # Do the arguments have the correct format? - # These checks ensure the arguments have the appropriate - # number of objects and object types and that all dict - # keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mistmatch. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - formats.MIRRORDICT_SCHEMA.check_match(repository_mirrors) - - # Save the validated arguments. - self.repository_name = repository_name - self.mirrors = repository_mirrors - - # Initialize Updater with an externally provided 'fetcher' implementing - # the network download. By default tuf.fetcher.RequestsFetcher is used. - if fetcher is None: - self.fetcher = requests_fetcher.RequestsFetcher() - else: - self.fetcher = fetcher - - # Store the trusted metadata read from disk. - self.metadata = {} - - # Store the currently trusted/verified metadata. - self.metadata['current'] = {} - - # Store the previously trusted/verified metadata. - self.metadata['previous'] = {} - - # Store the version numbers of roles available on the repository. The dict - # keys are paths, and the dict values versioninfo data. This information - # can help determine whether a metadata file has changed and needs to be - # re-downloaded. - self.versioninfo = {} - - # Store the file information of the root and snapshot roles. The dict keys - # are paths, the dict values fileinfo data. This information can help - # determine whether a metadata file has changed and so needs to be - # re-downloaded. - self.fileinfo = {} - - # Store the location of the client's metadata directory. - self.metadata_directory = {} - - # Store the 'consistent_snapshot' of the Root role. This setting - # determines if metadata and target files downloaded from remote - # repositories include the digest. - self.consistent_snapshot = False - - # Ensure the repository metadata directory has been set. - if settings.repositories_directory is None: - raise exceptions.RepositoryError('The TUF update client' - ' module must specify the directory containing the local repository' - ' files. "tuf.settings.repositories_directory" MUST be set.') - - # Set the path for the current set of metadata files. - repositories_directory = settings.repositories_directory - repository_directory = os.path.join(repositories_directory, self.repository_name) - - # raise MissingLocalRepository if the repo does not exist at all. - if not os.path.exists(repository_directory): - raise exceptions.MissingLocalRepositoryError('Local repository ' + - repr(repository_directory) + ' does not exist.') - - current_path = os.path.join(repository_directory, 'metadata', 'current') - - # Ensure the current path is valid/exists before saving it. - if not os.path.exists(current_path): - raise exceptions.RepositoryError('Missing' - ' ' + repr(current_path) + '. This path must exist and, at a minimum,' - ' contain the Root metadata file.') - - self.metadata_directory['current'] = current_path - - # Set the path for the previous set of metadata files. - previous_path = os.path.join(repository_directory, 'metadata', 'previous') - - # Ensure the previous path is valid/exists. - if not os.path.exists(previous_path): - raise exceptions.RepositoryError('Missing ' + repr(previous_path) + '.' - ' This path MUST exist.') - - self.metadata_directory['previous'] = previous_path - - # Load current and previous metadata. - for metadata_set in ['current', 'previous']: - for metadata_role in roledb.TOP_LEVEL_ROLES: - self._load_metadata_from_file(metadata_set, metadata_role) - - # Raise an exception if the repository is missing the required 'root' - # metadata. - if 'root' not in self.metadata['current']: - raise exceptions.RepositoryError('No root of trust!' - ' Could not find the "root.json" file.') - - - - - - def __str__(self): - """ - The string representation of an Updater object. - """ - - return self.repository_name - - - - - - def _load_metadata_from_file(self, metadata_set, metadata_role): - """ - - Non-public method that loads current or previous metadata if there is a - local file. If the expected file belonging to 'metadata_role' (e.g., - 'root.json') cannot be loaded, raise an exception. The extracted metadata - object loaded from file is saved to the metadata store (i.e., - self.metadata). - - - metadata_set: - The string 'current' or 'previous', depending on whether one wants to - load the currently or previously trusted metadata file. - - metadata_role: - The name of the metadata. This is a role name and should - not end in '.json'. Examples: 'root', 'targets', 'unclaimed'. - - - securesystemslib.exceptions.FormatError: - If the role object loaded for 'metadata_role' is improperly formatted. - - securesystemslib.exceptions.Error: - If there was an error importing a delegated role of 'metadata_role' - or the 'metadata_set' is not one currently supported. - - - If the metadata is loaded successfully, it is saved to the metadata - store. If 'metadata_role' is 'root', the role and key databases - are reloaded. If 'metadata_role' is a target metadata, all its - delegated roles are refreshed. - - - None. - """ - - # Ensure we have a valid metadata set. - if metadata_set not in ['current', 'previous']: - raise sslib_exceptions.Error( - 'Invalid metadata set: ' + repr(metadata_set)) - - # Save and construct the full metadata path. - metadata_directory = self.metadata_directory[metadata_set] - metadata_filename = metadata_role + '.json' - metadata_filepath = os.path.join(metadata_directory, metadata_filename) - - # Ensure the metadata path is valid/exists, else ignore the call. - if os.path.exists(metadata_filepath): - # Load the file. The loaded object should conform to - # 'tuf.formats.SIGNABLE_SCHEMA'. - try: - metadata_signable = sslib_util.load_json_file( - metadata_filepath) - - # Although the metadata file may exist locally, it may not - # be a valid json file. On the next refresh cycle, it will be - # updated as required. If Root if cannot be loaded from disk - # successfully, an exception should be raised by the caller. - except sslib_exceptions.Error: - return - - formats.check_signable_object_format(metadata_signable) - - # Extract the 'signed' role object from 'metadata_signable'. - metadata_object = metadata_signable['signed'] - - # Save the metadata object to the metadata store. - self.metadata[metadata_set][metadata_role] = metadata_object - - # If 'metadata_role' is 'root' or targets metadata, the key and role - # databases must be rebuilt. If 'root', ensure self.consistent_snaptshots - # is updated. - if metadata_set == 'current': - if metadata_role == 'root': - self._rebuild_key_and_role_db() - self.consistent_snapshot = metadata_object['consistent_snapshot'] - - elif metadata_object['_type'] == 'targets': - # TODO: Should we also remove the keys of the delegated roles? - self._import_delegations(metadata_role) - - - - - - def _rebuild_key_and_role_db(self): - """ - - Non-public method that rebuilds the key and role databases from the - currently trusted 'root' metadata object extracted from 'root.json'. - This private method is called when a new/updated 'root' metadata file is - loaded or when updater.refresh() is called. This method will only store - the role information of the top-level roles (i.e., 'root', 'targets', - 'snapshot', 'timestamp'). - - - None. - - - securesystemslib.exceptions.FormatError: - If the 'root' metadata is improperly formatted. - - securesystemslib.exceptions.Error: - If there is an error loading a role contained in the 'root' - metadata. - - - The key and role databases are reloaded for the top-level roles. - - - None. - """ - - # Clobbering this means all delegated metadata files are rendered outdated - # and will need to be reloaded. However, reloading the delegated metadata - # files is avoided here because fetching target information with - # get_one_valid_targetinfo() always causes a refresh of these files. The - # metadata files for delegated roles are also not loaded when the - # repository is first instantiated. Due to this setup, reloading delegated - # roles is not required here. - keydb.create_keydb_from_root_metadata(self.metadata['current']['root'], - self.repository_name) - - roledb.create_roledb_from_root_metadata(self.metadata['current']['root'], - self.repository_name) - - - - - - def _import_delegations(self, parent_role): - """ - - Non-public method that imports all the roles delegated by 'parent_role'. - - - parent_role: - The role whose delegations will be imported. - - - securesystemslib.exceptions.FormatError: - If a key attribute of a delegated role's signing key is - improperly formatted. - - securesystemslib.exceptions.Error: - If the signing key of a delegated role cannot not be loaded. - - - The key and role databases are modified to include the newly loaded roles - delegated by 'parent_role'. - - - None. - """ - - current_parent_metadata = self.metadata['current'][parent_role] - - if 'delegations' not in current_parent_metadata: - return - - # This could be quite slow with a large number of delegations. - keys_info = current_parent_metadata['delegations'].get('keys', {}) - roles_info = current_parent_metadata['delegations'].get('roles', []) - - logger.debug('Adding roles delegated from ' + repr(parent_role) + '.') - - # Iterate the keys of the delegated roles of 'parent_role' and load them. - for keyid, keyinfo in keys_info.items(): - if keyinfo['keytype'] in ['rsa', 'ed25519', 'ecdsa', 'ecdsa-sha2-nistp256']: - - # We specify the keyid to ensure that it's the correct keyid - # for the key. - try: - key, _ = sslib_keys.format_metadata_to_key(keyinfo, keyid) - - keydb.add_key(key, repository_name=self.repository_name) - - except exceptions.KeyAlreadyExistsError: - pass - - except (sslib_exceptions.FormatError, sslib_exceptions.Error): - logger.warning('Invalid key: ' + repr(keyid) + '. Aborting role ' + - 'delegation for parent role \'' + parent_role + '\'.') - raise - - else: - logger.warning('Invalid key type for ' + repr(keyid) + '.') - continue - - # Add the roles to the role database. - for roleinfo in roles_info: - try: - # NOTE: roledb.add_role will take care of the case where rolename - # is None. - rolename = roleinfo.get('name') - logger.debug('Adding delegated role: ' + str(rolename) + '.') - roledb.add_role(rolename, roleinfo, self.repository_name) - - except exceptions.RoleAlreadyExistsError: - logger.warning('Role already exists: ' + rolename) - - except Exception: - logger.warning('Failed to add delegated role: ' + repr(rolename) + '.') - raise - - - - - - def refresh(self, unsafely_update_root_if_necessary=True): - """ - - Update the latest copies of the metadata for the top-level roles. The - update request process follows a specific order to ensure the metadata - files are securely updated: - root (if necessary) -> timestamp -> snapshot -> targets. - - Delegated metadata is not refreshed by this method. After this method is - called, the use of get_one_valid_targetinfo() will update delegated - metadata, when required. Calling refresh() ensures that top-level - metadata is up-to-date, so that the target methods can refer to the - latest available content. Thus, refresh() should always be called by the - client before any requests of target file information. - - The expiration time for downloaded metadata is also verified, including - local metadata that the repository claims is up to date. - - If the refresh fails for any reason, then unless - 'unsafely_update_root_if_necessary' is set, refresh will be retried once - after first attempting to update the root metadata file. Only after this - check will the exceptions listed here potentially be raised. - - - unsafely_update_root_if_necessary: - Boolean that indicates whether to unsafely update the Root metadata if - any of the top-level metadata cannot be downloaded successfully. The - Root role is unsafely updated if its current version number is unknown. - - - tuf.exceptions.NoWorkingMirrorError: - If the metadata for any of the top-level roles cannot be updated. - - tuf.exceptions.ExpiredMetadataError: - If any of the top-level metadata is expired and no new version was - found. - - - Updates the metadata files of the top-level roles with the latest - information. - - - None. - """ - - # Do the arguments have the correct format? - # This check ensures the arguments have the appropriate - # number of objects and object types, and that all dict - # keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if the check fail. - sslib_formats.BOOLEAN_SCHEMA.check_match( - unsafely_update_root_if_necessary) - - # Update the top-level metadata. The _update_metadata_if_changed() and - # _update_metadata() calls below do NOT perform an update if there - # is insufficient trusted signatures for the specified metadata. - # Raise 'tuf.exceptions.NoWorkingMirrorError' if an update fails. - root_metadata = self.metadata['current']['root'] - - try: - self._ensure_not_expired(root_metadata, 'root') - - except exceptions.ExpiredMetadataError: - # Raise 'tuf.exceptions.NoWorkingMirrorError' if a valid (not - # expired, properly signed, and valid metadata) 'root.json' cannot be - # installed. - if unsafely_update_root_if_necessary: - logger.info('Expired Root metadata was loaded from disk.' - ' Try to update it now.' ) - - # The caller explicitly requested not to unsafely fetch an expired Root. - else: - logger.info('An expired Root metadata was loaded and must be updated.') - raise - - # Update the root metadata and verify it by building a chain of trusted root - # keys from the current trusted root metadata file - self._update_root_metadata(root_metadata) - - # Ensure that the role and key information of the top-level roles is the - # latest. We do this whether or not Root needed to be updated, in order to - # ensure that, e.g., the entries in roledb for top-level roles are - # populated with expected keyid info so that roles can be validated. In - # certain circumstances, top-level metadata might be missing because it was - # marked obsolete and deleted after a failed attempt, and thus we should - # refresh them here as a protective measure. See Issue #736. - self._rebuild_key_and_role_db() - self.consistent_snapshot = \ - self.metadata['current']['root']['consistent_snapshot'] - - # Use default but sane information for timestamp metadata, and do not - # require strict checks on its required length. - self._update_metadata('timestamp', DEFAULT_TIMESTAMP_UPPERLENGTH) - - self._update_metadata_if_changed('snapshot', - referenced_metadata='timestamp') - self._update_metadata_if_changed('targets') - - - - def _update_root_metadata(self, current_root_metadata): - """ - - The root file must be signed by the current root threshold and keys as - well as the previous root threshold and keys. The update process for root - files means that each intermediate root file must be downloaded, to build - a chain of trusted root keys from keys already trusted by the client: - - 1.root -> 2.root -> 3.root - - 3.root must be signed by the threshold and keys of 2.root, and 2.root - must be signed by the threshold and keys of 1.root. - - - current_root_metadata: - The currently held version of root. - - - Updates the root metadata files with the latest information. - - - None. - """ - - def neither_403_nor_404(mirror_error): - if isinstance(mirror_error, tuf.exceptions.FetcherHTTPError): - if mirror_error.status_code in {403, 404}: - return False - return True - - # Temporarily set consistent snapshot. Will be updated to whatever is set - # in the latest root.json after running through the intermediates with - # _update_metadata(). - self.consistent_snapshot = True - - # Following the spec, try downloading the N+1th root for a certain maximum - # number of times. - lower_bound = current_root_metadata['version'] + 1 - upper_bound = lower_bound + settings.MAX_NUMBER_ROOT_ROTATIONS - - # Try downloading the next root. - for next_version in range(lower_bound, upper_bound): - try: - # Thoroughly verify it. - self._update_metadata('root', DEFAULT_ROOT_UPPERLENGTH, - version=next_version) - # When we run into HTTP 403/404 error from ALL mirrors, break out of - # loop, because the next root metadata file is most likely missing. - except exceptions.NoWorkingMirrorError as exception: - for mirror_error in exception.mirror_errors.values(): - # Otherwise, reraise the error, because it is not a simple HTTP - # error. - if neither_403_nor_404(mirror_error): - logger.info('Misc error for root version ' + str(next_version)) - raise - else: - logger.debug('HTTP error for root version ' + str(next_version)) - # If we are here, then we ran into only 403 / 404 errors, which are - # good reasons to suspect that the next root metadata file does not - # exist. - break - - # Ensure that the role and key information of the top-level roles is the - # latest. We do this whether or not Root needed to be updated, in order - # to ensure that, e.g., the entries in roledb for top-level roles are - # populated with expected keyid info so that roles can be validated. In - # certain circumstances, top-level metadata might be missing because it - # was marked obsolete and deleted after a failed attempt, and thus we - # should refresh them here as a protective measure. See Issue #736. - self._rebuild_key_and_role_db() - - # Set our consistent snapshot property to what the latest root has said. - self.consistent_snapshot = \ - self.metadata['current']['root']['consistent_snapshot'] - - - - def _check_hashes(self, file_object, trusted_hashes): - """ - - Non-public method that verifies multiple secure hashes of 'file_object'. - - - file_object: - A file object. - - trusted_hashes: - A dictionary with hash-algorithm names as keys and hashes as dict values. - The hashes should be in the hexdigest format. Should be Conformant to - 'securesystemslib.formats.HASHDICT_SCHEMA'. - - - securesystemslib.exceptions.BadHashError, if the hashes don't match. - - - Hash digest object is created using the 'securesystemslib.hash' module. - Position within file_object is changed. - - - None. - """ - - # Verify each hash, raise an exception if any hash fails to verify - for algorithm, trusted_hash in trusted_hashes.items(): - digest_object = sslib_hash.digest_fileobject(file_object, - algorithm) - computed_hash = digest_object.hexdigest() - - if trusted_hash != computed_hash: - raise sslib_exceptions.BadHashError(trusted_hash, - computed_hash) - - else: - logger.info('Verified ' + algorithm + ' hash: ' + trusted_hash) - - - - - - def _check_file_length(self, file_object, trusted_file_length): - """ - - Non-public method that ensures the length of 'file_object' is strictly - equal to 'trusted_file_length'. This is a deliberately redundant - implementation designed to complement - download._check_downloaded_length(). - - - file_object: - A file object. - - trusted_file_length: - A non-negative integer that is the trusted length of the file. - - - tuf.exceptions.DownloadLengthMismatchError, if the lengths do not match. - - - Reads the contents of 'file_object' and logs a message if 'file_object' - matches the trusted length. - Position within file_object is changed. - - - None. - """ - - file_object.seek(0, io.SEEK_END) - observed_length = file_object.tell() - - # Return and log a message if the length 'file_object' is equal to - # 'trusted_file_length', otherwise raise an exception. A hard check - # ensures that a downloaded file strictly matches a known, or trusted, - # file length. - if observed_length != trusted_file_length: - raise exceptions.DownloadLengthMismatchError(trusted_file_length, - observed_length) - - else: - logger.debug('Observed length (' + str(observed_length) +\ - ') == trusted length (' + str(trusted_file_length) + ')') - - - - - - def _get_target_file(self, target_filepath, file_length, file_hashes, - prefix_filename_with_hash): - """ - - Non-public method that safely (i.e., the file length and hash are - strictly equal to the trusted) downloads a target file up to a certain - length, and checks its hashes thereafter. - - - target_filepath: - The target filepath (relative to the repository targets directory) - obtained from TUF targets metadata. - - file_length: - The expected compressed length of the target file. If the file is not - compressed, then it will simply be its uncompressed length. - - file_hashes: - The expected hashes of the target file. - - prefix_filename_with_hash: - Whether to prefix the targets file names with their hash when using - consistent snapshot. - This should be set to False when the served target filenames are not - prefixed with hashes (in this case the server uses other means - to ensure snapshot consistency). - - - tuf.exceptions.NoWorkingMirrorError: - The target could not be fetched. This is raised only when all known - mirrors failed to provide a valid copy of the desired target file. - - - The target file is downloaded from all known repository mirrors in the - worst case. If a valid copy of the target file is found, it is stored in - a temporary file and returned. - - - A file object containing the target. - """ - - if self.consistent_snapshot and prefix_filename_with_hash: - # Note: values() does not return a list in Python 3. Use list() - # on values() for Python 2+3 compatibility. - target_digest = list(file_hashes.values()).pop() - dirname, basename = os.path.split(target_filepath) - target_filepath = os.path.join(dirname, target_digest + '.' + basename) - - file_mirrors = mirrors.get_list_of_mirrors('target', target_filepath, - self.mirrors) - - # file_mirror (URL): error (Exception) - file_mirror_errors = {} - file_object = None - - for file_mirror in file_mirrors: - try: - file_object = download.safe_download(file_mirror, - file_length, self.fetcher) - - # Verify 'file_object' against the expected length and hashes. - self._check_file_length(file_object, file_length) - self._check_hashes(file_object, file_hashes) - # If the file verifies, we don't need to try more mirrors - return file_object - - except Exception as exception: - # Remember the error from this mirror, close tempfile if one was opened - logger.debug('Update failed from ' + file_mirror + '.') - file_mirror_errors[file_mirror] = exception - if file_object is not None: - file_object.close() - file_object = None - - logger.debug('Failed to update ' + repr(target_filepath) + ' from' - ' all mirrors: ' + repr(file_mirror_errors)) - raise exceptions.NoWorkingMirrorError(file_mirror_errors) - - - - - - def _verify_root_self_signed(self, signable): - """ - Verify the root metadata in signable is signed by a threshold of keys, - where the threshold and valid keys are defined by itself - """ - threshold = signable['signed']['roles']['root']['threshold'] - keyids = signable['signed']['roles']['root']['keyids'] - keys = signable['signed']['keys'] - signatures = signable['signatures'] - signed = sslib_formats.encode_canonical( - signable['signed']).encode('utf-8') - verified_sig_keyids = set() - - for signature in signatures: - keyid = signature['keyid'] - - # At this point we are verifying that the root metadata is signed by a - # threshold of keys listed in the current root role, therefore skip - # keys with a keyid that is not listed in the current root role. - if keyid not in keyids: - continue - - key = keys[keyid] - # The ANYKEY_SCHEMA check in verify_signature expects the keydict to - # include a keyid - key['keyid'] = keyid - valid_sig = sslib_keys.verify_signature(key, signature, signed) - - if valid_sig: - verified_sig_keyids.add(keyid) - - if len(verified_sig_keyids) >= threshold: - return True - return False - - - - - - def _verify_metadata_file(self, metadata_file_object, - metadata_role): - """ - - Non-public method that verifies a metadata file. An exception is - raised if 'metadata_file_object is invalid. There is no - return value. - - - metadata_file_object: - A file object containing the metadata file. - - metadata_role: - The role name of the metadata (e.g., 'root', 'targets', - 'unclaimed'). - - - securesystemslib.exceptions.FormatError: - In case the metadata file is valid JSON, but not valid TUF metadata. - - tuf.exceptions.InvalidMetadataJSONError: - In case the metadata file is not valid JSON. - - tuf.exceptions.ReplayedMetadataError: - In case the downloaded metadata file is older than the current one. - - tuf.exceptions.RepositoryError: - In case the repository is somehow inconsistent; e.g. a parent has not - delegated to a child (contrary to expectations). - - tuf.SignatureError: - In case the metadata file does not have a valid signature. - - - The content of 'metadata_file_object' is read and loaded, the current - position within the file is changed. - - - None. - """ - - metadata_file_object.seek(0) - metadata = metadata_file_object.read().decode('utf-8') - - try: - metadata_signable = sslib_util.load_json_string(metadata) - - except Exception as exception: - raise exceptions.InvalidMetadataJSONError(exception) - - else: - # Ensure the loaded 'metadata_signable' is properly formatted. Raise - # 'securesystemslib.exceptions.FormatError' if not. - formats.check_signable_object_format(metadata_signable) - - # Is 'metadata_signable' expired? - self._ensure_not_expired(metadata_signable['signed'], metadata_role) - - # We previously verified version numbers in this function, but have since - # moved version number verification to the functions that retrieve - # metadata. - - # Verify the signature on the downloaded metadata object. - valid = sig.verify(metadata_signable, metadata_role, - self.repository_name) - - if not valid: - raise sslib_exceptions.BadSignatureError(metadata_role) - - # For root metadata, verify the downloaded root metadata object with the - # new threshold of new signatures contained within the downloaded root - # metadata object - # NOTE: we perform the checks on root metadata here because this enables - # us to perform the check before the tempfile is persisted. Furthermore, - # by checking here we can easily perform the check for each download - # mirror. Whereas if we check after _verify_metadata_file we may be - # persisting invalid files and we cannot try copies of the file from other - # mirrors. - if valid and metadata_role == 'root': - valid = self._verify_root_self_signed(metadata_signable) - if not valid: - raise sslib_exceptions.BadSignatureError(metadata_role) - - - - - - def _get_metadata_file(self, metadata_role, remote_filename, - upperbound_filelength, expected_version): - """ - - Non-public method that tries downloading, up to a certain length, a - metadata file from a list of known mirrors. As soon as the first valid - copy of the file is found, the downloaded file is returned and the - remaining mirrors are skipped. - - - metadata_role: - The role name of the metadata (e.g., 'root', 'targets', 'unclaimed'). - - remote_filename: - The relative file path (on the remove repository) of 'metadata_role'. - - upperbound_filelength: - The expected length, or upper bound, of the metadata file to be - downloaded. - - expected_version: - The expected and required version number of the 'metadata_role' file - downloaded. 'expected_version' is an integer. - - - tuf.exceptions.NoWorkingMirrorError: - The metadata could not be fetched. This is raised only when all known - mirrors failed to provide a valid copy of the desired metadata file. - - - The file is downloaded from all known repository mirrors in the worst - case. If a valid copy of the file is found, it is stored in a temporary - file and returned. - - - A file object containing the metadata. - """ - - file_mirrors = mirrors.get_list_of_mirrors('meta', remote_filename, - self.mirrors) - - # file_mirror (URL): error (Exception) - file_mirror_errors = {} - file_object = None - - for file_mirror in file_mirrors: - try: - file_object = download.unsafe_download(file_mirror, - upperbound_filelength, self.fetcher) - file_object.seek(0) - - # Verify 'file_object' according to the callable function. - # 'file_object' is also verified if decompressed above (i.e., the - # uncompressed version). - metadata_signable = \ - sslib_util.load_json_string(file_object.read().decode('utf-8')) - - # Determine if the specification version number is supported. It is - # assumed that "spec_version" is in (major.minor.fix) format, (for - # example: "1.4.3") and that releases with the same major version - # number maintain backwards compatibility. Consequently, if the major - # version number of new metadata equals our expected major version - # number, the new metadata is safe to parse. - try: - metadata_spec_version = metadata_signable['signed']['spec_version'] - metadata_spec_version_split = metadata_spec_version.split('.') - metadata_spec_major_version = int(metadata_spec_version_split[0]) - metadata_spec_minor_version = int(metadata_spec_version_split[1]) - - code_spec_version_split = tuf.SPECIFICATION_VERSION.split('.') - code_spec_major_version = int(code_spec_version_split[0]) - code_spec_minor_version = int(code_spec_version_split[1]) - - if metadata_spec_major_version != code_spec_major_version: - raise exceptions.UnsupportedSpecificationError( - 'Downloaded metadata that specifies an unsupported ' - 'spec_version. This code supports major version number: ' + - repr(code_spec_major_version) + '; however, the obtained ' - 'metadata lists version number: ' + str(metadata_spec_version)) - - #report to user if minor versions do not match, continue with update - if metadata_spec_minor_version != code_spec_minor_version: - logger.info("Downloaded metadata that specifies a different minor " + - "spec_version. This code has version " + - str(tuf.SPECIFICATION_VERSION) + - " and the metadata lists version number " + - str(metadata_spec_version) + - ". The update will continue as the major versions match.") - - except (ValueError, TypeError) as error: - raise sslib_exceptions.FormatError('Improperly' - ' formatted spec_version, which must be in major.minor.fix format') from error - - # If the version number is unspecified, ensure that the version number - # downloaded is greater than the currently trusted version number for - # 'metadata_role'. - version_downloaded = metadata_signable['signed']['version'] - - if expected_version is not None: - # Verify that the downloaded version matches the version expected by - # the caller. - if version_downloaded != expected_version: - raise exceptions.BadVersionNumberError('Downloaded' - ' version number: ' + repr(version_downloaded) + '. Version' - ' number MUST be: ' + repr(expected_version)) - - # The caller does not know which version to download. Verify that the - # downloaded version is at least greater than the one locally - # available. - else: - # Verify that the version number of the locally stored - # 'timestamp.json', if available, is less than what was downloaded. - # Otherwise, accept the new timestamp with version number - # 'version_downloaded'. - - try: - current_version = \ - self.metadata['current'][metadata_role]['version'] - - if version_downloaded < current_version: - raise exceptions.ReplayedMetadataError(metadata_role, - version_downloaded, current_version) - - except KeyError: - logger.info(metadata_role + ' not available locally.') - - self._verify_metadata_file(file_object, metadata_role) - - except Exception as exception: - # Remember the error from this mirror, and "reset" the target file. - logger.debug('Update failed from ' + file_mirror + '.') - file_mirror_errors[file_mirror] = exception - if file_object: - file_object.close() - file_object = None - - else: - break - - if file_object: - return file_object - - else: - logger.debug('Failed to update ' + repr(remote_filename) + ' from all' - ' mirrors: ' + repr(file_mirror_errors)) - raise exceptions.NoWorkingMirrorError(file_mirror_errors) - - - - - - def _update_metadata(self, metadata_role, upperbound_filelength, version=None): - """ - - Non-public method that downloads, verifies, and 'installs' the metadata - belonging to 'metadata_role'. Calling this method implies that the - 'metadata_role' on the repository is newer than the client's, and thus - needs to be re-downloaded. The current and previous metadata stores are - updated if the newly downloaded metadata is successfully downloaded and - verified. This method also assumes that the store of top-level metadata - is the latest and exists. - - - metadata_role: - The name of the metadata. This is a role name and should not end - in '.json'. Examples: 'root', 'targets', 'targets/linux/x86'. - - upperbound_filelength: - The expected length, or upper bound, of the metadata file to be - downloaded. - - version: - The expected and required version number of the 'metadata_role' file - downloaded. 'expected_version' is an integer. - - - tuf.exceptions.NoWorkingMirrorError: - The metadata cannot be updated. This is not specific to a single - failure but rather indicates that all possible ways to update the - metadata have been tried and failed. - - - The metadata file belonging to 'metadata_role' is downloaded from a - repository mirror. If the metadata is valid, it is stored in the - metadata store. - - - None. - """ - - # Construct the metadata filename as expected by the download/mirror - # modules. - metadata_filename = metadata_role + '.json' - - # Attempt a file download from each mirror until the file is downloaded and - # verified. If the signature of the downloaded file is valid, proceed, - # otherwise log a warning and try the next mirror. 'metadata_file_object' - # is the file-like object returned by 'download.py'. 'metadata_signable' - # is the object extracted from 'metadata_file_object'. Metadata saved to - # files are regarded as 'signable' objects, conformant to - # 'tuf.formats.SIGNABLE_SCHEMA'. - # - # Some metadata (presently timestamp) will be downloaded "unsafely", in the - # sense that we can only estimate its true length and know nothing about - # its version. This is because not all metadata will have other metadata - # for it; otherwise we will have an infinite regress of metadata signing - # for each other. In this case, we will download the metadata up to the - # best length we can get for it, not request a specific version, but - # perform the rest of the checks (e.g., signature verification). - - remote_filename = metadata_filename - filename_version = '' - - if self.consistent_snapshot and version: - filename_version = version - dirname, basename = os.path.split(remote_filename) - remote_filename = os.path.join( - dirname, str(filename_version) + '.' + basename) - - metadata_file_object = \ - self._get_metadata_file(metadata_role, remote_filename, - upperbound_filelength, version) - - # The metadata has been verified. Move the metadata file into place. - # First, move the 'current' metadata file to the 'previous' directory - # if it exists. - current_filepath = os.path.join(self.metadata_directory['current'], - metadata_filename) - current_filepath = os.path.abspath(current_filepath) - sslib_util.ensure_parent_dir(current_filepath) - - previous_filepath = os.path.join(self.metadata_directory['previous'], - metadata_filename) - previous_filepath = os.path.abspath(previous_filepath) - - if os.path.exists(current_filepath): - # Previous metadata might not exist, say when delegations are added. - sslib_util.ensure_parent_dir(previous_filepath) - shutil.move(current_filepath, previous_filepath) - - # Next, move the verified updated metadata file to the 'current' directory. - metadata_file_object.seek(0) - metadata_signable = \ - sslib_util.load_json_string(metadata_file_object.read().decode('utf-8')) - - sslib_util.persist_temp_file(metadata_file_object, current_filepath) - - # Extract the metadata object so we can store it to the metadata store. - # 'current_metadata_object' set to 'None' if there is not an object - # stored for 'metadata_role'. - updated_metadata_object = metadata_signable['signed'] - current_metadata_object = self.metadata['current'].get(metadata_role) - - # Finally, update the metadata and fileinfo stores, and rebuild the - # key and role info for the top-level roles if 'metadata_role' is root. - # Rebuilding the key and role info is required if the newly-installed - # root metadata has revoked keys or updated any top-level role information. - logger.debug('Updated ' + repr(current_filepath) + '.') - self.metadata['previous'][metadata_role] = current_metadata_object - self.metadata['current'][metadata_role] = updated_metadata_object - self._update_versioninfo(metadata_filename) - - - - - - def _update_metadata_if_changed(self, metadata_role, - referenced_metadata='snapshot'): - """ - - Non-public method that updates the metadata for 'metadata_role' if it has - changed. All top-level roles other than the 'timestamp' and 'root' - roles are updated by this method. The 'timestamp' role is always - downloaded from a mirror without first checking if it has been updated; - it is updated in refresh() by calling _update_metadata('timestamp'). - The 'root' role is always updated first and verified based on the trusted - root metadata file the client already has a copy of; it is updated in - refresh() by calling _update_root_metadata(). - This method is also called for delegated role metadata, which are - referenced by 'snapshot'. - - If the metadata needs to be updated but an update cannot be obtained, - this method will delete the file. - - Due to the way in which metadata files are updated, it is expected that - 'referenced_metadata' is not out of date and trusted. The refresh() - method updates the top-level roles in 'root -> timestamp -> snapshot -> - targets' order. For delegated metadata, the parent role is - updated before the delegated role. Taking into account that - 'referenced_metadata' is updated and verified before 'metadata_role', - this method determines if 'metadata_role' has changed by checking - the 'meta' field of the newly updated 'referenced_metadata'. - - - metadata_role: - The name of the metadata. This is a role name and should not end - in '.json'. Examples: 'root', 'targets', 'unclaimed'. - - referenced_metadata: - This is the metadata that provides the role information for - 'metadata_role'. For the top-level roles, the 'snapshot' role - is the referenced metadata for the 'root', and 'targets' roles. - The 'timestamp' metadata is always downloaded regardless. In - other words, it is updated by calling _update_metadata('timestamp') - and not by this method. The referenced metadata for 'snapshot' - is 'timestamp'. See refresh(). - - - tuf.exceptions.ExpiredMetadataError: - If local metadata is expired and newer metadata is not available. - - tuf.exceptions.NoWorkingMirrorError: - If 'metadata_role' could not be downloaded after determining that it - had changed. - - tuf.exceptions.RepositoryError: - If the referenced metadata is missing. - - - If it is determined that 'metadata_role' has been updated, the metadata - store (i.e., self.metadata) is updated with the new metadata and the - affected stores modified (i.e., the previous metadata store is updated). - If the metadata is 'targets' or a delegated targets role, the role - database is updated with the new information, including its delegated - roles. - - - None. - """ - - metadata_filename = metadata_role + '.json' - expected_versioninfo = None - - # Ensure the referenced metadata has been loaded. The 'root' role may be - # updated without having 'snapshot' available. - if referenced_metadata not in self.metadata['current']: - raise exceptions.RepositoryError('Cannot update' - ' ' + repr(metadata_role) + ' because ' + referenced_metadata + ' is' - ' missing.') - - # The referenced metadata has been loaded. Extract the new versioninfo for - # 'metadata_role' from it. - else: - logger.debug(repr(metadata_role) + ' referenced in ' + - repr(referenced_metadata)+ '. ' + repr(metadata_role) + - ' may be updated.') - - # Simply return if the metadata for 'metadata_role' has not been updated, - # according to the uncompressed metadata provided by the referenced - # metadata. The metadata is considered updated if its version number is - # strictly greater than its currently trusted version number. - expected_versioninfo = self.metadata['current'][referenced_metadata] \ - ['meta'][metadata_filename] - - if not self._versioninfo_has_been_updated(metadata_filename, - expected_versioninfo): - logger.info(repr(metadata_filename) + ' up-to-date.') - - # Since we have not downloaded a new version of this metadata, we should - # check to see if our local version is stale and notify the user if so. - # This raises tuf.exceptions.ExpiredMetadataError if the metadata we have - # is expired. Resolves issue #322. - self._ensure_not_expired(self.metadata['current'][metadata_role], - metadata_role) - - # TODO: If metadata role is snapshot, we should verify that snapshot's - # hash matches what's listed in timestamp.json per step 3.1 of the - # detailed workflows in the specification - - return - - logger.debug('Metadata ' + repr(metadata_filename) + ' has changed.') - - # The file lengths of metadata are unknown, only their version numbers are - # known. Set an upper limit for the length of the downloaded file for each - # expected role. Note: The Timestamp role is not updated via this - # function. - if metadata_role == 'snapshot': - upperbound_filelength = settings.DEFAULT_SNAPSHOT_REQUIRED_LENGTH - - # The metadata is considered Targets (or delegated Targets metadata). - else: - upperbound_filelength = settings.DEFAULT_TARGETS_REQUIRED_LENGTH - - try: - self._update_metadata(metadata_role, upperbound_filelength, - expected_versioninfo['version']) - - except Exception: - # The current metadata we have is not current but we couldn't get new - # metadata. We shouldn't use the old metadata anymore. This will get rid - # of in-memory knowledge of the role and delegated roles, but will leave - # delegated metadata files as current files on disk. - # - # TODO: Should we get rid of the delegated metadata files? We shouldn't - # need to, but we need to check the trust implications of the current - # implementation. - self._delete_metadata(metadata_role) - logger.warning('Metadata for ' + repr(metadata_role) + ' cannot' - ' be updated.') - raise - - else: - # We need to import the delegated roles of 'metadata_role', since its - # list of delegations might have changed from what was previously - # loaded.. - # TODO: Should we remove the keys of the delegated roles? - self._import_delegations(metadata_role) - - - - - - def _versioninfo_has_been_updated(self, metadata_filename, new_versioninfo): - """ - - Non-public method that determines whether the current versioninfo of - 'metadata_filename' is less than 'new_versioninfo' (i.e., the version - number has been incremented). The 'new_versioninfo' argument should be - extracted from the latest copy of the metadata that references - 'metadata_filename'. Example: 'root.json' would be referenced by - 'snapshot.json'. - - 'new_versioninfo' should only be 'None' if this is for updating - 'root.json' without having 'snapshot.json' available. - - - metadadata_filename: - The metadata filename for the role. For the 'root' role, - 'metadata_filename' would be 'root.json'. - - new_versioninfo: - A dict object representing the new file information for - 'metadata_filename'. 'new_versioninfo' may be 'None' when - updating 'root' without having 'snapshot' available. This - dict conforms to 'tuf.formats.VERSIONINFO_SCHEMA' and has - the form: - - {'version': 288} - - - None. - - - If there is no versioninfo currently loaded for 'metadata_filename', try - to load it. - - - Boolean. True if the versioninfo has changed, False otherwise. - """ - - # If there is no versioninfo currently stored for 'metadata_filename', - # try to load the file, calculate the versioninfo, and store it. - if metadata_filename not in self.versioninfo: - self._update_versioninfo(metadata_filename) - - # Return true if there is no versioninfo for 'metadata_filename'. - # 'metadata_filename' is not in the 'self.versioninfo' store - # and it doesn't exist in the 'current' metadata location. - if self.versioninfo[metadata_filename] is None: - return True - - current_versioninfo = self.versioninfo[metadata_filename] - - logger.debug('New version for ' + repr(metadata_filename) + - ': ' + repr(new_versioninfo['version']) + '. Old version: ' + - repr(current_versioninfo['version'])) - - if new_versioninfo['version'] > current_versioninfo['version']: - return True - - else: - return False - - - - - - def _update_versioninfo(self, metadata_filename): - """ - - Non-public method that updates the 'self.versioninfo' entry for the - metadata belonging to 'metadata_filename'. If the current metadata for - 'metadata_filename' cannot be loaded, set its 'versioninfo' to 'None' to - signal that it is not in 'self.versioninfo' AND it also doesn't exist - locally. - - - metadata_filename: - The metadata filename for the role. For the 'root' role, - 'metadata_filename' would be 'root.json'. - - - None. - - - The version number of 'metadata_filename' is calculated and stored in its - corresponding entry in 'self.versioninfo'. - - - None. - """ - - # In case we delayed loading the metadata and didn't do it in - # __init__ (such as with delegated metadata), then get the version - # info now. - - # Save the path to the current metadata file for 'metadata_filename'. - current_filepath = os.path.join(self.metadata_directory['current'], - metadata_filename) - - # If the path is invalid, simply return and leave versioninfo unset. - if not os.path.exists(current_filepath): - self.versioninfo[metadata_filename] = None - return - - # Extract the version information from the trusted snapshot role and save - # it to the 'self.versioninfo' store. - if metadata_filename == 'timestamp.json': - trusted_versioninfo = \ - self.metadata['current']['timestamp']['version'] - - # When updating snapshot.json, the client either (1) has a copy of - # snapshot.json, or (2) is in the process of obtaining it by first - # downloading timestamp.json. Note: Clients are allowed to have only - # root.json initially, and perform a refresh of top-level metadata to - # obtain the remaining roles. - elif metadata_filename == 'snapshot.json': - - # Verify the version number of the currently trusted snapshot.json in - # snapshot.json itself. Checking the version number specified in - # timestamp.json may be greater than the version specified in the - # client's copy of snapshot.json. - try: - timestamp_version_number = self.metadata['current']['snapshot']['version'] - trusted_versioninfo = formats.make_versioninfo( - timestamp_version_number) - - except KeyError: - trusted_versioninfo = \ - self.metadata['current']['timestamp']['meta']['snapshot.json'] - - else: - - try: - # The metadata file names in 'self.metadata' exclude the role - # extension. Strip the '.json' extension when checking if - # 'metadata_filename' currently exists. - targets_version_number = \ - self.metadata['current'][metadata_filename[:-len('.json')]]['version'] - trusted_versioninfo = \ - formats.make_versioninfo(targets_version_number) - - except KeyError: - trusted_versioninfo = \ - self.metadata['current']['snapshot']['meta'][metadata_filename] - - self.versioninfo[metadata_filename] = trusted_versioninfo - - - - - - def _fileinfo_has_changed(self, metadata_filename, new_fileinfo): - """ - - Non-public method that determines whether the current fileinfo of - 'metadata_filename' differs from 'new_fileinfo'. The 'new_fileinfo' - argument should be extracted from the latest copy of the metadata that - references 'metadata_filename'. Example: 'root.json' would be referenced - by 'snapshot.json'. - - 'new_fileinfo' should only be 'None' if this is for updating 'root.json' - without having 'snapshot.json' available. - - - metadadata_filename: - The metadata filename for the role. For the 'root' role, - 'metadata_filename' would be 'root.json'. - - new_fileinfo: - A dict object representing the new file information for - 'metadata_filename'. 'new_fileinfo' may be 'None' when - updating 'root' without having 'snapshot' available. This - dict conforms to 'tuf.formats.TARGETS_FILEINFO_SCHEMA' and has - the form: - - {'length': 23423 - 'hashes': {'sha256': adfbc32343..}} - - - None. - - - If there is no fileinfo currently loaded for 'metada_filename', - try to load it. - - - Boolean. True if the fileinfo has changed, false otherwise. - """ - - # If there is no fileinfo currently stored for 'metadata_filename', - # try to load the file, calculate the fileinfo, and store it. - if metadata_filename not in self.fileinfo: - self._update_fileinfo(metadata_filename) - - # Return true if there is no fileinfo for 'metadata_filename'. - # 'metadata_filename' is not in the 'self.fileinfo' store - # and it doesn't exist in the 'current' metadata location. - if self.fileinfo[metadata_filename] is None: - return True - - current_fileinfo = self.fileinfo[metadata_filename] - - if current_fileinfo['length'] != new_fileinfo['length']: - return True - - # Now compare hashes. Note that the reason we can't just do a simple - # equality check on the fileinfo dicts is that we want to support the - # case where the hash algorithms listed in the metadata have changed - # without having that result in considering all files as needing to be - # updated, or not all hash algorithms listed can be calculated on the - # specific client. - for algorithm, hash_value in new_fileinfo['hashes'].items(): - # We're only looking for a single match. This isn't a security - # check, we just want to prevent unnecessary downloads. - if algorithm in current_fileinfo['hashes']: - if hash_value == current_fileinfo['hashes'][algorithm]: - return False - - return True - - - - - - def _update_fileinfo(self, metadata_filename): - """ - - Non-public method that updates the 'self.fileinfo' entry for the metadata - belonging to 'metadata_filename'. If the 'current' metadata for - 'metadata_filename' cannot be loaded, set its fileinfo' to 'None' to - signal that it is not in the 'self.fileinfo' AND it also doesn't exist - locally. - - - metadata_filename: - The metadata filename for the role. For the 'root' role, - 'metadata_filename' would be 'root.json'. - - - None. - - - The file details of 'metadata_filename' is calculated and - stored in 'self.fileinfo'. - - - None. - """ - - # In case we delayed loading the metadata and didn't do it in - # __init__ (such as with delegated metadata), then get the file - # info now. - - # Save the path to the current metadata file for 'metadata_filename'. - current_filepath = os.path.join(self.metadata_directory['current'], - metadata_filename) - - # If the path is invalid, simply return and leave fileinfo unset. - if not os.path.exists(current_filepath): - self.fileinfo[metadata_filename] = None - return - - # Extract the file information from the actual file and save it - # to the fileinfo store. - file_length, hashes = sslib_util.get_file_details(current_filepath) - metadata_fileinfo = formats.make_targets_fileinfo(file_length, hashes) - self.fileinfo[metadata_filename] = metadata_fileinfo - - - - - - - - def _move_current_to_previous(self, metadata_role): - """ - - Non-public method that moves the current metadata file for 'metadata_role' - to the previous directory. - - - metadata_role: - The name of the metadata. This is a role name and should not end - in '.json'. Examples: 'root', 'targets', 'targets/linux/x86'. - - - None. - - - The metadata file for 'metadata_role' is removed from 'current' - and moved to the 'previous' directory. - - - None. - """ - - # Get the 'current' and 'previous' full file paths for 'metadata_role' - metadata_filepath = metadata_role + '.json' - previous_filepath = os.path.join(self.metadata_directory['previous'], - metadata_filepath) - current_filepath = os.path.join(self.metadata_directory['current'], - metadata_filepath) - - # Remove the previous path if it exists. - if os.path.exists(previous_filepath): - os.remove(previous_filepath) - - # Move the current path to the previous path. - if os.path.exists(current_filepath): - sslib_util.ensure_parent_dir(previous_filepath) - os.rename(current_filepath, previous_filepath) - - - - - - def _delete_metadata(self, metadata_role): - """ - - Non-public method that removes all (current) knowledge of 'metadata_role'. - The metadata belonging to 'metadata_role' is removed from the current - 'self.metadata' store and from the role database. The 'root.json' role - file is never removed. - - - metadata_role: - The name of the metadata. This is a role name and should not end - in '.json'. Examples: 'root', 'targets', 'targets/linux/x86'. - - - None. - - - The role database is modified and the metadata for 'metadata_role' - removed from the 'self.metadata' store. - - - None. - """ - - # The root metadata role is never deleted without a replacement. - if metadata_role == 'root': - return - - # Get rid of the current metadata file. - self._move_current_to_previous(metadata_role) - - # Remove knowledge of the role. - if metadata_role in self.metadata['current']: - del self.metadata['current'][metadata_role] - roledb.remove_role(metadata_role, self.repository_name) - - - - - - def _ensure_not_expired(self, metadata_object, metadata_rolename): - """ - - Non-public method that raises an exception if the current specified - metadata has expired. - - - metadata_object: - The metadata that should be expired, a 'tuf.formats.ANYROLE_SCHEMA' - object. - - metadata_rolename: - The name of the metadata. This is a role name and should not end - in '.json'. Examples: 'root', 'targets', 'targets/linux/x86'. - - - tuf.exceptions.ExpiredMetadataError: - If 'metadata_rolename' has expired. - securesystemslib.exceptions.FormatError: - If the expiration cannot be parsed correctly - - None. - - - None. - """ - - # Extract the expiration time. Convert it to a unix timestamp and compare it - # against the current time.time() (also in Unix/POSIX time format, although - # with microseconds attached.) - expires_datetime = formats.expiry_string_to_datetime( - metadata_object['expires']) - expires_timestamp = formats.datetime_to_unix_timestamp(expires_datetime) - - current_time = int(time.time()) - if expires_timestamp <= current_time: - message = 'Metadata '+repr(metadata_rolename)+' expired on ' + \ - expires_datetime.ctime() + ' (UTC).' - raise exceptions.ExpiredMetadataError(message) - - - - - - def all_targets(self): - """ - - - NOTE: This function is deprecated. Its behavior with regard to which - delegating Targets roles are trusted to determine how to validate a - delegated Targets role is NOT WELL DEFINED. Please transition to use of - get_one_valid_targetinfo()! - - Get a list of the target information for all the trusted targets on the - repository. This list also includes all the targets of delegated roles. - Targets of the list returned are ordered according the trusted order of - the delegated roles, where parent roles come before children. The list - conforms to 'tuf.formats.TARGETINFOS_SCHEMA' and has the form: - - [{'filepath': 'a/b/c.txt', - 'fileinfo': {'length': 13323, - 'hashes': {'sha256': dbfac345..}} - ...] - - - None. - - - tuf.exceptions.RepositoryError: - If the metadata for the 'targets' role is missing from - the 'snapshot' metadata. - - tuf.exceptions.UnknownRoleError: - If one of the roles could not be found in the role database. - - - The metadata for target roles is updated and stored. - - - A list of targets, conformant to - 'tuf.formats.TARGETINFOS_SCHEMA'. - """ - - warnings.warn( - 'Support for all_targets() will be removed in a future release.' - ' get_one_valid_targetinfo() should be used instead.', - DeprecationWarning) - - # Load the most up-to-date targets of the 'targets' role and all - # delegated roles. - self._refresh_targets_metadata(refresh_all_delegated_roles=True) - - # Fetch the targets for the 'targets' role. - all_targets = self._targets_of_role('targets', skip_refresh=True) - - # Fetch the targets of the delegated roles. get_rolenames returns - # all roles available on the repository. - delegated_targets = [] - for role in roledb.get_rolenames(self.repository_name): - if role in roledb.TOP_LEVEL_ROLES: - continue - - else: - delegated_targets.extend(self._targets_of_role(role, skip_refresh=True)) - - all_targets.extend(delegated_targets) - - return all_targets - - - - - - def _refresh_targets_metadata(self, rolename='targets', - refresh_all_delegated_roles=False): - """ - - Non-public method that refreshes the targets metadata of 'rolename'. If - 'refresh_all_delegated_roles' is True, include all the delegations that - follow 'rolename'. The metadata for the 'targets' role is updated in - refresh() by the _update_metadata_if_changed('targets') call, not here. - Delegated roles are not loaded when the repository is first initialized. - They are loaded from disk, updated if they have changed, and stored to - the 'self.metadata' store by this method. This method is called by - get_one_valid_targetinfo(). - - - rolename: - This is a delegated role name and should not end in '.json'. Example: - 'unclaimed'. - - refresh_all_delegated_roles: - Boolean indicating if all the delegated roles available in the - repository (via snapshot.json) should be refreshed. - - - tuf.exceptions.ExpiredMetadataError: - If local metadata is expired and newer metadata is not available. - - tuf.exceptions.RepositoryError: - If the metadata file for the 'targets' role is missing from the - 'snapshot' metadata. - - - The metadata for the delegated roles are loaded and updated if they - have changed. Delegated metadata is removed from the role database if - it has expired. - - - None. - """ - - roles_to_update = [] - - if rolename + '.json' in self.metadata['current']['snapshot']['meta']: - roles_to_update.append(rolename) - - if refresh_all_delegated_roles: - - for role in self.metadata['current']['snapshot']['meta'].keys(): - # snapshot.json keeps track of root.json, targets.json, and delegated - # roles (e.g., django.json, unclaimed.json). Remove the 'targets' role - # because it gets updated when the targets.json file is updated in - # _update_metadata_if_changed('targets') and root. - if role.endswith('.json'): - role = role[:-len('.json')] - if role not in ['root', 'targets', rolename]: - roles_to_update.append(role) - - else: - continue - - # If there is nothing to refresh, we are done. - if not roles_to_update: - return - - logger.debug('Roles to update: ' + repr(roles_to_update) + '.') - - # Iterate 'roles_to_update', and load and update its metadata file if it - # has changed. - for rolename in roles_to_update: - self._load_metadata_from_file('previous', rolename) - self._load_metadata_from_file('current', rolename) - - self._update_metadata_if_changed(rolename) - - - - - - def _targets_of_role(self, rolename, targets=None, skip_refresh=False): - """ - - Non-public method that returns the target information of all the targets - of 'rolename'. The returned information is a list conformant to - 'tuf.formats.TARGETINFOS_SCHEMA', and has the form: - - [{'filepath': 'a/b/c.txt', - 'fileinfo': {'length': 13323, - 'hashes': {'sha256': dbfac345..}} - ...] - - - rolename: - This is a role name and should not end in '.json'. Examples: 'targets', - 'unclaimed'. - - targets: - A list of targets containing target information, conformant to - 'tuf.formats.TARGETINFOS_SCHEMA'. - - skip_refresh: - A boolean indicating if the target metadata for 'rolename' - should be refreshed. - - - tuf.exceptions.UnknownRoleError: - If 'rolename' is not found in the role database. - - - The metadata for 'rolename' is refreshed if 'skip_refresh' is False. - - - A list of dict objects containing the target information of all the - targets of 'rolename'. Conformant to - 'tuf.formats.TARGETINFOS_SCHEMA'. - """ - - if targets is None: - targets = [] - - targets_of_role = list(targets) - logger.debug('Getting targets of role: ' + repr(rolename) + '.') - - if not roledb.role_exists(rolename, self.repository_name): - raise exceptions.UnknownRoleError(rolename) - - # We do not need to worry about the target paths being trusted because - # this is enforced before any new metadata is accepted. - if not skip_refresh: - self._refresh_targets_metadata(rolename) - - # Do we have metadata for 'rolename'? - if rolename not in self.metadata['current']: - logger.debug('No metadata for ' + repr(rolename) + '.' - ' Unable to determine targets.') - return [] - - # Get the targets specified by the role itself. - for filepath, fileinfo in self.metadata['current'][rolename].get('targets', []).items(): - new_target = {} - new_target['filepath'] = filepath - new_target['fileinfo'] = fileinfo - - targets_of_role.append(new_target) - - return targets_of_role - - - - - - def targets_of_role(self, rolename='targets'): - """ - - - NOTE: This function is deprecated. Use with rolename 'targets' is secure - and the behavior well-defined, but use with any delegated targets role is - not. Please transition use for delegated targets roles to - get_one_valid_targetinfo(). More information is below. - - Return a list of trusted targets directly specified by 'rolename'. - The returned information is a list conformant to - 'tuf.formats.TARGETINFOS_SCHEMA', and has the form: - - [{'filepath': 'a/b/c.txt', - 'fileinfo': {'length': 13323, - 'hashes': {'sha256': dbfac345..}} - ...] - - The metadata of 'rolename' is updated if out of date, including the - metadata of its parent roles (i.e., the minimum roles needed to set the - chain of trust). - - - rolename: - The name of the role whose list of targets are wanted. - The name of the role should start with 'targets'. - - - securesystemslib.exceptions.FormatError: - If 'rolename' is improperly formatted. - - tuf.exceptions.RepositoryError: - If the metadata of 'rolename' cannot be updated. - - tuf.exceptions.UnknownRoleError: - If 'rolename' is not found in the role database. - - - The metadata of updated delegated roles are downloaded and stored. - - - A list of targets, conformant to - 'tuf.formats.TARGETINFOS_SCHEMA'. - """ - - warnings.warn( - 'Support for targets_of_role() will be removed in a future release.' - ' get_one_valid_targetinfo() should be used instead.', - DeprecationWarning) - - # Does 'rolename' have the correct format? - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - formats.RELPATH_SCHEMA.check_match(rolename) - - # If we've been given a delegated targets role, we don't know how to - # validate it without knowing what the delegating role is -- there could - # be several roles that delegate to the given role. Behavior of this - # function for roles other than Targets is not well defined as a result. - # This function is deprecated, but: - # - Usage of this function or a future successor makes sense when the - # role of interest is Targets, since we always know exactly how to - # validate Targets (We use root.). - # - Until it's removed (hopefully soon), we'll try to provide what it has - # always provided. To do this, we fetch and "validate" all delegated - # roles listed by snapshot. For delegated roles only, the order of the - # validation impacts the security of the validation -- the most- - # recently-validated role delegating to a role you are currently - # validating determines the expected keyids and threshold of the role - # you are currently validating. That is NOT GOOD. Again, please switch - # to get_one_valid_targetinfo, which is well-defined and secure. - if rolename != 'targets': - self._refresh_targets_metadata(refresh_all_delegated_roles=True) - - - if not roledb.role_exists(rolename, self.repository_name): - raise exceptions.UnknownRoleError(rolename) - - return self._targets_of_role(rolename, skip_refresh=True) - - - - - - def get_one_valid_targetinfo(self, target_filepath): - """ - - Return the target information for 'target_filepath', and update its - corresponding metadata, if necessary. 'target_filepath' must match - exactly as it appears in metadata, and should not contain URL encoding - escapes. - - - target_filepath: - The path to the target file on the repository. This will be relative to - the 'targets' (or equivalent) directory on a given mirror. - - - tuf.exceptions.ExpiredMetadataError: - If local metadata is expired and newer metadata is not available. - - securesystemslib.exceptions.FormatError: - If 'target_filepath' is improperly formatted. - - tuf.exceptions.UnknownTargetError: - If 'target_filepath' was not found. - - Any other unforeseen runtime exception. - - - The metadata for updated delegated roles are downloaded and stored. - - - The target information for 'target_filepath', conformant to - 'tuf.formats.TARGETINFO_SCHEMA'. - """ - - # Does 'target_filepath' have the correct format? - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - formats.RELPATH_SCHEMA.check_match(target_filepath) - - target_filepath = target_filepath.replace('\\', '/') - - if target_filepath.startswith('/'): - raise exceptions.FormatError('The requested target file cannot' - ' contain a leading path separator: ' + repr(target_filepath)) - - # Get target by looking at roles in order of priority tags. - target = self._preorder_depth_first_walk(target_filepath) - - # Raise an exception if the target information could not be retrieved. - if target is None: - raise exceptions.UnknownTargetError(repr(target_filepath) + ' not' - ' found.') - - # Otherwise, return the found target. - else: - return target - - - - - - def _preorder_depth_first_walk(self, target_filepath): - """ - - Non-public method that interrogates the tree of target delegations in - order of appearance (which implicitly order trustworthiness), and returns - the matching target found in the most trusted role. - - - target_filepath: - The path to the target file on the repository. This will be relative to - the 'targets' (or equivalent) directory on a given mirror. - - - tuf.exceptions.ExpiredMetadataError: - If local metadata is expired and newer metadata is not available. - - securesystemslib.exceptions.FormatError: - If 'target_filepath' is improperly formatted. - - tuf.exceptions.RepositoryError: - If 'target_filepath' is not found. - - - The metadata for updated delegated roles are downloaded and stored. - - - The target information for 'target_filepath', conformant to - 'tuf.formats.TARGETINFO_SCHEMA'. - """ - - target = None - current_metadata = self.metadata['current'] - role_names = ['targets'] - visited_role_names = set() - number_of_delegations = settings.MAX_NUMBER_OF_DELEGATIONS - - # Ensure the client has the most up-to-date version of 'targets.json'. - # Raise 'tuf.exceptions.NoWorkingMirrorError' if the changed metadata - # cannot be successfully downloaded and 'tuf.exceptions.RepositoryError' if - # the referenced metadata is missing. Target methods such as this one are - # called after the top-level metadata have been refreshed (i.e., - # updater.refresh()). - self._update_metadata_if_changed('targets') - - # Preorder depth-first traversal of the graph of target delegations. - while target is None and number_of_delegations > 0 and len(role_names) > 0: - - # Pop the role name from the top of the stack. - role_name = role_names.pop(-1) - - # Skip any visited current role to prevent cycles. - if role_name in visited_role_names: - logger.debug('Skipping visited current role ' + repr(role_name)) - continue - - # The metadata for 'role_name' must be downloaded/updated before its - # targets, delegations, and child roles can be inspected. - # self.metadata['current'][role_name] is currently missing. - # _refresh_targets_metadata() does not refresh 'targets.json', it - # expects _update_metadata_if_changed() to have already refreshed it, - # which this function has checked above. - self._refresh_targets_metadata(role_name, - refresh_all_delegated_roles=False) - - role_metadata = current_metadata[role_name] - targets = role_metadata['targets'] - delegations = role_metadata.get('delegations', {}) - child_roles = delegations.get('roles', []) - target = self._get_target_from_targets_role(role_name, targets, - target_filepath) - # After preorder check, add current role to set of visited roles. - visited_role_names.add(role_name) - - # And also decrement number of visited roles. - number_of_delegations -= 1 - - if target is None: - - child_roles_to_visit = [] - # NOTE: This may be a slow operation if there are many delegated roles. - for child_role in child_roles: - child_role_name = self._visit_child_role(child_role, target_filepath) - if child_role['terminating'] and child_role_name is not None: - logger.debug('Adding child role ' + repr(child_role_name)) - logger.debug('Not backtracking to other roles.') - role_names = [] - child_roles_to_visit.append(child_role_name) - break - - elif child_role_name is None: - logger.debug('Skipping child role ' + repr(child_role_name)) - - else: - logger.debug('Adding child role ' + repr(child_role_name)) - child_roles_to_visit.append(child_role_name) - - # Push 'child_roles_to_visit' in reverse order of appearance onto - # 'role_names'. Roles are popped from the end of the 'role_names' - # list. - child_roles_to_visit.reverse() - role_names.extend(child_roles_to_visit) - - else: - logger.debug('Found target in current role ' + repr(role_name)) - - if target is None and number_of_delegations == 0 and len(role_names) > 0: - logger.debug(repr(len(role_names)) + ' roles left to visit, ' + - 'but allowed to visit at most ' + - repr(settings.MAX_NUMBER_OF_DELEGATIONS) + ' delegations.') - - return target - - - - - - def _get_target_from_targets_role(self, role_name, targets, target_filepath): - """ - - Non-public method that determines whether the targets role with the given - 'role_name' has the target with the name 'target_filepath'. - - - role_name: - The name of the targets role that we are inspecting. - - targets: - The targets of the Targets role with the name 'role_name'. - - target_filepath: - The path to the target file on the repository. This will be relative to - the 'targets' (or equivalent) directory on a given mirror. - - - None. - - - None. - - - The target information for 'target_filepath', conformant to - 'tuf.formats.TARGETINFO_SCHEMA'. - """ - - # Does the current role name have our target? - logger.debug('Asking role ' + repr(role_name) + ' about' - ' target ' + repr(target_filepath)) - - target = targets.get(target_filepath) - - if target: - logger.debug('Found target ' + target_filepath + ' in role ' + role_name) - return {'filepath': target_filepath, 'fileinfo': target} - - else: - logger.debug( - 'Target file ' + target_filepath + ' not found in role ' + role_name) - return None - - - - - - def _visit_child_role(self, child_role, target_filepath): - """ - - Non-public method that determines whether the given 'target_filepath' - is an allowed path of 'child_role'. - - Ensure that we explore only delegated roles trusted with the target. The - metadata for 'child_role' should have been refreshed prior to this point, - however, the paths/targets that 'child_role' signs for have not been - verified (as intended). The paths/targets that 'child_role' is allowed - to specify in its metadata depends on the delegating role, and thus is - left to the caller to verify. We verify here that 'target_filepath' - is an allowed path according to the delegated 'child_role'. - - TODO: Should the TUF spec restrict the repository to one particular - algorithm? Should we allow the repository to specify in the role - dictionary the algorithm used for these generated hashed paths? - - - child_role: - The delegation targets role object of 'child_role', containing its - paths, path_hash_prefixes, keys, and so on. - - target_filepath: - The path to the target file on the repository. This will be relative to - the 'targets' (or equivalent) directory on a given mirror. - - - None. - - - None. - - - If 'child_role' has been delegated the target with the name - 'target_filepath', then we return the role name of 'child_role'. - - Otherwise, we return None. - """ - - child_role_name = child_role['name'] - child_role_paths = child_role.get('paths') - child_role_path_hash_prefixes = child_role.get('path_hash_prefixes') - - if child_role_path_hash_prefixes is not None: - target_filepath_hash = self._get_target_hash(target_filepath) - for child_role_path_hash_prefix in child_role_path_hash_prefixes: - if target_filepath_hash.startswith(child_role_path_hash_prefix): - return child_role_name - - else: - continue - - elif child_role_paths is not None: - # Is 'child_role_name' allowed to sign for 'target_filepath'? - for child_role_path in child_role_paths: - # A child role path may be an explicit path or glob pattern (Unix - # shell-style wildcards). The child role 'child_role_name' is returned - # if 'target_filepath' is equal to or matches 'child_role_path'. - # Explicit filepaths are also considered matches. A repo maintainer - # might delegate a glob pattern with a leading path separator, while - # the client requests a matching target without a leading path - # separator - make sure to strip any leading path separators so that a - # match is made. Example: "foo.tgz" should match with "/*.tgz". - if fnmatch.fnmatch(target_filepath.lstrip(os.sep), child_role_path.lstrip(os.sep)): - logger.debug('Child role ' + repr(child_role_name) + ' is allowed to' - ' sign for ' + repr(target_filepath)) - - return child_role_name - - else: - logger.debug( - 'The given target path ' + repr(target_filepath) + ' does not' - ' match the trusted path or glob pattern: ' + repr(child_role_path)) - continue - - else: - # 'role_name' should have been validated when it was downloaded. - # The 'paths' or 'path_hash_prefixes' fields should not be missing, - # so we raise a format error here in case they are both missing. - raise sslib_exceptions.FormatError(repr(child_role_name) + ' ' - 'has neither a "paths" nor "path_hash_prefixes". At least' - ' one of these attributes must be present.') - - return None - - - - def _get_target_hash(self, target_filepath, hash_function='sha256'): - """ - - Non-public method that computes the hash of 'target_filepath'. This is - useful in conjunction with the "path_hash_prefixes" attribute in a - delegated targets role, which tells us which paths it is implicitly - responsible for. - - - target_filepath: - The path to the target file on the repository. This will be relative to - the 'targets' (or equivalent) directory on a given mirror. - - hash_function: - The algorithm used by the repository to generate the hashes of the - target filepaths. The repository may optionally organize targets into - hashed bins to ease target delegations and role metadata management. - The use of consistent hashing allows for a uniform distribution of - targets into bins. - - - None. - - - None. - - - The hash of 'target_filepath'. - """ - - # Calculate the hash of the filepath to determine which bin to find the - # target. The client currently assumes the repository (i.e., repository - # tool) uses 'hash_function' to generate hashes and UTF-8. - digest_object = sslib_hash.digest(hash_function) - encoded_target_filepath = target_filepath.encode('utf-8') - digest_object.update(encoded_target_filepath) - target_filepath_hash = digest_object.hexdigest() - - return target_filepath_hash - - - - - - def remove_obsolete_targets(self, destination_directory): - """ - - Remove any files that are in 'previous' but not 'current'. This makes it - so if you remove a file from a repository, it actually goes away. The - targets for the 'targets' role and all delegated roles are checked. - - - destination_directory: - The directory containing the target files tracked by TUF. - - - securesystemslib.exceptions.FormatError: - If 'destination_directory' is improperly formatted. - - tuf.exceptions.RepositoryError: - If an error occurred removing any files. - - - Target files are removed from disk. - - - None. - """ - - # Does 'destination_directory' have the correct format? - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.PATH_SCHEMA.check_match(destination_directory) - - # Iterate the rolenames and verify whether the 'previous' directory - # contains a target no longer found in 'current'. - for role in roledb.get_rolenames(self.repository_name): - if role.startswith('targets'): - if role in self.metadata['previous'] and self.metadata['previous'][role] != None: - for target in self.metadata['previous'][role]['targets']: - if target not in self.metadata['current'][role]['targets']: - # 'target' is only in 'previous', so remove it. - logger.warning('Removing obsolete file: ' + repr(target) + '.') - - # Remove the file if it hasn't been removed already. - destination = \ - os.path.join(destination_directory, target.lstrip(os.sep)) - try: - os.remove(destination) - - except OSError as e: - # If 'filename' already removed, just log it. - if e.errno == errno.ENOENT: - logger.info('File ' + repr(destination) + ' was already' - ' removed.') - - else: - logger.warning('Failed to remove obsolete target: ' + str(e) ) - - else: - logger.debug('Skipping: ' + repr(target) + '. It is still' - ' a current target.') - else: - logger.debug('Skipping: ' + repr(role) + '. Not in the previous' - ' metadata') - - - - - - def updated_targets(self, targets, destination_directory): - """ - - Checks files in the provided directory against the provided file metadata. - - Filters the provided target info, returning a subset: only the metadata - for targets for which the target file either does not exist in the - provided directory, or for which the target file in the provided directory - does not match the provided metadata. - - A principle use of this function is to determine which target files need - to be downloaded. If the caller first uses get_one_valid_target_info() - calls to obtain up-to-date, valid metadata for targets, the caller can - then call updated_targets() to determine if that metadata does not match - what exists already on disk (in the provided directory). The returned - values can then be used in download_file() calls to update the files that - didn't exist or didn't match. - - The returned information is a list conformant to - 'tuf.formats.TARGETINFOS_SCHEMA' and has the form: - - [{'filepath': 'a/b/c.txt', - 'fileinfo': {'length': 13323, - 'hashes': {'sha256': dbfac345..}} - ...] - - - targets: - Metadata about the expected state of target files, against which local - files will be checked. This should be a list of target info - dictionaries; i.e. 'targets' must be conformant to - tuf.formats.TARGETINFOS_SCHEMA. - - destination_directory: - The directory containing the target files. - - - securesystemslib.exceptions.FormatError: - If the arguments are improperly formatted. - - - The files in 'targets' are read and their hashes computed. - - - A list of target info dictionaries. The list conforms to - 'tuf.formats.TARGETINFOS_SCHEMA'. - This is a strict subset of the argument 'targets'. - """ - - # Do the arguments have the correct format? - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - formats.TARGETINFOS_SCHEMA.check_match(targets) - sslib_formats.PATH_SCHEMA.check_match(destination_directory) - - # Keep track of the target objects and filepaths of updated targets. - # Return 'updated_targets' and use 'updated_targetpaths' to avoid - # duplicates. - updated_targets = [] - updated_targetpaths = [] - - for target in targets: - # Prepend 'destination_directory' to the target's relative filepath (as - # stored in metadata.) Verify the hash of 'target_filepath' against - # each hash listed for its fileinfo. Note: join() discards - # 'destination_directory' if 'filepath' contains a leading path separator - # (i.e., is treated as an absolute path). - filepath = target['filepath'] - if filepath[0] == '/': - filepath = filepath[1:] - target_filepath = os.path.join(destination_directory, filepath) - - if target_filepath in updated_targetpaths: - continue - - # Try one of the algorithm/digest combos for a mismatch. We break - # as soon as we find a mismatch. - for algorithm, digest in target['fileinfo']['hashes'].items(): - digest_object = None - try: - digest_object = sslib_hash.digest_filename(target_filepath, - algorithm=algorithm) - - # This exception would occur if the target does not exist locally. - except sslib_exceptions.StorageError: - updated_targets.append(target) - updated_targetpaths.append(target_filepath) - break - - # The file does exist locally, check if its hash differs. - if digest_object.hexdigest() != digest: - updated_targets.append(target) - updated_targetpaths.append(target_filepath) - break - - return updated_targets - - - - - - def download_target(self, target, destination_directory, - prefix_filename_with_hash=True): - """ - - Download 'target' and verify it is trusted. - - This will only store the file at 'destination_directory' if the - downloaded file matches the description of the file in the trusted - metadata. - - - target: - The target to be downloaded. Conformant to - 'tuf.formats.TARGETINFO_SCHEMA'. - - destination_directory: - The directory to save the downloaded target file. - - prefix_filename_with_hash: - Whether to prefix the targets file names with their hash when using - consistent snapshot. - This should be set to False when the served target filenames are not - prefixed with hashes (in this case the server uses other means - to ensure snapshot consistency). - Default is True. - - - securesystemslib.exceptions.FormatError: - If 'target' is not properly formatted. - - tuf.exceptions.NoWorkingMirrorError: - If a target could not be downloaded from any of the mirrors. - - Although expected to be rare, there might be OSError exceptions (except - errno.EEXIST) raised when creating the destination directory (if it - doesn't exist). - - - A target file is saved to the local system. - - - None. - """ - - # Do the arguments have the correct format? - # This check ensures the arguments have the appropriate - # number of objects and object types, and that all dict - # keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if the check fail. - formats.TARGETINFO_SCHEMA.check_match(target) - sslib_formats.PATH_SCHEMA.check_match(destination_directory) - - # Extract the target file information. - target_filepath = target['filepath'] - trusted_length = target['fileinfo']['length'] - trusted_hashes = target['fileinfo']['hashes'] - - # Build absolute 'destination' file path. - # Note: join() discards 'destination_directory' if 'target_path' contains - # a leading path separator (i.e., is treated as an absolute path). - destination = os.path.join(destination_directory, - target_filepath.lstrip(os.sep)) - destination = os.path.abspath(destination) - target_dirpath = os.path.dirname(destination) - - # When attempting to create the leaf directory of 'target_dirpath', ignore - # any exceptions raised if the root directory already exists. All other - # exceptions potentially thrown by os.makedirs() are re-raised. - # Note: os.makedirs can raise OSError if the leaf directory already exists - # or cannot be created. - try: - os.makedirs(target_dirpath) - - except OSError as e: - if e.errno == errno.EEXIST: - pass - - else: - raise - - # '_get_target_file()' checks every mirror and returns the first target - # that passes verification. - target_file_object = self._get_target_file(target_filepath, trusted_length, - trusted_hashes, prefix_filename_with_hash) - - sslib_util.persist_temp_file(target_file_object, destination) diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py deleted file mode 100755 index 82d936c072..0000000000 --- a/tuf/developer_tool.py +++ /dev/null @@ -1,1023 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - developer_tool.py - - - Santiago Torres - Zane Fisher - - Based on the work done for 'repository_tool.py' by Vladimir Diaz. - - - January 22, 2014. - - - See LICENCE-MIT OR LICENCE for licensing information. - - - See 'tuf/README-developer-tools.md' for a complete guide on using - 'developer_tool.py'. -""" - -import os -import errno -import logging -import shutil -import tempfile -import json - - -import securesystemslib # pylint: disable=unused-import -from securesystemslib import exceptions as sslib_exceptions -from securesystemslib import formats as sslib_formats -from securesystemslib import storage as sslib_storage -from securesystemslib import util as sslib_util - -from tuf import exceptions -from tuf import formats -from tuf import keydb -from tuf import log # pylint: disable=unused-import -from tuf import repository_lib as repo_lib -from tuf import roledb -from tuf import sig - -from tuf.repository_tool import Targets -from tuf.repository_lib import _check_role_keys -from tuf.repository_lib import _metadata_is_partially_loaded - - -# Copy API -# pylint: disable=unused-import - -# Copy generic repository API functions to be used via `developer_tool` -from tuf.repository_lib import ( - generate_targets_metadata, - create_tuf_client_directory, - disable_console_log_messages) - -# Copy key-related API functions to be used via `developer_tool` -from tuf.repository_lib import ( - import_rsa_privatekey_from_file) - -from securesystemslib.keys import ( - format_keyval_to_metadata, - format_metadata_to_key) - -from securesystemslib.interface import ( - generate_and_write_rsa_keypair, - generate_and_write_rsa_keypair_with_prompt, - generate_and_write_unencrypted_rsa_keypair, - generate_and_write_ecdsa_keypair, - generate_and_write_ecdsa_keypair_with_prompt, - generate_and_write_unencrypted_ecdsa_keypair, - generate_and_write_ed25519_keypair, - generate_and_write_ed25519_keypair_with_prompt, - generate_and_write_unencrypted_ed25519_keypair, - import_rsa_publickey_from_file, - import_ed25519_publickey_from_file, - import_ed25519_privatekey_from_file) - - -# See 'log.py' to learn how logging is handled in TUF. -logger = logging.getLogger(__name__) - -# The extension of TUF metadata. -from tuf.repository_lib import METADATA_EXTENSION as METADATA_EXTENSION - -# Project configuration filename. This file is intended to hold all of the -# supporting information about the project that's not contained in a usual -# TUF metadata file. 'project.cfg' consists of the following fields: -# -# targets_location: the location of the targets folder. -# -# prefix: the directory location to prepend to the metadata so it -# matches the metadata signed in the repository. -# -# metadata_location: the location of the metadata files. -# -# threshold: the threshold for this project object, it is fixed to -# one in the current version. -# -# public_keys: a list of the public keys used to verify the metadata -# in this project. -# -# layout_type: a field describing the directory layout: -# -# repo-like: matches the layout of the repository tool. -# the targets and metadata folders are -# located under a common directory for the -# project. -# -# flat: the targets directory and the -# metadata directory are located in different -# paths. -# -# project_name: The name of the current project, this value is used to -# match the resulting filename with the one in upstream. -PROJECT_FILENAME = 'project.cfg' - -# The targets and metadata directory names. Metadata files are written -# to the staged metadata directory instead of the "live" one. -from tuf.repository_tool import METADATA_DIRECTORY_NAME -from tuf.repository_tool import TARGETS_DIRECTORY_NAME - - -class Project(Targets): - """ - - Simplify the publishing process of third-party projects by handling all of - the bookkeeping, signature handling, and integrity checks of delegated TUF - metadata. 'repository_tool.py' is responsible for publishing and - maintaining metadata of the top-level roles, and 'developer_tool.py' is - used by projects that have been delegated responsibility for a delegated - projects role. Metadata created by this module may then be added to other - metadata available in a TUF repository. - - Project() is the representation of a project's metadata file(s), with the - ability to modify this data in an OOP manner. Project owners do not have to - manually verify that metadata files are properly formatted or that they - contain valid data. - - - project_name: - The name of the metadata file as it should be named in the upstream - repository. - - metadata_directory: - The metadata sub-directory contains the metadata file(s) of this project, - including any of its delegated roles. - - targets_directory: - The targets sub-directory contains the project's target files that are - downloaded by clients and are referenced in its metadata. The hashes and - file lengths are listed in Metadata files so that they are securely - downloaded. Metadata files are similarly referenced in the top-level - metadata. - - file_prefix: - The path string that will be prepended to the generated metadata - (e.g., targets/foo -> targets/prefix/foo) so that it matches the actual - targets location in the upstream repository. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - - Creates a project Targets role object, with the same object attributes of - the top-level targets role. - - - None. - """ - - def __init__(self, project_name, metadata_directory, targets_directory, - file_prefix, repository_name='default'): - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly - # formatted. - sslib_formats.NAME_SCHEMA.check_match(project_name) - sslib_formats.PATH_SCHEMA.check_match(metadata_directory) - sslib_formats.PATH_SCHEMA.check_match(targets_directory) - sslib_formats.ANY_STRING_SCHEMA.check_match(file_prefix) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - self.metadata_directory = metadata_directory - self.targets_directory = targets_directory - self.project_name = project_name - self.prefix = file_prefix - self.repository_name = repository_name - - # Layout type defaults to "flat" unless explicitly specified in - # create_new_project(). - self.layout_type = 'flat' - - # Set the top-level Targets object. Set the rolename to be the project's - # name. - super(Project, self).__init__(self.targets_directory, project_name) - - - - - - def write(self, write_partial=False): - """ - - Write all the JSON Metadata objects to their corresponding files. - write() raises an exception if any of the role metadata to be written to - disk is invalid, such as an insufficient threshold of signatures, missing - private keys, etc. - - - write_partial: - A boolean indicating whether partial metadata should be written to - disk. Partial metadata may be written to allow multiple maintainters - to independently sign and update role metadata. write() raises an - exception if a metadata role cannot be written due to not having enough - signatures. - - - securesystemslib.exceptions.Error, if any of the project roles do not - have a minimum threshold of signatures. - - - Creates metadata files in the project's metadata directory. - - - None. - """ - - # Does 'write_partial' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - sslib_formats.BOOLEAN_SCHEMA.check_match(write_partial) - - # At this point the keydb and roledb stores must be fully - # populated, otherwise write() throwns a 'tuf.Repository' exception if - # any of the project roles are missing signatures, keys, etc. - - # Write the metadata files of all the delegated roles of the project. - delegated_rolenames = roledb.get_delegated_rolenames(self.project_name, - self.repository_name) - - for delegated_rolename in delegated_rolenames: - delegated_filename = os.path.join(self.metadata_directory, - delegated_rolename + METADATA_EXTENSION) - - # Ensure the parent directories of 'metadata_filepath' exist, otherwise an - # IO exception is raised if 'metadata_filepath' is written to a - # sub-directory. - sslib_util.ensure_parent_dir(delegated_filename) - - _generate_and_write_metadata(delegated_rolename, delegated_filename, - write_partial, self.targets_directory, prefix=self.prefix, - repository_name=self.repository_name) - - - # Generate the 'project_name' metadata file. - targets_filename = self.project_name + METADATA_EXTENSION - targets_filename = os.path.join(self.metadata_directory, targets_filename) - junk, targets_filename = _generate_and_write_metadata(self.project_name, - targets_filename, write_partial, self.targets_directory, - prefix=self.prefix, repository_name=self.repository_name) - - # Save configuration information that is not stored in the project's - # metadata - _save_project_configuration(self.metadata_directory, - self.targets_directory, self.keys, self.prefix, self.threshold, - self.layout_type, self.project_name) - - - - - - def add_verification_key(self, key, expires=None): - """ - - Function as a thin wrapper call for the project._targets call - with the same name. This wrapper is only for usability purposes. - - - key: - The role key to be added, conformant to - 'securesystemslib.formats.ANYKEY_SCHEMA'. Adding a public key to a - role means that its corresponding private key must generate and add - its signture to the role. - - - securesystemslib.exceptions.FormatError, if the 'key' argument is - improperly formatted. - - securesystemslib.exceptions.Error, if the project already contains a key. - - - The role's entries in 'keydb' and 'roledb' are updated. - - - None - """ - - # Verify that this role does not already contain a key. The parent project - # role is restricted to one key. Any of its delegated roles may have - # more than one key. - # TODO: Add condition check for the requirement stated above. - if len(self.keys) > 0: - raise sslib_exceptions.Error("This project already contains a key.") - - super(Project, self).add_verification_key(key, expires) - - - - - - def status(self): - """ - - Determine the status of the project, including its delegated roles. - status() checks if each role provides sufficient public keys, signatures, - and that a valid metadata file is generated if write() were to be called. - Metadata files are temporarily written to check that proper metadata files - is written, where file hashes and lengths are calculated and referenced - by the project. status() does not do a simple check for number of - threshold keys and signatures. - - - None. - - - securesystemslib.exceptions.Error, if the project, or any of its - delegated roles, do not have a minimum threshold of signatures. - - - Generates and writes temporary metadata files. - - - None. - """ - - temp_project_directory = None - - try: - temp_project_directory = tempfile.mkdtemp() - - metadata_directory = os.path.join(temp_project_directory, 'metadata') - targets_directory = self.targets_directory - - os.makedirs(metadata_directory) - - # TODO: We should do the schema check. - filenames = {} - filenames['targets'] = os.path.join(metadata_directory, self.project_name) - - # Delegated roles. - delegated_roles = roledb.get_delegated_rolenames(self.project_name, - self.repository_name) - insufficient_keys = [] - insufficient_signatures = [] - - for delegated_role in delegated_roles: - try: - _check_role_keys(delegated_role, self.repository_name) - - except exceptions.InsufficientKeysError: - insufficient_keys.append(delegated_role) - continue - - try: - signable = _generate_and_write_metadata(delegated_role, - filenames['targets'], False, targets_directory, False, - repository_name=self.repository_name) - self._log_status(delegated_role, signable[0], self.repository_name) - - except sslib_exceptions.Error: - insufficient_signatures.append(delegated_role) - - if len(insufficient_keys): - message = 'Delegated roles with insufficient keys: ' +\ - repr(insufficient_keys) - logger.info(message) - return - - if len(insufficient_signatures): - message = 'Delegated roles with insufficient signatures: ' +\ - repr(insufficient_signatures) - logger.info(message) - return - - # Targets role. - try: - _check_role_keys(self.rolename, self.repository_name) - - except exceptions.InsufficientKeysError as e: - logger.info(str(e)) - return - - try: - signable, junk = _generate_and_write_metadata(self.project_name, - filenames['targets'], False, targets_directory, metadata_directory, - self.repository_name) - self._log_status(self.project_name, signable, self.repository_name) - - except exceptions.UnsignedMetadataError as e: - # This error is raised if the metadata has insufficient signatures to - # meet the threshold. - self._log_status(self.project_name, e.signable, self.repository_name) - return - - finally: - shutil.rmtree(temp_project_directory, ignore_errors=True) - - - - - - def _log_status(self, rolename, signable, repository_name): - """ - Non-public function prints the number of (good/threshold) signatures of - 'rolename'. - """ - - status = sig.get_signature_status(signable, rolename, repository_name) - - message = repr(rolename) + ' role contains ' +\ - repr(len(status['good_sigs'])) + ' / ' + repr(status['threshold']) +\ - ' signatures.' - logger.info(message) - - - - - -def _generate_and_write_metadata(rolename, metadata_filename, write_partial, - targets_directory, prefix='', repository_name='default'): - """ - Non-public function that can generate and write the metadata of the - specified 'rolename'. It also increments version numbers if: - - 1. write_partial==True and the metadata is the first to be written. - - 2. write_partial=False (i.e., write()), the metadata was not loaded as - partially written, and a write_partial is not needed. - """ - - metadata = None - - # Retrieve the roleinfo of 'rolename' to extract the needed metadata - # attributes, such as version number, expiration, etc. - roleinfo = roledb.get_roleinfo(rolename, repository_name) - - metadata = generate_targets_metadata(targets_directory, roleinfo['paths'], - roleinfo['version'], roleinfo['expires'], roleinfo['delegations'], - False) - - # Prepend the prefix to the project's filepath to avoid signature errors in - # upstream. - for element in list(metadata['targets']): - junk, relative_target = os.path.split(element) - prefixed_path = os.path.join(prefix, relative_target) - metadata['targets'][prefixed_path] = metadata['targets'][element] - if prefix != '': - del(metadata['targets'][element]) - - signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'], - metadata_filename, repository_name) - - # Check if the version number of 'rolename' may be automatically incremented, - # depending on whether if partial metadata is loaded or if the metadata is - # written with write() / write_partial(). - # Increment the version number if this is the first partial write. - if write_partial: - temp_signable = repo_lib.sign_metadata(metadata, [], metadata_filename, - repository_name) - temp_signable['signatures'].extend(roleinfo['signatures']) - status = sig.get_signature_status(temp_signable, rolename, - repository_name) - if len(status['good_sigs']) == 0: - metadata['version'] = metadata['version'] + 1 - signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'], - metadata_filename, repository_name) - - # non-partial write() - else: - if sig.verify(signable, rolename, repository_name): - metadata['version'] = metadata['version'] + 1 - signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'], - metadata_filename, repository_name) - - # Write the metadata to file if contains a threshold of signatures. - signable['signatures'].extend(roleinfo['signatures']) - - if sig.verify(signable, rolename, repository_name) or write_partial: - repo_lib._remove_invalid_and_duplicate_signatures(signable, repository_name) - storage_backend = sslib_storage.FilesystemBackend() - filename = repo_lib.write_metadata_file(signable, metadata_filename, - metadata['version'], False, storage_backend) - - # 'signable' contains an invalid threshold of signatures. - else: - message = 'Not enough signatures for ' + repr(metadata_filename) - raise sslib_exceptions.Error(message, signable) - - return signable, filename - - - - -def create_new_project(project_name, metadata_directory, - location_in_repository = '', targets_directory=None, key=None, - repository_name='default'): - """ - - Create a new project object, instantiate barebones metadata for the - targets, and return a blank project object. On disk, create_new_project() - only creates the directories needed to hold the metadata and targets files. - The project object returned can be directly modified to meet the designer's - criteria and then written using the method project.write(). - - The project name provided is the one that will be added to the resulting - metadata file as it should be named in upstream. - - - project_name: - The name of the project as it should be called in upstream. For example, - targets/unclaimed/django should have its project_name set to "django" - - metadata_directory: - The directory that will eventually hold the metadata and target files of - the project. - - location_in_repository: - An optional argument to hold the "prefix" or the expected location for - the project files in the "upstream" repository. This value is only - used to sign metadata in a way that it matches the future location - of the files. - - For example, targets/unclaimed/django should have its project name set to - "targets/unclaimed" - - targets_directory: - An optional argument to point the targets directory somewhere else than - the metadata directory if, for example, a project structure already - exists and the user does not want to move it. - - key: - The public key to verify the project's metadata. Projects can only - handle one key with a threshold of one. If a project were to modify it's - key it should be removed and updated. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted or if the public key is not a valid one (if it's not none.) - - OSError, if the filepaths provided do not have write permissions. - - - The 'metadata_directory' and 'targets_directory' directories are created - if they do not exist. - - - A 'tuf.developer_tool.Project' object. - """ - - # Does 'metadata_directory' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.PATH_SCHEMA.check_match(metadata_directory) - - # Do the same for the location in the repo and the project name, we must - # ensure they are valid pathnames. - sslib_formats.NAME_SCHEMA.check_match(project_name) - sslib_formats.ANY_STRING_SCHEMA.check_match(location_in_repository) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # for the targets directory we do the same, but first, let's find out what - # layout the user needs, layout_type is a variable that is usually set to - # 1, which means "flat" (i.e. the cfg file is where the metadata folder is - # located), with a two, the cfg file goes to the "metadata" folder, and a - # new metadata folder is created inside the tree, to separate targets and - # metadata. - layout_type = 'flat' - if targets_directory is None: - targets_directory = os.path.join(metadata_directory, TARGETS_DIRECTORY_NAME) - metadata_directory = \ - os.path.join(metadata_directory, METADATA_DIRECTORY_NAME) - layout_type = 'repo-like' - - if targets_directory is not None: - sslib_formats.PATH_SCHEMA.check_match(targets_directory) - - if key is not None: - sslib_formats.KEY_SCHEMA.check_match(key) - - # Set the metadata and targets directories. These directories - # are created if they do not exist. - metadata_directory = os.path.abspath(metadata_directory) - targets_directory = os.path.abspath(targets_directory) - - # Try to create the metadata directory that will hold all of the metadata - # files, such as 'root.txt' and 'release.txt'. - try: - message = 'Creating ' + repr(metadata_directory) - logger.info(message) - os.makedirs(metadata_directory) - - # 'OSError' raised if the leaf directory already exists or cannot be created. - # Check for case where 'repository_directory' has already been created. - except OSError as e: - if e.errno == errno.EEXIST: - # Should check if we have write permissions here. - pass - - # Testing of non-errno.EEXIST exceptions have been verified on all - # supported # OSs. An unexpected exception (the '/' directory exists, - # rather than disallowed path) is possible on Travis, so the '#pragma: no - # branch' below is included to prevent coverage failure. - else: #pragma: no branch - raise - - # Try to create the targets directory that will hold all of the target files. - try: - message = 'Creating ' + repr(targets_directory) - logger.info(message) - os.mkdir(targets_directory) - - except OSError as e: - if e.errno == errno.EEXIST: - pass - - else: - raise - - # Create the bare bones project object, where project role contains default - # values (e.g., threshold of 1, expires 1 year into the future, etc.) - project = Project(project_name, metadata_directory, targets_directory, - location_in_repository, repository_name) - - # Add 'key' to the project. - # TODO: Add check for expected number of keys for the project (must be 1) and - # its delegated roles (may be greater than one.) - if key is not None: - project.add_verification_key(key) - - # Save the layout information. - project.layout_type = layout_type - - return project - - - - - - -def _save_project_configuration(metadata_directory, targets_directory, - public_keys, prefix, threshold, layout_type, project_name): - """ - - Persist the project's information to a file. The saved project information - can later be loaded with Project.load_project(). - - - metadata_directory: - Where the project's metadata is located. - - targets_directory: - The location of the target files for this project. - - public_keys: - A list containing the public keys for the project role. - - prefix: - The project's prefix (if any.) - - threshold: - The threshold value for the project role. - - layout_type: - The layout type being used by the project, "flat" stands for separated - targets and metadata directories, "repo-like" emulates the layout used - by the repository tools - - project_name: - The name given to the project, this sets the metadata filename so it - matches the one stored in upstream. - - - securesystemslib.exceptions.FormatError are also expected if any of the arguments are malformed. - - OSError may rise if the metadata_directory/project.cfg file exists and - is non-writeable - - - A 'project.cfg' configuration file is created or overwritten. - - - None. - """ - - # Schema check for the arguments. - sslib_formats.PATH_SCHEMA.check_match(metadata_directory) - sslib_formats.PATH_SCHEMA.check_match(prefix) - sslib_formats.PATH_SCHEMA.check_match(targets_directory) - formats.RELPATH_SCHEMA.check_match(project_name) - - cfg_file_directory = metadata_directory - - # Check whether the layout type is 'flat' or 'repo-like'. - # If it is, the .cfg file should be saved in the previous directory. - if layout_type == 'repo-like': - cfg_file_directory = os.path.dirname(metadata_directory) - junk, targets_directory = os.path.split(targets_directory) - - junk, metadata_directory = os.path.split(metadata_directory) - - # Can the file be opened? - project_filename = os.path.join(cfg_file_directory, PROJECT_FILENAME) - - # Build the fields of the configuration file. - project_config = {} - project_config['prefix'] = prefix - project_config['public_keys'] = {} - project_config['metadata_location'] = metadata_directory - project_config['targets_location'] = targets_directory - project_config['threshold'] = threshold - project_config['layout_type'] = layout_type - project_config['project_name'] = project_name - - # Build a dictionary containing the actual keys. - for key in public_keys: - key_info = keydb.get_key(key) - key_metadata = format_keyval_to_metadata(key_info['keytype'], - key_info['scheme'], key_info['keyval']) - project_config['public_keys'][key] = key_metadata - - # Save the actual file. - with open(project_filename, 'wt', encoding='utf8') as fp: - json.dump(project_config, fp) - - - - - -def load_project(project_directory, prefix='', new_targets_location=None, - repository_name='default'): - """ - - Return a Project object initialized with the contents of the metadata - files loaded from 'project_directory'. - - - project_directory: - The path to the project's metadata and configuration file. - - prefix: - The prefix for the metadata, if defined. It will replace the current - prefix, by first removing the existing one (saved). - - new_targets_location: - For flat project configurations, project owner might want to reload the - project with a new location for the target files. This overwrites the - previous path to search for the target files. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - securesystemslib.exceptions.FormatError, if 'project_directory' or any of - the metadata files are improperly formatted. - - - All the metadata files found in the project are loaded and their contents - stored in a libtuf.Repository object. - - - A tuf.developer_tool.Project object. - """ - - # Does 'repository_directory' have the correct format? - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.PATH_SCHEMA.check_match(project_directory) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # Do the same for the prefix - sslib_formats.ANY_STRING_SCHEMA.check_match(prefix) - - # Clear the role and key databases since we are loading in a new project. - roledb.clear_roledb(clear_all=True) - keydb.clear_keydb(clear_all=True) - - # Locate metadata filepaths and targets filepath. - project_directory = os.path.abspath(project_directory) - - # Load the cfg file and the project. - config_filename = os.path.join(project_directory, PROJECT_FILENAME) - - project_configuration = sslib_util.load_json_file(config_filename) - formats.PROJECT_CFG_SCHEMA.check_match(project_configuration) - - targets_directory = os.path.join(project_directory, - project_configuration['targets_location']) - - if project_configuration['layout_type'] == 'flat': - project_directory, junk = os.path.split(project_directory) - targets_directory = project_configuration['targets_location'] - - if new_targets_location is not None: - targets_directory = new_targets_location - - metadata_directory = os.path.join(project_directory, - project_configuration['metadata_location']) - - new_prefix = None - - if prefix != '': - new_prefix = prefix - - prefix = project_configuration['prefix'] - - # Load the project's filename. - project_name = project_configuration['project_name'] - project_filename = project_name + METADATA_EXTENSION - - # Create a blank project on the target directory. - project = Project(project_name, metadata_directory, targets_directory, prefix, - repository_name) - - project.threshold = project_configuration['threshold'] - project.prefix = project_configuration['prefix'] - project.layout_type = project_configuration['layout_type'] - - # Traverse the public keys and add them to the project. - keydict = project_configuration['public_keys'] - - for keyid in keydict: - key, junk = format_metadata_to_key(keydict[keyid]) - project.add_verification_key(key) - - # Load the project's metadata. - targets_metadata_path = os.path.join(project_directory, metadata_directory, - project_filename) - signable = sslib_util.load_json_file(targets_metadata_path) - try: - formats.check_signable_object_format(signable) - except exceptions.UnsignedMetadataError: - # Downgrade the error to a warning because a use case exists where - # metadata may be generated unsigned on one machine and signed on another. - logger.warning('Unsigned metadata object: ' + repr(signable)) - targets_metadata = signable['signed'] - - # Remove the prefix from the metadata. - targets_metadata = _strip_prefix_from_targets_metadata(targets_metadata, - prefix) - for signature in signable['signatures']: - project.add_signature(signature) - - # Update roledb.py containing the loaded project attributes. - roleinfo = roledb.get_roleinfo(project_name, repository_name) - roleinfo['signatures'].extend(signable['signatures']) - roleinfo['version'] = targets_metadata['version'] - roleinfo['paths'] = targets_metadata['targets'] - roleinfo['delegations'] = targets_metadata['delegations'] - roleinfo['partial_loaded'] = False - - # Check if the loaded metadata was partially written and update the - # flag in 'roledb.py'. - if _metadata_is_partially_loaded(project_name, signable, - repository_name=repository_name): - roleinfo['partial_loaded'] = True - - roledb.update_roleinfo(project_name, roleinfo, mark_role_as_dirty=False, - repository_name=repository_name) - - for key_metadata in targets_metadata['delegations']['keys'].values(): - key_object, junk = format_metadata_to_key(key_metadata) - keydb.add_key(key_object, repository_name=repository_name) - - for role in targets_metadata['delegations']['roles']: - rolename = role['name'] - roleinfo = {'name': role['name'], 'keyids': role['keyids'], - 'threshold': role['threshold'], - 'signing_keyids': [], 'signatures': [], 'partial_loaded':False, - 'delegations': {'keys':{}, 'roles':[]} - } - roledb.add_role(rolename, roleinfo, repository_name=repository_name) - - # Load the delegated metadata and generate their fileinfo. - targets_objects = {} - loaded_metadata = [project_name] - targets_objects[project_name] = project - metadata_directory = os.path.join(project_directory, metadata_directory) - - if os.path.exists(metadata_directory) and \ - os.path.isdir(metadata_directory): - for metadata_role in os.listdir(metadata_directory): - metadata_path = os.path.join(metadata_directory, metadata_role) - metadata_name = \ - metadata_path[len(metadata_directory):].lstrip(os.path.sep) - - # Strip the extension. The roledb does not include an appended '.json' - # extension for each role. - if metadata_name.endswith(METADATA_EXTENSION): - extension_length = len(METADATA_EXTENSION) - metadata_name = metadata_name[:-extension_length] - - else: - continue - - if metadata_name in loaded_metadata: - continue - - signable = None - signable = sslib_util.load_json_file(metadata_path) - - # Strip the prefix from the local working copy, it will be added again - # when the targets metadata is written to disk. - metadata_object = signable['signed'] - metadata_object = _strip_prefix_from_targets_metadata(metadata_object, - prefix) - - roleinfo = roledb.get_roleinfo(metadata_name, repository_name) - roleinfo['signatures'].extend(signable['signatures']) - roleinfo['version'] = metadata_object['version'] - roleinfo['expires'] = metadata_object['expires'] - roleinfo['paths'] = {} - - for filepath, fileinfo in metadata_object['targets'].items(): - roleinfo['paths'].update({filepath: fileinfo.get('custom', {})}) - roleinfo['delegations'] = metadata_object['delegations'] - roleinfo['partial_loaded'] = False - - # If the metadata was partially loaded, update the roleinfo flag. - if _metadata_is_partially_loaded(metadata_name, signable, - repository_name=repository_name): - roleinfo['partial_loaded'] = True - - - roledb.update_roleinfo(metadata_name, roleinfo, - mark_role_as_dirty=False, repository_name=repository_name) - - # Append to list of elements to avoid reloading repeated metadata. - loaded_metadata.append(metadata_name) - - # Generate the Targets objects of the delegated roles. - new_targets_object = Targets(targets_directory, metadata_name, roleinfo, - repository_name=repository_name) - targets_object = targets_objects[project_name] - - targets_object._delegated_roles[metadata_name] = new_targets_object - - # Add the keys specified in the delegations field of the Targets role. - for key_metadata in metadata_object['delegations']['keys'].values(): - key_object, junk = format_metadata_to_key(key_metadata) - - try: - keydb.add_key(key_object, repository_name=repository_name) - - except exceptions.KeyAlreadyExistsError: - pass - - for role in metadata_object['delegations']['roles']: - rolename = role['name'] - roleinfo = {'name': role['name'], 'keyids': role['keyids'], - 'threshold': role['threshold'], - 'signing_keyids': [], 'signatures': [], - 'partial_loaded': False, - 'delegations': {'keys': {}, - 'roles': []}} - roledb.add_role(rolename, roleinfo, repository_name=repository_name) - - if new_prefix: - project.prefix = new_prefix - - return project - - - - - -def _strip_prefix_from_targets_metadata(targets_metadata, prefix): - """ - Non-public method that removes the prefix from each of the target paths in - 'targets_metadata' so they can be used again in compliance with the local - copies. The prefix is needed in metadata to match the layout of the remote - repository. - """ - - unprefixed_targets_metadata = {} - - for targets in targets_metadata['targets'].keys(): - unprefixed_target = os.path.relpath(targets, prefix) - unprefixed_targets_metadata[unprefixed_target] = \ - targets_metadata['targets'][targets] - targets_metadata['targets'] = unprefixed_targets_metadata - - return targets_metadata - - - - - -if __name__ == '__main__': - # The interactive sessions of the documentation strings can - # be tested by running 'developer_tool.py' as a standalone module: - # $ python3 developer_tool.py - import doctest - doctest.testmod() diff --git a/tuf/download.py b/tuf/download.py deleted file mode 100755 index af12af614b..0000000000 --- a/tuf/download.py +++ /dev/null @@ -1,314 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - download.py - - - February 21, 2012. Based on previous version by Geremy Condra. - - - Konstantin Andrianov - Vladimir Diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Download metadata and target files and check their validity. The hash and - length of a downloaded file has to match the hash and length supplied by the - metadata of that file. -""" - -import logging -import timeit -import tempfile -from urllib import parse - -import securesystemslib # pylint: disable=unused-import -from securesystemslib import formats as sslib_formats - -from tuf import exceptions -from tuf import formats -from tuf import settings - -# See 'log.py' to learn how logging is handled in TUF. -logger = logging.getLogger(__name__) - - -def safe_download(url, required_length, fetcher): - """ - - Given the 'url' and 'required_length' of the desired file, open a connection - to 'url', download it, and return the contents of the file. Also ensure - the length of the downloaded file matches 'required_length' exactly. - download.unsafe_download() may be called if an upper download limit is - preferred. - - - url: - A URL string that represents the location of the file. - - required_length: - An integer value representing the length of the file. This is an exact - limit. - - fetcher: - An object implementing FetcherInterface that performs the network IO - operations. - - - A file object is created on disk to store the contents of 'url'. - - - tuf.ssl_commons.exceptions.DownloadLengthMismatchError, if there was a - mismatch of observed vs expected lengths while downloading the file. - - securesystemslib.exceptions.FormatError, if any of the arguments are - improperly formatted. - - Any other unforeseen runtime exception. - - - A file object that points to the contents of 'url'. - """ - - # Do all of the arguments have the appropriate format? - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.URL_SCHEMA.check_match(url) - formats.LENGTH_SCHEMA.check_match(required_length) - - return _download_file(url, required_length, fetcher, STRICT_REQUIRED_LENGTH=True) - - - - - -def unsafe_download(url, required_length, fetcher): - """ - - Given the 'url' and 'required_length' of the desired file, open a connection - to 'url', download it, and return the contents of the file. Also ensure - the length of the downloaded file is up to 'required_length', and no larger. - download.safe_download() may be called if an exact download limit is - preferred. - - - url: - A URL string that represents the location of the file. - - required_length: - An integer value representing the length of the file. This is an upper - limit. - - fetcher: - An object implementing FetcherInterface that performs the network IO - operations. - - - A file object is created on disk to store the contents of 'url'. - - - tuf.ssl_commons.exceptions.DownloadLengthMismatchError, if there was a - mismatch of observed vs expected lengths while downloading the file. - - securesystemslib.exceptions.FormatError, if any of the arguments are - improperly formatted. - - Any other unforeseen runtime exception. - - - A file object that points to the contents of 'url'. - """ - - # Do all of the arguments have the appropriate format? - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.URL_SCHEMA.check_match(url) - formats.LENGTH_SCHEMA.check_match(required_length) - - return _download_file(url, required_length, fetcher, STRICT_REQUIRED_LENGTH=False) - - - - - -def _download_file(url, required_length, fetcher, STRICT_REQUIRED_LENGTH=True): - """ - - Given the url and length of the desired file, this function opens a - connection to 'url' and downloads the file while ensuring its length - matches 'required_length' if 'STRICT_REQUIRED_LENGH' is True (If False, - the file's length is not checked and a slow retrieval exception is raised - if the downloaded rate falls below the acceptable rate). - - - url: - A URL string that represents the location of the file. - - required_length: - An integer value representing the length of the file. - - STRICT_REQUIRED_LENGTH: - A Boolean indicator used to signal whether we should perform strict - checking of required_length. True by default. We explicitly set this to - False when we know that we want to turn this off for downloading the - timestamp metadata, which has no signed required_length. - - - A file object is created on disk to store the contents of 'url'. - - - tuf.exceptions.DownloadLengthMismatchError, if there was a - mismatch of observed vs expected lengths while downloading the file. - - securesystemslib.exceptions.FormatError, if any of the arguments are - improperly formatted. - - Any other unforeseen runtime exception. - - - A file object that points to the contents of 'url'. - """ - # 'url.replace('\\', '/')' is needed for compatibility with Windows-based - # systems, because they might use back-slashes in place of forward-slashes. - # This converts it to the common format. unquote() replaces %xx escapes in a - # url with their single-character equivalent. A back-slash may be encoded as - # %5c in the url, which should also be replaced with a forward slash. - url = parse.unquote(url).replace('\\', '/') - logger.info('Downloading: ' + repr(url)) - - # This is the temporary file that we will return to contain the contents of - # the downloaded file. - temp_file = tempfile.TemporaryFile() - - average_download_speed = 0 - number_of_bytes_received = 0 - - try: - chunks = fetcher.fetch(url, required_length) - start_time = timeit.default_timer() - for chunk in chunks: - - stop_time = timeit.default_timer() - temp_file.write(chunk) - - # Measure the average download speed. - number_of_bytes_received += len(chunk) - seconds_spent_receiving = stop_time - start_time - average_download_speed = number_of_bytes_received / seconds_spent_receiving - - if average_download_speed < settings.MIN_AVERAGE_DOWNLOAD_SPEED: - logger.debug('The average download speed dropped below the minimum' - ' average download speed set in settings. Stopping the download!.') - break - - else: - logger.debug('The average download speed has not dipped below the' - ' minimum average download speed set in settings.') - - # Does the total number of downloaded bytes match the required length? - _check_downloaded_length(number_of_bytes_received, required_length, - STRICT_REQUIRED_LENGTH=STRICT_REQUIRED_LENGTH, - average_download_speed=average_download_speed) - - except Exception: - # Close 'temp_file'. Any written data is lost. - temp_file.close() - logger.debug('Could not download URL: ' + repr(url)) - raise - - else: - return temp_file - - - - -def _check_downloaded_length(total_downloaded, required_length, - STRICT_REQUIRED_LENGTH=True, - average_download_speed=None): - """ - - A helper function which checks whether the total number of downloaded bytes - matches our expectation. - - - total_downloaded: - The total number of bytes supposedly downloaded for the file in question. - - required_length: - The total number of bytes expected of the file as seen from its metadata. - The Timestamp role is always downloaded without a known file length, and - the Root role when the client cannot download any of the required - top-level roles. In both cases, 'required_length' is actually an upper - limit on the length of the downloaded file. - - STRICT_REQUIRED_LENGTH: - A Boolean indicator used to signal whether we should perform strict - checking of required_length. True by default. We explicitly set this to - False when we know that we want to turn this off for downloading the - timestamp metadata, which has no signed required_length. - - average_download_speed: - The average download speed for the downloaded file. - - - None. - - - securesystemslib.exceptions.DownloadLengthMismatchError, if - STRICT_REQUIRED_LENGTH is True and total_downloaded is not equal - required_length. - - tuf.exceptions.SlowRetrievalError, if the total downloaded was - done in less than the acceptable download speed (as set in - tuf.settings). - - - None. - """ - - if total_downloaded == required_length: - logger.info('Downloaded ' + str(total_downloaded) + ' bytes out of the' - ' expected ' + str(required_length) + ' bytes.') - - else: - difference_in_bytes = abs(total_downloaded - required_length) - - # What we downloaded is not equal to the required length, but did we ask - # for strict checking of required length? - if STRICT_REQUIRED_LENGTH: - logger.info('Downloaded ' + str(total_downloaded) + ' bytes, but' - ' expected ' + str(required_length) + ' bytes. There is a difference' - ' of ' + str(difference_in_bytes) + ' bytes.') - - # If the average download speed is below a certain threshold, we flag - # this as a possible slow-retrieval attack. - logger.debug('Average download speed: ' + repr(average_download_speed)) - logger.debug('Minimum average download speed: ' + repr(settings.MIN_AVERAGE_DOWNLOAD_SPEED)) - - if average_download_speed < settings.MIN_AVERAGE_DOWNLOAD_SPEED: - raise exceptions.SlowRetrievalError(average_download_speed) - - else: - logger.debug('Good average download speed: ' + - repr(average_download_speed) + ' bytes per second') - - raise exceptions.DownloadLengthMismatchError(required_length, total_downloaded) - - else: - # We specifically disabled strict checking of required length, but we - # will log a warning anyway. This is useful when we wish to download the - # Timestamp or Root metadata, for which we have no signed metadata; so, - # we must guess a reasonable required_length for it. - if average_download_speed < settings.MIN_AVERAGE_DOWNLOAD_SPEED: - raise exceptions.SlowRetrievalError(average_download_speed) - - else: - logger.debug('Good average download speed: ' + - repr(average_download_speed) + ' bytes per second') - - logger.info('Downloaded ' + str(total_downloaded) + ' bytes out of an' - ' upper limit of ' + str(required_length) + ' bytes.') diff --git a/tuf/exceptions.py b/tuf/exceptions.py deleted file mode 100755 index 8ebc92c7d1..0000000000 --- a/tuf/exceptions.py +++ /dev/null @@ -1,338 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - exceptions.py - - - Vladimir Diaz - - - January 10, 2017 - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Define TUF Exceptions. - The names chosen for TUF Exception classes should end in 'Error' except where - there is a good reason not to, and provide that reason in those cases. -""" - -from urllib import parse - -from typing import Any, Dict, Optional - -import logging -logger = logging.getLogger(__name__) - - -class Error(Exception): - """Indicate a generic error.""" - - -class UnsupportedSpecificationError(Error): - """ - Metadata received claims to conform to a version of the specification that is - not supported by this client. - """ - -class FormatError(Error): - """Indicate an error while validating an object's format.""" - - -class InvalidMetadataJSONError(FormatError): - """Indicate that a metadata file is not valid JSON.""" - - def __init__(self, exception: BaseException): - super(InvalidMetadataJSONError, self).__init__() - - # Store the original exception. - self.exception = exception - - def __str__(self) -> str: - return repr(self) - - def __repr__(self) -> str: - # Show the original exception. - return self.__class__.__name__ + ' : wraps error: ' + repr(self.exception) - - # # Directly instance-reproducing: - # return self.__class__.__name__ + '(' + repr(self.exception) + ')' - - -class UnsupportedAlgorithmError(Error): - """Indicate an error while trying to identify a user-specified algorithm.""" - -class LengthOrHashMismatchError(Error): - """Indicate an error while checking the length and hash values of an object""" - -class RepositoryError(Error): - """Indicate an error with a repository's state, such as a missing file.""" - -class BadHashError(RepositoryError): - """Indicate an error while checking the value of a hash object.""" - - def __init__(self, expected_hash: str, observed_hash: str): - super(BadHashError, self).__init__() - - self.expected_hash = expected_hash - self.observed_hash = observed_hash - - def __str__(self) -> str: - return ( - 'Observed hash (' + repr(self.observed_hash) + ') != expected hash (' + - repr(self.expected_hash) + ')') - - def __repr__(self) -> str: - return self.__class__.__name__ + ' : ' + str(self) - - # # Directly instance-reproducing: - # return ( - # self.__class__.__name__ + '(' + repr(self.expected_hash) + ', ' + - # repr(self.observed_hash) + ')') - - -class BadPasswordError(Error): - """Indicate an error after encountering an invalid password.""" - - -class UnknownKeyError(Error): - """Indicate an error while verifying key-like objects (e.g., keyids).""" - - -class BadVersionNumberError(RepositoryError): - """Indicate an error for metadata that contains an invalid version number.""" - - -class MissingLocalRepositoryError(RepositoryError): - """Raised when a local repository could not be found.""" - - -class InsufficientKeysError(Error): - """Indicate that metadata role lacks a threshold of pubic or private keys.""" - - -class ForbiddenTargetError(RepositoryError): - """Indicate that a role signed for a target that it was not delegated to.""" - - -class ExpiredMetadataError(RepositoryError): - """Indicate that a TUF Metadata file has expired.""" - - -class ReplayedMetadataError(RepositoryError): - """Indicate that some metadata has been replayed to the client.""" - - def __init__(self, metadata_role: str, downloaded_version: int, current_version: int): - super(ReplayedMetadataError, self).__init__() - - self.metadata_role = metadata_role - self.downloaded_version = downloaded_version - self.current_version = current_version - - def __str__(self) -> str: - return ( - 'Downloaded ' + repr(self.metadata_role) + ' is older (' + - repr(self.downloaded_version) + ') than the version currently ' - 'installed (' + repr(self.current_version) + ').') - - def __repr__(self) -> str: - return self.__class__.__name__ + ' : ' + str(self) - - -class CryptoError(Error): - """Indicate any cryptography-related errors.""" - - -class BadSignatureError(CryptoError): - """Indicate that some metadata file has a bad signature.""" - - def __init__(self, metadata_role_name: str): - super(BadSignatureError, self).__init__() - - self.metadata_role_name = metadata_role_name - - def __str__(self) -> str: - return repr(self.metadata_role_name) + ' metadata has a bad signature.' - - def __repr__(self) -> str: - return self.__class__.__name__ + ' : ' + str(self) - - # # Directly instance-reproducing: - # return ( - # self.__class__.__name__ + '(' + repr(self.metadata_role_name) + ')') - - -class UnknownMethodError(CryptoError): - """Indicate that a user-specified cryptograpthic method is unknown.""" - - -class UnsupportedLibraryError(Error): - """Indicate that a supported library could not be located or imported.""" - - -class DownloadError(Error): - """Indicate an error occurred while attempting to download a file.""" - - -class DownloadLengthMismatchError(DownloadError): - """Indicate that a mismatch of lengths was seen while downloading a file.""" - - def __init__(self, expected_length: int, observed_length: int): - super(DownloadLengthMismatchError, self).__init__() - - self.expected_length = expected_length #bytes - self.observed_length = observed_length #bytes - - def __str__(self) -> str: - return ( - 'Observed length (' + repr(self.observed_length) + - ') < expected length (' + repr(self.expected_length) + ').') - - def __repr__(self) -> str: - return self.__class__.__name__ + ' : ' + str(self) - - # # Directly instance-reproducing: - # return ( - # self.__class__.__name__ + '(' + repr(self.expected_length) + ', ' + - # self.observed_length + ')') - - - -class SlowRetrievalError(DownloadError): - """"Indicate that downloading a file took an unreasonably long time.""" - - def __init__(self, average_download_speed: Optional[int] = None): - super(SlowRetrievalError, self).__init__() - - self.__average_download_speed = average_download_speed #bytes/second - - def __str__(self) -> str: - msg = 'Download was too slow.' - if self.__average_download_speed is not None: - msg = ('Download was too slow. Average speed: ' + - repr(self.__average_download_speed) + ' bytes per second.') - - return msg - - def __repr__(self) -> str: - return self.__class__.__name__ + ' : ' + str(self) - - # # Directly instance-reproducing: - # return ( - # self.__class__.__name__ + '(' + repr(self.__average_download_speed + ')') - - -class KeyAlreadyExistsError(Error): - """Indicate that a key already exists and cannot be added.""" - - -class RoleAlreadyExistsError(Error): - """Indicate that a role already exists and cannot be added.""" - - -class UnknownRoleError(Error): - """Indicate an error trying to locate or identify a specified TUF role.""" - - -class UnknownTargetError(Error): - """Indicate an error trying to locate or identify a specified target.""" - - -class InvalidNameError(Error): - """Indicate an error while trying to validate any type of named object.""" - - -class UnsignedMetadataError(RepositoryError): - """Indicate metadata object with insufficient threshold of signatures.""" - - # signable is not used but kept in method signature for backwards compat - def __init__(self, message: str, signable: Any = None): - super(UnsignedMetadataError, self).__init__() - - self.exception_message = message - self.signable = signable - - def __str__(self) -> str: - return self.exception_message - - def __repr__(self) -> str: - return self.__class__.__name__ + ' : ' + str(self) - - # # Directly instance-reproducing: - # return ( - # self.__class__.__name__ + '(' + repr(self.exception_message) + ', ' + - # repr(self.signable) + ')') - - -class NoWorkingMirrorError(Error): - """ - An updater will throw this exception in case it could not download a - metadata or target file. - A dictionary of Exception instances indexed by every mirror URL will also be - provided. - """ - - def __init__(self, mirror_errors: Dict[str, BaseException]): - super(NoWorkingMirrorError, self).__init__() - - # Dictionary of URL strings to Exception instances - self.mirror_errors = mirror_errors - - def __str__(self) -> str: - all_errors = 'No working mirror was found:' - - for mirror_url, mirror_error in self.mirror_errors.items(): - try: - # http://docs.python.org/2/library/urlparse.html#urlparse.urlparse - mirror_url_tokens = parse.urlparse(mirror_url) - - except Exception: - logger.exception('Failed to parse mirror URL: ' + repr(mirror_url)) - mirror_netloc = mirror_url - - else: - mirror_netloc = mirror_url_tokens.netloc - - all_errors += '\n ' + repr(mirror_netloc) + ': ' + repr(mirror_error) - - return all_errors - - def __repr__(self) -> str: - return self.__class__.__name__ + ' : ' + str(self) - - # # Directly instance-reproducing: - # return ( - # self.__class__.__name__ + '(' + repr(self.mirror_errors) + ')') - - - -class NotFoundError(Error): - """If a required configuration or resource is not found.""" - - -class URLMatchesNoPatternError(Error): - """If a URL does not match a user-specified regular expression.""" - -class URLParsingError(Error): - """If we are unable to parse a URL -- for example, if a hostname element - cannot be isoalted.""" - -class InvalidConfigurationError(Error): - """If a configuration object does not match the expected format.""" - -class FetcherHTTPError(Exception): - """ - Returned by FetcherInterface implementations for HTTP errors. - - Args: - message (str): The HTTP error messsage - status_code (int): The HTTP status code - """ - def __init__(self, message: str, status_code: int): - super(FetcherHTTPError, self).__init__(message) - self.status_code = status_code diff --git a/tuf/formats.py b/tuf/formats.py deleted file mode 100755 index ca304ca9e4..0000000000 --- a/tuf/formats.py +++ /dev/null @@ -1,1009 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - formats.py - - - Geremy Condra - Vladimir Diaz - - - Refactored April 30, 2012. -vladimir.v.diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - A central location for all format-related checking of TUF objects. - Some crypto-related formats may also be defined in securesystemslib. - Note: 'formats.py' depends heavily on 'schema.py', so the 'schema.py' - module should be read and understood before tackling this module. - - 'formats.py' can be broken down into two sections. (1) Schemas and object - matching. (2) Functions that help produce or verify TUF objects. - - The first section deals with schemas and object matching based on format. - There are two ways of checking the format of objects. The first method - raises a 'securesystemslib.exceptions.FormatError' exception if the match - fails and the other returns a Boolean result. - - tuf.formats..check_match(object) - tuf.formats..matches(object) - - Example: - - rsa_key = {'keytype': 'rsa' - 'keyid': 34892fc465ac76bc3232fab - 'keyval': {'public': 'public_key', - 'private': 'private_key'} - - securesystemslib.formats.RSAKEY_SCHEMA.check_match(rsa_key) - securesystemslib.formats.RSAKEY_SCHEMA.matches(rsa_key) - - In this example, if a dict key or dict value is missing or incorrect, - the match fails. There are numerous variations of object checking - provided by 'formats.py' and 'schema.py'. - - The second section contains miscellaneous functions related to the format of - TUF objects. - Example: - - signable_object = make_signable(unsigned_object) -""" - -import binascii -import calendar -import datetime -import time -import copy - -from securesystemslib import exceptions as sslib_exceptions -from securesystemslib import formats as sslib_formats -from securesystemslib import schema as SCHEMA - -import tuf -from tuf import exceptions - -# As per TUF spec 1.0.0 the spec version field must follow the Semantic -# Versioning 2.0.0 (semver) format. The regex pattern is provided by semver. -# https://semver.org/spec/v2.0.0.html#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string -SEMVER_2_0_0_SCHEMA = SCHEMA.RegularExpression( - r'(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)' - r'(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)' - r'(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?' - r'(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?' -) -SPECIFICATION_VERSION_SCHEMA = SCHEMA.OneOf([ - # However, temporarily allow "1.0" for backwards-compatibility in tuf-0.12.PATCH. - SCHEMA.String("1.0"), - SEMVER_2_0_0_SCHEMA -]) - -# A datetime in 'YYYY-MM-DDTHH:MM:SSZ' ISO 8601 format. The "Z" zone designator -# for the zero UTC offset is always used (i.e., a numerical offset is not -# supported.) Example: '2015-10-21T13:20:00Z'. Note: This is a simple format -# check, and an ISO8601 string should be fully verified when it is parsed. -ISO8601_DATETIME_SCHEMA = SCHEMA.RegularExpression(r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z') - -# An integer representing the numbered version of a metadata file. -# Must be 1, or greater. -METADATAVERSION_SCHEMA = SCHEMA.Integer(lo=0) - -# A relative file path (e.g., 'metadata/root/'). -RELPATH_SCHEMA = SCHEMA.AnyString() -RELPATHS_SCHEMA = SCHEMA.ListOf(RELPATH_SCHEMA) - -VERSIONINFO_SCHEMA = SCHEMA.Object( - object_name = 'VERSIONINFO_SCHEMA', - version = METADATAVERSION_SCHEMA) - -# A string representing a role's name. -ROLENAME_SCHEMA = SCHEMA.AnyString() - -# A role's threshold value (i.e., the minimum number -# of signatures required to sign a metadata file). -# Must be 1 and greater. -THRESHOLD_SCHEMA = SCHEMA.Integer(lo=1) - -# A hexadecimal value in '23432df87ab..' format. -HEX_SCHEMA = SCHEMA.RegularExpression(r'[a-fA-F0-9]+') - -# A path hash prefix is a hexadecimal string. -PATH_HASH_PREFIX_SCHEMA = HEX_SCHEMA - -# A list of path hash prefixes. -PATH_HASH_PREFIXES_SCHEMA = SCHEMA.ListOf(PATH_HASH_PREFIX_SCHEMA) - -# Role object in {'keyids': [keydids..], 'name': 'ABC', 'threshold': 1, -# 'paths':[filepaths..]} format. -# TODO: This is not a role. In further #660-related PRs, fix it, similar to -# the way I did in Uptane's TUF fork. -ROLE_SCHEMA = SCHEMA.Object( - object_name = 'ROLE_SCHEMA', - name = SCHEMA.Optional(ROLENAME_SCHEMA), - keyids = sslib_formats.KEYIDS_SCHEMA, - threshold = THRESHOLD_SCHEMA, - terminating = SCHEMA.Optional(sslib_formats.BOOLEAN_SCHEMA), - paths = SCHEMA.Optional(RELPATHS_SCHEMA), - path_hash_prefixes = SCHEMA.Optional(PATH_HASH_PREFIXES_SCHEMA)) - -# A dict of roles where the dict keys are role names and the dict values holding -# the role data/information. -ROLEDICT_SCHEMA = SCHEMA.DictOf( - key_schema = ROLENAME_SCHEMA, - value_schema = ROLE_SCHEMA) - -# A dictionary of ROLEDICT, where dictionary keys can be repository names, and -# dictionary values containing information for each role available on the -# repository (corresponding to the repository belonging to named repository in -# the dictionary key) -ROLEDICTDB_SCHEMA = SCHEMA.DictOf( - key_schema = sslib_formats.NAME_SCHEMA, - value_schema = ROLEDICT_SCHEMA) - -# Command argument list, as used by the CLI tool. -# Example: {'keytype': ed25519, 'expires': 365,} -COMMAND_SCHEMA = SCHEMA.DictOf( - key_schema = sslib_formats.NAME_SCHEMA, - value_schema = SCHEMA.Any()) - -# A dictionary holding version information. -VERSION_SCHEMA = SCHEMA.Object( - object_name = 'VERSION_SCHEMA', - major = SCHEMA.Integer(lo=0), - minor = SCHEMA.Integer(lo=0), - fix = SCHEMA.Integer(lo=0)) - -# A value that is either True or False, on or off, etc. -BOOLEAN_SCHEMA = SCHEMA.Boolean() - -# A hexadecimal value in '23432df87ab..' format. -HASH_SCHEMA = SCHEMA.RegularExpression(r'[a-fA-F0-9]+') - -# A key identifier (e.g., a hexadecimal value identifying an RSA key). -KEYID_SCHEMA = HASH_SCHEMA - -# A list of KEYID_SCHEMA. -KEYIDS_SCHEMA = SCHEMA.ListOf(KEYID_SCHEMA) - -# The actual values of a key, as opposed to meta data such as a key type and -# key identifier ('rsa', 233df889cb). For RSA keys, the key value is a pair of -# public and private keys in PEM Format stored as strings. -KEYVAL_SCHEMA = SCHEMA.Object( - object_name = 'KEYVAL_SCHEMA', - public = SCHEMA.AnyString(), - private = SCHEMA.Optional(SCHEMA.AnyString())) - -# A generic TUF key. All TUF keys should be saved to metadata files in this -# format. -KEY_SCHEMA = SCHEMA.Object( - object_name = 'KEY_SCHEMA', - keytype = SCHEMA.AnyString(), - keyval = KEYVAL_SCHEMA, - expires = SCHEMA.Optional(ISO8601_DATETIME_SCHEMA)) - -# A dict where the dict keys hold a keyid and the dict values a key object. -KEYDICT_SCHEMA = SCHEMA.DictOf( - key_schema = KEYID_SCHEMA, - value_schema = KEY_SCHEMA) - -# The format used by the key database to store keys. The dict keys hold a key -# identifier and the dict values any object. The key database should store -# key objects in the values (e.g., 'RSAKEY_SCHEMA', 'DSAKEY_SCHEMA'). -KEYDB_SCHEMA = SCHEMA.DictOf( - key_schema = KEYID_SCHEMA, - value_schema = SCHEMA.Any()) - -# A schema holding the result of checking the signatures of a particular -# 'SIGNABLE_SCHEMA' role. -# For example, how many of the signatures for the 'Target' role are -# valid? This SCHEMA holds this information. See 'sig.py' for -# more information. -SIGNATURESTATUS_SCHEMA = SCHEMA.Object( - object_name = 'SIGNATURESTATUS_SCHEMA', - threshold = SCHEMA.Integer(), - good_sigs = KEYIDS_SCHEMA, - bad_sigs = KEYIDS_SCHEMA, - unknown_sigs = KEYIDS_SCHEMA, - untrusted_sigs = KEYIDS_SCHEMA) - -# An integer representing length. Must be 0, or greater. -LENGTH_SCHEMA = SCHEMA.Integer(lo=0) - -# A dict in {'sha256': '23432df87ab..', 'sha512': '34324abc34df..', ...} format. -HASHDICT_SCHEMA = SCHEMA.DictOf( - key_schema = SCHEMA.AnyString(), - value_schema = HASH_SCHEMA) - -# Information about target files, like file length and file hash(es). This -# schema allows the storage of multiple hashes for the same file (e.g., sha256 -# and sha512 may be computed for the same file and stored). -TARGETS_FILEINFO_SCHEMA = SCHEMA.Object( - object_name = 'TARGETS_FILEINFO_SCHEMA', - length = LENGTH_SCHEMA, - hashes = HASHDICT_SCHEMA, - custom = SCHEMA.Optional(SCHEMA.Object())) - -# Information about snapshot and timestamp files. This schema allows for optional -# length and hashes, but version is mandatory. -METADATA_FILEINFO_SCHEMA = SCHEMA.Object( - object_name = 'METADATA_FILEINFO_SCHEMA', - length = SCHEMA.Optional(LENGTH_SCHEMA), - hashes = SCHEMA.Optional(HASHDICT_SCHEMA), - version = METADATAVERSION_SCHEMA) - -# A dict holding the version or file information for a particular metadata -# role. The dict keys hold the relative file paths, and the dict values the -# corresponding version numbers and/or file information. -FILEINFODICT_SCHEMA = SCHEMA.DictOf( - key_schema = RELPATH_SCHEMA, - value_schema = SCHEMA.OneOf([VERSIONINFO_SCHEMA, - METADATA_FILEINFO_SCHEMA])) - -# A dict holding the information for a particular target / file. The dict keys -# hold the relative file paths, and the dict values the corresponding file -# information. -FILEDICT_SCHEMA = SCHEMA.DictOf( - key_schema = RELPATH_SCHEMA, - value_schema = TARGETS_FILEINFO_SCHEMA) - -# A dict holding a target info. -TARGETINFO_SCHEMA = SCHEMA.Object( - object_name = 'TARGETINFO_SCHEMA', - filepath = RELPATH_SCHEMA, - fileinfo = TARGETS_FILEINFO_SCHEMA) - -# A list of TARGETINFO_SCHEMA. -TARGETINFOS_SCHEMA = SCHEMA.ListOf(TARGETINFO_SCHEMA) - -# A string representing a named oject. -NAME_SCHEMA = SCHEMA.AnyString() - -# A dict of repository names to mirrors. -REPO_NAMES_TO_MIRRORS_SCHEMA = SCHEMA.DictOf( - key_schema = NAME_SCHEMA, - value_schema = SCHEMA.ListOf(sslib_formats.URL_SCHEMA)) - -# An object containing the map file's "mapping" attribute. -MAPPING_SCHEMA = SCHEMA.ListOf(SCHEMA.Object( - paths = RELPATHS_SCHEMA, - repositories = SCHEMA.ListOf(NAME_SCHEMA), - terminating = BOOLEAN_SCHEMA, - threshold = THRESHOLD_SCHEMA)) - -# A dict containing the map file (named 'map.json', by default). The format of -# the map file is covered in TAP 4: Multiple repository consensus on entrusted -# targets. -MAPFILE_SCHEMA = SCHEMA.Object( - repositories = REPO_NAMES_TO_MIRRORS_SCHEMA, - mapping = MAPPING_SCHEMA) - -# Like ROLEDICT_SCHEMA, except that ROLE_SCHEMA instances are stored in order. -ROLELIST_SCHEMA = SCHEMA.ListOf(ROLE_SCHEMA) - -# The delegated roles of a Targets role (a parent). -DELEGATIONS_SCHEMA = SCHEMA.Object( - keys = KEYDICT_SCHEMA, - roles = ROLELIST_SCHEMA) - -# The number of hashed bins, or the number of delegated roles. See -# delegate_hashed_bins() in 'repository_tool.py' for an example. Note: -# Tools may require further restrictions on the number of bins, such -# as requiring them to be a power of 2. -NUMBINS_SCHEMA = SCHEMA.Integer(lo=1) - -# The fileinfo format of targets specified in the repository and -# developer tools. The fields match that of TARGETS_FILEINFO_SCHEMA, only all -# fields are optional. -CUSTOM_SCHEMA = SCHEMA.DictOf( - key_schema = SCHEMA.AnyString(), - value_schema = SCHEMA.Any() -) -LOOSE_TARGETS_FILEINFO_SCHEMA = SCHEMA.Object( - object_name = "LOOSE_TARGETS_FILEINFO_SCHEMA", - length = SCHEMA.Optional(LENGTH_SCHEMA), - hashes = SCHEMA.Optional(HASHDICT_SCHEMA), - version = SCHEMA.Optional(METADATAVERSION_SCHEMA), - custom = SCHEMA.Optional(SCHEMA.Object()) -) - -PATH_FILEINFO_SCHEMA = SCHEMA.DictOf( - key_schema = RELPATH_SCHEMA, - value_schema = LOOSE_TARGETS_FILEINFO_SCHEMA) - -# TUF roledb -ROLEDB_SCHEMA = SCHEMA.Object( - object_name = 'ROLEDB_SCHEMA', - keyids = SCHEMA.Optional(KEYIDS_SCHEMA), - signing_keyids = SCHEMA.Optional(KEYIDS_SCHEMA), - previous_keyids = SCHEMA.Optional(KEYIDS_SCHEMA), - threshold = SCHEMA.Optional(THRESHOLD_SCHEMA), - previous_threshold = SCHEMA.Optional(THRESHOLD_SCHEMA), - version = SCHEMA.Optional(METADATAVERSION_SCHEMA), - expires = SCHEMA.Optional(ISO8601_DATETIME_SCHEMA), - signatures = SCHEMA.Optional(sslib_formats.SIGNATURES_SCHEMA), - paths = SCHEMA.Optional(SCHEMA.OneOf([RELPATHS_SCHEMA, PATH_FILEINFO_SCHEMA])), - path_hash_prefixes = SCHEMA.Optional(PATH_HASH_PREFIXES_SCHEMA), - delegations = SCHEMA.Optional(DELEGATIONS_SCHEMA), - partial_loaded = SCHEMA.Optional(BOOLEAN_SCHEMA)) - -# A signable object. Holds the signing role and its associated signatures. -SIGNABLE_SCHEMA = SCHEMA.Object( - object_name = 'SIGNABLE_SCHEMA', - signed = SCHEMA.Any(), - signatures = SCHEMA.ListOf(sslib_formats.SIGNATURE_SCHEMA)) - -# Root role: indicates root keys and top-level roles. -ROOT_SCHEMA = SCHEMA.Object( - object_name = 'ROOT_SCHEMA', - _type = SCHEMA.String('root'), - spec_version = SPECIFICATION_VERSION_SCHEMA, - version = METADATAVERSION_SCHEMA, - consistent_snapshot = BOOLEAN_SCHEMA, - expires = ISO8601_DATETIME_SCHEMA, - keys = KEYDICT_SCHEMA, - roles = ROLEDICT_SCHEMA) - -# Targets role: Indicates targets and delegates target paths to other roles. -TARGETS_SCHEMA = SCHEMA.Object( - object_name = 'TARGETS_SCHEMA', - _type = SCHEMA.String('targets'), - spec_version = SPECIFICATION_VERSION_SCHEMA, - version = METADATAVERSION_SCHEMA, - expires = ISO8601_DATETIME_SCHEMA, - targets = FILEDICT_SCHEMA, - delegations = SCHEMA.Optional(DELEGATIONS_SCHEMA)) - -# Snapshot role: indicates the latest versions of all metadata (except -# timestamp). -SNAPSHOT_SCHEMA = SCHEMA.Object( - object_name = 'SNAPSHOT_SCHEMA', - _type = SCHEMA.String('snapshot'), - version = METADATAVERSION_SCHEMA, - expires = sslib_formats.ISO8601_DATETIME_SCHEMA, - spec_version = SPECIFICATION_VERSION_SCHEMA, - meta = FILEINFODICT_SCHEMA) - -# Timestamp role: indicates the latest version of the snapshot file. -TIMESTAMP_SCHEMA = SCHEMA.Object( - object_name = 'TIMESTAMP_SCHEMA', - _type = SCHEMA.String('timestamp'), - spec_version = SPECIFICATION_VERSION_SCHEMA, - version = METADATAVERSION_SCHEMA, - expires = sslib_formats.ISO8601_DATETIME_SCHEMA, - meta = FILEINFODICT_SCHEMA) - - -# project.cfg file: stores information about the project in a json dictionary -PROJECT_CFG_SCHEMA = SCHEMA.Object( - object_name = 'PROJECT_CFG_SCHEMA', - project_name = SCHEMA.AnyString(), - layout_type = SCHEMA.OneOf([SCHEMA.String('repo-like'), SCHEMA.String('flat')]), - targets_location = sslib_formats.PATH_SCHEMA, - metadata_location = sslib_formats.PATH_SCHEMA, - prefix = sslib_formats.PATH_SCHEMA, - public_keys = sslib_formats.KEYDICT_SCHEMA, - threshold = SCHEMA.Integer(lo = 0, hi = 2) - ) - -# A schema containing information a repository mirror may require, -# such as a url, the path of the directory metadata files, etc. -MIRROR_SCHEMA = SCHEMA.Object( - object_name = 'MIRROR_SCHEMA', - url_prefix = sslib_formats.URL_SCHEMA, - metadata_path = SCHEMA.Optional(RELPATH_SCHEMA), - targets_path = SCHEMA.Optional(RELPATH_SCHEMA), - confined_target_dirs = SCHEMA.Optional(RELPATHS_SCHEMA), - custom = SCHEMA.Optional(SCHEMA.Object())) - -# A dictionary of mirrors where the dict keys hold the mirror's name and -# and the dict values the mirror's data (i.e., 'MIRROR_SCHEMA'). -# The repository class of 'updater.py' accepts dictionaries -# of this type provided by the TUF client. -MIRRORDICT_SCHEMA = SCHEMA.DictOf( - key_schema = SCHEMA.AnyString(), - value_schema = MIRROR_SCHEMA) - -# A Mirrorlist: indicates all the live mirrors, and what documents they -# serve. -MIRRORLIST_SCHEMA = SCHEMA.Object( - object_name = 'MIRRORLIST_SCHEMA', - _type = SCHEMA.String('mirrors'), - version = METADATAVERSION_SCHEMA, - expires = sslib_formats.ISO8601_DATETIME_SCHEMA, - mirrors = SCHEMA.ListOf(MIRROR_SCHEMA)) - -# Any of the role schemas (e.g., TIMESTAMP_SCHEMA, SNAPSHOT_SCHEMA, etc.) -ANYROLE_SCHEMA = SCHEMA.OneOf([ROOT_SCHEMA, TARGETS_SCHEMA, SNAPSHOT_SCHEMA, - TIMESTAMP_SCHEMA, MIRROR_SCHEMA]) - -# The format of the resulting "scp config dict" after extraction from the -# push configuration file (i.e., push.cfg). In the case of a config file -# utilizing the scp transfer module, it must contain the 'general' and 'scp' -# sections, where 'general' must contain a 'transfer_module' and -# 'metadata_path' entry, and 'scp' the 'host', 'user', 'identity_file', and -# 'remote_directory' entries. -SCPCONFIG_SCHEMA = SCHEMA.Object( - object_name = 'SCPCONFIG_SCHEMA', - general = SCHEMA.Object( - object_name = '[general]', - transfer_module = SCHEMA.String('scp'), - metadata_path = sslib_formats.PATH_SCHEMA, - targets_directory = sslib_formats.PATH_SCHEMA), - scp=SCHEMA.Object( - object_name = '[scp]', - host = sslib_formats.URL_SCHEMA, - user = sslib_formats.NAME_SCHEMA, - identity_file = sslib_formats.PATH_SCHEMA, - remote_directory = sslib_formats.PATH_SCHEMA)) - -# The format of the resulting "receive config dict" after extraction from the -# receive configuration file (i.e., receive.cfg). The receive config file -# must contain a 'general' section, and this section the 'pushroots', -# 'repository_directory', 'metadata_directory', 'targets_directory', and -# 'backup_directory' entries. -RECEIVECONFIG_SCHEMA = SCHEMA.Object( - object_name = 'RECEIVECONFIG_SCHEMA', general=SCHEMA.Object( - object_name = '[general]', - pushroots = SCHEMA.ListOf(sslib_formats.PATH_SCHEMA), - repository_directory = sslib_formats.PATH_SCHEMA, - metadata_directory = sslib_formats.PATH_SCHEMA, - targets_directory = sslib_formats.PATH_SCHEMA, - backup_directory = sslib_formats.PATH_SCHEMA)) - - - -def make_signable(role_schema): - """ - - Return the role metadata 'role_schema' in 'SIGNABLE_SCHEMA' format. - 'role_schema' is added to the 'signed' key, and an empty list - initialized to the 'signatures' key. The caller adds signatures - to this second field. - Note: check_signable_object_format() should be called after - make_signable() and signatures added to ensure the final - signable object has a valid format (i.e., a signable containing - a supported role metadata). - - - role_schema: - A role schema dict (e.g., 'ROOT_SCHEMA', 'SNAPSHOT_SCHEMA'). - - - None. - - - None. - - - A dict in 'SIGNABLE_SCHEMA' format. - """ - - if not isinstance(role_schema, dict) or 'signed' not in role_schema: - return { 'signed' : role_schema, 'signatures' : [] } - - else: - return role_schema - - - - - - -def build_dict_conforming_to_schema(schema, **kwargs): - """ - - Given a schema.Object object (for example, TIMESTAMP_SCHEMA from this - module) and a set of keyword arguments, create a dictionary that conforms - to the given schema, using the keyword arguments to define the elements of - the new dict. - - Checks the result to make sure that it conforms to the given schema, raising - an error if not. - - - schema - A schema.Object, like TIMESTAMP_SCHEMA, TARGETS_FILEINFO_SCHEMA, - securesystemslib.formats.SIGNATURE_SCHEMA, etc. - - **kwargs - A keyword argument for each element of the schema. Optional arguments - may be included or skipped, but all required arguments must be included. - - For example, for TIMESTAMP_SCHEMA, a call might look like: - build_dict_conforming_to_schema( - TIMESTAMP_SCHEMA, - _type='timestamp', - spec_version='1.0.0', - version=1, - expires='2020-01-01T00:00:00Z', - meta={...}) - Some arguments will be filled in if excluded: _type, spec_version - - - A dictionary conforming to the given schema. Adds certain required fields - if they are missing and can be deduced from the schema. The data returned - is a deep copy. - - - securesystemslib.exceptions.FormatError - if the provided data does not match the schema when assembled. - - - None. In particular, the provided values are not modified, and the - returned dictionary does not include references to them. - - """ - - # Check the schema argument type (must provide check_match and _required). - if not isinstance(schema, SCHEMA.Object): - raise ValueError( - 'The first argument must be a schema.Object instance, but is not. ' - 'Given schema: ' + repr(schema)) - - # Make a copy of the provided fields so that the caller's provided values - # do not change when the returned values are changed. - dictionary = copy.deepcopy(kwargs) - - - # Automatically provide certain schema properties if they are not already - # provided and are required in objects of class . - # This includes: - # _type: - # spec_version: SPECIFICATION_VERSION_SCHEMA - # - # (Please note that _required is slightly misleading, as it includes both - # required and optional elements. It should probably be called _components.) - # - for key, element_type in schema._required: #pylint: disable=protected-access - - if key in dictionary: - # If the field has been provided, proceed normally. - continue - - elif isinstance(element_type, SCHEMA.Optional): - # If the field has NOT been provided but IS optional, proceed without it. - continue - - else: - # If the field has not been provided and is required, check to see if - # the field is one of the fields we automatically fill. - - # Currently, the list is limited to ['_type', 'spec_version']. - - if key == '_type' and isinstance(element_type, SCHEMA.String): - # A SCHEMA.String stores its expected value in _string, so use that. - dictionary[key] = element_type._string #pylint: disable=protected-access - - elif (key == 'spec_version' and - element_type == SPECIFICATION_VERSION_SCHEMA): - # If not provided, use the specification version in tuf/__init__.py - dictionary[key] = tuf.SPECIFICATION_VERSION - - - # If what we produce does not match the provided schema, raise a FormatError. - schema.check_match(dictionary) - - return dictionary - - - - - -# A dict holding the recognized schemas for the top-level roles. -SCHEMAS_BY_TYPE = { - 'root' : ROOT_SCHEMA, - 'targets' : TARGETS_SCHEMA, - 'snapshot' : SNAPSHOT_SCHEMA, - 'timestamp' : TIMESTAMP_SCHEMA, - 'mirrors' : MIRRORLIST_SCHEMA} - - - - -def expiry_string_to_datetime(expires): - """ - - Convert an expiry string to a datetime object. - - expires: - The expiry date-time string in the ISO8601 format that is defined - in securesystemslib.ISO8601_DATETIME_SCHEMA. E.g. '2038-01-19T03:14:08Z' - - securesystemslib.exceptions.FormatError, if 'expires' cannot be - parsed correctly. - - None. - - A datetime object representing the expiry time. - """ - - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.ISO8601_DATETIME_SCHEMA.check_match(expires) - - try: - return datetime.datetime.strptime(expires, "%Y-%m-%dT%H:%M:%SZ") - except ValueError as error: - raise sslib_exceptions.FormatError( - 'Failed to parse ' + repr(expires) + ' as an expiry time') from error - - - - -def datetime_to_unix_timestamp(datetime_object): - """ - - Convert 'datetime_object' (in datetime.datetime()) format) to a Unix/POSIX - timestamp. For example, Python's time.time() returns a Unix timestamp, and - includes the number of microseconds. 'datetime_object' is converted to UTC. - - >>> datetime_object = datetime.datetime(1985, 10, 26, 1, 22) - >>> timestamp = datetime_to_unix_timestamp(datetime_object) - >>> timestamp - 499137720 - - - datetime_object: - The datetime.datetime() object to convert to a Unix timestamp. - - - securesystemslib.exceptions.FormatError, if 'datetime_object' is not a - datetime.datetime() object. - - - None. - - - A unix (posix) timestamp (e.g., 499137660). - """ - - # Is 'datetime_object' a datetime.datetime() object? - # Raise 'securesystemslib.exceptions.FormatError' if not. - if not isinstance(datetime_object, datetime.datetime): - message = repr(datetime_object) + ' is not a datetime.datetime() object.' - raise sslib_exceptions.FormatError(message) - - unix_timestamp = calendar.timegm(datetime_object.timetuple()) - - return unix_timestamp - - - - - -def unix_timestamp_to_datetime(unix_timestamp): - """ - - Convert 'unix_timestamp' (i.e., POSIX time, in UNIX_TIMESTAMP_SCHEMA format) - to a datetime.datetime() object. 'unix_timestamp' is the number of seconds - since the epoch (January 1, 1970.) - - >>> datetime_object = unix_timestamp_to_datetime(1445455680) - >>> datetime_object - datetime.datetime(2015, 10, 21, 19, 28) - - - unix_timestamp: - An integer representing the time (e.g., 1445455680). Conformant to - 'securesystemslib.formats.UNIX_TIMESTAMP_SCHEMA'. - - - securesystemslib.exceptions.FormatError, if 'unix_timestamp' is improperly - formatted. - - - None. - - - A datetime.datetime() object corresponding to 'unix_timestamp'. - """ - - # Is 'unix_timestamp' properly formatted? - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.UNIX_TIMESTAMP_SCHEMA.check_match(unix_timestamp) - - # Convert 'unix_timestamp' to a 'time.struct_time', in UTC. The Daylight - # Savings Time (DST) flag is set to zero. datetime.fromtimestamp() is not - # used because it returns a local datetime. - struct_time = time.gmtime(unix_timestamp) - - # Extract the (year, month, day, hour, minutes, seconds) arguments for the - # datetime object to be returned. - datetime_object = datetime.datetime(*struct_time[:6]) - - return datetime_object - - - -def format_base64(data): - """ - - Return the base64 encoding of 'data' with whitespace and '=' signs omitted. - - - data: - Binary or buffer of data to convert. - - - securesystemslib.exceptions.FormatError, if the base64 encoding fails or the - argument is invalid. - - - None. - - - A base64-encoded string. - """ - - try: - return binascii.b2a_base64(data).decode('utf-8').rstrip('=\n ') - - except (TypeError, binascii.Error) as e: - raise sslib_exceptions.FormatError('Invalid base64' - ' encoding: ' + str(e)) - - - - -def parse_base64(base64_string): - """ - - Parse a base64 encoding with whitespace and '=' signs omitted. - - - base64_string: - A string holding a base64 value. - - - securesystemslib.exceptions.FormatError, if 'base64_string' cannot be parsed - due to an invalid base64 encoding. - - - None. - - - A byte string representing the parsed based64 encoding of - 'base64_string'. - """ - - if not isinstance(base64_string, str): - message = 'Invalid argument: '+repr(base64_string) - raise sslib_exceptions.FormatError(message) - - extra = len(base64_string) % 4 - if extra: - padding = '=' * (4 - extra) - base64_string = base64_string + padding - - try: - return binascii.a2b_base64(base64_string.encode('utf-8')) - - except (TypeError, binascii.Error) as e: - raise sslib_exceptions.FormatError('Invalid base64' - ' encoding: ' + str(e)) - - - -def make_targets_fileinfo(length, hashes, custom=None): - """ - - Create a dictionary conformant to 'TARGETS_FILEINFO_SCHEMA'. - This dict describes a target file. - - - length: - An integer representing the size of the file. - - hashes: - A dict of hashes in 'HASHDICT_SCHEMA' format, which has the form: - {'sha256': 123df8a9b12, 'sha512': 324324dfc121, ...} - - custom: - An optional object providing additional information about the file. - - - securesystemslib.exceptions.FormatError, if the 'TARGETS_FILEINFO_SCHEMA' to be - returned does not have the correct format. - - - A dictionary conformant to 'TARGETS_FILEINFO_SCHEMA', representing the file - information of a target file. - """ - - fileinfo = {'length' : length, 'hashes' : hashes} - - if custom is not None: - fileinfo['custom'] = custom - - # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - TARGETS_FILEINFO_SCHEMA.check_match(fileinfo) - - return fileinfo - - - -def make_metadata_fileinfo(version, length=None, hashes=None): - """ - - Create a dictionary conformant to 'METADATA_FILEINFO_SCHEMA'. - This dict describes one of the metadata files used for timestamp and - snapshot roles. - - - version: - An integer representing the version of the file. - - length: - An optional integer representing the size of the file. - - hashes: - An optional dict of hashes in 'HASHDICT_SCHEMA' format, which has the form: - {'sha256': 123df8a9b12, 'sha512': 324324dfc121, ...} - - - - securesystemslib.exceptions.FormatError, if the 'METADATA_FILEINFO_SCHEMA' to be - returned does not have the correct format. - - - A dictionary conformant to 'METADATA_FILEINFO_SCHEMA', representing the file - information of a metadata file. - """ - - fileinfo = {'version' : version} - - if length: - fileinfo['length'] = length - - if hashes: - fileinfo['hashes'] = hashes - - # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - METADATA_FILEINFO_SCHEMA.check_match(fileinfo) - - return fileinfo - - - -def make_versioninfo(version_number): - """ - - Create a dictionary conformant to 'VERSIONINFO_SCHEMA'. This dict - describes both metadata and target files. - - - version_number: - An integer representing the version of a particular metadata role. - The dictionary returned by this function is expected to be included - in Snapshot metadata. - - - securesystemslib.exceptions.FormatError, if the dict to be returned does not - have the correct format (i.e., VERSIONINFO_SCHEMA). - - - None. - - - A dictionary conformant to 'VERSIONINFO_SCHEMA', containing the version - information of a metadata role. - """ - - versioninfo = {'version': version_number} - - # Raise 'securesystemslib.exceptions.FormatError' if 'versioninfo' is - # improperly formatted. - VERSIONINFO_SCHEMA.check_match(versioninfo) - - return versioninfo - - - - - -def expected_meta_rolename(meta_rolename): - """ - - Ensure 'meta_rolename' is properly formatted. - 'targets' is returned as 'Targets'. - 'targets role1' is returned as 'Targets Role1'. - - The words in the string (i.e., separated by whitespace) - are capitalized. - - - meta_rolename: - A string representing the rolename. - E.g., 'root', 'targets'. - - - securesystemslib.exceptions.FormatError, if 'meta_rolename' is improperly - formatted. - - - None. - - - A string (e.g., 'Root', 'Targets'). - """ - - # Does 'meta_rolename' have the correct type? - # This check ensures 'meta_rolename' conforms to - # 'securesystemslib.formats.NAME_SCHEMA'. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.NAME_SCHEMA.check_match(meta_rolename) - - return meta_rolename.lower() - - - -def check_signable_object_format(signable): - """ - - Ensure 'signable' is properly formatted, conformant to - 'SIGNABLE_SCHEMA'. Return the signing role on - success. Note: The 'signed' field of a 'SIGNABLE_SCHEMA' is checked - against securesystemslib.schema.Any(). The 'signed' field, however, should - actually hold one of the supported role schemas (e.g., 'ROOT_SCHEMA', - 'TARGETS_SCHEMA'). The role schemas all differ in their format, so this - function determines exactly which schema is listed in the 'signed' field. - - - signable: - The signable object compared against 'SIGNABLE.SCHEMA'. - - - securesystemslib.exceptions.FormatError, if 'signable' does not have the - correct format. - - tuf.exceptions.UnsignedMetadataError, if 'signable' does not have any - signatures - - - None. - - - A string representing the signing role (e.g., 'root', 'targets'). - The role string is returned with characters all lower case. - """ - - # Does 'signable' have the correct type? - # This check ensures 'signable' conforms to - # 'SIGNABLE_SCHEMA'. - SIGNABLE_SCHEMA.check_match(signable) - - try: - role_type = signable['signed']['_type'] - - except (KeyError, TypeError) as error: - raise sslib_exceptions.FormatError('Untyped signable object.') from error - - try: - schema = SCHEMAS_BY_TYPE[role_type] - - except KeyError as error: - raise sslib_exceptions.FormatError('Unrecognized type ' - + repr(role_type)) from error - - if not signable['signatures']: - raise exceptions.UnsignedMetadataError('Signable object of type ' + - repr(role_type) + ' has no signatures ', signable) - - # 'securesystemslib.exceptions.FormatError' raised if 'signable' does not - # have a properly formatted role schema. - schema.check_match(signable['signed']) - - return role_type.lower() - - - -if __name__ == '__main__': - # The interactive sessions of the documentation strings can - # be tested by running formats.py as a standalone module. - # python3 -B formats.py - import doctest - doctest.testmod() diff --git a/tuf/keydb.py b/tuf/keydb.py deleted file mode 100755 index a5681321f8..0000000000 --- a/tuf/keydb.py +++ /dev/null @@ -1,442 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - keydb.py - - - Vladimir Diaz - - - March 21, 2012. Based on a previous version of this module by Geremy Condra. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Represent a collection of keys and their organization. This module ensures - the layout of the collection remain consistent and easily verifiable. - Provided are functions to add and delete keys from the database, retrieve a - single key, and assemble a collection from keys stored in TUF 'Root' Metadata. - The Update Framework process maintains a set of role info for multiple - repositories. - - RSA keys are currently supported and a collection of keys is organized as a - dictionary indexed by key ID. Key IDs are used as identifiers for keys - (e.g., RSA key). They are the hexadecimal representations of the hash of key - objects (specifically, the key object containing only the public key). See - 'rsa_key.py' and the '_get_keyid()' function to learn precisely how keyids - are generated. One may get the keyid of a key object by simply accessing the - dictionary's 'keyid' key (i.e., rsakey['keyid']). -""" - -import logging -import copy - -import securesystemslib # pylint: disable=unused-import -from securesystemslib import exceptions as sslib_exceptions -from securesystemslib import formats as sslib_formats -from securesystemslib import keys as sslib_keys - -from tuf import exceptions -from tuf import formats - -# List of strings representing the key types supported by TUF. -_SUPPORTED_KEY_TYPES = ['rsa', 'ed25519', 'ecdsa', 'ecdsa-sha2-nistp256'] - -# See 'log.py' to learn how logging is handled in TUF. -logger = logging.getLogger(__name__) - -# The key database. -_keydb_dict = {} -_keydb_dict['default'] = {} - - -def create_keydb_from_root_metadata(root_metadata, repository_name='default'): - """ - - Populate the key database with the unique keys found in 'root_metadata'. - The database dictionary will conform to - 'tuf.formats.KEYDB_SCHEMA' and have the form: {keyid: key, - ...}. The 'keyid' conforms to 'securesystemslib.formats.KEYID_SCHEMA' and - 'key' to its respective type. In the case of RSA keys, this object would - match 'RSAKEY_SCHEMA'. - - - root_metadata: - A dictionary conformant to 'tuf.formats.ROOT_SCHEMA'. The keys found - in the 'keys' field of 'root_metadata' are needed by this function. - - repository_name: - The name of the repository to store the key information. If not supplied, - the key database is populated for the 'default' repository. - - - securesystemslib.exceptions.FormatError, if 'root_metadata' does not have the correct format. - - securesystemslib.exceptions.InvalidNameError, if 'repository_name' does not exist in the key - database. - - - A function to add the key to the database is called. In the case of RSA - keys, this function is add_key(). - - The old keydb key database is replaced. - - - None. - """ - - # Does 'root_metadata' have the correct format? - # This check will ensure 'root_metadata' has the appropriate number of objects - # and object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - formats.ROOT_SCHEMA.check_match(root_metadata) - - # Does 'repository_name' have the correct format? - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # Clear the key database for 'repository_name', or create it if non-existent. - if repository_name in _keydb_dict: - _keydb_dict[repository_name].clear() - - else: - create_keydb(repository_name) - - # Iterate the keys found in 'root_metadata' by converting them to - # 'RSAKEY_SCHEMA' if their type is 'rsa', and then adding them to the - # key database using the provided keyid. - for keyid, key_metadata in root_metadata['keys'].items(): - if key_metadata['keytype'] in _SUPPORTED_KEY_TYPES: - # 'key_metadata' is stored in 'KEY_SCHEMA' format. Call - # create_from_metadata_format() to get the key in 'RSAKEY_SCHEMA' format, - # which is the format expected by 'add_key()'. Note: This call to - # format_metadata_to_key() uses the provided keyid as the default keyid. - # All other keyids returned are ignored. - - key_dict, _ = sslib_keys.format_metadata_to_key(key_metadata, - keyid) - - # Make sure to update key_dict['keyid'] to use one of the other valid - # keyids, otherwise add_key() will have no reference to it. - try: - add_key(key_dict, repository_name=repository_name) - - # Although keyid duplicates should *not* occur (unique dict keys), log a - # warning and continue. However, 'key_dict' may have already been - # adding to the keydb elsewhere. - except exceptions.KeyAlreadyExistsError as e: # pragma: no cover - logger.warning(e) - continue - - else: - logger.warning('Root Metadata file contains a key with an invalid keytype.') - - - - - -def create_keydb(repository_name): - """ - - Create a key database for a non-default repository named 'repository_name'. - - - repository_name: - The name of the repository. An empty key database is created, and keys - may be added to via add_key(keyid, repository_name). - - - securesystemslib.exceptions.FormatError, if 'repository_name' is improperly formatted. - - securesystemslib.exceptions.InvalidNameError, if 'repository_name' already exists. - - - None. - - - None. - """ - - # Is 'repository_name' properly formatted? Raise 'securesystemslib.exceptions.FormatError' if not. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - if repository_name in _keydb_dict: - raise sslib_exceptions.InvalidNameError('Repository name already exists:' - ' ' + repr(repository_name)) - - _keydb_dict[repository_name] = {} - - - - - -def remove_keydb(repository_name): - """ - - Remove a key database for a non-default repository named 'repository_name'. - The 'default' repository cannot be removed. - - - repository_name: - The name of the repository to remove. The 'default' repository should - not be removed, so 'repository_name' cannot be 'default'. - - - securesystemslib.exceptions.FormatError, if 'repository_name' is improperly formatted. - - securesystemslib.exceptions.InvalidNameError, if 'repository_name' is 'default'. - - - None. - - - None. - """ - - # Is 'repository_name' properly formatted? Raise 'securesystemslib.exceptions.FormatError' if not. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - if repository_name not in _keydb_dict: - logger.warning('Repository name does not exist: ' + repr(repository_name)) - return - - if repository_name == 'default': - raise sslib_exceptions.InvalidNameError('Cannot remove the default repository:' - ' ' + repr(repository_name)) - - del _keydb_dict[repository_name] - - - - -def add_key(key_dict, keyid=None, repository_name='default'): - """ - - Add 'rsakey_dict' to the key database while avoiding duplicates. - If keyid is provided, verify it is the correct keyid for 'rsakey_dict' - and raise an exception if it is not. - - - key_dict: - A dictionary conformant to 'securesystemslib.formats.ANYKEY_SCHEMA'. - It has the form: - - {'keytype': 'rsa', - 'keyid': keyid, - 'keyval': {'public': '-----BEGIN RSA PUBLIC KEY----- ...', - 'private': '-----BEGIN RSA PRIVATE KEY----- ...'}} - - keyid: - An object conformant to 'KEYID_SCHEMA'. It is used as an identifier - for RSA keys. - - repository_name: - The name of the repository to add the key. If not supplied, the key is - added to the 'default' repository. - - - securesystemslib.exceptions.FormatError, if the arguments do not have the correct format. - - securesystemslib.exceptions.Error, if 'keyid' does not match the keyid for 'rsakey_dict'. - - tuf.exceptions.KeyAlreadyExistsError, if 'rsakey_dict' is found in the key database. - - securesystemslib.exceptions.InvalidNameError, if 'repository_name' does not exist in the key - database. - - - The keydb key database is modified. - - - None. - """ - - # Does 'key_dict' have the correct format? - # This check will ensure 'key_dict' has the appropriate number of objects - # and object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError if the check fails. - sslib_formats.ANYKEY_SCHEMA.check_match(key_dict) - - # Does 'repository_name' have the correct format? - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # Does 'keyid' have the correct format? - if keyid is not None: - # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - sslib_formats.KEYID_SCHEMA.check_match(keyid) - - # Check if each keyid found in 'key_dict' matches 'keyid'. - if keyid != key_dict['keyid']: - raise sslib_exceptions.Error('Incorrect keyid. Got ' + key_dict['keyid'] + ' but expected ' + keyid) - - # Ensure 'repository_name' is actually set in the key database. - if repository_name not in _keydb_dict: - raise sslib_exceptions.InvalidNameError('Repository name does not exist:' - ' ' + repr(repository_name)) - - # Check if the keyid belonging to 'key_dict' is not already - # available in the key database before returning. - keyid = key_dict['keyid'] - if keyid in _keydb_dict[repository_name]: - raise exceptions.KeyAlreadyExistsError('Key: ' + keyid) - - _keydb_dict[repository_name][keyid] = copy.deepcopy(key_dict) - - - - - -def get_key(keyid, repository_name='default'): - """ - - Return the key belonging to 'keyid'. - - - keyid: - An object conformant to 'securesystemslib.formats.KEYID_SCHEMA'. It is used as an - identifier for keys. - - repository_name: - The name of the repository to get the key. If not supplied, the key is - retrieved from the 'default' repository. - - - securesystemslib.exceptions.FormatError, if the arguments do not have the correct format. - - tuf.exceptions.UnknownKeyError, if 'keyid' is not found in the keydb database. - - securesystemslib.exceptions.InvalidNameError, if 'repository_name' does not exist in the key - database. - - - None. - - - The key matching 'keyid'. In the case of RSA keys, a dictionary conformant - to 'securesystemslib.formats.RSAKEY_SCHEMA' is returned. - """ - - # Does 'keyid' have the correct format? - # This check will ensure 'keyid' has the appropriate number of objects - # and object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' is the match fails. - sslib_formats.KEYID_SCHEMA.check_match(keyid) - - # Does 'repository_name' have the correct format? - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - if repository_name not in _keydb_dict: - raise sslib_exceptions.InvalidNameError('Repository name does not exist:' - ' ' + repr(repository_name)) - - # Return the key belonging to 'keyid', if found in the key database. - try: - return copy.deepcopy(_keydb_dict[repository_name][keyid]) - - except KeyError as error: - raise exceptions.UnknownKeyError('Key: ' + keyid) from error - - - - - -def remove_key(keyid, repository_name='default'): - """ - - Remove the key belonging to 'keyid'. - - - keyid: - An object conformant to 'securesystemslib.formats.KEYID_SCHEMA'. It is used as an - identifier for keys. - - repository_name: - The name of the repository to remove the key. If not supplied, the key - is removed from the 'default' repository. - - - securesystemslib.exceptions.FormatError, if the arguments do not have the correct format. - - tuf.exceptions.UnknownKeyError, if 'keyid' is not found in key database. - - securesystemslib.exceptions.InvalidNameError, if 'repository_name' does not exist in the key - database. - - - The key, identified by 'keyid', is deleted from the key database. - - - None. - """ - - # Does 'keyid' have the correct format? - # This check will ensure 'keyid' has the appropriate number of objects - # and object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' is the match fails. - sslib_formats.KEYID_SCHEMA.check_match(keyid) - - # Does 'repository_name' have the correct format? - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - if repository_name not in _keydb_dict: - raise sslib_exceptions.InvalidNameError('Repository name does not exist:' - ' ' + repr(repository_name)) - - # Remove the key belonging to 'keyid' if found in the key database. - if keyid in _keydb_dict[repository_name]: - del _keydb_dict[repository_name][keyid] - - else: - raise exceptions.UnknownKeyError('Key: ' + keyid) - - - - - -def clear_keydb(repository_name='default', clear_all=False): - - """ - - Clear the keydb key database. - - - repository_name: - The name of the repository to clear the key database. If not supplied, - the key database is cleared for the 'default' repository. - - clear_all: - Boolean indicating whether to clear the entire keydb. - - - securesystemslib.exceptions.FormatError, if 'repository_name' is improperly formatted. - - securesystemslib.exceptions.InvalidNameError, if 'repository_name' does not exist in the key - database. - - - The keydb key database is reset. - - - None. - """ - - # Do the arguments have the correct format? Raise 'securesystemslib.exceptions.FormatError' if - # 'repository_name' is improperly formatted. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - sslib_formats.BOOLEAN_SCHEMA.check_match(clear_all) - - global _keydb_dict - - if clear_all: - _keydb_dict = {} - _keydb_dict['default'] = {} - - if repository_name not in _keydb_dict: - raise sslib_exceptions.InvalidNameError('Repository name does not exist:' - ' ' + repr(repository_name)) - - _keydb_dict[repository_name] = {} diff --git a/tuf/log.py b/tuf/log.py deleted file mode 100755 index 62def62e83..0000000000 --- a/tuf/log.py +++ /dev/null @@ -1,451 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - log.py - - - Vladimir Diaz - - - April 4, 2012. Based on a previous version of this module by Geremy Condra. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - A central location for all logging-related configuration. This module should - be imported once by the main program. If other modules wish to incorporate - 'tuf' logging, they should do the following: - - import logging - logger = logging.getLogger('tuf') - - 'logging' refers to the module name. logging.getLogger() is a function of - the module 'logging'. logging.getLogger(name) returns a Logger instance - associated with 'name'. Calling getLogger(name) will always return the same - instance. In this 'log.py' module, we perform the initial setup for the name - 'tuf'. The 'log.py' module should only be imported once by the main program. - When any other module does a logging.getLogger('tuf'), it is referring to the - same 'tuf' instance, and its associated settings, set here in 'log.py'. - See http://docs.python.org/library/logging.html#logger-objects for more - information. - - We use multiple handlers to process log messages in various ways and to - configure each one independently. Instead of using one single manner of - processing log messages, we can use two built-in handlers that have already - been configured for us. For example, the built-in FileHandler will catch - log messages and dump them to a file. If we wanted, we could set this file - handler to only catch CRITICAL (and greater) messages and save them to a - file. Other handlers (e.g., StreamHandler) could handle INFO-level - (and greater) messages. - - Logging Levels: - - --Level-- --Value-- - logging.CRITICAL 50 - logging.ERROR 40 - logging.WARNING 30 - logging.INFO 20 - logging.DEBUG 10 - logging.NOTSET 0 - - The logging module is thread-safe. Logging to a single file from - multiple threads in a single process is also thread-safe. The logging - module is NOT thread-safe when logging to a single file across multiple - processes: - http://docs.python.org/library/logging.html#thread-safety - http://docs.python.org/howto/logging-cookbook.html -""" - -import logging -import time - -from securesystemslib import exceptions as sslib_exceptions -from securesystemslib import formats as sslib_formats - -from tuf import exceptions -from tuf import settings - - -# Setting a handler's log level filters only logging messages of that level -# (and above). For example, setting the built-in StreamHandler's log level to -# 'logging.WARNING' will cause the stream handler to only process messages -# of levels: WARNING, ERROR, and CRITICAL. -_DEFAULT_LOG_LEVEL = logging.DEBUG -_DEFAULT_CONSOLE_LOG_LEVEL = logging.INFO -_DEFAULT_FILE_LOG_LEVEL = logging.DEBUG - -# Set the format for logging messages. -# Example format for '_FORMAT_STRING': -# [2013-08-13 15:21:18,068 localtime] [tuf] -# [INFO][_update_metadata:851@updater.py] -_FORMAT_STRING = '[%(asctime)s UTC] [%(name)s] [%(levelname)s] '+\ - '[%(funcName)s:%(lineno)s@%(filename)s]\n%(message)s\n' - -# Ask all Formatter instances to talk GMT. Set the 'converter' attribute of -# 'logging.Formatter' so that all formatters use Greenwich Mean Time. -# http://docs.python.org/library/logging.html#logging.Formatter.formatTime -# The 2nd paragraph in the link above contains the relevant information. -# GMT = UTC (Coordinated Universal Time). TUF metadata stores timestamps in UTC. -# We previously displayed the local time but this lead to confusion when -# visually comparing logger events and metadata information. Unix time stamps -# are fine but they may be less human-readable than UTC. -logging.Formatter.converter = time.gmtime -formatter = logging.Formatter(_FORMAT_STRING) - -# Set the handlers for the logger. The console handler is unset by default. A -# module importing 'log.py' should explicitly set the console handler if -# outputting log messages to the screen is needed. Adding a console handler can -# be done with tuf.log.add_console_handler(). Logging messages to a file is not -# set by default. -console_handler = None -file_handler = None - -# Set the logger and its settings. -# Note: we're configuring the top-level hierarchy for the tuf package, -# therefore we explicitly request the 'tuf' logger, rather than following -# the standard pattern of logging.getLogger(__name__) -logger = logging.getLogger('tuf') -logger.setLevel(_DEFAULT_LOG_LEVEL) -logger.addHandler(logging.NullHandler()) - -# Set the built-in file handler. Messages will be logged to -# 'settings.LOG_FILENAME', and only those messages with a log level of -# '_DEFAULT_LOG_LEVEL'. The log level of messages handled by 'file_handler' -# may be modified with 'set_filehandler_log_level()'. 'settings.LOG_FILENAME' -# will be opened in append mode. -if settings.ENABLE_FILE_LOGGING: - file_handler = logging.FileHandler(settings.LOG_FILENAME) - file_handler.setLevel(_DEFAULT_FILE_LOG_LEVEL) - file_handler.setFormatter(formatter) - logger.addHandler(file_handler) - -else: - pass - -# Silently ignore logger exceptions. -logging.raiseExceptions = False - - - - - -class ConsoleFilter(logging.Filter): - def filter(self, record): - """ - - Use Vinay Sajip's recommendation from Python issue #6435 to modify a - LogRecord object. This is meant to be used with our console handler. - - http://stackoverflow.com/q/6177520 - http://stackoverflow.com/q/5875225 - http://bugs.python.org/issue6435 - http://docs.python.org/howto/logging-cookbook.html#filters-contextual - http://docs.python.org/library/logging.html#logrecord-attributes - - - record: - A logging.LogRecord object. - - - None. - - - Replaces the LogRecord exception text attribute. - - - True. - """ - - # If this LogRecord object has an exception, then we will replace its text. - if record.exc_info: - # We place the record's cached exception text (which usually contains the - # exception traceback) with much simpler exception information. This is - # most useful for the console handler, which we do not wish to deluge - # with too much data. Assuming that this filter is not applied to the - # file logging handler, the user may always consult the file log for the - # original exception traceback. The exc_info is explained here: - # http://docs.python.org/library/sys.html#sys.exc_info - exc_type, _, _ = record.exc_info - - # Simply set the class name as the exception text. - record.exc_text = exc_type.__name__ - - # Always return True to signal that any given record must be formatted. - return True - - - - - -def set_log_level(log_level=_DEFAULT_LOG_LEVEL): - """ - - Allow the default log level to be overridden. If 'log_level' is not - provided, log level defaults to 'logging.DEBUG'. - - - log_level: - The log level to set for the 'log.py' file handler. - 'log_level' examples: logging.INFO; logging.CRITICAL. - - - None. - - - Overrides the logging level for the 'log.py' file handler. - - - None. - """ - - # Does 'log_level' have the correct format? - # Raise 'securesystems.exceptions.FormatError' if there is a mismatch. - sslib_formats.LOGLEVEL_SCHEMA.check_match(log_level) - - logger.setLevel(log_level) - - - - - -def set_filehandler_log_level(log_level=_DEFAULT_FILE_LOG_LEVEL): - """ - - Allow the default file handler log level to be overridden. If 'log_level' - is not provided, log level defaults to 'logging.DEBUG'. - - - log_level: - The log level to set for the 'log.py' file handler. - 'log_level' examples: logging.INFO; logging.CRITICAL. - - - None. - - - Overrides the logging level for the 'log.py' file handler. - - - None. - """ - - # Does 'log_level' have the correct format? - # Raise 'securesystems.exceptions.FormatError' if there is a mismatch. - sslib_formats.LOGLEVEL_SCHEMA.check_match(log_level) - - if file_handler: - file_handler.setLevel(log_level) - - else: - raise exceptions.Error( - 'File handler has not been set. Enable file logging' - ' before attempting to set its log level') - - - - - -def set_console_log_level(log_level=_DEFAULT_CONSOLE_LOG_LEVEL): - """ - - Allow the default log level for console messages to be overridden. If - 'log_level' is not provided, log level defaults to 'logging.INFO'. - - - log_level: - The log level to set for the console handler. - 'log_level' examples: logging.INFO; logging.CRITICAL. - - - securesystemslib.exceptions.Error, if the 'log.py' console handler has not - been set yet with add_console_handler(). - - - Overrides the logging level for the console handler. - - - None. - """ - - # Does 'log_level' have the correct format? - # Raise 'securesystems.exceptions.FormatError' if there is a mismatch. - sslib_formats.LOGLEVEL_SCHEMA.check_match(log_level) - - # Assign to the global console_handler object. - global console_handler - - if console_handler is not None: - console_handler.setLevel(log_level) - - else: - message = 'The console handler has not been set with add_console_handler().' - raise sslib_exceptions.Error(message) - - - - - -def add_console_handler(log_level=_DEFAULT_CONSOLE_LOG_LEVEL): - """ - - Add a console handler and set its log level to 'log_level'. - - - log_level: - The log level to set for the console handler. - 'log_level' examples: logging.INFO; logging.CRITICAL. - - - None. - - - Adds a console handler to the 'log.py' logger and sets its logging level to - 'log_level'. - - - None. - """ - - # Does 'log_level' have the correct format? - # Raise 'securesystems.exceptions.FormatError' if there is a mismatch. - sslib_formats.LOGLEVEL_SCHEMA.check_match(log_level) - - # Assign to the global console_handler object. - global console_handler - - if not console_handler: - # Set the console handler for the logger. The built-in console handler will - # log messages to 'sys.stderr' and capture 'log_level' messages. - console_handler = logging.StreamHandler() - - # Get our filter for the console handler. - console_filter = ConsoleFilter() - console_format_string = '%(message)s' - console_formatter = logging.Formatter(console_format_string) - - console_handler.setLevel(log_level) - console_handler.setFormatter(console_formatter) - console_handler.addFilter(console_filter) - logger.addHandler(console_handler) - logger.debug('Added a console handler.') - - else: - logger.warning('We already have a console handler.') - - - - - -def remove_console_handler(): - """ - - Remove the console handler from the logger in 'log.py', if previously added. - - - None. - - - None. - - - A handler belonging to the console is removed from the 'log.py' logger - and the console handler is marked as unset. - - - - None. - """ - - # Assign to the global 'console_handler' object. - global console_handler - - if console_handler: - logger.removeHandler(console_handler) - console_handler = None - logger.debug('Removed a console handler.') - - else: - logger.warning('We do not have a console handler.') - - - -def enable_file_logging(log_filename=settings.LOG_FILENAME): - """ - - Log messages to a file (i.e., 'log_filename'). The log level for the file - handler can be set with set_filehandler_log_level(). - - - log_filename: - Logging messages are saved to this file. If not provided, the log - filename specified in tuf.settings.LOG_FILENAME is used. - - - securesystemslib.exceptions.FormatError, if any of the arguments are - not the expected format. - - tuf.exceptions.Error, if the file handler has already been set. - - - The global file handler is set. - - - None. - """ - - # Are the arguments properly formatted? - sslib_formats.PATH_SCHEMA.check_match(log_filename) - - global file_handler - - # Add a file handler to the logger if not already set. - if not file_handler: - file_handler = logging.FileHandler(log_filename) - file_handler.setLevel(_DEFAULT_FILE_LOG_LEVEL) - file_handler.setFormatter(formatter) - logger.addHandler(file_handler) - - else: - raise exceptions.Error( - 'The file handler has already been been set. A new file handler' - ' can be set by first calling disable_file_logging()') - - - -def disable_file_logging(): - """ - - Disable file logging by removing any previously set file handler. - A warning is logged if the file handler cannot be removed. - - The file that was written to will not be deleted. - - - None. - - - None. - - - The global file handler is unset. - - - None. - """ - - # Assign to the global 'file_handler' object. - global file_handler - - if file_handler: - logger.removeHandler(file_handler) - file_handler.close() - file_handler = None - logger.debug('Removed the file handler.') - - else: - logger.warning('A file handler has not been set.') diff --git a/tuf/mirrors.py b/tuf/mirrors.py deleted file mode 100755 index c7662d3eec..0000000000 --- a/tuf/mirrors.py +++ /dev/null @@ -1,122 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - mirrors.py - - - Konstantin Andrianov. - Derived from original mirrors.py written by Geremy Condra. - - - March 12, 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Extract a list of mirror urls corresponding to the file type and the location - of the file with respect to the base url. -""" - -import os -from urllib import parse - -import securesystemslib # pylint: disable=unused-import -from securesystemslib import exceptions as sslib_exceptions -from securesystemslib import formats as sslib_formats -from securesystemslib.util import file_in_confined_directories - -from tuf import formats - - -# The type of file to be downloaded from a repository. The -# 'get_list_of_mirrors' function supports these file types. -_SUPPORTED_FILE_TYPES = ['meta', 'target'] - - -def get_list_of_mirrors(file_type, file_path, mirrors_dict): - """ - - Get a list of mirror urls from a mirrors dictionary, provided the type - and the path of the file with respect to the base url. - - - file_type: - Type of data needed for download, must correspond to one of the strings - in the list ['meta', 'target']. 'meta' for metadata file type or - 'target' for target file type. It should correspond to - NAME_SCHEMA format. - - file_path: - A relative path to the file that corresponds to RELPATH_SCHEMA format. - Ex: 'http://url_prefix/targets_path/file_path' - - mirrors_dict: - A mirrors_dict object that corresponds to MIRRORDICT_SCHEMA, where - keys are strings and values are MIRROR_SCHEMA. An example format - of MIRROR_SCHEMA: - - {'url_prefix': 'http://localhost:8001', - 'metadata_path': 'metadata/', - 'targets_path': 'targets/', - 'confined_target_dirs': ['targets/snapshot1/', ...], - 'custom': {...}} - - The 'custom' field is optional. - - - securesystemslib.exceptions.Error, on unsupported 'file_type'. - - securesystemslib.exceptions.FormatError, on bad argument. - - - List of mirror urls corresponding to the file_type and file_path. If no - match is found, empty list is returned. - """ - - # Checking if all the arguments have appropriate format. - formats.RELPATH_SCHEMA.check_match(file_path) - formats.MIRRORDICT_SCHEMA.check_match(mirrors_dict) - sslib_formats.NAME_SCHEMA.check_match(file_type) - - # Verify 'file_type' is supported. - if file_type not in _SUPPORTED_FILE_TYPES: - raise sslib_exceptions.Error('Invalid file_type argument.' - ' Supported file types: ' + repr(_SUPPORTED_FILE_TYPES)) - path_key = 'metadata_path' if file_type == 'meta' else 'targets_path' - - list_of_mirrors = [] - for junk, mirror_info in mirrors_dict.items(): - # Does mirror serve this file type at all? - path = mirror_info.get(path_key) - if path is None: - continue - - # for targets, ensure directory confinement - if path_key == 'targets_path': - full_filepath = os.path.join(path, file_path) - confined_target_dirs = mirror_info.get('confined_target_dirs') - # confined_target_dirs is optional and can used to confine the client to - # certain paths on a repository mirror when fetching target files. - if confined_target_dirs and not file_in_confined_directories(full_filepath, - confined_target_dirs): - continue - - # parse.quote(string) replaces special characters in string using the %xx - # escape. This is done to avoid parsing issues of the URL on the server - # side. Do *NOT* pass URLs with Unicode characters without first encoding - # the URL as UTF-8. We need a long-term solution with #61. - # http://bugs.python.org/issue1712522 - file_path = parse.quote(file_path) - url = os.path.join(mirror_info['url_prefix'], path, file_path) - - # The above os.path.join() result as well as input file_path may be - # invalid on windows (might contain both separator types), see #1077. - # Make sure the URL doesn't contain backward slashes on Windows. - list_of_mirrors.append(url.replace('\\', '/')) - - return list_of_mirrors diff --git a/tuf/ngclient/_internal/requests_fetcher.py b/tuf/ngclient/_internal/requests_fetcher.py index 3647e58fc9..07562791f3 100644 --- a/tuf/ngclient/_internal/requests_fetcher.py +++ b/tuf/ngclient/_internal/requests_fetcher.py @@ -1,21 +1,19 @@ # Copyright 2021, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 -"""Provides an implementation of FetcherInterface using the Requests HTTP - library. +"""Provides an implementation of ``FetcherInterface`` using the Requests + HTTP library. """ import logging -import time -from typing import Dict, Iterator, Optional +from typing import Dict, Iterator from urllib import parse # Imports import requests -import urllib3.exceptions import tuf -from tuf import exceptions +from tuf.api import exceptions from tuf.ngclient.fetcher import FetcherInterface # Globals @@ -23,11 +21,10 @@ # Classes class RequestsFetcher(FetcherInterface): - """A concrete implementation of FetcherInterface based on the Requests - library. + """An implementation of ``FetcherInterface`` based on the requests library. Attributes: - _sessions: A dictionary of Requests.Session objects storing a separate + _sessions: Dictionary of ``Requests.Session`` objects storing a separate session per scheme+hostname combination. """ @@ -51,21 +48,20 @@ def __init__(self) -> None: # Default settings self.socket_timeout: int = 4 # seconds self.chunk_size: int = 400000 # bytes - self.sleep_before_round: Optional[int] = None - def fetch(self, url: str) -> Iterator[bytes]: + def _fetch(self, url: str) -> Iterator[bytes]: """Fetches the contents of HTTP/HTTPS url from a remote server - Arguments: - url: A URL string that represents a file location. + Args: + url: URL string that represents a file location. Raises: - exceptions.SlowRetrievalError: A timeout occurs while receiving + exceptions.SlowRetrievalError: Timeout occurs while receiving data. - exceptions.FetcherHTTPError: An HTTP error code is received. + exceptions.DownloadHTTPError: HTTP error code is received. Returns: - A bytes iterator + Bytes iterator """ # Get a customized session for each new schema+hostname combination. session = self._get_session(url) @@ -77,14 +73,20 @@ def fetch(self, url: str) -> Iterator[bytes]: # requests as: # - connect timeout (max delay before first byte is received) # - read (gap) timeout (max delay between bytes received) - response = session.get(url, stream=True, timeout=self.socket_timeout) + try: + response = session.get( + url, stream=True, timeout=self.socket_timeout + ) + except requests.exceptions.Timeout as e: + raise exceptions.SlowRetrievalError from e + # Check response status. try: response.raise_for_status() except requests.HTTPError as e: response.close() status = e.response.status_code - raise exceptions.FetcherHTTPError(str(e), status) + raise exceptions.DownloadHTTPError(str(e), status) return self._chunks(response) @@ -94,30 +96,12 @@ def _chunks(self, response: "requests.Response") -> Iterator[bytes]: download.""" try: - while True: - # We download a fixed chunk of data in every round. This is - # so that we can defend against slow retrieval attacks. - # Furthermore, we do not wish to download an extremely - # large file in one shot. Before beginning the round, sleep - # (if set) for a short amount of time so that the CPU is not - # hogged in the while loop. - if self.sleep_before_round: - time.sleep(self.sleep_before_round) - - # NOTE: This may not handle some servers adding a - # Content-Encoding header, which may cause urllib3 to - # misbehave: - # https://github.com/pypa/pip/blob/404838abcca467648180b358598c597b74d568c9/src/pip/_internal/download.py#L547-L582 - data = response.raw.read(self.chunk_size) - - # We might have no more data to read, we signal - # that the download is complete. - if not data: - break - + for data in response.iter_content(self.chunk_size): yield data - - except urllib3.exceptions.ReadTimeoutError as e: + except ( + requests.exceptions.ConnectionError, + requests.exceptions.Timeout, + ) as e: raise exceptions.SlowRetrievalError from e finally: @@ -126,41 +110,28 @@ def _chunks(self, response: "requests.Response") -> Iterator[bytes]: def _get_session(self, url: str) -> requests.Session: """Returns a different customized requests.Session per schema+hostname combination. + + Raises: + exceptions.DownloadError: When there is a problem parsing the url. """ # Use a different requests.Session per schema+hostname combination, to # reuse connections while minimizing subtle security issues. parsed_url = parse.urlparse(url) if not parsed_url.scheme or not parsed_url.hostname: - raise exceptions.URLParsingError( - "Could not get scheme and hostname from URL: " + url - ) + raise exceptions.DownloadError(f"Failed to parse URL {url}") - session_index = parsed_url.scheme + "+" + parsed_url.hostname + session_index = f"{parsed_url.scheme}+{parsed_url.hostname}" session = self._sessions.get(session_index) if not session: session = requests.Session() self._sessions[session_index] = session - # Attach some default headers to every Session. - requests_user_agent = session.headers["User-Agent"] - # Follows the RFC: https://tools.ietf.org/html/rfc7231#section-5.5.3 - tuf_user_agent = ( - "tuf/" + tuf.__version__ + " " + requests_user_agent - ) - session.headers.update( - { - # Tell the server not to compress or modify anything. - # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding#Directives - "Accept-Encoding": "identity", - # The TUF user agent. - "User-Agent": tuf_user_agent, - } - ) + ua = f"tuf/{tuf.__version__} {session.headers['User-Agent']}" + session.headers["User-Agent"] = ua logger.debug("Made new session %s", session_index) - else: logger.debug("Reusing session %s", session_index) diff --git a/tuf/ngclient/_internal/trusted_metadata_set.py b/tuf/ngclient/_internal/trusted_metadata_set.py index be1b0b44ed..d08694f091 100644 --- a/tuf/ngclient/_internal/trusted_metadata_set.py +++ b/tuf/ngclient/_internal/trusted_metadata_set.py @@ -3,21 +3,28 @@ """Trusted collection of client-side TUF Metadata -TrustedMetadataSet keeps track of the current valid set of metadata for the +``TrustedMetadataSet`` keeps track of the current valid set of metadata for the client, and handles almost every step of the "Detailed client workflow" ( https://theupdateframework.github.io/specification/latest#detailed-client-workflow) in the TUF specification: the remaining steps are related to filesystem and network IO, which are not handled here. Loaded metadata can be accessed via index access with rolename as key -(trusted_set["root"]) or, in the case of top-level metadata, using the helper -properties (trusted_set.root). - -The rules for top-level metadata are - * Metadata is updatable only if metadata it depends on is loaded - * Metadata is not updatable if any metadata depending on it has been loaded - * Metadata must be updated in order: - root -> timestamp -> snapshot -> targets -> (delegated targets) +(``trusted_set[Root.type]``) or, in the case of top-level metadata, using the +helper properties (``trusted_set.root``). + +The rules that ``TrustedMetadataSet`` follows for top-level metadata are + * Metadata must be loaded in order: + root -> timestamp -> snapshot -> targets -> (delegated targets). + * Metadata can be loaded even if it is expired (or in the snapshot case if the + meta info does not match): this is called "intermediate metadata". + * Intermediate metadata can _only_ be used to load newer versions of the + same metadata: As an example an expired root can be used to load a new root. + * Metadata is loadable only if metadata before it in loading order is loaded + (and is not intermediate): As an example timestamp can be loaded if a + final (non-expired) root has been loaded. + * Metadata is not loadable if any metadata after it in loading order has been + loaded: As an example new roots cannot be loaded if timestamp is loaded. Exceptions are raised if metadata fails to load in any way. @@ -28,7 +35,7 @@ >>> trusted_set = TrustedMetadataSet(f.read()) >>> >>> # update root from remote until no more are available ->>> with download("root", trusted_set.root.signed.version + 1) as f: +>>> with download(Root.type, trusted_set.root.signed.version + 1) as f: >>> trusted_set.update_root(f.read()) >>> >>> # load local timestamp, then update from remote @@ -38,7 +45,7 @@ >>> except (RepositoryError, OSError): >>> pass # failure to load a local file is ok >>> ->>> with download("timestamp") as f: +>>> with download(Timestamp.type) as f: >>> trusted_set.update_timestamp(f.read()) >>> >>> # load local snapshot, then update from remote if needed @@ -48,52 +55,43 @@ >>> except (RepositoryError, OSError): >>> # local snapshot is not valid, load from remote >>> # (RepositoryErrors here stop the update) ->>> with download("snapshot", version) as f: +>>> with download(Snapshot.type, version) as f: >>> trusted_set.update_snapshot(f.read()) - -TODO: - * exceptions are not final: the idea is that client could just handle - a generic RepositoryError that covers every issue that server provided - metadata could inflict (other errors would be user errors), but this is not - yet the case - * Progress through Specification update process should be documented - (not sure yet how: maybe a spec_logger that logs specification events?) """ +import datetime import logging from collections import abc -from datetime import datetime from typing import Dict, Iterator, Optional -from tuf import exceptions +from tuf.api import exceptions from tuf.api.metadata import Metadata, Root, Snapshot, Targets, Timestamp -from tuf.api.serialization import DeserializationError logger = logging.getLogger(__name__) class TrustedMetadataSet(abc.Mapping): - """Internal class to keep track of trusted metadata in Updater + """Internal class to keep track of trusted metadata in ``Updater`` - TrustedMetadataSet ensures that the collection of metadata in it is valid + ``TrustedMetadataSet`` ensures that the collection of metadata in it is valid and trusted through the whole client update workflow. It provides easy ways to update the metadata with the caller making decisions on what is updated. """ def __init__(self, root_data: bytes): - """Initialize TrustedMetadataSet by loading trusted root metadata + """Initialize ``TrustedMetadataSet`` by loading trusted root metadata Args: root_data: Trusted root metadata as bytes. Note that this metadata will only be verified by itself: it is the source of trust for - all metadata in the TrustedMetadataSet + all metadata in the ``TrustedMetadataSet`` Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. """ self._trusted_set: Dict[str, Metadata] = {} - self.reference_time = datetime.utcnow() + self.reference_time = datetime.datetime.utcnow() # Load and validate the local root metadata. Valid initial trusted root # metadata is required @@ -101,93 +99,105 @@ def __init__(self, root_data: bytes): self._load_trusted_root(root_data) def __getitem__(self, role: str) -> Metadata: - """Returns current Metadata for 'role'""" + """Returns current ``Metadata`` for ``role``""" return self._trusted_set[role] def __len__(self) -> int: - """Returns number of Metadata objects in TrustedMetadataSet""" + """Returns number of ``Metadata`` objects in ``TrustedMetadataSet``""" return len(self._trusted_set) def __iter__(self) -> Iterator[Metadata]: - """Returns iterator over all Metadata objects in TrustedMetadataSet""" + """Returns iterator over ``Metadata`` objects in ``TrustedMetadataSet``""" return iter(self._trusted_set.values()) # Helper properties for top level metadata @property def root(self) -> Metadata[Root]: - """Current root Metadata""" - return self._trusted_set["root"] + """Current root ``Metadata``""" + return self._trusted_set[Root.type] @property def timestamp(self) -> Optional[Metadata[Timestamp]]: - """Current timestamp Metadata or None""" - return self._trusted_set.get("timestamp") + """Current timestamp ``Metadata`` or ``None``""" + return self._trusted_set.get(Timestamp.type) @property def snapshot(self) -> Optional[Metadata[Snapshot]]: - """Current snapshot Metadata or None""" - return self._trusted_set.get("snapshot") + """Current snapshot ``Metadata`` or ``None``""" + return self._trusted_set.get(Snapshot.type) @property def targets(self) -> Optional[Metadata[Targets]]: - """Current targets Metadata or None""" - return self._trusted_set.get("targets") + """Current targets ``Metadata`` or ``None``""" + return self._trusted_set.get(Targets.type) # Methods for updating metadata - def update_root(self, data: bytes) -> None: - """Verifies and loads 'data' as new root metadata. + def update_root(self, data: bytes) -> Metadata[Root]: + """Verifies and loads ``data`` as new root metadata. Note that an expired intermediate root is considered valid: expiry is - only checked for the final root in update_timestamp(). + only checked for the final root in ``update_timestamp()``. Args: - data: unverified new root metadata as bytes + data: Unverified new root metadata as bytes Raises: + RuntimeError: This function is called after updating timestamp. RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. + + Returns: + Deserialized and verified root ``Metadata`` object """ if self.timestamp is not None: raise RuntimeError("Cannot update root after timestamp") logger.debug("Updating root") - try: - new_root = Metadata[Root].from_bytes(data) - except DeserializationError as e: - raise exceptions.RepositoryError("Failed to load root") from e + new_root = Metadata[Root].from_bytes(data) - if new_root.signed.type != "root": + if new_root.signed.type != Root.type: raise exceptions.RepositoryError( f"Expected 'root', got '{new_root.signed.type}'" ) # Verify that new root is signed by trusted root - self.root.verify_delegate("root", new_root) + self.root.verify_delegate(Root.type, new_root) if new_root.signed.version != self.root.signed.version + 1: - raise exceptions.ReplayedMetadataError( - "root", new_root.signed.version, self.root.signed.version + raise exceptions.BadVersionNumberError( + f"Expected root version {self.root.signed.version + 1}" + f" instead got version {new_root.signed.version}" ) # Verify that new root is signed by itself - new_root.verify_delegate("root", new_root) + new_root.verify_delegate(Root.type, new_root) - self._trusted_set["root"] = new_root - logger.debug("Updated root") + self._trusted_set[Root.type] = new_root + logger.info("Updated root v%d", new_root.signed.version) - def update_timestamp(self, data: bytes) -> None: - """Verifies and loads 'data' as new timestamp metadata. + return new_root - Note that an expired intermediate timestamp is considered valid so it - can be used for rollback checks on newer, final timestamp. Expiry is - only checked for the final timestamp in update_snapshot(). + def update_timestamp(self, data: bytes) -> Metadata[Timestamp]: + """Verifies and loads ``data`` as new timestamp metadata. + + Note that an intermediate timestamp is allowed to be expired: + ``TrustedMetadataSet`` will throw an ``ExpiredMetadataError`` in + this case but the intermediate timestamp will be loaded. This way + a newer timestamp can still be loaded (and the intermediate + timestamp will be used for rollback protection). Expired timestamp + will prevent loading snapshot metadata. Args: - data: unverified new timestamp metadata as bytes + data: Unverified new timestamp metadata as bytes Raises: - RepositoryError: Metadata failed to load or verify. The actual - error type and content will contain more details. + RuntimeError: This function is called after updating snapshot. + RepositoryError: Metadata failed to load or verify as final + timestamp. The actual error type and content will contain + more details. + + Returns: + Deserialized and verified timestamp ``Metadata`` object """ if self.snapshot is not None: raise RuntimeError("Cannot update timestamp after snapshot") @@ -198,60 +208,80 @@ def update_timestamp(self, data: bytes) -> None: # No need to check for 5.3.11 (fast forward attack recovery): # timestamp/snapshot can not yet be loaded at this point - try: - new_timestamp = Metadata[Timestamp].from_bytes(data) - except DeserializationError as e: - raise exceptions.RepositoryError("Failed to load timestamp") from e + new_timestamp = Metadata[Timestamp].from_bytes(data) - if new_timestamp.signed.type != "timestamp": + if new_timestamp.signed.type != Timestamp.type: raise exceptions.RepositoryError( f"Expected 'timestamp', got '{new_timestamp.signed.type}'" ) - self.root.verify_delegate("timestamp", new_timestamp) + self.root.verify_delegate(Timestamp.type, new_timestamp) # If an existing trusted timestamp is updated, # check for a rollback attack if self.timestamp is not None: # Prevent rolling back timestamp version if new_timestamp.signed.version < self.timestamp.signed.version: - raise exceptions.ReplayedMetadataError( - "timestamp", - new_timestamp.signed.version, - self.timestamp.signed.version, + raise exceptions.BadVersionNumberError( + f"New timestamp version {new_timestamp.signed.version} must" + f" be >= {self.timestamp.signed.version}" ) # Prevent rolling back snapshot version - if ( - new_timestamp.signed.meta["snapshot.json"].version - < self.timestamp.signed.meta["snapshot.json"].version - ): - raise exceptions.ReplayedMetadataError( - "snapshot", - new_timestamp.signed.meta["snapshot.json"].version, - self.timestamp.signed.meta["snapshot.json"].version, + snapshot_meta = self.timestamp.signed.snapshot_meta + new_snapshot_meta = new_timestamp.signed.snapshot_meta + if new_snapshot_meta.version < snapshot_meta.version: + raise exceptions.BadVersionNumberError( + f"New snapshot version must be >= {snapshot_meta.version}" + f", got version {new_snapshot_meta.version}" ) # expiry not checked to allow old timestamp to be used for rollback # protection of new timestamp: expiry is checked in update_snapshot() - self._trusted_set["timestamp"] = new_timestamp - logger.debug("Updated timestamp") + self._trusted_set[Timestamp.type] = new_timestamp + logger.info("Updated timestamp v%d", new_timestamp.signed.version) + + # timestamp is loaded: raise if it is not valid _final_ timestamp + self._check_final_timestamp() + + return new_timestamp + + def _check_final_timestamp(self) -> None: + """Raise if timestamp is expired""" + + assert self.timestamp is not None # nosec + if self.timestamp.signed.is_expired(self.reference_time): + raise exceptions.ExpiredMetadataError("timestamp.json is expired") - def update_snapshot(self, data: bytes) -> None: - """Verifies and loads 'data' as new snapshot metadata. + def update_snapshot( + self, data: bytes, trusted: Optional[bool] = False + ) -> Metadata[Snapshot]: + """Verifies and loads ``data`` as new snapshot metadata. - Note that intermediate snapshot is considered valid even if it is - expired or the version does not match the timestamp meta version. This - means the intermediate snapshot can be used for rollback checks on - newer, final snapshot. Expiry and meta version are only checked for - the final snapshot in update_delegated_targets(). + Note that an intermediate snapshot is allowed to be expired and version + is allowed to not match timestamp meta version: ``TrustedMetadataSet`` + will throw an ``ExpiredMetadataError``/``BadVersionNumberError`` in + these cases but the intermediate snapshot will be loaded. This way a + newer snapshot can still be loaded (and the intermediate snapshot will + be used for rollback protection). Expired snapshot or snapshot that + does not match timestamp meta version will prevent loading targets. Args: - data: unverified new snapshot metadata as bytes + data: Unverified new snapshot metadata as bytes + trusted: ``True`` if data has at some point been verified by + ``TrustedMetadataSet`` as a valid snapshot. Purpose of trusted + is to allow loading of locally stored snapshot as intermediate + snapshot even if hashes in current timestamp meta no longer + match data. Default is False. Raises: - RepositoryError: Metadata failed to load or verify. The actual - error type and content will contain more details. + RuntimeError: This function is called before updating timestamp + or after updating targets. + RepositoryError: Data failed to load or verify as final snapshot. + The actual error type and content will contain more details. + + Returns: + Deserialized and verified snapshot ``Metadata`` object """ if self.timestamp is None: @@ -260,32 +290,24 @@ def update_snapshot(self, data: bytes) -> None: raise RuntimeError("Cannot update snapshot after targets") logger.debug("Updating snapshot") - # Local timestamp was allowed to be expired to allow for rollback - # checks on new timestamp but now timestamp must not be expired - if self.timestamp.signed.is_expired(self.reference_time): - raise exceptions.ExpiredMetadataError("timestamp.json is expired") + # Snapshot cannot be loaded if final timestamp is expired + self._check_final_timestamp() - meta = self.timestamp.signed.meta["snapshot.json"] + snapshot_meta = self.timestamp.signed.snapshot_meta - # Verify against the hashes in timestamp, if any - try: - meta.verify_length_and_hashes(data) - except exceptions.LengthOrHashMismatchError as e: - raise exceptions.RepositoryError( - "Snapshot length or hashes do not match" - ) from e + # Verify non-trusted data against the hashes in timestamp, if any. + # Trusted snapshot data has already been verified once. + if not trusted: + snapshot_meta.verify_length_and_hashes(data) - try: - new_snapshot = Metadata[Snapshot].from_bytes(data) - except DeserializationError as e: - raise exceptions.RepositoryError("Failed to load snapshot") from e + new_snapshot = Metadata[Snapshot].from_bytes(data) - if new_snapshot.signed.type != "snapshot": + if new_snapshot.signed.type != Snapshot.type: raise exceptions.RepositoryError( f"Expected 'snapshot', got '{new_snapshot.signed.type}'" ) - self.root.verify_delegate("snapshot", new_snapshot) + self.root.verify_delegate(Snapshot.type, new_snapshot) # version not checked against meta version to allow old snapshot to be # used in rollback protection: it is checked when targets is updated @@ -311,59 +333,66 @@ def update_snapshot(self, data: bytes) -> None: # expiry not checked to allow old snapshot to be used for rollback # protection of new snapshot: it is checked when targets is updated - self._trusted_set["snapshot"] = new_snapshot - logger.debug("Updated snapshot") + self._trusted_set[Snapshot.type] = new_snapshot + logger.info("Updated snapshot v%d", new_snapshot.signed.version) + + # snapshot is loaded, but we raise if it's not valid _final_ snapshot + self._check_final_snapshot() + + return new_snapshot def _check_final_snapshot(self) -> None: - """Check snapshot expiry and version before targets is updated""" + """Raise if snapshot is expired or meta version does not match""" assert self.snapshot is not None # nosec assert self.timestamp is not None # nosec if self.snapshot.signed.is_expired(self.reference_time): raise exceptions.ExpiredMetadataError("snapshot.json is expired") - - if ( - self.snapshot.signed.version - != self.timestamp.signed.meta["snapshot.json"].version - ): + snapshot_meta = self.timestamp.signed.snapshot_meta + if self.snapshot.signed.version != snapshot_meta.version: raise exceptions.BadVersionNumberError( - f"Expected snapshot version " - f"{self.timestamp.signed.meta['snapshot.json'].version}, " + f"Expected snapshot version {snapshot_meta.version}, " f"got {self.snapshot.signed.version}" ) - def update_targets(self, data: bytes) -> None: - """Verifies and loads 'data' as new top-level targets metadata. + def update_targets(self, data: bytes) -> Metadata[Targets]: + """Verifies and loads ``data`` as new top-level targets metadata. Args: - data: unverified new targets metadata as bytes + data: Unverified new targets metadata as bytes Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. + + Returns: + Deserialized and verified targets ``Metadata`` object """ - self.update_delegated_targets(data, "targets", "root") + return self.update_delegated_targets(data, Targets.type, Root.type) def update_delegated_targets( self, data: bytes, role_name: str, delegator_name: str - ) -> None: - """Verifies and loads 'data' as new metadata for target 'role_name'. + ) -> Metadata[Targets]: + """Verifies and loads ``data`` as new metadata for target ``role_name``. Args: - data: unverified new metadata as bytes - role_name: The role name of the new metadata - delegator_name: The name of the role delegating to the new metadata + data: Unverified new metadata as bytes + role_name: Role name of the new metadata + delegator_name: Name of the role delegating to the new metadata Raises: + RuntimeError: This function is called before updating snapshot. RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. + + Returns: + Deserialized and verified targets ``Metadata`` object """ if self.snapshot is None: raise RuntimeError("Cannot load targets before snapshot") - # Local snapshot was allowed to be expired and to not match meta - # version to allow for rollback checks on new snapshot but now - # snapshot must not be expired and must match meta version + # Targets cannot be loaded if final snapshot is expired or its version + # does not match meta version in timestamp self._check_final_snapshot() delegator: Optional[Metadata] = self.get(delegator_name) @@ -379,54 +408,45 @@ def update_delegated_targets( f"Snapshot does not contain information for '{role_name}'" ) - try: - meta.verify_length_and_hashes(data) - except exceptions.LengthOrHashMismatchError as e: - raise exceptions.RepositoryError( - f"{role_name} length or hashes do not match" - ) from e + meta.verify_length_and_hashes(data) - try: - new_delegate = Metadata[Targets].from_bytes(data) - except DeserializationError as e: - raise exceptions.RepositoryError("Failed to load snapshot") from e + new_delegate = Metadata[Targets].from_bytes(data) - if new_delegate.signed.type != "targets": + if new_delegate.signed.type != Targets.type: raise exceptions.RepositoryError( f"Expected 'targets', got '{new_delegate.signed.type}'" ) delegator.verify_delegate(role_name, new_delegate) - if new_delegate.signed.version != meta.version: + version = new_delegate.signed.version + if version != meta.version: raise exceptions.BadVersionNumberError( - f"Expected {role_name} version " - f"{meta.version}, got {new_delegate.signed.version}." + f"Expected {role_name} v{meta.version}, got v{version}." ) if new_delegate.signed.is_expired(self.reference_time): raise exceptions.ExpiredMetadataError(f"New {role_name} is expired") self._trusted_set[role_name] = new_delegate - logger.debug("Updated %s delegated by %s", role_name, delegator_name) + logger.info("Updated %s v%d", role_name, version) + + return new_delegate def _load_trusted_root(self, data: bytes) -> None: - """Verifies and loads 'data' as trusted root metadata. + """Verifies and loads ``data`` as trusted root metadata. Note that an expired initial root is considered valid: expiry is - only checked for the final root in update_timestamp(). + only checked for the final root in ``update_timestamp()``. """ - try: - new_root = Metadata[Root].from_bytes(data) - except DeserializationError as e: - raise exceptions.RepositoryError("Failed to load root") from e + new_root = Metadata[Root].from_bytes(data) - if new_root.signed.type != "root": + if new_root.signed.type != Root.type: raise exceptions.RepositoryError( f"Expected 'root', got '{new_root.signed.type}'" ) - new_root.verify_delegate("root", new_root) + new_root.verify_delegate(Root.type, new_root) - self._trusted_set["root"] = new_root - logger.debug("Loaded trusted root") + self._trusted_set[Root.type] = new_root + logger.info("Loaded trusted root v%d", new_root.signed.version) diff --git a/tuf/ngclient/config.py b/tuf/ngclient/config.py index 177594bf49..e6213d0bed 100644 --- a/tuf/ngclient/config.py +++ b/tuf/ngclient/config.py @@ -1,7 +1,7 @@ # Copyright 2021, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 -"""Configuration options for Updater class +"""Configuration options for ``Updater`` class """ from dataclasses import dataclass @@ -9,20 +9,20 @@ @dataclass class UpdaterConfig: - """Used to store Updater configuration. + """Used to store ``Updater`` configuration. - Arguments: - max_root_rotations: The maximum number of root rotations. - max_delegations: The maximum number of delegations. - root_max_length: The maxmimum length of a root metadata file. - timestamp_max_length: The maximum length of a timestamp metadata file. - snapshot_max_length: The maximum length of a snapshot metadata file. - targets_max_length: The maximum length of a targets metadata file. - prefix_targets_with_hash: When consistent snapshots are used - (see https://theupdateframework.github.io/specification/latest/#consistent-snapshots), #pylint: disable=line-too-long - target download URLs are formed by prefixing the filename with a - hash digest of file content by default. This can be overridden by - setting prefix_targets_with_hash to False. + Args: + max_root_rotations: Maximum number of root rotations. + max_delegations: Maximum number of delegations. + root_max_length: Maxmimum length of a root metadata file. + timestamp_max_length: Maximum length of a timestamp metadata file. + snapshot_max_length: Maximum length of a snapshot metadata file. + targets_max_length: Maximum length of a targets metadata file. + prefix_targets_with_hash: When `consistent snapshots + `_ + are used, target download URLs are formed by prefixing the filename + with a hash digest of file content by default. This can be + overridden by setting ``prefix_targets_with_hash`` to ``False``. """ diff --git a/tuf/ngclient/fetcher.py b/tuf/ngclient/fetcher.py index 6e8f2df27d..3960477e17 100644 --- a/tuf/ngclient/fetcher.py +++ b/tuf/ngclient/fetcher.py @@ -10,9 +10,8 @@ import tempfile from contextlib import contextmanager from typing import IO, Iterator -from urllib import parse -from tuf import exceptions +from tuf.api import exceptions logger = logging.getLogger(__name__) @@ -29,45 +28,70 @@ class FetcherInterface: __metaclass__ = abc.ABCMeta @abc.abstractmethod - def fetch(self, url: str) -> Iterator[bytes]: - """Fetches the contents of HTTP/HTTPS url from a remote server. + def _fetch(self, url: str) -> Iterator[bytes]: + """Fetches the contents of HTTP/HTTPS ``url`` from a remote server. + + Implementations must raise ``DownloadHTTPError`` if they receive + an HTTP error code. - Arguments: - url: A URL string that represents a file location. + Implementations may raise any errors but the ones that are not + ``DownloadErrors`` will be wrapped in a ``DownloadError`` by + ``fetch()``. + + Args: + url: URL string that represents a file location. Raises: - tuf.exceptions.SlowRetrievalError: A timeout occurs while receiving - data. - tuf.exceptions.FetcherHTTPError: An HTTP error code is received. + exceptions.DownloadHTTPError: HTTP error code was received. Returns: - A bytes iterator + Bytes iterator """ raise NotImplementedError # pragma: no cover + def fetch(self, url: str) -> Iterator[bytes]: + """Fetches the contents of HTTP/HTTPS ``url`` from a remote server. + + Args: + url: URL string that represents a file location. + + Raises: + exceptions.DownloadError: An error occurred during download. + exceptions.DownloadHTTPError: An HTTP error code was received. + + Returns: + Bytes iterator + """ + # Ensure that fetch() only raises DownloadErrors, regardless of the + # fetcher implementation + try: + return self._fetch(url) + except exceptions.DownloadError as e: + raise e + except Exception as e: + raise exceptions.DownloadError(f"Failed to download {url}") from e + @contextmanager def download_file(self, url: str, max_length: int) -> Iterator[IO]: - """Opens a connection to 'url' and downloads the content - up to 'max_length'. + """Download file from given ``url``. + + It is recommended to use ``download_file()`` within a ``with`` + block to guarantee that allocated file resources will always + be released even if download fails. Args: - url: a URL string that represents the location of the file. - max_length: an integer value representing the length of - the file or an upper bound. + url: URL string that represents the location of the file. + max_length: Upper bound of file size in bytes. Raises: - DownloadLengthMismatchError: downloaded bytes exceed 'max_length'. + exceptions.DownloadError: An error occurred during download. + exceptions.DownloadLengthMismatchError: Downloaded bytes exceed + ``max_length``. + exceptions.DownloadHTTPError: An HTTP error code was received. Yields: - A TemporaryFile object that points to the contents of 'url'. + ``TemporaryFile`` object that points to the contents of ``url``. """ - # 'url.replace('\\', '/')' is needed for compatibility with - # Windows-based systems, because they might use back-slashes in place - # of forward-slashes. This converts it to the common format. - # unquote() replaces %xx escapes in a url with their single-character - # equivalent. A back-slash may beencoded as %5c in the url, which - # should also be replaced with a forward slash. - url = parse.unquote(url).replace("\\", "/") logger.debug("Downloading: %s", url) number_of_bytes_received = 0 @@ -78,7 +102,8 @@ def download_file(self, url: str, max_length: int) -> Iterator[IO]: number_of_bytes_received += len(chunk) if number_of_bytes_received > max_length: raise exceptions.DownloadLengthMismatchError( - max_length, number_of_bytes_received + f"Downloaded {number_of_bytes_received} bytes exceeding" + f" the maximum allowed length of {max_length}" ) temp_file.write(chunk) @@ -93,9 +118,22 @@ def download_file(self, url: str, max_length: int) -> Iterator[IO]: yield temp_file def download_bytes(self, url: str, max_length: int) -> bytes: - """Download bytes from given url + """Download bytes from given ``url``. + + Returns the downloaded bytes, otherwise like ``download_file()``. - Returns the downloaded bytes, otherwise like download_file() + Args: + url: URL string that represents the location of the file. + max_length: Upper bound of data size in bytes. + + Raises: + exceptions.DownloadError: An error occurred during download. + exceptions.DownloadLengthMismatchError: Downloaded bytes exceed + ``max_length``. + exceptions.DownloadHTTPError: An HTTP error code was received. + + Returns: + Content of the file in bytes. """ with self.download_file(url, max_length) as dl_file: return dl_file.read() diff --git a/tuf/ngclient/updater.py b/tuf/ngclient/updater.py index a3c7189d75..238fe30310 100644 --- a/tuf/ngclient/updater.py +++ b/tuf/ngclient/updater.py @@ -3,72 +3,52 @@ """Client update workflow implementation -The Updater class provides an implementation of the +The ``Updater`` class provides an implementation of the `TUF client workflow `_. -Updater provides an API to query available targets and to download them in a +``Updater`` provides an API to query available targets and to download them in a secure manner: All downloaded files are verified by signed metadata. -High-level description of Updater functionality: - * Initializing an :class:`~tuf.ngclient.updater.Updater` loads and validates - the trusted local root metadata: This root metadata is used as the source - of trust for all other metadata. - * Calling :func:`~tuf.ngclient.updater.Updater.refresh()` will update root - metadata and load all other top-level metadata as described in the - specification, using both locally cached metadata and metadata downloaded - from the remote repository. - * When metadata is up-to-date, targets can be dowloaded. The repository - snapshot is consistent so multiple targets can be downloaded without - fear of repository content changing. For each target: - - * :func:`~tuf.ngclient.updater.Updater.get_one_valid_targetinfo()` is - used to find information about a specific target. This will load new - targets metadata as needed (from local cache or remote repository). - * :func:`~tuf.ngclient.updater.Updater.updated_targets()` can be used to - check if target files are already locally cached. - * :func:`~tuf.ngclient.updater.Updater.download_target()` downloads a - target file and ensures it is verified correct by the metadata. - -Below is a simple example of using the Updater to download and verify -"file.txt" from a remote repository. The required environment for this example -is: - - * A webserver running on http://localhost:8000, serving TUF repository - metadata at "/tuf-repo/" and targets at "/targets/" - * Local metadata directory "~/tufclient/metadata/" is writable and contains - a root metadata version for the remote repository - * Download directory "~/tufclient/downloads/" is writable - -Example:: - - from tuf.ngclient import Updater - - # Load trusted local root metadata from client metadata cache. Define the - # remote repository metadata URL prefix and target URL prefix. - updater = Updater( - repository_dir="~/tufclient/metadata/", - metadata_base_url="http://localhost:8000/tuf-repo/", - target_base_url="http://localhost:8000/targets/", - ) - - # Update top-level metadata from remote - updater.refresh() - - # Securely download a target: - # Update target metadata, then download and verify target - targetinfo = updater.get_one_valid_targetinfo("file.txt") - updater.download_target(targetinfo, "~/tufclient/downloads/") +High-level description of ``Updater`` functionality: + * Initializing an ``Updater`` loads and validates the trusted local root + metadata: This root metadata is used as the source of trust for all other + metadata. + * ``refresh()`` can optionally be called to update and load all top-level + metadata as described in the specification, using both locally cached + metadata and metadata downloaded from the remote repository. If refresh is + not done explicitly, it will happen automatically during the first target + info lookup. + * ``Updater`` can be used to download targets. For each target: + + * ``Updater.get_targetinfo()`` is first used to find information about a + specific target. This will load new targets metadata as needed (from + local cache or remote repository). + * ``Updater.find_cached_target()`` can optionally be used to check if a + target file is already locally cached. + * ``Updater.download_target()`` downloads a target file and ensures it is + verified correct by the metadata. + +A simple example of using the Updater to implement a Python TUF client that +downloads target files is available in `examples/client_example +`_. """ import logging import os -from typing import List, Optional, Set, Tuple +import shutil +import tempfile +from typing import Optional, Set from urllib import parse -from securesystemslib import util as sslib_util - -from tuf import exceptions -from tuf.api.metadata import TargetFile, Targets +from tuf.api import exceptions +from tuf.api.metadata import ( + Metadata, + Root, + Snapshot, + TargetFile, + Targets, + Timestamp, +) from tuf.ngclient._internal import requests_fetcher, trusted_metadata_set from tuf.ngclient.config import UpdaterConfig from tuf.ngclient.fetcher import FetcherInterface @@ -77,40 +57,44 @@ class Updater: - """Implementation of the TUF client workflow.""" + """Creates a new ``Updater`` instance and loads trusted root metadata. + + Args: + metadata_dir: Local metadata directory. Directory must be + writable and it must contain a trusted root.json file + metadata_base_url: Base URL for all remote metadata downloads + target_dir: Local targets directory. Directory must be writable. It + will be used as the default target download directory by + ``find_cached_target()`` and ``download_target()`` + target_base_url: ``Optional``; Default base URL for all remote target + downloads. Can be individually set in ``download_target()`` + fetcher: ``Optional``; ``FetcherInterface`` implementation used to + download both metadata and targets. Default is ``RequestsFetcher`` + + Raises: + OSError: Local root.json cannot be read + RepositoryError: Local root.json is invalid + """ def __init__( self, - repository_dir: str, + metadata_dir: str, metadata_base_url: str, + target_dir: Optional[str] = None, target_base_url: Optional[str] = None, fetcher: Optional[FetcherInterface] = None, config: Optional[UpdaterConfig] = None, ): - """Creates a new Updater instance and loads trusted root metadata. - - Args: - repository_dir: Local metadata directory. Directory must be - writable and it must contain a trusted root.json file. - metadata_base_url: Base URL for all remote metadata downloads - target_base_url: Optional; Default base URL for all remote target - downloads. Can be individually set in download_target() - fetcher: Optional; FetcherInterface implementation used to download - both metadata and targets. Default is RequestsFetcher - - Raises: - OSError: Local root.json cannot be read - RepositoryError: Local root.json is invalid - """ - self._dir = repository_dir + self._dir = metadata_dir self._metadata_base_url = _ensure_trailing_slash(metadata_base_url) + self.target_dir = target_dir if target_base_url is None: self._target_base_url = None else: self._target_base_url = _ensure_trailing_slash(target_base_url) # Read trusted local root metadata - data = self._load_local_metadata("root") + data = self._load_local_metadata(Root.type) self._trusted_set = trusted_metadata_set.TrustedMetadataSet(data) self._fetcher = fetcher or requests_fetcher.RequestsFetcher() self.config = config or UpdaterConfig() @@ -122,121 +106,126 @@ def refresh(self) -> None: specified order (root -> timestamp -> snapshot -> targets) implementing all the checks required in the TUF client workflow. - The metadata for delegated roles are not refreshed by this method as - that happens on demand during get_one_valid_targetinfo(). + A ``refresh()`` can be done only once during the lifetime of an Updater. + If ``refresh()`` has not been explicitly called before the first + ``get_targetinfo()`` call, it will be done implicitly at that time. - The refresh() method should be called by the client before any other - method calls. + The metadata for delegated roles is not updated by ``refresh()``: + that happens on demand during ``get_targetinfo()``. However, if the + repository uses `consistent_snapshot + `_, + then all metadata downloaded downloaded by the Updater will use the same + consistent repository state. Raises: OSError: New metadata could not be written to disk RepositoryError: Metadata failed to verify in some way - TODO: download-related errors + DownloadError: Download of a metadata file failed in some way """ self._load_root() self._load_timestamp() self._load_snapshot() - self._load_targets("targets", "root") + self._load_targets(Targets.type, Root.type) - def get_one_valid_targetinfo( - self, target_path: str - ) -> Optional[TargetFile]: - """Returns TargetFile instance with information for 'target_path'. + def _generate_target_file_path(self, targetinfo: TargetFile) -> str: + if self.target_dir is None: + raise ValueError("target_dir must be set if filepath is not given") + + # Use URL encoded target path as filename + filename = parse.quote(targetinfo.path, "") + return os.path.join(self.target_dir, filename) + + def get_targetinfo(self, target_path: str) -> Optional[TargetFile]: + """Returns ``TargetFile`` instance with information for ``target_path``. The return value can be used as an argument to - :func:`download_target()` and :func:`updated_targets()`. + ``download_target()`` and ``find_cached_target()``. - :func:`refresh()` must be called before calling - `get_one_valid_targetinfo()`. Subsequent calls to - `get_one_valid_targetinfo()` will use the same consistent repository - state: Changes that happen in the repository between calling - :func:`refresh()` and `get_one_valid_targetinfo()` will not be - seen by the updater. + If ``refresh()`` has not been called before calling + ``get_targetinfo()``, the refresh will be done implicitly. As a side-effect this method downloads all the additional (delegated targets) metadata it needs to return the target information. Args: - target_path: A target identifier that is a path-relative-URL string - (https://url.spec.whatwg.org/#path-relative-url-string). - Typically this is also the unix file path of the eventually - downloaded file. + target_path: `path-relative-URL string + `_ + that uniquely identifies the target within the repository. Raises: OSError: New metadata could not be written to disk RepositoryError: Metadata failed to verify in some way - TODO: download-related errors + DownloadError: Download of a metadata file failed in some way Returns: - A TargetFile instance or None. + ``TargetFile`` instance or ``None``. """ + + if self._trusted_set.targets is None: + self.refresh() return self._preorder_depth_first_walk(target_path) - @staticmethod - def updated_targets( - targets: List[TargetFile], destination_directory: str - ) -> List[TargetFile]: - """Checks whether local cached target files are up to date + def find_cached_target( + self, + targetinfo: TargetFile, + filepath: Optional[str] = None, + ) -> Optional[str]: + """Checks whether a local file is an up to date target - After retrieving the target information for the targets that should be - updated, updated_targets() can be called to determine which targets - have changed compared to locally stored versions. + Args: + targetinfo: ``TargetFile`` from ``get_targetinfo()``. + filepath: Local path to file. If ``None``, a file path is + generated based on ``target_dir`` constructor argument. + + Raises: + ValueError: Incorrect arguments - All the targets that are not up-to-date in destination_directory are - returned in a list. The list items can be downloaded with - 'download_target()'. + Returns: + Local file path if the file is an up to date target file. + None if file is not found or it is not up to date. """ - # Keep track of the target objects and filepaths of updated targets. - # Return 'updated_targets' and use 'updated_targetpaths' to avoid - # duplicates. - updated_targets = [] - updated_targetpaths = [] - - for target in targets: - # Prepend 'destination_directory' to the target's relative filepath - # (as stored in metadata.) Verify the hash of 'target_filepath' - # against each hash listed for its fileinfo. Note: join() discards - # 'destination_directory' if 'filepath' contains a leading path - # separator (i.e., is treated as an absolute path). - target_filepath = os.path.join(destination_directory, target.path) - - if target_filepath in updated_targetpaths: - continue - try: - with open(target_filepath, "rb") as target_file: - target.verify_length_and_hashes(target_file) - # If the file does not exist locally or length and hashes - # do not match, append to updated targets. - except (OSError, exceptions.LengthOrHashMismatchError): - updated_targets.append(target) - updated_targetpaths.append(target_filepath) + if filepath is None: + filepath = self._generate_target_file_path(targetinfo) - return updated_targets + try: + with open(filepath, "rb") as target_file: + targetinfo.verify_length_and_hashes(target_file) + return filepath + except (OSError, exceptions.LengthOrHashMismatchError): + return None def download_target( self, targetinfo: TargetFile, - destination_directory: str, + filepath: Optional[str] = None, target_base_url: Optional[str] = None, - ) -> None: - """Downloads the target file specified by 'targetinfo'. + ) -> str: + """Downloads the target file specified by ``targetinfo``. Args: - targetinfo: TargetFile instance received from - get_one_valid_targetinfo() or updated_targets(). - destination_directory: existing local directory to download into. - Note that new directories may be created inside - destination_directory as required. - target_base_url: Optional; Base URL used to form the final target - download URL. Default is the value provided in Updater() + targetinfo: ``TargetFile`` from ``get_targetinfo()``. + filepath: Local path to download into. If ``None``, the file is + downloaded into directory defined by ``target_dir`` constructor + argument using a generated filename. If file already exists, + it is overwritten. + target_base_url: Base URL used to form the final target + download URL. Default is the value provided in ``Updater()`` Raises: - TODO: download-related errors - TODO: file write errors + ValueError: Invalid arguments + DownloadError: Download of the target file failed in some way + RepositoryError: Downloaded target failed to be verified in some way + OSError: Failed to write target to file + + Returns: + Local path to downloaded file """ + if filepath is None: + filepath = self._generate_target_file_path(targetinfo) + if target_base_url is None: if self._target_base_url is None: raise ValueError( @@ -252,43 +241,58 @@ def download_target( consistent_snapshot = self._trusted_set.root.signed.consistent_snapshot if consistent_snapshot and self.config.prefix_targets_with_hash: hashes = list(targetinfo.hashes.values()) - target_filepath = f"{hashes[0]}.{target_filepath}" - full_url = parse.urljoin(target_base_url, target_filepath) + dirname, sep, basename = target_filepath.rpartition("/") + target_filepath = f"{dirname}{sep}{hashes[0]}.{basename}" + full_url = f"{target_base_url}{target_filepath}" with self._fetcher.download_file( full_url, targetinfo.length ) as target_file: - try: - targetinfo.verify_length_and_hashes(target_file) - except exceptions.LengthOrHashMismatchError as e: - raise exceptions.RepositoryError( - f"{target_filepath} length or hashes do not match" - ) from e - - # Store the target file name without the HASH prefix. - local_filepath = os.path.join( - destination_directory, targetinfo.path - ) - sslib_util.persist_temp_file(target_file, local_filepath) + targetinfo.verify_length_and_hashes(target_file) + + target_file.seek(0) + with open(filepath, "wb") as destination_file: + shutil.copyfileobj(target_file, destination_file) + + logger.info("Downloaded target %s", targetinfo.path) + return filepath def _download_metadata( self, rolename: str, length: int, version: Optional[int] = None ) -> bytes: """Download a metadata file and return it as bytes""" + encoded_name = parse.quote(rolename, "") if version is None: - filename = f"{rolename}.json" + url = f"{self._metadata_base_url}{encoded_name}.json" else: - filename = f"{version}.{rolename}.json" - url = parse.urljoin(self._metadata_base_url, filename) + url = f"{self._metadata_base_url}{version}.{encoded_name}.json" return self._fetcher.download_bytes(url, length) def _load_local_metadata(self, rolename: str) -> bytes: - with open(os.path.join(self._dir, f"{rolename}.json"), "rb") as f: + encoded_name = parse.quote(rolename, "") + with open(os.path.join(self._dir, f"{encoded_name}.json"), "rb") as f: return f.read() def _persist_metadata(self, rolename: str, data: bytes) -> None: - with open(os.path.join(self._dir, f"{rolename}.json"), "wb") as f: - f.write(data) + """Write metadata to disk atomically to avoid data loss.""" + try: + # encode the rolename to avoid issues with e.g. path separators + encoded_name = parse.quote(rolename, "") + filename = os.path.join(self._dir, f"{encoded_name}.json") + with tempfile.NamedTemporaryFile( + dir=self._dir, delete=False + ) as temp_file: + temp_file.write(data) + os.replace(temp_file.name, filename) + except OSError as e: + # remove tempfile if we managed to create one, + # then let the exception happen + if temp_file: + try: + os.remove(temp_file.name) + except FileNotFoundError: + pass + raise e def _load_root(self) -> None: """Load remote root metadata. @@ -304,12 +308,14 @@ def _load_root(self) -> None: for next_version in range(lower_bound, upper_bound): try: data = self._download_metadata( - "root", self.config.root_max_length, next_version + Root.type, + self.config.root_max_length, + next_version, ) self._trusted_set.update_root(data) - self._persist_metadata("root", data) + self._persist_metadata(Root.type, data) - except exceptions.FetcherHTTPError as exception: + except exceptions.DownloadHTTPError as exception: if exception.status_code not in {403, 404}: raise # 404/403 means current root is newest available @@ -318,46 +324,54 @@ def _load_root(self) -> None: def _load_timestamp(self) -> None: """Load local and remote timestamp metadata""" try: - data = self._load_local_metadata("timestamp") + data = self._load_local_metadata(Timestamp.type) self._trusted_set.update_timestamp(data) except (OSError, exceptions.RepositoryError) as e: # Local timestamp does not exist or is invalid - logger.debug("Failed to load local timestamp %s", e) + logger.debug("Local timestamp not valid as final: %s", e) # Load from remote (whether local load succeeded or not) data = self._download_metadata( - "timestamp", self.config.timestamp_max_length + Timestamp.type, self.config.timestamp_max_length ) self._trusted_set.update_timestamp(data) - self._persist_metadata("timestamp", data) + self._persist_metadata(Timestamp.type, data) def _load_snapshot(self) -> None: """Load local (and if needed remote) snapshot metadata""" try: - data = self._load_local_metadata("snapshot") - self._trusted_set.update_snapshot(data) + data = self._load_local_metadata(Snapshot.type) + self._trusted_set.update_snapshot(data, trusted=True) logger.debug("Local snapshot is valid: not downloading new one") except (OSError, exceptions.RepositoryError) as e: # Local snapshot does not exist or is invalid: update from remote - logger.debug("Failed to load local snapshot %s", e) + logger.debug("Local snapshot not valid as final: %s", e) assert self._trusted_set.timestamp is not None # nosec - metainfo = self._trusted_set.timestamp.signed.meta["snapshot.json"] - length = metainfo.length or self.config.snapshot_max_length + snapshot_meta = self._trusted_set.timestamp.signed.snapshot_meta + length = snapshot_meta.length or self.config.snapshot_max_length version = None if self._trusted_set.root.signed.consistent_snapshot: - version = metainfo.version + version = snapshot_meta.version - data = self._download_metadata("snapshot", length, version) + data = self._download_metadata(Snapshot.type, length, version) self._trusted_set.update_snapshot(data) - self._persist_metadata("snapshot", data) + self._persist_metadata(Snapshot.type, data) + + def _load_targets(self, role: str, parent_role: str) -> Metadata[Targets]: + """Load local (and if needed remote) metadata for ``role``.""" + + # Avoid loading 'role' more than once during "get_targetinfo" + if role in self._trusted_set: + return self._trusted_set[role] - def _load_targets(self, role: str, parent_role: str) -> None: - """Load local (and if needed remote) metadata for 'role'.""" try: data = self._load_local_metadata(role) - self._trusted_set.update_delegated_targets(data, role, parent_role) + delegated_targets = self._trusted_set.update_delegated_targets( + data, role, parent_role + ) logger.debug("Local %s is valid: not downloading new one", role) + return delegated_targets except (OSError, exceptions.RepositoryError) as e: # Local 'role' does not exist or is invalid: update from remote logger.debug("Failed to load local %s: %s", role, e) @@ -370,9 +384,13 @@ def _load_targets(self, role: str, parent_role: str) -> None: version = metainfo.version data = self._download_metadata(role, length, version) - self._trusted_set.update_delegated_targets(data, role, parent_role) + delegated_targets = self._trusted_set.update_delegated_targets( + data, role, parent_role + ) self._persist_metadata(role, data) + return delegated_targets + def _preorder_depth_first_walk( self, target_filepath: str ) -> Optional[TargetFile]: @@ -384,43 +402,41 @@ def _preorder_depth_first_walk( # List of delegations to be interrogated. A (role, parent role) pair # is needed to load and verify the delegated targets metadata. - delegations_to_visit = [("targets", "root")] - visited_role_names: Set[Tuple[str, str]] = set() - number_of_delegations = self.config.max_delegations + delegations_to_visit = [(Targets.type, Root.type)] + visited_role_names: Set[str] = set() # Preorder depth-first traversal of the graph of target delegations. - while number_of_delegations > 0 and len(delegations_to_visit) > 0: + while ( + len(visited_role_names) <= self.config.max_delegations + and len(delegations_to_visit) > 0 + ): # Pop the role name from the top of the stack. role_name, parent_role = delegations_to_visit.pop(-1) # Skip any visited current role to prevent cycles. - if (role_name, parent_role) in visited_role_names: + if role_name in visited_role_names: logger.debug("Skipping visited current role %s", role_name) continue # The metadata for 'role_name' must be downloaded/updated before # its targets, delegations, and child roles can be inspected. - self._load_targets(role_name, parent_role) + targets = self._load_targets(role_name, parent_role).signed - role_metadata: Targets = self._trusted_set[role_name].signed - target = role_metadata.targets.get(target_filepath) + target = targets.targets.get(target_filepath) if target is not None: logger.debug("Found target in current role %s", role_name) return target # After preorder check, add current role to set of visited roles. - visited_role_names.add((role_name, parent_role)) - - # And also decrement number of visited roles. - number_of_delegations -= 1 + visited_role_names.add(role_name) - if role_metadata.delegations is not None: + if targets.delegations is not None: child_roles_to_visit = [] # NOTE: This may be a slow operation if there are many # delegated roles. - for child_role in role_metadata.delegations.roles: + for child_role in targets.delegations.roles.values(): if child_role.is_delegated_path(target_filepath): logger.debug("Adding child role %s", child_role.name) @@ -428,7 +444,7 @@ def _preorder_depth_first_walk( (child_role.name, role_name) ) if child_role.terminating: - logger.debug("Not backtracking to other roles.") + logger.debug("Not backtracking to other roles") delegations_to_visit = [] break # Push 'child_roles_to_visit' in reverse order of appearance @@ -437,10 +453,9 @@ def _preorder_depth_first_walk( child_roles_to_visit.reverse() delegations_to_visit.extend(child_roles_to_visit) - if number_of_delegations == 0 and len(delegations_to_visit) > 0: + if len(delegations_to_visit) > 0: logger.debug( - "%d roles left to visit, but allowed to " - "visit at most %d delegations.", + "%d roles left to visit, but allowed at most %d delegations", len(delegations_to_visit), self.config.max_delegations, ) diff --git a/.gitmodules b/tuf/py.typed similarity index 100% rename from .gitmodules rename to tuf/py.typed diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py deleted file mode 100644 index 642447d8b3..0000000000 --- a/tuf/repository_lib.py +++ /dev/null @@ -1,2306 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - repository_lib.py - - - Vladimir Diaz - - - June 1, 2014. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Provide a library for the repository tool that can create a TUF repository. - The repository tool can be used with the Python interpreter in interactive - mode, or imported directly into a Python module. See 'tuf/README' for the - complete guide to using 'tuf.repository_tool.py'. -""" - -import os -import errno -import time -import logging -import shutil -import json -import tempfile - -import securesystemslib # pylint: disable=unused-import -from securesystemslib import exceptions as sslib_exceptions -from securesystemslib import formats as sslib_formats -from securesystemslib import hash as sslib_hash -from securesystemslib import interface as sslib_interface -from securesystemslib import keys as sslib_keys -from securesystemslib import util as sslib_util -from securesystemslib import storage as sslib_storage - -from tuf import exceptions -from tuf import formats -from tuf import keydb -from tuf import log -from tuf import roledb -from tuf import settings -from tuf import sig - - -# See 'log.py' to learn how logging is handled in TUF. -logger = logging.getLogger(__name__) - -# The extension of TUF metadata. -METADATA_EXTENSION = '.json' - -# The targets and metadata directory names. Metadata files are written -# to the staged metadata directory instead of the "live" one. -METADATA_STAGED_DIRECTORY_NAME = 'metadata.staged' -METADATA_DIRECTORY_NAME = 'metadata' -TARGETS_DIRECTORY_NAME = 'targets' - -# The metadata filenames of the top-level roles. -ROOT_FILENAME = 'root' + METADATA_EXTENSION -TARGETS_FILENAME = 'targets' + METADATA_EXTENSION -SNAPSHOT_FILENAME = 'snapshot' + METADATA_EXTENSION -TIMESTAMP_FILENAME = 'timestamp' + METADATA_EXTENSION - -# Log warning when metadata expires in n days, or less. -# root = 1 month, snapshot = 1 day, targets = 10 days, timestamp = 1 day. -ROOT_EXPIRES_WARN_SECONDS = 2630000 -SNAPSHOT_EXPIRES_WARN_SECONDS = 86400 -TARGETS_EXPIRES_WARN_SECONDS = 864000 -TIMESTAMP_EXPIRES_WARN_SECONDS = 86400 - -# Supported key types. -SUPPORTED_KEY_TYPES = ['rsa', 'ed25519', 'ecdsa', 'ecdsa-sha2-nistp256'] - -# The algorithm used by the repository to generate the path hash prefixes -# of hashed bin delegations. Please see delegate_hashed_bins() -HASH_FUNCTION = settings.DEFAULT_HASH_ALGORITHM - - - - -def _generate_and_write_metadata(rolename, metadata_filename, - targets_directory, metadata_directory, storage_backend, - consistent_snapshot=False, filenames=None, allow_partially_signed=False, - increment_version_number=True, repository_name='default', - use_existing_fileinfo=False, use_timestamp_length=True, - use_timestamp_hashes=True, use_snapshot_length=False, - use_snapshot_hashes=False): - """ - Non-public function that can generate and write the metadata for the - specified 'rolename'. It also increments the version number of 'rolename' if - the 'increment_version_number' argument is True. - """ - - metadata = None - - # Retrieve the roleinfo of 'rolename' to extract the needed metadata - # attributes, such as version number, expiration, etc. - roleinfo = roledb.get_roleinfo(rolename, repository_name) - previous_keyids = roleinfo.get('previous_keyids', []) - previous_threshold = roleinfo.get('previous_threshold', 1) - signing_keyids = sorted(set(roleinfo['signing_keyids'])) - - # Generate the appropriate role metadata for 'rolename'. - if rolename == 'root': - metadata = generate_root_metadata(roleinfo['version'], roleinfo['expires'], - consistent_snapshot, repository_name) - - _log_warning_if_expires_soon(ROOT_FILENAME, roleinfo['expires'], - ROOT_EXPIRES_WARN_SECONDS) - - - - elif rolename == 'snapshot': - metadata = generate_snapshot_metadata(metadata_directory, - roleinfo['version'], roleinfo['expires'], - storage_backend, consistent_snapshot, repository_name, - use_length=use_snapshot_length, use_hashes=use_snapshot_hashes) - - - _log_warning_if_expires_soon(SNAPSHOT_FILENAME, roleinfo['expires'], - SNAPSHOT_EXPIRES_WARN_SECONDS) - - elif rolename == 'timestamp': - # If filenames don't have "snapshot_filename" key, defaults to "snapshot.json" - snapshot_file_path = (filenames and filenames['snapshot']) \ - or SNAPSHOT_FILENAME - - metadata = generate_timestamp_metadata(snapshot_file_path, roleinfo['version'], - roleinfo['expires'], storage_backend, repository_name, - use_length=use_timestamp_length, use_hashes=use_timestamp_hashes) - - _log_warning_if_expires_soon(TIMESTAMP_FILENAME, roleinfo['expires'], - TIMESTAMP_EXPIRES_WARN_SECONDS) - - # All other roles are either the top-level 'targets' role, or - # a delegated role. - else: - # Only print a warning if the top-level 'targets' role expires soon. - if rolename == 'targets': - _log_warning_if_expires_soon(TARGETS_FILENAME, roleinfo['expires'], - TARGETS_EXPIRES_WARN_SECONDS) - - # Don't hash-prefix consistent target files if they are handled out of band - consistent_targets = consistent_snapshot and not use_existing_fileinfo - - metadata = generate_targets_metadata(targets_directory, - roleinfo['paths'], roleinfo['version'], roleinfo['expires'], - roleinfo['delegations'], consistent_targets, use_existing_fileinfo, - storage_backend, repository_name) - - # Update roledb with the latest delegations info collected during - # generate_targets_metadata() - roledb.update_roleinfo(rolename, roleinfo, - repository_name=repository_name) - - - # Before writing 'rolename' to disk, automatically increment its version - # number (if 'increment_version_number' is True) so that the caller does not - # have to manually perform this action. The version number should be - # incremented in both the metadata file and roledb (required so that Snapshot - # references the latest version). - - # Store the 'current_version' in case the version number must be restored - # (e.g., if 'rolename' cannot be written to disk because its metadata is not - # properly signed). - current_version = metadata['version'] - if increment_version_number: - roleinfo = roledb.get_roleinfo(rolename, repository_name) - metadata['version'] = metadata['version'] + 1 - roleinfo['version'] = roleinfo['version'] + 1 - roledb.update_roleinfo(rolename, roleinfo, - repository_name=repository_name) - - else: - logger.debug('Not incrementing ' + repr(rolename) + '\'s version number.') - - if rolename in roledb.TOP_LEVEL_ROLES and not allow_partially_signed: - # Verify that the top-level 'rolename' is fully signed. Only a delegated - # role should not be written to disk without full verification of its - # signature(s), since it can only be considered fully signed depending on - # the delegating role. - signable = sign_metadata(metadata, signing_keyids, metadata_filename, - repository_name) - - - def should_write(): - # Root must be signed by its previous keys and threshold. - if rolename == 'root' and len(previous_keyids) > 0: - if not sig.verify(signable, rolename, repository_name, - previous_threshold, previous_keyids): - return False - - else: - logger.debug('Root is signed by a threshold of its previous keyids.') - - # In the normal case, we should write metadata if the threshold is met. - return sig.verify(signable, rolename, repository_name, - roleinfo['threshold'], roleinfo['signing_keyids']) - - - if should_write(): - _remove_invalid_and_duplicate_signatures(signable, repository_name) - - # Root should always be written as if consistent_snapshot is True (i.e., - # write .root.json and root.json to disk). - if rolename == 'root': - consistent_snapshot = True - filename = write_metadata_file(signable, metadata_filename, - metadata['version'], consistent_snapshot, storage_backend) - - # 'signable' contains an invalid threshold of signatures. - else: - # Since new metadata cannot be successfully written, restore the current - # version number. - roleinfo = roledb.get_roleinfo(rolename, repository_name) - roleinfo['version'] = current_version - roledb.update_roleinfo(rolename, roleinfo, - repository_name=repository_name) - - # Note that 'signable' is an argument to tuf.UnsignedMetadataError(). - raise exceptions.UnsignedMetadataError('Not enough' - ' signatures for ' + repr(metadata_filename), signable) - - # 'rolename' is a delegated role or a top-level role that is partially - # signed, and thus its signatures should not be verified. - else: - signable = sign_metadata(metadata, signing_keyids, metadata_filename, - repository_name) - _remove_invalid_and_duplicate_signatures(signable, repository_name) - - # Root should always be written as if consistent_snapshot is True (i.e., - # .root.json and root.json). - if rolename == 'root': - filename = write_metadata_file(signable, metadata_filename, - metadata['version'], consistent_snapshot=True, - storage_backend=storage_backend) - - else: - filename = write_metadata_file(signable, metadata_filename, - metadata['version'], consistent_snapshot, storage_backend) - - return signable, filename - - - - - -def _metadata_is_partially_loaded(rolename, signable, repository_name): - """ - Non-public function that determines whether 'rolename' is loaded with - at least zero good signatures, but an insufficient threshold (which means - 'rolename' was written to disk with repository.write_partial()). A repository - maintainer may write partial metadata without including a valid signature. - However, the final repository.write() must include a threshold number of - signatures. - - If 'rolename' is found to be partially loaded, mark it as partially loaded in - its 'roledb' roleinfo. This function exists to assist in deciding whether - a role's version number should be incremented when write() or write_parital() - is called. Return True if 'rolename' was partially loaded, False otherwise. - """ - - # The signature status lists the number of good signatures, including - # bad, untrusted, unknown, etc. - status = sig.get_signature_status(signable, rolename, repository_name) - - if len(status['good_sigs']) < status['threshold'] and \ - len(status['good_sigs']) >= 0: - return True - - else: - return False - - - - - -def _check_role_keys(rolename, repository_name): - """ - Non-public function that verifies the public and signing keys of 'rolename'. - If either contain an invalid threshold of keys, raise an exception. - """ - - # Extract the total number of public and private keys of 'rolename' from its - # roleinfo in 'roledb'. - roleinfo = roledb.get_roleinfo(rolename, repository_name) - total_keyids = len(roleinfo['keyids']) - threshold = roleinfo['threshold'] - total_signatures = len(roleinfo['signatures']) - total_signing_keys = len(roleinfo['signing_keyids']) - - # Raise an exception for an invalid threshold of public keys. - if total_keyids < threshold: - raise exceptions.InsufficientKeysError(repr(rolename) + ' role contains' - ' ' + repr(total_keyids) + ' / ' + repr(threshold) + ' public keys.') - - # Raise an exception for an invalid threshold of signing keys. - if total_signatures == 0 and total_signing_keys < threshold: - raise exceptions.InsufficientKeysError(repr(rolename) + ' role contains' - ' ' + repr(total_signing_keys) + ' / ' + repr(threshold) + ' signing keys.') - - - - - -def _remove_invalid_and_duplicate_signatures(signable, repository_name): - """ - Non-public function that removes invalid or duplicate signatures from - 'signable'. 'signable' may contain signatures (invalid) from previous - versions of the metadata that were loaded with load_repository(). Invalid, - or duplicate signatures, are removed from 'signable'. - """ - - # Store the keyids of valid signatures. 'signature_keyids' is checked for - # duplicates rather than comparing signature objects because PSS may generate - # duplicate valid signatures for the same data, yet contain different - # signatures. - signature_keyids = [] - - for signature in signable['signatures']: - signed = sslib_formats.encode_canonical(signable['signed']).encode('utf-8') - keyid = signature['keyid'] - key = None - - # Remove 'signature' from 'signable' if the listed keyid does not exist - # in 'keydb'. - try: - key = keydb.get_key(keyid, repository_name=repository_name) - - except exceptions.UnknownKeyError: - signable['signatures'].remove(signature) - continue - - # Remove 'signature' from 'signable' if it is an invalid signature. - if not sslib_keys.verify_signature(key, signature, signed): - logger.debug('Removing invalid signature for ' + repr(keyid)) - signable['signatures'].remove(signature) - - # Although valid, it may still need removal if it is a duplicate. Check - # the keyid, rather than the signature, to remove duplicate PSS signatures. - # PSS may generate multiple different signatures for the same keyid. - else: - if keyid in signature_keyids: - signable['signatures'].remove(signature) - - # 'keyid' is valid and not a duplicate, so add it to 'signature_keyids'. - else: - signature_keyids.append(keyid) - - - - - -def _delete_obsolete_metadata(metadata_directory, snapshot_metadata, - consistent_snapshot, repository_name, storage_backend): - """ - Non-public function that deletes metadata files marked as removed by - 'repository_tool.py'. Revoked metadata files are not actually deleted until - this function is called. Obsolete metadata should *not* be retained in - "metadata.staged", otherwise they may be re-loaded by 'load_repository()'. - - Note: Obsolete metadata may not always be easily detected (by inspecting - top-level metadata during loading) due to partial metadata and top-level - metadata that have not been written yet. - """ - - # Walk the repository's metadata sub-directory, which is where all metadata - # is stored (including delegated roles). The 'django.json' role (e.g., - # delegated by Targets) would be located in the - # '{repository_directory}/metadata/' directory. - metadata_files = sorted(storage_backend.list_folder(metadata_directory)) - for metadata_role in metadata_files: - if metadata_role.endswith('root.json'): - continue - - metadata_path = os.path.join(metadata_directory, metadata_role) - - # Strip the version number if 'consistent_snapshot' is True. Example: - # '10.django.json' --> 'django.json'. Consistent and non-consistent - # metadata might co-exist if write() and - # write(consistent_snapshot=True) are mixed, so ensure only - # '.filename' metadata is stripped. - - # Should we check if 'consistent_snapshot' is True? It might have been - # set previously, but 'consistent_snapshot' can potentially be False - # now. We'll proceed with the understanding that 'metadata_name' can - # have a prepended version number even though the repository is now - # a non-consistent one. - if metadata_role not in snapshot_metadata['meta']: - metadata_role, junk = _strip_version_number(metadata_role, - consistent_snapshot) - - else: - logger.debug(repr(metadata_role) + ' found in the snapshot role.') - - # Strip metadata extension from filename. The role database does not - # include the metadata extension. - if metadata_role.endswith(METADATA_EXTENSION): - metadata_role = metadata_role[:-len(METADATA_EXTENSION)] - else: - logger.debug(repr(metadata_role) + ' does not match' - ' supported extension ' + repr(METADATA_EXTENSION)) - - if metadata_role in roledb.TOP_LEVEL_ROLES: - logger.debug('Not removing top-level metadata ' + repr(metadata_role)) - return - - # Delete the metadata file if it does not exist in 'roledb'. - # 'repository_tool.py' might have removed 'metadata_name,' - # but its metadata file is not actually deleted yet. Do it now. - if not roledb.role_exists(metadata_role, repository_name): - logger.info('Removing outdated metadata: ' + repr(metadata_path)) - storage_backend.remove(metadata_path) - - else: - logger.debug('Not removing metadata: ' + repr(metadata_path)) - - # TODO: Should we delete outdated consistent snapshots, or does it make - # more sense for integrators to remove outdated consistent snapshots? - - - - -def _get_written_metadata(metadata_signable): - """ - Non-public function that returns the actual content of written metadata. - """ - - # Explicitly specify the JSON separators for Python 2 + 3 consistency. - written_metadata_content = json.dumps(metadata_signable, indent=1, - separators=(',', ': '), sort_keys=True).encode('utf-8') - - return written_metadata_content - - - - - -def _strip_version_number(metadata_filename, consistent_snapshot): - """ - Strip from 'metadata_filename' any version number (in the - expected '{dirname}/.rolename.' format) that - it may contain, and return the stripped filename and version number, - as a tuple. 'consistent_snapshot' is a boolean indicating if a version - number is prepended to 'metadata_filename'. - """ - - # Strip the version number if 'consistent_snapshot' is True. - # Example: '10.django.json' --> 'django.json' - if consistent_snapshot: - dirname, basename = os.path.split(metadata_filename) - version_number, basename = basename.split('.', 1) - stripped_metadata_filename = os.path.join(dirname, basename) - - if not version_number.isdigit(): - return metadata_filename, '' - - else: - return stripped_metadata_filename, version_number - - else: - return metadata_filename, '' - - - - -def _load_top_level_metadata(repository, top_level_filenames, repository_name): - """ - Load the metadata of the Root, Timestamp, Targets, and Snapshot roles. At a - minimum, the Root role must exist and load successfully. - """ - - root_filename = top_level_filenames[ROOT_FILENAME] - targets_filename = top_level_filenames[TARGETS_FILENAME] - snapshot_filename = top_level_filenames[SNAPSHOT_FILENAME] - timestamp_filename = top_level_filenames[TIMESTAMP_FILENAME] - - root_metadata = None - targets_metadata = None - snapshot_metadata = None - timestamp_metadata = None - - # Load 'root.json'. A Root role file without a version number is always - # written. - try: - # Initialize the key and role metadata of the top-level roles. - signable = sslib_util.load_json_file(root_filename) - try: - formats.check_signable_object_format(signable) - except exceptions.UnsignedMetadataError: - # Downgrade the error to a warning because a use case exists where - # metadata may be generated unsigned on one machine and signed on another. - logger.warning('Unsigned metadata object: ' + repr(signable)) - - root_metadata = signable['signed'] - keydb.create_keydb_from_root_metadata(root_metadata, repository_name) - roledb.create_roledb_from_root_metadata(root_metadata, repository_name) - - # Load Root's roleinfo and update 'roledb'. - roleinfo = roledb.get_roleinfo('root', repository_name) - roleinfo['consistent_snapshot'] = root_metadata['consistent_snapshot'] - roleinfo['signatures'] = [] - for signature in signable['signatures']: - if signature not in roleinfo['signatures']: - roleinfo['signatures'].append(signature) - - else: - logger.debug('Found a Root signature that is already loaded:' - ' ' + repr(signature)) - - # By default, roleinfo['partial_loaded'] of top-level roles should be set - # to False in 'create_roledb_from_root_metadata()'. Update this field, if - # necessary, now that we have its signable object. - if _metadata_is_partially_loaded('root', signable, repository_name): - roleinfo['partial_loaded'] = True - - else: - logger.debug('Root was not partially loaded.') - - _log_warning_if_expires_soon(ROOT_FILENAME, roleinfo['expires'], - ROOT_EXPIRES_WARN_SECONDS) - - roledb.update_roleinfo('root', roleinfo, mark_role_as_dirty=False, - repository_name=repository_name) - - # Ensure the 'consistent_snapshot' field is extracted. - consistent_snapshot = root_metadata['consistent_snapshot'] - - except sslib_exceptions.StorageError as error: - raise exceptions.RepositoryError('Cannot load the required' - ' root file: ' + repr(root_filename)) from error - - # Load 'timestamp.json'. A Timestamp role file without a version number is - # always written. - try: - signable = sslib_util.load_json_file(timestamp_filename) - timestamp_metadata = signable['signed'] - for signature in signable['signatures']: - repository.timestamp.add_signature(signature, mark_role_as_dirty=False) - - # Load Timestamp's roleinfo and update 'roledb'. - roleinfo = roledb.get_roleinfo('timestamp', repository_name) - roleinfo['expires'] = timestamp_metadata['expires'] - roleinfo['version'] = timestamp_metadata['version'] - - if _metadata_is_partially_loaded('timestamp', signable, repository_name): - roleinfo['partial_loaded'] = True - - else: - logger.debug('The Timestamp role was not partially loaded.') - - _log_warning_if_expires_soon(TIMESTAMP_FILENAME, roleinfo['expires'], - TIMESTAMP_EXPIRES_WARN_SECONDS) - - roledb.update_roleinfo('timestamp', roleinfo, mark_role_as_dirty=False, - repository_name=repository_name) - - except sslib_exceptions.StorageError as error: - raise exceptions.RepositoryError('Cannot load the Timestamp ' - 'file: ' + repr(timestamp_filename)) from error - - # Load 'snapshot.json'. A consistent snapshot.json must be calculated if - # 'consistent_snapshot' is True. - # The Snapshot and Root roles are both accessed by their hashes. - if consistent_snapshot: - snapshot_version = timestamp_metadata['meta'][SNAPSHOT_FILENAME]['version'] - - dirname, basename = os.path.split(snapshot_filename) - basename = basename.split(METADATA_EXTENSION, 1)[0] - snapshot_filename = os.path.join(dirname, - str(snapshot_version) + '.' + basename + METADATA_EXTENSION) - - try: - signable = sslib_util.load_json_file(snapshot_filename) - try: - formats.check_signable_object_format(signable) - except exceptions.UnsignedMetadataError: - # Downgrade the error to a warning because a use case exists where - # metadata may be generated unsigned on one machine and signed on another. - logger.warning('Unsigned metadata object: ' + repr(signable)) - - snapshot_metadata = signable['signed'] - - for signature in signable['signatures']: - repository.snapshot.add_signature(signature, mark_role_as_dirty=False) - - # Load Snapshot's roleinfo and update 'roledb'. - roleinfo = roledb.get_roleinfo('snapshot', repository_name) - roleinfo['expires'] = snapshot_metadata['expires'] - roleinfo['version'] = snapshot_metadata['version'] - - if _metadata_is_partially_loaded('snapshot', signable, repository_name): - roleinfo['partial_loaded'] = True - - else: - logger.debug('Snapshot was not partially loaded.') - - _log_warning_if_expires_soon(SNAPSHOT_FILENAME, roleinfo['expires'], - SNAPSHOT_EXPIRES_WARN_SECONDS) - - roledb.update_roleinfo('snapshot', roleinfo, mark_role_as_dirty=False, - repository_name=repository_name) - - except sslib_exceptions.StorageError as error: - raise exceptions.RepositoryError('The Snapshot file ' - 'cannot be loaded: '+ repr(snapshot_filename)) from error - - # Load 'targets.json'. A consistent snapshot of the Targets role must be - # calculated if 'consistent_snapshot' is True. - if consistent_snapshot: - targets_version = snapshot_metadata['meta'][TARGETS_FILENAME]['version'] - dirname, basename = os.path.split(targets_filename) - targets_filename = os.path.join(dirname, str(targets_version) + '.' + basename) - - try: - signable = sslib_util.load_json_file(targets_filename) - try: - formats.check_signable_object_format(signable) - except exceptions.UnsignedMetadataError: - # Downgrade the error to a warning because a use case exists where - # metadata may be generated unsigned on one machine and signed on another. - logger.warning('Unsigned metadata object: ' + repr(signable)) - - targets_metadata = signable['signed'] - - for signature in signable['signatures']: - repository.targets.add_signature(signature, mark_role_as_dirty=False) - - # Update 'targets.json' in 'roledb' - roleinfo = roledb.get_roleinfo('targets', repository_name) - roleinfo['paths'] = targets_metadata['targets'] - roleinfo['version'] = targets_metadata['version'] - roleinfo['expires'] = targets_metadata['expires'] - roleinfo['delegations'] = targets_metadata['delegations'] - - if _metadata_is_partially_loaded('targets', signable, repository_name): - roleinfo['partial_loaded'] = True - - else: - logger.debug('Targets file was not partially loaded.') - - _log_warning_if_expires_soon(TARGETS_FILENAME, roleinfo['expires'], - TARGETS_EXPIRES_WARN_SECONDS) - - roledb.update_roleinfo('targets', roleinfo, mark_role_as_dirty=False, - repository_name=repository_name) - - # Add the keys specified in the delegations field of the Targets role. - for keyid, key_metadata in targets_metadata['delegations']['keys'].items(): - - # Use the keyid found in the delegation - key_object, _ = sslib_keys.format_metadata_to_key(key_metadata, - keyid) - - # Add 'key_object' to the list of recognized keys. Keys may be shared, - # so do not raise an exception if 'key_object' has already been loaded. - # In contrast to the methods that may add duplicate keys, do not log - # a warning as there may be many such duplicate key warnings. The - # repository maintainer should have also been made aware of the duplicate - # key when it was added. - try: - keydb.add_key(key_object, keyid=None, repository_name=repository_name) - - except exceptions.KeyAlreadyExistsError: - pass - - except sslib_exceptions.StorageError as error: - raise exceptions.RepositoryError('The Targets file ' - 'can not be loaded: ' + repr(targets_filename)) from error - - return repository, consistent_snapshot - - - - -def _log_warning_if_expires_soon(rolename, expires_iso8601_timestamp, - seconds_remaining_to_warn): - """ - Non-public function that logs a warning if 'rolename' expires in - 'seconds_remaining_to_warn' seconds, or less. - """ - - # Metadata stores expiration datetimes in ISO8601 format. Convert to - # unix timestamp, subtract from current time.time() (also in POSIX time) - # and compare against 'seconds_remaining_to_warn'. Log a warning message - # to console if 'rolename' expires soon. - datetime_object = formats.expiry_string_to_datetime( - expires_iso8601_timestamp) - expires_unix_timestamp = \ - formats.datetime_to_unix_timestamp(datetime_object) - seconds_until_expires = expires_unix_timestamp - int(time.time()) - - if seconds_until_expires <= seconds_remaining_to_warn: - if seconds_until_expires <= 0: - logger.warning( - repr(rolename) + ' expired ' + repr(datetime_object.ctime() + ' (UTC).')) - - else: - days_until_expires = seconds_until_expires / 86400 - logger.warning(repr(rolename) + ' expires ' + datetime_object.ctime() + '' - ' (UTC). ' + repr(days_until_expires) + ' day(s) until it expires.') - - else: - pass - - - - - -def import_rsa_privatekey_from_file(filepath, password=None): - """ - - Import the encrypted PEM file in 'filepath', decrypt it, and return the key - object in 'securesystemslib.RSAKEY_SCHEMA' format. - - - filepath: - file, an RSA encrypted PEM file. Unlike the public RSA PEM - key file, 'filepath' does not have an extension. - - password: - The passphrase to decrypt 'filepath'. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - securesystemslib.exceptions.CryptoError, if 'filepath' is not a valid - encrypted key file. - - - The contents of 'filepath' is read, decrypted, and the key stored. - - - An RSA key object, conformant to 'securesystemslib.RSAKEY_SCHEMA'. - """ - - # Note: securesystemslib.interface.import_rsa_privatekey_from_file() does not - # allow both 'password' and 'prompt' to be True, nor does it automatically - # prompt for a password if the key file is encrypted and a password isn't - # given. - try: - private_key = sslib_interface.import_rsa_privatekey_from_file( - filepath, password) - - # The user might not have given a password for an encrypted private key. - # Prompt for a password for convenience. - except sslib_exceptions.CryptoError: - if password is None: - private_key = sslib_interface.import_rsa_privatekey_from_file( - filepath, password, prompt=True) - - else: - raise - - return private_key - - - - - - - -def import_ed25519_privatekey_from_file(filepath, password=None): - """ - - Import the encrypted ed25519 TUF key file in 'filepath', decrypt it, and - return the key object in 'securesystemslib.ED25519KEY_SCHEMA' format. - - The TUF private key (may also contain the public part) is encrypted with - AES 256 and CTR the mode of operation. The password is strengthened with - PBKDF2-HMAC-SHA256. - - - filepath: - file, an RSA encrypted TUF key file. - - password: - The password, or passphrase, to import the private key (i.e., the - encrypted key file 'filepath' must be decrypted before the ed25519 key - object can be returned. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted or the imported key object contains an invalid key type (i.e., - not 'ed25519'). - - securesystemslib.exceptions.CryptoError, if 'filepath' cannot be decrypted. - - securesystemslib.exceptions.UnsupportedLibraryError, if 'filepath' cannot be - decrypted due to an invalid configuration setting (i.e., invalid - 'tuf.settings' setting). - - - 'password' is used to decrypt the 'filepath' key file. - - - An ed25519 key object of the form: 'securesystemslib.ED25519KEY_SCHEMA'. - """ - - # Note: securesystemslib.interface.import_ed25519_privatekey_from_file() does - # not allow both 'password' and 'prompt' to be True, nor does it - # automatically prompt for a password if the key file is encrypted and a - # password isn't given. - try: - private_key = sslib_interface.import_ed25519_privatekey_from_file( - filepath, password) - - # The user might not have given a password for an encrypted private key. - # Prompt for a password for convenience. - except sslib_exceptions.CryptoError: - if password is None: - private_key = sslib_interface.import_ed25519_privatekey_from_file( - filepath, password, prompt=True) - - else: - raise - - return private_key - - - -def get_delegated_roles_metadata_filenames(metadata_directory, - consistent_snapshot, storage_backend=None): - """ - Return a dictionary containing all filenames in 'metadata_directory' - except the top-level roles. - If multiple versions of a file exist because of a consistent snapshot, - only the file with biggest version prefix is included. - """ - - filenames = {} - metadata_files = sorted(storage_backend.list_folder(metadata_directory), - reverse=True) - - # Iterate over role metadata files, sorted by their version-number prefix, with - # more recent versions first, and only add the most recent version of any - # (non top-level) metadata to the list of returned filenames. Note that there - # should only be one version of each file, if consistent_snapshot is False. - for metadata_role in metadata_files: - metadata_path = os.path.join(metadata_directory, metadata_role) - - # Strip the version number if 'consistent_snapshot' is True, - # or if 'metadata_role' is Root. - # Example: '10.django.json' --> 'django.json' - consistent = \ - metadata_role.endswith('root.json') or consistent_snapshot == True - metadata_name, junk = _strip_version_number(metadata_role, - consistent) - - if metadata_name.endswith(METADATA_EXTENSION): - extension_length = len(METADATA_EXTENSION) - metadata_name = metadata_name[:-extension_length] - - else: - logger.debug('Skipping file with unsupported metadata' - ' extension: ' + repr(metadata_path)) - continue - - # Skip top-level roles, only interested in delegated roles. - if metadata_name in roledb.TOP_LEVEL_ROLES: - continue - - # Prevent reloading duplicate versions if consistent_snapshot is True - if metadata_name not in filenames: - filenames[metadata_name] = metadata_path - - return filenames - - - -def get_top_level_metadata_filenames(metadata_directory): - """ - - Return a dictionary containing the filenames of the top-level roles. - If 'metadata_directory' is set to 'metadata', the dictionary - returned would contain: - - filenames = {'root.json': 'metadata/root.json', - 'targets.json': 'metadata/targets.json', - 'snapshot.json': 'metadata/snapshot.json', - 'timestamp.json': 'metadata/timestamp.json'} - - If 'metadata_directory' is not set by the caller, the current directory is - used. - - - metadata_directory: - The directory containing the metadata files. - - - securesystemslib.exceptions.FormatError, if 'metadata_directory' is - improperly formatted. - - - None. - - - A dictionary containing the expected filenames of the top-level - metadata files, such as 'root.json' and 'snapshot.json'. - """ - - # Does 'metadata_directory' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.PATH_SCHEMA.check_match(metadata_directory) - - # Store the filepaths of the top-level roles, including the - # 'metadata_directory' for each one. - filenames = {} - - filenames[ROOT_FILENAME] = \ - os.path.join(metadata_directory, ROOT_FILENAME) - - filenames[TARGETS_FILENAME] = \ - os.path.join(metadata_directory, TARGETS_FILENAME) - - filenames[SNAPSHOT_FILENAME] = \ - os.path.join(metadata_directory, SNAPSHOT_FILENAME) - - filenames[TIMESTAMP_FILENAME] = \ - os.path.join(metadata_directory, TIMESTAMP_FILENAME) - - return filenames - - - - - -def get_targets_metadata_fileinfo(filename, storage_backend, custom=None): - """ - - Retrieve the file information of 'filename'. The object returned - conforms to 'tuf.formats.TARGETS_FILEINFO_SCHEMA'. The information - generated for 'filename' is stored in metadata files like 'targets.json'. - The fileinfo object returned has the form: - - fileinfo = {'length': 1024, - 'hashes': {'sha256': 1233dfba312, ...}, - 'custom': {...}} - - - filename: - The metadata file whose file information is needed. It must exist. - - custom: - An optional object providing additional information about the file. - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. - - - securesystemslib.exceptions.FormatError, if 'filename' is improperly - formatted. - - - The file is opened and information about the file is generated, - such as file size and its hash. - - - A dictionary conformant to 'tuf.formats.TARGETS_FILEINFO_SCHEMA'. This - dictionary contains the length, hashes, and custom data about the - 'filename' metadata file. SHA256 hashes are generated by default. - """ - - # Does 'filename' and 'custom' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.PATH_SCHEMA.check_match(filename) - if custom is not None: - formats.CUSTOM_SCHEMA.check_match(custom) - - # Note: 'filehashes' is a dictionary of the form - # {'sha256': 1233dfba312, ...}. 'custom' is an optional - # dictionary that a client might define to include additional - # file information, such as the file's author, version/revision - # numbers, etc. - filesize, filehashes = sslib_util.get_file_details(filename, - settings.FILE_HASH_ALGORITHMS, storage_backend) - - return formats.make_targets_fileinfo(filesize, filehashes, custom=custom) - - - - - -def get_metadata_versioninfo(rolename, repository_name): - """ - - Retrieve the version information of 'rolename'. The object returned - conforms to 'tuf.formats.VERSIONINFO_SCHEMA'. The information - generated for 'rolename' is stored in 'snapshot.json'. - The versioninfo object returned has the form: - - versioninfo = {'version': 14} - - - rolename: - The metadata role whose versioninfo is needed. It must exist, otherwise - a 'tuf.exceptions.UnknownRoleError' exception is raised. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - securesystemslib.exceptions.FormatError, if 'rolename' is improperly - formatted. - - tuf.exceptions.UnknownRoleError, if 'rolename' does not exist. - - - None. - - - A dictionary conformant to 'tuf.formats.VERSIONINFO_SCHEMA'. - This dictionary contains the version number of 'rolename'. - """ - - # Does 'rolename' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - formats.ROLENAME_SCHEMA.check_match(rolename) - - roleinfo = roledb.get_roleinfo(rolename, repository_name) - versioninfo = {'version': roleinfo['version']} - - return versioninfo - - - - - -def create_bin_name(low, high, prefix_len): - """ - - Create a string name of a delegated hash bin, where name will be a range of - zero-padded (up to prefix_len) strings i.e. for low=00, high=07, - prefix_len=3 the returned name would be '000-007'. - - - low: - The low end of the prefix range to be binned - - high: - The high end of the prefix range to be binned - - prefix_len: - The length of the prefix range components - - - A string bin name, with each end of the range zero-padded up to prefix_len - """ - if low == high: - return "{low:0{len}x}".format(low=low, len=prefix_len) - - return "{low:0{len}x}-{high:0{len}x}".format(low=low, high=high, - len=prefix_len) - - - - - -def get_bin_numbers(number_of_bins): - """ - - Given the desired number of bins (number_of_bins) calculate the prefix - length (prefix_length), total number of prefixes (prefix_count) and the - number of prefixes to be stored in each bin (bin_size). - Example: number_of_bins = 32 - prefix_length = 2 - prefix_count = 256 - bin_size = 8 - That is, each of the 32 hashed bins are responsible for 8 hash prefixes, - i.e. 00-07, 08-0f, ..., f8-ff. - - - number_of_bins: - The number of hashed bins in use - - - A tuple of three values: - 1. prefix_length: the length of each prefix - 2. prefix_count: the total number of prefixes in use - 3. bin_size: the number of hash prefixes to be stored in each bin - """ - # Convert 'number_of_bins' to hexadecimal and determine the number of - # hexadecimal digits needed by each hash prefix - prefix_length = len("{:x}".format(number_of_bins - 1)) - # Calculate the total number of hash prefixes (e.g., 000 - FFF total values) - prefix_count = 16 ** prefix_length - # Determine how many prefixes to assign to each bin - bin_size = prefix_count // number_of_bins - - # For simplicity, ensure that 'prefix_count' (16 ^ n) can be evenly - # distributed over 'number_of_bins' (must be 2 ^ n). Each bin will contain - # (prefix_count / number_of_bins) hash prefixes. - if prefix_count % number_of_bins != 0: - # Note: x % y != 0 does not guarantee that y is not a power of 2 for - # arbitrary x and y values. However, due to the relationship between - # number_of_bins and prefix_count, it is true for them. - raise sslib_exceptions.Error('The "number_of_bins" argument' - ' must be a power of 2.') - - return prefix_length, prefix_count, bin_size - - - - - -def find_bin_for_target_hash(target_hash, number_of_bins): - """ - - For a given hashed filename, target_hash, calculate the name of a hashed bin - into which this file would be delegated given number_of_bins bins are in - use. - - - target_hash: - The hash of the target file's path - - number_of_bins: - The number of hashed_bins in use - - - The name of the hashed bin target_hash would be binned into - """ - - prefix_length, _, bin_size = get_bin_numbers(number_of_bins) - - prefix = int(target_hash[:prefix_length], 16) - - low = prefix - (prefix % bin_size) - high = (low + bin_size - 1) - - return create_bin_name(low, high, prefix_length) - - - - - -def get_target_hash(target_filepath): - """ - - Compute the hash of 'target_filepath'. This is useful in conjunction with - the "path_hash_prefixes" attribute in a delegated targets role, which - tells us which paths a role is implicitly responsible for. - - The repository may optionally organize targets into hashed bins to ease - target delegations and role metadata management. The use of consistent - hashing allows for a uniform distribution of targets into bins. - - - target_filepath: - The path to the target file on the repository. This will be relative to - the 'targets' (or equivalent) directory on a given mirror. - - - None. - - - None. - - - The hash of 'target_filepath'. - - """ - formats.RELPATH_SCHEMA.check_match(target_filepath) - - digest_object = sslib_hash.digest(algorithm=HASH_FUNCTION) - digest_object.update(target_filepath.encode('utf-8')) - return digest_object.hexdigest() - - - - -def generate_root_metadata(version, expiration_date, consistent_snapshot, - repository_name='default'): - """ - - Create the root metadata. 'roledb' and 'keydb' - are read and the information returned by these modules is used to generate - the root metadata object. - - - version: - The metadata version number. Clients use the version number to - determine if the downloaded version is newer than the one currently - trusted. - - expiration_date: - The expiration date of the metadata file. Conformant to - 'securesystemslib.formats.ISO8601_DATETIME_SCHEMA'. - - consistent_snapshot: - Boolean. If True, a file digest is expected to be prepended to the - filename of any target file located in the targets directory. Each digest - is stripped from the target filename and listed in the snapshot metadata. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - securesystemslib.exceptions.FormatError, if the generated root metadata - object could not be generated with the correct format. - - securesystemslib.exceptions.Error, if an error is encountered while - generating the root metadata object (e.g., a required top-level role not - found in 'roledb'.) - - - The contents of 'keydb' and 'roledb' are read. - - - A root metadata object, conformant to 'tuf.formats.ROOT_SCHEMA'. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any of the arguments are - # improperly formatted. - formats.METADATAVERSION_SCHEMA.check_match(version) - sslib_formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - sslib_formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # The role and key dictionaries to be saved in the root metadata object. - # Conformant to 'ROLEDICT_SCHEMA' and 'KEYDICT_SCHEMA', respectively. - roledict = {} - keydict = {} - keylist = [] - - # Extract the role, threshold, and keyid information of the top-level roles, - # which Root stores in its metadata. The necessary role metadata is generated - # from this information. - for rolename in roledb.TOP_LEVEL_ROLES: - - # If a top-level role is missing from 'roledb', raise an exception. - if not roledb.role_exists(rolename, repository_name): - raise sslib_exceptions.Error(repr(rolename) + ' not in' - ' "roledb".') - - # Collect keys from all roles in a list - keyids = roledb.get_role_keyids(rolename, repository_name) - for keyid in keyids: - key = keydb.get_key(keyid, repository_name=repository_name) - keylist.append(key) - - # Generate the authentication information Root establishes for each - # top-level role. - role_threshold = roledb.get_role_threshold(rolename, repository_name) - role_metadata = formats.build_dict_conforming_to_schema( - formats.ROLE_SCHEMA, - keyids=keyids, - threshold=role_threshold) - roledict[rolename] = role_metadata - - # Create the root metadata 'keys' dictionary - _, keydict = keys_to_keydict(keylist) - - # Use generalized build_dict_conforming_to_schema func to produce a dict that - # contains all the appropriate information for this type of metadata, - # checking that the result conforms to the appropriate schema. - # TODO: Later, probably after the rewrite for TUF Issue #660, generalize - # further, upward, by replacing generate_targets_metadata, - # generate_root_metadata, etc. with one function that generates - # metadata, possibly rolling that upwards into the calling function. - # There are very few things that really need to be done differently. - return formats.build_dict_conforming_to_schema( - formats.ROOT_SCHEMA, - version=version, - expires=expiration_date, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - - - - -def generate_targets_metadata(targets_directory, target_files, version, - expiration_date, delegations=None, write_consistent_targets=False, - use_existing_fileinfo=False, storage_backend=None, - repository_name='default'): - """ - - Generate the targets metadata object. The targets in 'target_files' must - exist at the same path they should on the repo. 'target_files' is a list - of targets. The 'custom' field of the targets metadata is not currently - supported. - - - targets_directory: - The absolute path to a directory containing the target files and - directories of the repository. - - target_files: - The target files tracked by 'targets.json'. 'target_files' is a - dictionary mapping target paths (relative to the targets directory) to - a dict matching tuf.formats.LOOSE_FILEINFO_SCHEMA. LOOSE_FILEINFO_SCHEMA - can support multiple different value patterns: - 1) an empty dictionary - for when fileinfo should be generated - 2) a dictionary matching tuf.formats.CUSTOM_SCHEMA - for when fileinfo - should be generated, with the supplied custom metadata attached - 3) a dictionary matching tuf.formats.FILEINFO_SCHEMA - for when full - fileinfo is provided in conjunction with use_existing_fileinfo - - version: - The metadata version number. Clients use the version number to - determine if the downloaded version is newer than the one currently - trusted. - - expiration_date: - The expiration date of the metadata file. Conformant to - 'securesystemslib.formats.ISO8601_DATETIME_SCHEMA'. - - delegations: - The delegations made by the targets role to be generated. 'delegations' - must match 'tuf.formats.DELEGATIONS_SCHEMA'. - - write_consistent_targets: - Boolean that indicates whether file digests should be prepended to the - target files. - NOTE: it is an error for write_consistent_targets to be True when - use_existing_fileinfo is also True. We can not create consistent targets - for a target file where the fileinfo isn't generated by tuf. - - use_existing_fileinfo: - Boolean that indicates whether to use the complete fileinfo, including - hashes, as already exists in the roledb (True) or whether to generate - hashes (False). - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. - - repository_name: - The name of the repository. If not supplied, 'default' repository - is used. - - - securesystemslib.exceptions.FormatError, if an error occurred trying to - generate the targets metadata object. - - securesystemslib.exceptions.Error, if use_existing_fileinfo is False and - any of the target files cannot be read. - - securesystemslib.exceptions.Error, if use_existing_fileinfo is True and - some of the target files do not have corresponding hashes in the roledb. - - securesystemslib.exceptions.Error, if both of use_existing_fileinfo and - write_consistent_targets are True. - - - If use_existing_fileinfo is False, the target files are read from storage - and file information about them is generated. - If 'write_consistent_targets' is True, each target in 'target_files' will be - copied to a file with a digest prepended to its filename. For example, if - 'some_file.txt' is one of the targets of 'target_files', consistent targets - .some_file.txt, .some_file.txt, etc., are created - and the content of 'some_file.txt' will be copied into them. - - - A targets metadata object, conformant to - 'tuf.formats.TARGETS_SCHEMA'. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.PATH_SCHEMA.check_match(targets_directory) - formats.PATH_FILEINFO_SCHEMA.check_match(target_files) - formats.METADATAVERSION_SCHEMA.check_match(version) - sslib_formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - sslib_formats.BOOLEAN_SCHEMA.check_match(write_consistent_targets) - sslib_formats.BOOLEAN_SCHEMA.check_match(use_existing_fileinfo) - - if write_consistent_targets and use_existing_fileinfo: - raise sslib_exceptions.Error('Cannot support writing consistent' - ' targets and using existing fileinfo.') - - if delegations is not None: - formats.DELEGATIONS_SCHEMA.check_match(delegations) - # If targets role has delegations, collect the up-to-date 'keyids' and - # 'threshold' for each role. Update the delegations keys dictionary. - delegations_keys = [] - # Update 'keyids' and 'threshold' for each delegated role - for role in delegations['roles']: - role['keyids'] = roledb.get_role_keyids(role['name'], - repository_name) - role['threshold'] = roledb.get_role_threshold(role['name'], - repository_name) - - # Collect all delegations keys for generating the delegations keydict - for keyid in role['keyids']: - key = keydb.get_key(keyid, repository_name=repository_name) - delegations_keys.append(key) - - _, delegations['keys'] = keys_to_keydict(delegations_keys) - - - # Store the file attributes of targets in 'target_files'. 'filedict', - # conformant to 'tuf.formats.FILEDICT_SCHEMA', is added to the - # targets metadata object returned. - filedict = {} - - if use_existing_fileinfo: - # Use the provided fileinfo dicts, conforming to FILEINFO_SCHEMA, rather than - # generating fileinfo - for target, fileinfo in target_files.items(): - - # Ensure all fileinfo entries in target_files have a non-empty hashes dict - if not fileinfo.get('hashes', None): - raise sslib_exceptions.Error('use_existing_fileinfo option' - ' set but no hashes exist in fileinfo for ' + repr(target)) - - # and a non-empty length - if fileinfo.get('length', -1) < 0: - raise sslib_exceptions.Error('use_existing_fileinfo option' - ' set but no length exists in fileinfo for ' + repr(target)) - - filedict[target] = fileinfo - - else: - # Generate the fileinfo dicts by accessing the target files on storage. - # Default to accessing files on local storage. - if storage_backend is None: - storage_backend = sslib_storage.FilesystemBackend() - - filedict = _generate_targets_fileinfo(target_files, targets_directory, - write_consistent_targets, storage_backend) - - # Generate the targets metadata object. - # Use generalized build_dict_conforming_to_schema func to produce a dict that - # contains all the appropriate information for targets metadata, - # checking that the result conforms to the appropriate schema. - # TODO: Later, probably after the rewrite for TUF Issue #660, generalize - # further, upward, by replacing generate_targets_metadata, - # generate_root_metadata, etc. with one function that generates - # metadata, possibly rolling that upwards into the calling function. - # There are very few things that really need to be done differently. - if delegations is not None: - return formats.build_dict_conforming_to_schema( - formats.TARGETS_SCHEMA, - version=version, - expires=expiration_date, - targets=filedict, - delegations=delegations) - else: - return formats.build_dict_conforming_to_schema( - formats.TARGETS_SCHEMA, - version=version, - expires=expiration_date, - targets=filedict) - # TODO: As an alternative to the odd if/else above where we decide whether or - # not to include the delegations argument based on whether or not it is - # None, consider instead adding a check in - # build_dict_conforming_to_schema that skips a keyword if that keyword - # is optional in the schema and the value passed in is set to None.... - - - - - -def _generate_targets_fileinfo(target_files, targets_directory, - write_consistent_targets, storage_backend): - """ - Iterate over target_files and: - * ensure they exist in the targets_directory - * generate a fileinfo dict for the target file, including hashes - * copy 'target_path' to 'digest_target' if write_consistent_targets - add all generated fileinfo dicts to a dictionary mapping - targetpath: fileinfo and return the dict. - """ - - filedict = {} - - # Generate the fileinfo of all the target files listed in 'target_files'. - for target, fileinfo in target_files.items(): - - # The root-most folder of the targets directory should not be included in - # target paths listed in targets metadata. - # (e.g., 'targets/more_targets/somefile.txt' -> 'more_targets/somefile.txt') - relative_targetpath = target - - # Note: join() discards 'targets_directory' if 'target' contains a leading - # path separator (i.e., is treated as an absolute path). - target_path = os.path.join(targets_directory, target.lstrip(os.sep)) - - # Add 'custom' if it has been provided. Custom data about the target is - # optional and will only be included in metadata (i.e., a 'custom' field in - # the target's fileinfo dictionary) if specified here. - custom_data = fileinfo.get('custom', None) - - filedict[relative_targetpath] = \ - get_targets_metadata_fileinfo(target_path, storage_backend, custom_data) - - # Copy 'target_path' to 'digest_target' if consistent hashing is enabled. - if write_consistent_targets: - for target_digest in filedict[relative_targetpath]['hashes'].values(): - dirname, basename = os.path.split(target_path) - digest_filename = target_digest + '.' + basename - digest_target = os.path.join(dirname, digest_filename) - shutil.copyfile(target_path, digest_target) - - return filedict - - - -def _get_hashes_and_length_if_needed(use_length, use_hashes, full_file_path, - storage_backend): - """ - Calculate length and hashes only if they are required, - otherwise, for adopters of tuf with lots of delegations, - this will cause unnecessary overhead. - """ - - length = None - hashes = None - if use_length: - length = sslib_util.get_file_length(full_file_path, - storage_backend) - - if use_hashes: - hashes = sslib_util.get_file_hashes(full_file_path, - settings.FILE_HASH_ALGORITHMS, storage_backend) - - return length, hashes - - - -def generate_snapshot_metadata(metadata_directory, version, expiration_date, - storage_backend, consistent_snapshot=False, - repository_name='default', use_length=False, use_hashes=False): - """ - - Create the snapshot metadata. The minimum metadata must exist (i.e., - 'root.json' and 'targets.json'). This function searches - 'metadata_directory' and the resulting snapshot file will list all the - delegated roles found there. - - - metadata_directory: - The directory containing the 'root.json' and 'targets.json' metadata - files. - - version: - The metadata version number. Clients use the version number to - determine if the downloaded version is newer than the one currently - trusted. - - expiration_date: - The expiration date of the metadata file. - Conformant to 'securesystemslib.formats.ISO8601_DATETIME_SCHEMA'. - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. - - consistent_snapshot: - Boolean. If True, a file digest is expected to be prepended to the - filename of any target file located in the targets directory. Each digest - is stripped from the target filename and listed in the snapshot metadata. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - use_length: - Whether to include the optional length attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - use_hashes: - Whether to include the optional hashes attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - securesystemslib.exceptions.Error, if an error occurred trying to generate - the snapshot metadata object. - - - The 'root.json' and 'targets.json' files are read. - - - The snapshot metadata object, conformant to 'tuf.formats.SNAPSHOT_SCHEMA'. - """ - - # Do the arguments have the correct format? - # This check ensures arguments have the appropriate number of objects and - # object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - sslib_formats.PATH_SCHEMA.check_match(metadata_directory) - formats.METADATAVERSION_SCHEMA.check_match(version) - sslib_formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - sslib_formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - sslib_formats.BOOLEAN_SCHEMA.check_match(use_length) - sslib_formats.BOOLEAN_SCHEMA.check_match(use_hashes) - - # Snapshot's 'fileinfodict' shall contain the version number of Root, - # Targets, and all delegated roles of the repository. - fileinfodict = {} - - length, hashes = _get_hashes_and_length_if_needed(use_length, use_hashes, - os.path.join(metadata_directory, TARGETS_FILENAME), storage_backend) - - targets_role = TARGETS_FILENAME[:-len(METADATA_EXTENSION)] - - targets_file_version = get_metadata_versioninfo(targets_role, - repository_name) - - # Make file info dictionary with make_metadata_fileinfo because - # in the tuf spec length and hashes are optional for all - # METAFILES in snapshot.json including the top-level targets file. - fileinfodict[TARGETS_FILENAME] = formats.make_metadata_fileinfo( - targets_file_version['version'], length, hashes) - - # Search the metadata directory and generate the versioninfo of all the role - # files found there. This information is stored in the 'meta' field of - # 'snapshot.json'. - - metadata_files = sorted(storage_backend.list_folder(metadata_directory), - reverse=True) - for metadata_filename in metadata_files: - # Strip the version number if 'consistent_snapshot' is True. - # Example: '10.django.json' --> 'django.json' - metadata_name, junk = _strip_version_number(metadata_filename, - consistent_snapshot) - - # All delegated roles are added to the snapshot file. - if metadata_filename.endswith(METADATA_EXTENSION): - rolename = metadata_filename[:-len(METADATA_EXTENSION)] - - # Obsolete role files may still be found. Ensure only roles loaded - # in the roledb are included in the Snapshot metadata. Since the - # snapshot and timestamp roles are not listed in snapshot.json, do not - # list these roles found in the metadata directory. - if roledb.role_exists(rolename, repository_name) and \ - rolename not in roledb.TOP_LEVEL_ROLES: - - length, hashes = _get_hashes_and_length_if_needed(use_length, use_hashes, - os.path.join(metadata_directory, metadata_filename), storage_backend) - - file_version = get_metadata_versioninfo(rolename, - repository_name) - - fileinfodict[metadata_name] = formats.make_metadata_fileinfo( - file_version['version'], length, hashes) - - else: - logger.debug('Metadata file has an unsupported file' - ' extension: ' + metadata_filename) - - # Generate the Snapshot metadata object. - # Use generalized build_dict_conforming_to_schema func to produce a dict that - # contains all the appropriate information for snapshot metadata, - # checking that the result conforms to the appropriate schema. - # TODO: Later, probably after the rewrite for TUF Issue #660, generalize - # further, upward, by replacing generate_targets_metadata, - # generate_root_metadata, etc. with one function that generates - # metadata, possibly rolling that upwards into the calling function. - # There are very few things that really need to be done differently. - return formats.build_dict_conforming_to_schema( - formats.SNAPSHOT_SCHEMA, - version=version, - expires=expiration_date, - meta=fileinfodict) - - - - - - -def generate_timestamp_metadata(snapshot_file_path, version, expiration_date, - storage_backend, repository_name, use_length=True, use_hashes=True): - """ - - Generate the timestamp metadata object. The 'snapshot.json' file must - exist. - - - snapshot_file_path: - Path to the required snapshot metadata file. The timestamp role - needs to the calculate the file size and hash of this file. - - version: - The timestamp's version number. Clients use the version number to - determine if the downloaded version is newer than the one currently - trusted. - - expiration_date: - The expiration date of the metadata file, conformant to - 'securesystemslib.formats.ISO8601_DATETIME_SCHEMA'. - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - use_length: - Whether to include the optional length attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - use_hashes: - Whether to include the optional hashes attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - - securesystemslib.exceptions.FormatError, if the generated timestamp metadata - object cannot be formatted correctly, or one of the arguments is improperly - formatted. - - - None. - - - A timestamp metadata object, conformant to 'tuf.formats.TIMESTAMP_SCHEMA'. - """ - - # Do the arguments have the correct format? - # This check ensures arguments have the appropriate number of objects and - # object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - sslib_formats.PATH_SCHEMA.check_match(snapshot_file_path) - formats.METADATAVERSION_SCHEMA.check_match(version) - sslib_formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - sslib_formats.BOOLEAN_SCHEMA.check_match(use_length) - sslib_formats.BOOLEAN_SCHEMA.check_match(use_hashes) - - snapshot_fileinfo = {} - - length, hashes = _get_hashes_and_length_if_needed(use_length, use_hashes, - snapshot_file_path, storage_backend) - - snapshot_filename = os.path.basename(snapshot_file_path) - # Retrieve the versioninfo of the Snapshot metadata file. - snapshot_version = get_metadata_versioninfo('snapshot', repository_name) - snapshot_fileinfo[snapshot_filename] = \ - formats.make_metadata_fileinfo(snapshot_version['version'], - length, hashes) - - # Generate the timestamp metadata object. - # Use generalized build_dict_conforming_to_schema func to produce a dict that - # contains all the appropriate information for timestamp metadata, - # checking that the result conforms to the appropriate schema. - # TODO: Later, probably after the rewrite for TUF Issue #660, generalize - # further, upward, by replacing generate_targets_metadata, - # generate_root_metadata, etc. with one function that generates - # metadata, possibly rolling that upwards into the calling function. - # There are very few things that really need to be done differently. - return formats.build_dict_conforming_to_schema( - formats.TIMESTAMP_SCHEMA, - version=version, - expires=expiration_date, - meta=snapshot_fileinfo) - - - - - -def sign_metadata(metadata_object, keyids, filename, repository_name): - """ - - Sign a metadata object. If any of the keyids have already signed the file, - the old signature is replaced. The keys in 'keyids' must already be - loaded in 'keydb'. - - - metadata_object: - The metadata object to sign. For example, 'metadata' might correspond to - 'tuf.formats.ROOT_SCHEMA' or - 'tuf.formats.TARGETS_SCHEMA'. - - keyids: - The keyids list of the signing keys. - - filename: - The intended filename of the signed metadata object. - For example, 'root.json' or 'targets.json'. This function - does NOT save the signed metadata to this filename. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - securesystemslib.exceptions.FormatError, if a valid 'signable' object could - not be generated or the arguments are improperly formatted. - - securesystemslib.exceptions.Error, if an invalid keytype was found in the - keystore. - - - None. - - - A signable object conformant to 'tuf.formats.SIGNABLE_SCHEMA'. - """ - - # Do the arguments have the correct format? - # This check ensures arguments have the appropriate number of objects and - # object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - formats.ANYROLE_SCHEMA.check_match(metadata_object) - sslib_formats.KEYIDS_SCHEMA.check_match(keyids) - sslib_formats.PATH_SCHEMA.check_match(filename) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # Make sure the metadata is in 'signable' format. That is, - # it contains a 'signatures' field containing the result - # of signing the 'signed' field of 'metadata' with each - # keyid of 'keyids'. - signable = formats.make_signable(metadata_object) - - # Sign the metadata with each keyid in 'keyids'. 'signable' should have - # zero signatures (metadata_object contained none). - for keyid in keyids: - - # Load the signing key. - key = keydb.get_key(keyid, repository_name=repository_name) - # Generate the signature using the appropriate signing method. - if key['keytype'] in SUPPORTED_KEY_TYPES: - if 'private' in key['keyval']: - signed = sslib_formats.encode_canonical(signable['signed']).encode('utf-8') - try: - signature = sslib_keys.create_signature(key, signed) - signable['signatures'].append(signature) - - except Exception: - logger.warning('Unable to create signature for keyid: ' + repr(keyid)) - - else: - logger.debug('Private key unset. Skipping: ' + repr(keyid)) - - else: - raise sslib_exceptions.Error('The keydb contains a key with' - ' an invalid key type.' + repr(key['keytype'])) - - # Raise 'securesystemslib.exceptions.FormatError' if the resulting 'signable' - # is not formatted correctly. - try: - formats.check_signable_object_format(signable) - except exceptions.UnsignedMetadataError: - # Downgrade the error to a warning because a use case exists where - # metadata may be generated unsigned on one machine and signed on another. - logger.warning('Unsigned metadata object: ' + repr(signable)) - - - return signable - - - - - -def write_metadata_file(metadata, filename, version_number, consistent_snapshot, - storage_backend): - """ - - If necessary, write the 'metadata' signable object to 'filename'. - - - metadata: - The object that will be saved to 'filename', conformant to - 'tuf.formats.SIGNABLE_SCHEMA'. - - filename: - The filename of the metadata to be written (e.g., 'root.json'). - - version_number: - The version number of the metadata file to be written. The version - number is needed for consistent snapshots, which prepend the version - number to 'filename'. - - consistent_snapshot: - Boolean that determines whether the metadata file's digest should be - prepended to the filename. - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - securesystemslib.exceptions.Error, if the directory of 'filename' does not - exist. - - Any other runtime (e.g., IO) exception. - - - The 'filename' file is created, or overwritten if it exists. - - - The filename of the written file. - """ - - # Do the arguments have the correct format? - # This check ensures arguments have the appropriate number of objects and - # object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - formats.SIGNABLE_SCHEMA.check_match(metadata) - sslib_formats.PATH_SCHEMA.check_match(filename) - formats.METADATAVERSION_SCHEMA.check_match(version_number) - sslib_formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - - if storage_backend is None: - storage_backend = sslib_storage.FilesystemBackend() - - # Generate the actual metadata file content of 'metadata'. Metadata is - # saved as JSON and includes formatting, such as indentation and sorted - # objects. The new digest of 'metadata' is also calculated to help determine - # if re-saving is required. - file_content = _get_written_metadata(metadata) - - # We previously verified whether new metadata needed to be written (i.e., has - # not been previously written or has changed). It is now assumed that the - # caller intends to write changes that have been marked as dirty. - - # The 'metadata' object is written to 'file_object'. To avoid partial - # metadata from being written, 'metadata' is first written to a temporary - # location (i.e., 'file_object') and then moved to 'filename'. - file_object = tempfile.TemporaryFile() - - # Serialize 'metadata' to the file-like object and then write 'file_object' - # to disk. The dictionary keys of 'metadata' are sorted and indentation is - # used. - file_object.write(file_content) - - if consistent_snapshot: - dirname, basename = os.path.split(filename) - basename = basename.split(METADATA_EXTENSION, 1)[0] - version_and_filename = str(version_number) + '.' + basename + METADATA_EXTENSION - written_consistent_filename = os.path.join(dirname, version_and_filename) - - # If we were to point consistent snapshots to 'written_filename', they - # would always point to the current version. Example: 1.root.json and - # 2.root.json -> root.json. If consistent snapshot is True, we should save - # the consistent snapshot and point 'written_filename' to it. - logger.debug('Creating a consistent file for ' + repr(filename)) - logger.debug('Saving ' + repr(written_consistent_filename)) - sslib_util.persist_temp_file(file_object, - written_consistent_filename, should_close=False) - - else: - logger.debug('Not creating a consistent snapshot for ' + repr(filename)) - - logger.debug('Saving ' + repr(filename)) - storage_backend.put(file_object, filename) - - file_object.close() - - return filename - - - - - -def _log_status_of_top_level_roles(targets_directory, metadata_directory, - repository_name, storage_backend): - """ - Non-public function that logs whether any of the top-level roles contain an - invalid number of public and private keys, or an insufficient threshold of - signatures. Considering that the top-level metadata have to be verified in - the expected root -> targets -> snapshot -> timestamp order, this function - logs the error message and returns as soon as a required metadata file is - found to be invalid. It is assumed here that the delegated roles have been - written and verified. Example output: - - 'root' role contains 1 / 1 signatures. - 'targets' role contains 1 / 1 signatures. - 'snapshot' role contains 1 / 1 signatures. - 'timestamp' role contains 1 / 1 signatures. - - Note: Temporary metadata is generated so that file hashes & sizes may be - computed and verified against the attached signatures. 'metadata_directory' - should be a directory in a temporary repository directory. - """ - - # The expected full filenames of the top-level roles needed to write them to - # disk. - filenames = get_top_level_metadata_filenames(metadata_directory) - root_filename = filenames[ROOT_FILENAME] - targets_filename = filenames[TARGETS_FILENAME] - snapshot_filename = filenames[SNAPSHOT_FILENAME] - timestamp_filename = filenames[TIMESTAMP_FILENAME] - - # Verify that the top-level roles contain a valid number of public keys and - # that their corresponding private keys have been loaded. - for rolename in ['root', 'targets', 'snapshot', 'timestamp']: - try: - _check_role_keys(rolename, repository_name) - - except exceptions.InsufficientKeysError as e: - logger.info(str(e)) - - # Do the top-level roles contain a valid threshold of signatures? Top-level - # metadata is verified in Root -> Targets -> Snapshot -> Timestamp order. - # Verify the metadata of the Root role. - dirty_rolenames = roledb.get_dirty_roles(repository_name) - - root_roleinfo = roledb.get_roleinfo('root', repository_name) - root_is_dirty = None - if 'root' in dirty_rolenames: - root_is_dirty = True - - else: - root_is_dirty = False - - try: - signable, root_filename = \ - _generate_and_write_metadata('root', root_filename, targets_directory, - metadata_directory, storage_backend, repository_name=repository_name) - _log_status('root', signable, repository_name) - - # 'tuf.exceptions.UnsignedMetadataError' raised if metadata contains an - # invalid threshold of signatures. log the valid/threshold message, where - # valid < threshold. - except exceptions.UnsignedMetadataError as e: - _log_status('root', e.signable, repository_name) - return - - finally: - roledb.unmark_dirty(['root'], repository_name) - roledb.update_roleinfo('root', root_roleinfo, - mark_role_as_dirty=root_is_dirty, repository_name=repository_name) - - # Verify the metadata of the Targets role. - targets_roleinfo = roledb.get_roleinfo('targets', repository_name) - targets_is_dirty = None - if 'targets' in dirty_rolenames: - targets_is_dirty = True - - else: - targets_is_dirty = False - - try: - signable, targets_filename = \ - _generate_and_write_metadata('targets', targets_filename, - targets_directory, metadata_directory, storage_backend, - repository_name=repository_name) - _log_status('targets', signable, repository_name) - - except exceptions.UnsignedMetadataError as e: - _log_status('targets', e.signable, repository_name) - return - - finally: - roledb.unmark_dirty(['targets'], repository_name) - roledb.update_roleinfo('targets', targets_roleinfo, - mark_role_as_dirty=targets_is_dirty, repository_name=repository_name) - - # Verify the metadata of the snapshot role. - snapshot_roleinfo = roledb.get_roleinfo('snapshot', repository_name) - snapshot_is_dirty = None - if 'snapshot' in dirty_rolenames: - snapshot_is_dirty = True - - else: - snapshot_is_dirty = False - - filenames = {'root': root_filename, 'targets': targets_filename} - try: - signable, snapshot_filename = \ - _generate_and_write_metadata('snapshot', snapshot_filename, - targets_directory, metadata_directory, storage_backend, False, - filenames, repository_name=repository_name) - _log_status('snapshot', signable, repository_name) - - except exceptions.UnsignedMetadataError as e: - _log_status('snapshot', e.signable, repository_name) - return - - finally: - roledb.unmark_dirty(['snapshot'], repository_name) - roledb.update_roleinfo('snapshot', snapshot_roleinfo, - mark_role_as_dirty=snapshot_is_dirty, repository_name=repository_name) - - # Verify the metadata of the Timestamp role. - timestamp_roleinfo = roledb.get_roleinfo('timestamp', repository_name) - timestamp_is_dirty = None - if 'timestamp' in dirty_rolenames: - timestamp_is_dirty = True - - else: - timestamp_is_dirty = False - - filenames = {'snapshot': snapshot_filename} - try: - signable, timestamp_filename = \ - _generate_and_write_metadata('timestamp', timestamp_filename, - targets_directory, metadata_directory, storage_backend, - False, filenames, repository_name=repository_name) - _log_status('timestamp', signable, repository_name) - - except exceptions.UnsignedMetadataError as e: - _log_status('timestamp', e.signable, repository_name) - return - - finally: - roledb.unmark_dirty(['timestamp'], repository_name) - roledb.update_roleinfo('timestamp', timestamp_roleinfo, - mark_role_as_dirty=timestamp_is_dirty, repository_name=repository_name) - - - -def _log_status(rolename, signable, repository_name): - """ - Non-public function logs the number of (good/threshold) signatures of - 'rolename'. - """ - - status = sig.get_signature_status(signable, rolename, repository_name) - - logger.info(repr(rolename) + ' role contains ' + \ - repr(len(status['good_sigs'])) + ' / ' + repr(status['threshold']) + \ - ' signatures.') - - - - - -def create_tuf_client_directory(repository_directory, client_directory): - """ - - Create client directory structure as 'tuf.client.updater' expects it. - Metadata files downloaded from a remote TUF repository are saved to - 'client_directory'. - The Root file must initially exist before an update request can be - satisfied. create_tuf_client_directory() ensures the minimum metadata - is copied and that required directories ('previous' and 'current') are - created in 'client_directory'. Software updaters integrating TUF may - use the client directory created as an initial copy of the repository's - metadata. - - - repository_directory: - The path of the root repository directory. The 'metadata' and 'targets' - sub-directories should be available in 'repository_directory'. The - metadata files of 'repository_directory' are copied to 'client_directory'. - - client_directory: - The path of the root client directory. The 'current' and 'previous' - sub-directories are created and will store the metadata files copied - from 'repository_directory'. 'client_directory' will store metadata - and target files downloaded from a TUF repository. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - tuf.exceptions.RepositoryError, if the metadata directory in - 'client_directory' already exists. - - - Copies metadata files and directories from 'repository_directory' to - 'client_directory'. Parent directories are created if they do not exist. - - - None. - """ - - # Do the arguments have the correct format? - # This check ensures arguments have the appropriate number of objects and - # object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - sslib_formats.PATH_SCHEMA.check_match(repository_directory) - sslib_formats.PATH_SCHEMA.check_match(client_directory) - - # Set the absolute path of the Repository's metadata directory. The metadata - # directory should be the one served by the Live repository. At a minimum, - # the repository's root file must be copied. - repository_directory = os.path.abspath(repository_directory) - metadata_directory = os.path.join(repository_directory, - METADATA_DIRECTORY_NAME) - - # Set the client's metadata directory, which will store the metadata copied - # from the repository directory set above. - client_directory = os.path.abspath(client_directory) - client_metadata_directory = os.path.join(client_directory, - METADATA_DIRECTORY_NAME) - - # If the client's metadata directory does not already exist, create it and - # any of its parent directories, otherwise raise an exception. An exception - # is raised to avoid accidentally overwriting previous metadata. - try: - os.makedirs(client_metadata_directory) - - except OSError as e: - if e.errno == errno.EEXIST: - message = 'Cannot create a fresh client metadata directory: ' +\ - repr(client_metadata_directory) + '. Already exists.' - raise exceptions.RepositoryError(message) - - # Testing of non-errno.EEXIST exceptions have been verified on all - # supported OSs. An unexpected exception (the '/' directory exists, rather - # than disallowed path) is possible on Travis, so the '#pragma: no branch' - # below is included to prevent coverage failure. - else: #pragma: no branch - raise - - # Move all metadata to the client's 'current' and 'previous' directories. - # The root metadata file MUST exist in '{client_metadata_directory}/current'. - # 'tuf.client.updater' expects the 'current' and 'previous' directories to - # exist under 'metadata'. - client_current = os.path.join(client_metadata_directory, 'current') - client_previous = os.path.join(client_metadata_directory, 'previous') - shutil.copytree(metadata_directory, client_current) - shutil.copytree(metadata_directory, client_previous) - - - -def disable_console_log_messages(): - """ - - Disable logger messages printed to the console. For example, repository - maintainers may want to call this function if many roles will be sharing - keys, otherwise detected duplicate keys will continually log a warning - message. - - - None. - - - None. - - - Removes the 'tuf.log' console handler, added by default when - 'tuf.repository_tool.py' is imported. - - - None. - """ - - log.remove_console_handler() - - - -def keys_to_keydict(keys): - """ - - Iterate over a list of keys and return a list of keyids and a dict mapping - keyid to key metadata - - - keys: - A list of key objects conforming to - securesystemslib.formats.ANYKEYLIST_SCHEMA. - - - keyids: - A list of keyids conforming to securesystemslib.formats.KEYID_SCHEMA - keydict: - A dictionary conforming to securesystemslib.formats.KEYDICT_SCHEMA - """ - keyids = [] - keydict = {} - - for key in keys: - keyid = key['keyid'] - key_metadata_format = sslib_keys.format_keyval_to_metadata( - key['keytype'], key['scheme'], key['keyval']) - - new_keydict = {keyid: key_metadata_format} - keydict.update(new_keydict) - keyids.append(keyid) - return keyids, keydict - - - - -if __name__ == '__main__': - # The interactive sessions of the documentation strings can - # be tested by running repository_lib.py as a standalone module: - # $ python repository_lib.py. - import doctest - doctest.testmod() diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py deleted file mode 100755 index af78b2ba32..0000000000 --- a/tuf/repository_tool.py +++ /dev/null @@ -1,3291 +0,0 @@ - -#!/usr/bin/env python - -# Copyright 2013 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - repository_tool.py - - - Vladimir Diaz - - - October 19, 2013 - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Provide a tool that can create a TUF repository. It can be used with the - Python interpreter in interactive mode, or imported directly into a Python - module. See 'tuf/README' for the complete guide to using - 'tuf.repository_tool.py'. -""" - -import os -import time -import datetime -import logging -import tempfile -import shutil -import json - -from collections import deque - -from securesystemslib import exceptions as sslib_exceptions -from securesystemslib import formats as sslib_formats -from securesystemslib import util as sslib_util -from securesystemslib import storage as sslib_storage - -from tuf import exceptions -from tuf import formats -from tuf import keydb -from tuf import log -from tuf import repository_lib as repo_lib -from tuf import roledb - - -# Copy API -# pylint: disable=unused-import - -# Copy generic repository API functions to be used via `repository_tool` -from tuf.repository_lib import ( - create_tuf_client_directory, - disable_console_log_messages) - - -# Copy key-related API functions to be used via `repository_tool` -from tuf.repository_lib import ( - import_rsa_privatekey_from_file, - import_ed25519_privatekey_from_file) - -from securesystemslib.interface import ( - generate_and_write_rsa_keypair, - generate_and_write_rsa_keypair_with_prompt, - generate_and_write_unencrypted_rsa_keypair, - generate_and_write_ecdsa_keypair, - generate_and_write_ecdsa_keypair_with_prompt, - generate_and_write_unencrypted_ecdsa_keypair, - generate_and_write_ed25519_keypair, - generate_and_write_ed25519_keypair_with_prompt, - generate_and_write_unencrypted_ed25519_keypair, - import_rsa_publickey_from_file, - import_ecdsa_publickey_from_file, - import_ed25519_publickey_from_file, - import_ecdsa_privatekey_from_file) - -from securesystemslib.keys import ( - format_metadata_to_key, - generate_rsa_key, - generate_ecdsa_key, - generate_ed25519_key, - import_rsakey_from_pem, - import_ecdsakey_from_pem) - - -# See 'log.py' to learn how logging is handled in TUF. -logger = logging.getLogger(__name__) - -# Add a console handler so that users are aware of potentially unintended -# states, such as multiple roles that share keys. -log.add_console_handler() -log.set_console_log_level(logging.INFO) - -# Recommended RSA key sizes: -# https://en.wikipedia.org/wiki/Key_size#Asymmetric_algorithm_key_lengths -# Based on the above, RSA keys of size 3072 are expected to provide security -# through 2031 and beyond. -DEFAULT_RSA_KEY_BITS=3072 - -# The default number of hashed bin delegations -DEFAULT_NUM_BINS=1024 - -# The targets and metadata directory names. Metadata files are written -# to the staged metadata directory instead of the "live" one. -METADATA_STAGED_DIRECTORY_NAME = 'metadata.staged' -METADATA_DIRECTORY_NAME = 'metadata' -TARGETS_DIRECTORY_NAME = 'targets' - -# The extension of TUF metadata. -METADATA_EXTENSION = '.json' - -# Expiration date delta, in seconds, of the top-level roles. A metadata -# expiration date is set by taking the current time and adding the expiration -# seconds listed below. - -# Initial 'root.json' expiration time of 1 year. -ROOT_EXPIRATION = 31556900 - -# Initial 'targets.json' expiration time of 3 months. -TARGETS_EXPIRATION = 7889230 - -# Initial 'snapshot.json' expiration time of 1 week. -SNAPSHOT_EXPIRATION = 604800 - -# Initial 'timestamp.json' expiration time of 1 day. -TIMESTAMP_EXPIRATION = 86400 - - -class Repository(object): - """ - - Represent a TUF repository that contains the metadata of the top-level - roles, including all those delegated from the 'targets.json' role. The - repository object returned provides access to the top-level roles, and any - delegated targets that are added as the repository is modified. For - example, a Repository object named 'repository' provides the following - access by default: - - repository.root.version = 2 - repository.timestamp.expiration = datetime.datetime(2015, 8, 8, 12, 0) - repository.snapshot.add_verification_key(...) - repository.targets.delegate('unclaimed', ...) - - Delegating a role from 'targets' updates the attributes of the parent - delegation, which then provides: - - repository.targets('unclaimed').add_verification_key(...) - - - - repository_directory: - The root folder of the repository that contains the metadata and targets - sub-directories. - - metadata_directory: - The metadata sub-directory contains the files of the top-level - roles, including all roles delegated from 'targets.json'. - - targets_directory: - The targets sub-directory contains all the target files that are - downloaded by clients and are referenced in TUF Metadata. The hashes and - file lengths are listed in Metadata files so that they are securely - downloaded. Metadata files are similarly referenced in the top-level - metadata. - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - use_timestamp_length: - Whether to include the optional length attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - use_timestamp_hashes: - Whether to include the optional hashes attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - use_snapshot_length: - Whether to include the optional length attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - use_snapshot_hashes: - Whether to include the optional hashes attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - - Creates top-level role objects and assigns them as attributes. - - - A Repository object that contains default Metadata objects for the top-level - roles. - """ - - def __init__(self, repository_directory, metadata_directory, - targets_directory, storage_backend, repository_name='default', - use_timestamp_length=True, use_timestamp_hashes=True, - use_snapshot_length=False, use_snapshot_hashes=False): - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - sslib_formats.PATH_SCHEMA.check_match(repository_directory) - sslib_formats.PATH_SCHEMA.check_match(metadata_directory) - sslib_formats.PATH_SCHEMA.check_match(targets_directory) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - sslib_formats.BOOLEAN_SCHEMA.check_match(use_timestamp_length) - sslib_formats.BOOLEAN_SCHEMA.check_match(use_timestamp_hashes) - sslib_formats.BOOLEAN_SCHEMA.check_match(use_snapshot_length) - sslib_formats.BOOLEAN_SCHEMA.check_match(use_snapshot_hashes) - - self._repository_directory = repository_directory - self._metadata_directory = metadata_directory - self._targets_directory = targets_directory - self._repository_name = repository_name - self._storage_backend = storage_backend - self._use_timestamp_length = use_timestamp_length - self._use_timestamp_hashes = use_timestamp_hashes - self._use_snapshot_length = use_snapshot_length - self._use_snapshot_hashes = use_snapshot_hashes - - try: - roledb.create_roledb(repository_name) - keydb.create_keydb(repository_name) - - except sslib_exceptions.InvalidNameError: - logger.debug(repr(repository_name) + ' already exists. Overwriting' - ' its contents.') - - # Set the top-level role objects. - self.root = Root(self._repository_name) - self.snapshot = Snapshot(self._repository_name) - self.timestamp = Timestamp(self._repository_name) - self.targets = Targets(self._targets_directory, 'targets', - repository_name=self._repository_name) - - - - def writeall(self, consistent_snapshot=False, use_existing_fileinfo=False): - """ - - Write all the JSON Metadata objects to their corresponding files for - roles which have changed. - writeall() raises an exception if any of the role metadata to be written - to disk is invalid, such as an insufficient threshold of signatures, - missing private keys, etc. - - - consistent_snapshot: - A boolean indicating whether role metadata files should have their - version numbers as filename prefix when written to disk, i.e - 'VERSION.ROLENAME.json', and target files should be copied to a - filename that has their hex digest as filename prefix, i.e - 'HASH.FILENAME'. Note that: - - root metadata is always written with a version prefix, independently - of 'consistent_snapshot' - - the latest version of each metadata file is always also written - without version prefix - - target files are only copied to a hash-prefixed filename if - 'consistent_snapshot' is True and 'use_existing_fileinfo' is False. - If both are True hash-prefixed target file copies must be created - out-of-band. - - use_existing_fileinfo: - Boolean indicating whether the fileinfo dicts in the roledb should be - written as-is (True) or whether hashes should be generated (False, - requires access to the targets files on-disk). - - - tuf.exceptions.UnsignedMetadataError, if any of the top-level - and delegated roles do not have the minimum threshold of signatures. - - - Creates metadata files in the repository's metadata directory. - - - None. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly - # formatted. - sslib_formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - - # At this point, keydb and roledb must be fully populated, - # otherwise writeall() throws a 'tuf.exceptions.UnsignedMetadataError' for - # the top-level roles. exception if any of the top-level roles are missing - # signatures, keys, etc. - - # Write the metadata files of all the Targets roles that are dirty (i.e., - # have been modified via roledb.update_roleinfo()). - filenames = {'root': os.path.join(self._metadata_directory, - repo_lib.ROOT_FILENAME), 'targets': os.path.join(self._metadata_directory, - repo_lib.TARGETS_FILENAME), 'snapshot': os.path.join(self._metadata_directory, - repo_lib.SNAPSHOT_FILENAME), 'timestamp': os.path.join(self._metadata_directory, - repo_lib.TIMESTAMP_FILENAME)} - - snapshot_signable = None - dirty_rolenames = roledb.get_dirty_roles(self._repository_name) - - for dirty_rolename in dirty_rolenames: - - # Ignore top-level roles, they will be generated later in this method. - if dirty_rolename in roledb.TOP_LEVEL_ROLES: - continue - - dirty_filename = os.path.join(self._metadata_directory, - dirty_rolename + METADATA_EXTENSION) - repo_lib._generate_and_write_metadata(dirty_rolename, dirty_filename, - self._targets_directory, self._metadata_directory, - self._storage_backend, consistent_snapshot, filenames, - repository_name=self._repository_name, - use_existing_fileinfo=use_existing_fileinfo) - - # Metadata should be written in (delegated targets -> root -> targets -> - # snapshot -> timestamp) order. Begin by generating the 'root.json' - # metadata file. _generate_and_write_metadata() raises a - # 'securesystemslib.exceptions.Error' exception if the metadata cannot be - # written. - root_roleinfo = roledb.get_roleinfo('root', self._repository_name) - old_consistent_snapshot = root_roleinfo['consistent_snapshot'] - if 'root' in dirty_rolenames or consistent_snapshot != old_consistent_snapshot: - repo_lib._generate_and_write_metadata('root', filenames['root'], - self._targets_directory, self._metadata_directory, - self._storage_backend, consistent_snapshot, filenames, - repository_name=self._repository_name) - - # Generate the 'targets.json' metadata file. - if 'targets' in dirty_rolenames: - repo_lib._generate_and_write_metadata('targets', filenames['targets'], - self._targets_directory, self._metadata_directory, - self._storage_backend, consistent_snapshot, - repository_name=self._repository_name, - use_existing_fileinfo=use_existing_fileinfo) - - # Generate the 'snapshot.json' metadata file. - if 'snapshot' in dirty_rolenames: - snapshot_signable, junk = repo_lib._generate_and_write_metadata('snapshot', - filenames['snapshot'], self._targets_directory, - self._metadata_directory, self._storage_backend, - consistent_snapshot, filenames, - repository_name=self._repository_name, - use_snapshot_length=self._use_snapshot_length, - use_snapshot_hashes=self._use_snapshot_hashes) - - # Generate the 'timestamp.json' metadata file. - if 'timestamp' in dirty_rolenames: - repo_lib._generate_and_write_metadata('timestamp', filenames['timestamp'], - self._targets_directory, self._metadata_directory, - self._storage_backend, consistent_snapshot, - filenames, repository_name=self._repository_name, - use_timestamp_length=self._use_timestamp_length, - use_timestamp_hashes=self._use_timestamp_hashes) - - roledb.unmark_dirty(dirty_rolenames, self._repository_name) - - # Delete the metadata of roles no longer in 'roledb'. Obsolete roles - # may have been revoked and should no longer have their metadata files - # available on disk, otherwise loading a repository may unintentionally - # load them. - if snapshot_signable is not None: - repo_lib._delete_obsolete_metadata(self._metadata_directory, - snapshot_signable['signed'], consistent_snapshot, self._repository_name, - self._storage_backend) - - - - def write(self, rolename, consistent_snapshot=False, increment_version_number=True, - use_existing_fileinfo=False): - """ - - Write the JSON metadata for 'rolename' to its corresponding file on disk. - Unlike writeall(), write() allows the metadata file to contain an invalid - threshold of signatures. - - - rolename: - The name of the role to be written to disk. - - consistent_snapshot: - A boolean indicating whether the role metadata file should have its - version number as filename prefix when written to disk, i.e - 'VERSION.ROLENAME.json'. Note that: - - root metadata is always written with a version prefix, independently - of 'consistent_snapshot' - - the latest version of the metadata file is always also written - without version prefix - - if the metadata is targets metadata and 'consistent_snapshot' is - True, the corresponding target files are copied to a filename with - their hex digest as filename prefix, i.e 'HASH.FILENAME', unless - 'use_existing_fileinfo' is also True. - If 'consistent_snapshot' and 'use_existing_fileinfo' both are True, - hash-prefixed target file copies must be created out-of-band. - - increment_version_number: - Boolean indicating whether the version number of 'rolename' should be - automatically incremented. - - use_existing_fileinfo: - Boolean indicating whether the fileinfo dicts in the roledb should be - written as-is (True) or whether hashes should be generated (False, - requires access to the targets files on-disk). - - - None. - - - Creates metadata files in the repository's metadata directory. - - - None. - """ - - rolename_filename = os.path.join(self._metadata_directory, - rolename + METADATA_EXTENSION) - - filenames = {'root': os.path.join(self._metadata_directory, repo_lib.ROOT_FILENAME), - 'targets': os.path.join(self._metadata_directory, repo_lib.TARGETS_FILENAME), - 'snapshot': os.path.join(self._metadata_directory, repo_lib.SNAPSHOT_FILENAME), - 'timestamp': os.path.join(self._metadata_directory, repo_lib.TIMESTAMP_FILENAME)} - - repo_lib._generate_and_write_metadata(rolename, rolename_filename, - self._targets_directory, self._metadata_directory, - self._storage_backend, consistent_snapshot, - filenames=filenames, allow_partially_signed=True, - increment_version_number=increment_version_number, - repository_name=self._repository_name, - use_existing_fileinfo=use_existing_fileinfo) - - # Ensure 'rolename' is no longer marked as dirty after the successful write(). - roledb.unmark_dirty([rolename], self._repository_name) - - - - - - def status(self): - """ - - Determine the status of the top-level roles. status() checks if each - role provides sufficient public and private keys, signatures, and that a - valid metadata file is generated if writeall() or write() were to be - called. Metadata files are temporarily written so that file hashes and - lengths may be verified, determine if delegated role trust is fully - obeyed, and target paths valid according to parent roles. status() does - not do a simple check for number of threshold keys and signatures. - - - None. - - - None. - - - Generates and writes temporary metadata files. - - - None. - """ - - temp_repository_directory = None - - # Generate and write temporary metadata so that full verification of - # metadata is possible, such as verifying signatures, digests, and file - # content. Ensure temporary files are removed after verification results - # are completed. - try: - temp_repository_directory = tempfile.mkdtemp() - targets_directory = self._targets_directory - metadata_directory = os.path.join(temp_repository_directory, - METADATA_STAGED_DIRECTORY_NAME) - os.mkdir(metadata_directory) - - # Verify the top-level roles and log the results. - repo_lib._log_status_of_top_level_roles(targets_directory, - metadata_directory, self._repository_name, self._storage_backend) - - finally: - shutil.rmtree(temp_repository_directory, ignore_errors=True) - - - - def dirty_roles(self): - """ - - Print/log the roles that have been modified. For example, if some role's - version number is changed (repository.timestamp.version = 2), it is - considered dirty and will be included in the list of dirty roles - printed/logged here. Unlike status(), signatures, public keys, targets, - etc. are not verified. status() should be called instead if the caller - would like to verify if a valid role file is generated if writeall() were - to be called. - - - None. - - - None. - - - None. - - - None. - """ - - logger.info('Dirty roles: ' + str(roledb.get_dirty_roles(self._repository_name))) - - - - def mark_dirty(self, roles): - """ - - Mark the list of 'roles' as dirty. - - - roles: - A list of roles to mark as dirty. on the next write, these roles - will be written to disk. - - - None. - - - None. - - - None. - """ - - roledb.mark_dirty(roles, self._repository_name) - - - - def unmark_dirty(self, roles): - """ - - No longer mark the list of 'roles' as dirty. - - - roles: - A list of roles to mark as dirty. on the next write, these roles - will be written to disk. - - - None. - - - None. - - - None. - """ - - roledb.unmark_dirty(roles, self._repository_name) - - - - @staticmethod - def get_filepaths_in_directory(files_directory, recursive_walk=False, - followlinks=True): - """ - - Walk the given 'files_directory' and build a list of target files found. - - - files_directory: - The path to a directory of target files. - - recursive_walk: - To recursively walk the directory, set recursive_walk=True. - - followlinks: - To follow symbolic links, set followlinks=True. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - securesystemslib.exceptions.Error, if 'file_directory' is not a valid - directory. - - Python IO exceptions. - - - None. - - - A list of absolute paths to target files in the given 'files_directory'. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - sslib_formats.PATH_SCHEMA.check_match(files_directory) - sslib_formats.BOOLEAN_SCHEMA.check_match(recursive_walk) - sslib_formats.BOOLEAN_SCHEMA.check_match(followlinks) - - # Ensure a valid directory is given. - if not os.path.isdir(files_directory): - raise sslib_exceptions.Error(repr(files_directory) + ' is not' - ' a directory.') - - # A list of the target filepaths found in 'files_directory'. - targets = [] - - # FIXME: We need a way to tell Python 2, but not Python 3, to return - # filenames in Unicode; see #61 and: - # http://docs.python.org/howto/unicode.html#unicode-filenames - for dirpath, dirnames, filenames in os.walk(files_directory, - followlinks=followlinks): - for filename in filenames: - full_target_path = os.path.join(os.path.abspath(dirpath), filename) - targets.append(full_target_path) - - # Prune the subdirectories to walk right now if we do not wish to - # recursively walk 'files_directory'. - if recursive_walk is False: - del dirnames[:] - - else: - logger.debug('Not pruning subdirectories ' + repr(dirnames)) - - return targets - - - - - -class Metadata(object): - """ - - Provide a base class to represent a TUF Metadata role. There are four - top-level roles: Root, Targets, Snapshot, and Timestamp. The Metadata - class provides methods that are needed by all top-level roles, such as - adding and removing public keys, private keys, and signatures. Metadata - attributes, such as rolename, version, threshold, expiration, and key list - are also provided by the Metadata base class. - - - None. - - - None. - - - None. - - - None. - """ - - def __init__(self): - self._rolename = None - self._repository_name = None - - - def add_verification_key(self, key, expires=None): - """ - - Add 'key' to the role. Adding a key, which should contain only the - public portion, signifies the corresponding private key and signatures - the role is expected to provide. A threshold of signatures is required - for a role to be considered properly signed. If a metadata file contains - an insufficient threshold of signatures, it must not be accepted. - - >>> - >>> - >>> - - - key: - The role key to be added, conformant to - 'securesystemslib.formats.ANYKEY_SCHEMA'. Adding a public key to a role - means that its corresponding private key must generate and add its - signature to the role. A threshold number of signatures is required - for a role to be fully signed. - - expires: - The date in which 'key' expires. 'expires' is a datetime.datetime() - object. - - - securesystemslib.exceptions.FormatError, if any of the arguments are - improperly formatted. - - securesystemslib.exceptions.Error, if the 'expires' datetime has already - expired. - - - The role's entries in 'keydb' and 'roledb' are updated. - - - None. - """ - - # Does 'key' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - sslib_formats.ANYKEY_SCHEMA.check_match(key) - - # If 'expires' is unset, choose a default expiration for 'key'. By - # default, Root, Targets, Snapshot, and Timestamp keys are set to expire - # 1 year, 3 months, 1 week, and 1 day from the current time, respectively. - if expires is None: - if self.rolename == 'root': - expires = \ - formats.unix_timestamp_to_datetime(int(time.time() + ROOT_EXPIRATION)) - - elif self.rolename == 'Targets': - expires = \ - formats.unix_timestamp_to_datetime(int(time.time() + TARGETS_EXPIRATION)) - - elif self.rolename == 'Snapshot': - expires = \ - formats.unix_timestamp_to_datetime(int(time.time() + SNAPSHOT_EXPIRATION)) - - elif self.rolename == 'Timestamp': - expires = \ - formats.unix_timestamp_to_datetime(int(time.time() + TIMESTAMP_EXPIRATION)) - - else: - expires = \ - formats.unix_timestamp_to_datetime(int(time.time() + TIMESTAMP_EXPIRATION)) - - # Is 'expires' a datetime.datetime() object? - # Raise 'securesystemslib.exceptions.FormatError' if not. - if not isinstance(expires, datetime.datetime): - raise sslib_exceptions.FormatError(repr(expires) + ' is not a' - ' datetime.datetime() object.') - - # Truncate the microseconds value to produce a correct schema string - # of the form 'yyyy-mm-ddThh:mm:ssZ'. - expires = expires.replace(microsecond = 0) - - # Ensure the expiration has not already passed. - current_datetime = \ - formats.unix_timestamp_to_datetime(int(time.time())) - - if expires < current_datetime: - raise sslib_exceptions.Error(repr(key) + ' has already' - ' expired.') - - # Update the key's 'expires' entry. - expires = expires.isoformat() + 'Z' - key['expires'] = expires - - # Ensure 'key', which should contain the public portion, is added to - # 'keydb'. Add 'key' to the list of recognized keys. - # Keys may be shared, so do not raise an exception if 'key' has already - # been loaded. - try: - keydb.add_key(key, repository_name=self._repository_name) - - except exceptions.KeyAlreadyExistsError: - logger.warning('Adding a verification key that has already been used.') - - keyid = key['keyid'] - roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - - # Save the keyids that are being replaced since certain roles will need to - # re-sign metadata with these keys (e.g., root). Use list() to make a copy - # of roleinfo['keyids'] to ensure we're modifying distinct lists. - previous_keyids = list(roleinfo['keyids']) - - # Add 'key' to the role's entry in 'roledb', and avoid duplicates. - if keyid not in roleinfo['keyids']: - roleinfo['keyids'].append(keyid) - roleinfo['previous_keyids'] = previous_keyids - - roledb.update_roleinfo(self._rolename, roleinfo, - repository_name=self._repository_name) - - - - def remove_verification_key(self, key): - """ - - Remove 'key' from the role's currently recognized list of role keys. - The role expects a threshold number of signatures. - - >>> - >>> - >>> - - - key: - The role's key, conformant to 'securesystemslib.formats.ANYKEY_SCHEMA'. - 'key' should contain only the public portion, as only the public key is - needed. The 'add_verification_key()' method should have previously - added 'key'. - - - securesystemslib.exceptions.FormatError, if the 'key' argument is - improperly formatted. - - securesystemslib.exceptions.Error, if the 'key' argument has not been - previously added. - - - Updates the role's 'roledb' entry. - - - None. - """ - - # Does 'key' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - sslib_formats.ANYKEY_SCHEMA.check_match(key) - - keyid = key['keyid'] - roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - - if keyid in roleinfo['keyids']: - roleinfo['keyids'].remove(keyid) - - roledb.update_roleinfo(self._rolename, roleinfo, - repository_name=self._repository_name) - - else: - raise sslib_exceptions.Error('Verification key not found.') - - - - def load_signing_key(self, key): - """ - - Load the role key, which must contain the private portion, so that role - signatures may be generated when the role's metadata file is eventually - written to disk. - - >>> - >>> - >>> - - - key: - The role's key, conformant to 'securesystemslib.formats.ANYKEY_SCHEMA'. - It must contain the private key, so that role signatures may be - generated when writeall() or write() is eventually called to generate - valid metadata files. - - - securesystemslib.exceptions.FormatError, if 'key' is improperly formatted. - - securesystemslib.exceptions.Error, if the private key is not found in 'key'. - - - Updates the role's 'keydb' and 'roledb' entries. - - - None. - """ - - # Does 'key' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - sslib_formats.ANYKEY_SCHEMA.check_match(key) - - # Ensure the private portion of the key is available, otherwise signatures - # cannot be generated when the metadata file is written to disk. - if 'private' not in key['keyval'] or not len(key['keyval']['private']): - raise sslib_exceptions.Error('This is not a private key.') - - # Has the key, with the private portion included, been added to the keydb? - # The public version of the key may have been previously added. - try: - keydb.add_key(key, repository_name=self._repository_name) - - except exceptions.KeyAlreadyExistsError: - keydb.remove_key(key['keyid'], self._repository_name) - keydb.add_key(key, repository_name=self._repository_name) - - # Update the role's 'signing_keys' field in 'roledb'. - roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - if key['keyid'] not in roleinfo['signing_keyids']: - roleinfo['signing_keyids'].append(key['keyid']) - - roledb.update_roleinfo(self.rolename, roleinfo, - repository_name=self._repository_name) - - - - def unload_signing_key(self, key): - """ - - Remove a previously loaded role private key (i.e., load_signing_key()). - The keyid of the 'key' is removed from the list of recognized signing - keys. - - >>> - >>> - >>> - - - key: - The role key to be unloaded, conformant to - 'securesystemslib.formats.ANYKEY_SCHEMA'. - - - securesystemslib.exceptions.FormatError, if the 'key' argument is - improperly formatted. - - securesystemslib.exceptions.Error, if the 'key' argument has not been - previously loaded. - - - Updates the signing keys of the role in 'roledb'. - - - None. - """ - - # Does 'key' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - sslib_formats.ANYKEY_SCHEMA.check_match(key) - - # Update the role's 'signing_keys' field in 'roledb'. - roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - - # TODO: Should we consider removing keys from keydb that are no longer - # associated with any roles? There could be many no-longer-used keys - # stored in the keydb if not. For now, just unload the key. - if key['keyid'] in roleinfo['signing_keyids']: - roleinfo['signing_keyids'].remove(key['keyid']) - - roledb.update_roleinfo(self.rolename, roleinfo, - repository_name=self._repository_name) - - else: - raise sslib_exceptions.Error('Signing key not found.') - - - - def add_signature(self, signature, mark_role_as_dirty=True): - """ - - Add a signature to the role. A role is considered fully signed if it - contains a threshold of signatures. The 'signature' should have been - generated by the private key corresponding to one of the role's expected - keys. - - >>> - >>> - >>> - - - signature: - The signature to be added to the role, conformant to - 'securesystemslib.formats.SIGNATURE_SCHEMA'. - - mark_role_as_dirty: - A boolean indicating whether the updated 'roleinfo' for 'rolename' - should be marked as dirty. The caller might not want to mark - 'rolename' as dirty if it is loading metadata from disk and only wants - to populate roledb.py. Likewise, add_role() would support a similar - boolean to allow the repository tools to successfully load roles via - load_repository() without needing to mark these roles as dirty (default - behavior). - - - securesystemslib.exceptions.FormatError, if the 'signature' argument is - improperly formatted. - - - Adds 'signature', if not already added, to the role's 'signatures' field - in 'roledb'. - - - None. - """ - - # Does 'signature' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - sslib_formats.SIGNATURE_SCHEMA.check_match(signature) - sslib_formats.BOOLEAN_SCHEMA.check_match(mark_role_as_dirty) - - roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - - # Ensure the roleinfo contains a 'signatures' field. - if 'signatures' not in roleinfo: - roleinfo['signatures'] = [] - - # Update the role's roleinfo by adding 'signature', if it has not been - # added. - if signature not in roleinfo['signatures']: - roleinfo['signatures'].append(signature) - roledb.update_roleinfo(self.rolename, roleinfo, mark_role_as_dirty, - repository_name=self._repository_name) - - else: - logger.debug('Signature already exists for role: ' + repr(self.rolename)) - - - - def remove_signature(self, signature): - """ - - Remove a previously loaded, or added, role 'signature'. A role must - contain a threshold number of signatures to be considered fully signed. - - >>> - >>> - >>> - - - signature: - The role signature to remove, conformant to - 'securesystemslib.formats.SIGNATURE_SCHEMA'. - - - securesystemslib.exceptions.FormatError, if the 'signature' argument is - improperly formatted. - - securesystemslib.exceptions.Error, if 'signature' has not been previously - added to this role. - - - Updates the 'signatures' field of the role in 'roledb'. - - - None. - """ - - # Does 'signature' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - sslib_formats.SIGNATURE_SCHEMA.check_match(signature) - - roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - - if signature in roleinfo['signatures']: - roleinfo['signatures'].remove(signature) - - roledb.update_roleinfo(self.rolename, roleinfo, - repository_name=self._repository_name) - - else: - raise sslib_exceptions.Error('Signature not found.') - - - - @property - def signatures(self): - """ - - A getter method that returns the role's signatures. A role is considered - fully signed if it contains a threshold number of signatures, where each - signature must be provided by the generated by the private key. Keys - are added to a role with the add_verification_key() method. - - - None. - - - None. - - - None. - - - A list of signatures, conformant to - 'securesystemslib.formats.SIGNATURES_SCHEMA'. - """ - - roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - signatures = roleinfo['signatures'] - - return signatures - - - - @property - def keys(self): - """ - - A getter method that returns the role's keyids of the keys. The role - is expected to eventually contain a threshold of signatures generated - by the private keys of each of the role's keys (returned here as a keyid.) - - - None. - - - None. - - - None. - - - A list of the role's keyids (i.e., keyids of the keys). - """ - - roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - keyids = roleinfo['keyids'] - - return keyids - - - - @property - def rolename(self): - """ - - Return the role's name. - Examples: 'root', 'timestamp', 'targets/unclaimed/django'. - - - None. - - - None. - - - None. - - - The role's name, conformant to 'tuf.formats.ROLENAME_SCHEMA'. - Examples: 'root', 'timestamp', 'targets/unclaimed/django'. - """ - - return self._rolename - - - - @property - def version(self): - """ - - A getter method that returns the role's version number, conformant to - 'tuf.formats.VERSION_SCHEMA'. - - - None. - - - None. - - - None. - - - The role's version number, conformant to - 'tuf.formats.VERSION_SCHEMA'. - """ - - roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - version = roleinfo['version'] - - return version - - - - @version.setter - def version(self, version): - """ - - A setter method that updates the role's version number. TUF clients - download new metadata with version number greater than the version - currently trusted. New metadata start at version 1 when either write() - or write_partial() is called. Version numbers are automatically - incremented, when the write methods are called, as follows: - - 1. write_partial==True and the metadata is the first to be written. - - 2. write_partial=False (i.e., write()), the metadata was not loaded as - partially written, and a write_partial is not needed. - - >>> - >>> - >>> - - - version: - The role's version number, conformant to - 'tuf.formats.VERSION_SCHEMA'. - - - securesystemslib.exceptions.FormatError, if the 'version' argument is - improperly formatted. - - - Modifies the 'version' attribute of the Repository object and updates the - role's version in 'roledb'. - - - None. - """ - - # Does 'version' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - formats.METADATAVERSION_SCHEMA.check_match(version) - - roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - roleinfo['version'] = version - - roledb.update_roleinfo(self._rolename, roleinfo, - repository_name=self._repository_name) - - - - @property - def threshold(self): - """ - - Return the role's threshold value. A role is considered fully signed if - a threshold number of signatures is available. - - - None. - - - None. - - - None. - - - The role's threshold value, conformant to - 'tuf.formats.THRESHOLD_SCHEMA'. - """ - - roleinfo = roledb.get_roleinfo(self._rolename, self._repository_name) - threshold = roleinfo['threshold'] - - return threshold - - - - @threshold.setter - def threshold(self, threshold): - """ - - A setter method that modified the threshold value of the role. Metadata - is considered fully signed if a 'threshold' number of signatures is - available. - - >>> - >>> - >>> - - - threshold: - An integer value that sets the role's threshold value, or the minimum - number of signatures needed for metadata to be considered fully - signed. Conformant to 'tuf.formats.THRESHOLD_SCHEMA'. - - - securesystemslib.exceptions.FormatError, if the 'threshold' argument is - improperly formatted. - - - Modifies the threshold attribute of the Repository object and updates - the roles threshold in 'roledb'. - - - None. - """ - - # Does 'threshold' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - formats.THRESHOLD_SCHEMA.check_match(threshold) - - roleinfo = roledb.get_roleinfo(self._rolename, self._repository_name) - roleinfo['previous_threshold'] = roleinfo['threshold'] - roleinfo['threshold'] = threshold - - roledb.update_roleinfo(self._rolename, roleinfo, - repository_name=self._repository_name) - - - @property - def expiration(self): - """ - - A getter method that returns the role's expiration datetime. - - - None. - - - securesystemslib.exceptions.FormatError, if the expiration cannot be - parsed correctly - - - None. - - - The role's expiration datetime, a datetime.datetime() object. - """ - - roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - expires = roleinfo['expires'] - - return formats.expiry_string_to_datetime(expires) - - - - @expiration.setter - def expiration(self, datetime_object): - """ - - A setter method for the role's expiration datetime. The top-level - roles have a default expiration (e.g., ROOT_EXPIRATION), but may later - be modified by this setter method. - - >>> - >>> - >>> - - - datetime_object: - The datetime expiration of the role, a datetime.datetime() object. - - - securesystemslib.exceptions.FormatError, if 'datetime_object' is not a - datetime.datetime() object. - - securesystemslib.exceptions.Error, if 'datetime_object' has already - expired. - - - Modifies the expiration attribute of the Repository object. - The datetime given will be truncated to microseconds = 0 - - - None. - """ - - # Is 'datetime_object' a datetime.datetime() object? - # Raise 'securesystemslib.exceptions.FormatError' if not. - if not isinstance(datetime_object, datetime.datetime): - raise sslib_exceptions.FormatError( - repr(datetime_object) + ' is not a datetime.datetime() object.') - - # truncate the microseconds value to produce a correct schema string - # of the form yyyy-mm-ddThh:mm:ssZ - datetime_object = datetime_object.replace(microsecond = 0) - - # Ensure the expiration has not already passed. - current_datetime_object = \ - formats.unix_timestamp_to_datetime(int(time.time())) - - if datetime_object < current_datetime_object: - raise sslib_exceptions.Error(repr(self.rolename) + ' has' - ' already expired.') - - # Update the role's 'expires' entry in 'roledb'. - roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - expires = datetime_object.isoformat() + 'Z' - roleinfo['expires'] = expires - - roledb.update_roleinfo(self.rolename, roleinfo, - repository_name=self._repository_name) - - - - @property - def signing_keys(self): - """ - - A getter method that returns a list of the role's signing keys. - - >>> - >>> - >>> - - - None. - - - None. - - - None. - - - A list of keyids of the role's signing keys, conformant to - 'securesystemslib.formats.KEYIDS_SCHEMA'. - """ - - roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - signing_keyids = roleinfo['signing_keyids'] - - return signing_keyids - - - - - -class Root(Metadata): - """ - - Represent a Root role object. The root role is responsible for - listing the public keys and threshold of all the top-level roles, including - itself. Top-level metadata is rejected if it does not comply with what is - specified by the Root role. - - This Root object sub-classes Metadata, so the expected Metadata - operations like adding/removing public keys, signatures, private keys, and - updating metadata attributes (e.g., version and expiration) is supported. - Since Root is a top-level role and must exist, a default Root object - is instantiated when a new Repository object is created. - - >>> - >>> - >>> - - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - tuf.exceptions.FormatError, if the argument is improperly formatted. - - - A 'root' role is added to 'roledb'. - - - None. - """ - - def __init__(self, repository_name): - - super(Root, self).__init__() - - self._rolename = 'root' - self._repository_name = repository_name - - # Is 'repository_name' properly formatted? Otherwise, raise a - # tuf.exceptions.FormatError exception. - formats.ROLENAME_SCHEMA.check_match(repository_name) - - # By default, 'snapshot' metadata is set to expire 1 week from the current - # time. The expiration may be modified. - expiration = formats.unix_timestamp_to_datetime( - int(time.time() + ROOT_EXPIRATION)) - expiration = expiration.isoformat() + 'Z' - - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'signatures': [], 'version': 0, 'consistent_snapshot': False, - 'expires': expiration, 'partial_loaded': False} - try: - roledb.add_role(self._rolename, roleinfo, self._repository_name) - - except exceptions.RoleAlreadyExistsError: - pass - - - - - -class Timestamp(Metadata): - """ - - Represent a Timestamp role object. The timestamp role is responsible for - referencing the latest version of the Snapshot role. Under normal - conditions, it is the only role to be downloaded from a remote repository - without a known file length and hash. An upper length limit is set, though. - Also, its signatures are also verified to be valid according to the Root - role. If invalid metadata can only be downloaded by the client, Root - is the only other role that is downloaded without a known length and hash. - This case may occur if a role's signing keys have been revoked and a newer - Root file is needed to list the updated keys. - - This Timestamp object sub-classes Metadata, so the expected Metadata - operations like adding/removing public keys, signatures, private keys, and - updating metadata attributes (e.g., version and expiration) is supported. - Since Snapshot is a top-level role and must exist, a default Timestamp - object is instantiated when a new Repository object is created. - - >>> - >>> - >>> - - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - tuf.exceptions.FormatError, if the argument is improperly formatted. - - - A 'timestamp' role is added to 'roledb'. - - - None. - """ - - def __init__(self, repository_name): - - super(Timestamp, self).__init__() - - self._rolename = 'timestamp' - self._repository_name = repository_name - - # Is 'repository_name' properly formatted? Otherwise, raise a - # tuf.exceptions.FormatError exception. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # By default, 'root' metadata is set to expire 1 year from the current - # time. The expiration may be modified. - expiration = formats.unix_timestamp_to_datetime( - int(time.time() + TIMESTAMP_EXPIRATION)) - expiration = expiration.isoformat() + 'Z' - - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'signatures': [], 'version': 0, 'expires': expiration, - 'partial_loaded': False} - - try: - roledb.add_role(self.rolename, roleinfo, self._repository_name) - - except exceptions.RoleAlreadyExistsError: - pass - - - - - -class Snapshot(Metadata): - """ - - Represent a Snapshot role object. The snapshot role is responsible for - referencing the other top-level roles (excluding Timestamp) and all - delegated roles. - - This Snapshot object sub-classes Metadata, so the expected - Metadata operations like adding/removing public keys, signatures, private - keys, and updating metadata attributes (e.g., version and expiration) is - supported. Since Snapshot is a top-level role and must exist, a default - Snapshot object is instantiated when a new Repository object is created. - - >>> - >>> - >>> - - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - tuf.exceptions.FormatError, if the argument is improperly formatted. - - - A 'snapshot' role is added to 'roledb'. - - - None. - """ - - def __init__(self, repository_name): - - super(Snapshot, self).__init__() - - self._rolename = 'snapshot' - self._repository_name = repository_name - - # Is 'repository_name' properly formatted? Otherwise, raise a - # tuf.exceptions.FormatError exception. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # By default, 'snapshot' metadata is set to expire 1 week from the current - # time. The expiration may be modified. - expiration = formats.unix_timestamp_to_datetime( - int(time.time() + SNAPSHOT_EXPIRATION)) - expiration = expiration.isoformat() + 'Z' - - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'signatures': [], 'version': 0, 'expires': expiration, - 'partial_loaded': False} - - try: - roledb.add_role(self._rolename, roleinfo, self._repository_name) - - except exceptions.RoleAlreadyExistsError: - pass - - - - - -class Targets(Metadata): - """ - - Represent a Targets role object. Targets roles include the top-level role - 'targets.json' and all delegated roles (e.g., 'targets/unclaimed/django'). - The expected operations of Targets metadata is included, such as adding - and removing repository target files, making and revoking delegations, and - listing the target files provided by it. - - Adding or removing a delegation causes the attributes of the Targets object - to be updated. That is, if the 'django' Targets object is delegated by - 'targets/unclaimed', a new attribute is added so that the following - code statement is supported: - repository.targets('unclaimed')('django').version = 2 - - Likewise, revoking a delegation causes removal of the delegation attribute. - - This Targets object sub-classes Metadata, so the expected Metadata - operations like adding/removing public keys, signatures, private keys, and - updating metadata attributes (e.g., version and expiration) is supported. - Since Targets is a top-level role and must exist, a default Targets object - (for 'targets.json', not delegated roles) is instantiated when a new - Repository object is created. - - >>> - >>> - >>> - - - targets_directory: - The targets directory of the Repository object. - - rolename: - The rolename of this Targets object. - - roleinfo: - An already populated roleinfo object of 'rolename'. Conformant to - 'tuf.formats.ROLEDB_SCHEMA'. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - - Modifies the roleinfo of the targets role in 'roledb', or creates - a default one named 'targets'. - - - None. - """ - - def __init__(self, targets_directory, rolename='targets', roleinfo=None, - parent_targets_object=None, repository_name='default'): - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - sslib_formats.PATH_SCHEMA.check_match(targets_directory) - formats.ROLENAME_SCHEMA.check_match(rolename) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - if roleinfo is not None: - formats.ROLEDB_SCHEMA.check_match(roleinfo) - - super(Targets, self).__init__() - self._targets_directory = targets_directory - self._rolename = rolename - self._target_files = [] - self._delegated_roles = {} - self._parent_targets_object = self - self._repository_name = repository_name - - # Keep a reference to the top-level 'targets' object. Any delegated roles - # that may be created, can be added to and accessed via the top-level - # 'targets' object. - if parent_targets_object is not None: - self._parent_targets_object = parent_targets_object - - # By default, Targets objects are set to expire 3 months from the current - # time. May be later modified. - expiration = formats.unix_timestamp_to_datetime( - int(time.time() + TARGETS_EXPIRATION)) - expiration = expiration.isoformat() + 'Z' - - # If 'roleinfo' is not provided, set an initial default. - if roleinfo is None: - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'version': 0, 'expires': expiration, - 'signatures': [], 'paths': {}, 'path_hash_prefixes': [], - 'partial_loaded': False, 'delegations': {'keys': {}, - 'roles': []}} - - # Add the new role to the 'roledb'. - try: - roledb.add_role(self.rolename, roleinfo, self._repository_name) - - except exceptions.RoleAlreadyExistsError: - pass - - - - def __call__(self, rolename): - """ - - Allow callable Targets object so that delegated roles may be referenced - by their string rolenames. Rolenames may include characters like '-' and - are not restricted to Python identifiers. - - - rolename: - The rolename of the delegated role. 'rolename' must be a role - previously delegated by this Targets role. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - tuf.exceptions.UnknownRoleError, if 'rolename' has not been - delegated by this Targets object. - - - Modifies the roleinfo of the targets role in 'roledb'. - - - The Targets object of 'rolename'. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - formats.ROLENAME_SCHEMA.check_match(rolename) - - if rolename in self._delegated_roles: - return self._delegated_roles[rolename] - - else: - raise exceptions.UnknownRoleError(repr(rolename) + ' has' - ' not been delegated by ' + repr(self.rolename)) - - - - def add_delegated_role(self, rolename, targets_object): - """ - - Add 'targets_object' to this Targets object's list of known delegated - roles. Specifically, delegated Targets roles should call 'super(Targets, - self).add_delegated_role(...)' so that the top-level 'targets' role - contains a dictionary of all the available roles on the repository. - - - rolename: - The rolename of the delegated role. 'rolename' must be a role - previously delegated by this Targets role. - - targets_object: - A Targets() object. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - - Updates the Target object's dictionary of delegated targets. - - - The Targets object of 'rolename'. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - formats.ROLENAME_SCHEMA.check_match(rolename) - - if not isinstance(targets_object, Targets): - raise sslib_exceptions.FormatError(repr(targets_object) + ' is' - ' not a Targets object.') - - - if rolename in self._delegated_roles: - logger.debug(repr(rolename) + ' already exists.') - - else: - self._delegated_roles[rolename] = targets_object - - - - def remove_delegated_role(self, rolename): - """ - Remove 'rolename' from this Targets object's list of delegated roles. - This method does not update roledb and others. - - - rolename: - The rolename of the delegated role to remove. 'rolename' should be a - role previously delegated by this Targets role. - - - securesystemslib.exceptions.FormatError, if the argument is improperly - formatted. - - - Updates the Target object's dictionary of delegated targets. - - - None. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - formats.ROLENAME_SCHEMA.check_match(rolename) - - if rolename not in self._delegated_roles: - logger.debug(repr(rolename) + ' has not been delegated.') - return - - else: - del self._delegated_roles[rolename] - - - - @property - def target_files(self): - """ - - A getter method that returns the target files added thus far to this - Targets object. - - >>> - >>> - >>> - - - None. - - - None. - - - None. - - - None. - """ - - target_files = roledb.get_roleinfo(self._rolename, - self._repository_name)['paths'] - return target_files - - - - def add_paths(self, paths, child_rolename): - """ - - Add 'paths' to the delegated paths of 'child_rolename'. 'paths' can be a - list of either file paths or glob patterns. The updater client verifies - the target paths specified by child roles, and searches for targets by - visiting these delegated paths. A child role may only provide targets - specifically listed in the delegations field of the delegating role, or a - target that matches a delegated path. - - >>> - >>> - >>> - - - paths: - A list of glob patterns, or file paths, that 'child_rolename' is - trusted to provide. - - child_rolename: - The child delegation that requires an update to its delegated or - trusted paths, as listed in the parent role's delegations (e.g., - 'Django' in 'unclaimed'). - - - securesystemslib.exceptions.FormatError, if a path or glob pattern in - 'paths' is not a string, or if 'child_rolename' is not a formatted - rolename. - - securesystemslib.exceptions.Error, if 'child_rolename' has not been - delegated yet. - - tuf.exceptions.InvalidNameError, if any path in 'paths' does not match - pattern. - - - Modifies this Targets' delegations field. - - - None. - """ - - # Do the argument have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.PATHS_SCHEMA.check_match(paths) - formats.ROLENAME_SCHEMA.check_match(child_rolename) - - # Ensure that 'child_rolename' exists, otherwise it will not have an entry - # in the parent role's delegations field. - if not roledb.role_exists(child_rolename, self._repository_name): - raise sslib_exceptions.Error(repr(child_rolename) + ' does' - ' not exist.') - - for path in paths: - # Check if the delegated paths or glob patterns are relative and use - # forward slash as a separator or raise an exception. Paths' existence - # on the file system is not verified. If the path is incorrect, - # the targetfile won't be matched successfully during a client update. - self._check_path(path) - - # Get the current role's roleinfo, so that its delegations field can be - # updated. - roleinfo = roledb.get_roleinfo(self._rolename, self._repository_name) - - # Update the delegated paths of 'child_rolename' to add relative paths. - for role in roleinfo['delegations']['roles']: - if role['name'] == child_rolename: - for relative_path in paths: - if relative_path not in role['paths']: - role['paths'].append(relative_path) - - else: - logger.debug(repr(relative_path) + ' is already a delegated path.') - else: - logger.debug(repr(role['name']) + ' does not match child rolename.') - - roledb.update_roleinfo(self._rolename, roleinfo, - repository_name=self._repository_name) - - - - def add_target(self, filepath, custom=None, fileinfo=None): - """ - - Add a filepath (must be relative to the repository's targets directory) - to the Targets object. - - If 'filepath' has already been added, it will be replaced with any new - file or 'custom' information. - - >>> - >>> - >>> - - - filepath: - The path of the target file. It must be relative to the repository's - targets directory. - - custom: - An optional dictionary providing additional information about the file. - NOTE: if a custom value is passed, the fileinfo parameter must be None. - This parameter will be deprecated in a future release of tuf, use of - the fileinfo parameter is preferred. - - fileinfo: - An optional fileinfo dictionary, conforming to - tuf.formats.TARGETS_FILEINFO_SCHEMA, providing full information about the - file, i.e: - { 'length': 101, - 'hashes': { 'sha256': '123EDF...' }, - 'custom': { 'permissions': '600'} # optional - } - NOTE: if a custom value is passed, the fileinfo parameter must be None. - - - securesystemslib.exceptions.FormatError, if 'filepath' is improperly - formatted. - - tuf.exceptions.InvalidNameError, if 'filepath' does not match pattern. - - - Adds 'filepath' to this role's list of targets. This role's - 'roledb' entry is also updated. - - - None. - """ - - # Does 'filepath' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if there is a mismatch. - formats.RELPATH_SCHEMA.check_match(filepath) - - if fileinfo and custom: - raise sslib_exceptions.Error("Can only take one of" - " custom or fileinfo, not both.") - - if fileinfo: - formats.TARGETS_FILEINFO_SCHEMA.check_match(fileinfo) - - if custom is None: - custom = {} - else: - formats.CUSTOM_SCHEMA.check_match(custom) - - # Add 'filepath' (i.e., relative to the targets directory) to the role's - # list of targets. 'filepath' will not be verified as an allowed path - # according to some delegating role. Not verifying 'filepath' here allows - # freedom to add targets and parent restrictions in any order, minimize - # the number of times these checks are performed, and allow any role to - # delegate trust of packages to this Targets role. - - # Check if the target is relative and uses forward slash as a separator - # or raise an exception. File's existence on the file system is not - # verified. If the file does not exist relative to the targets directory, - # later calls to write() will fail. - self._check_path(filepath) - - # Update the role's 'roledb' entry and avoid duplicates. - roleinfo = roledb.get_roleinfo(self._rolename, self._repository_name) - - if filepath not in roleinfo['paths']: - logger.debug('Adding new target: ' + repr(filepath)) - - else: - logger.debug('Replacing target: ' + repr(filepath)) - - if fileinfo: - roleinfo['paths'].update({filepath: fileinfo}) - else: - roleinfo['paths'].update({filepath: {'custom': custom}}) - - roledb.update_roleinfo(self._rolename, roleinfo, - repository_name=self._repository_name) - - - - def add_targets(self, list_of_targets): - """ - - Add a list of target filepaths (all relative to 'self.targets_directory'). - This method does not actually create files on the file system. The - list of targets must already exist on disk. - - >>> - >>> - >>> - - - list_of_targets: - A list of target filepaths that are added to the paths of this Targets - object. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - tuf.exceptions.InvalidNameError, if any target in 'list_of_targets' - does not match pattern. - - - This Targets' roleinfo is updated with the paths in 'list_of_targets'. - - - None. - """ - - # Does 'list_of_targets' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - formats.RELPATHS_SCHEMA.check_match(list_of_targets) - - # Ensure the paths in 'list_of_targets' are relative and use forward slash - # as a separator or raise an exception. The paths of 'list_of_targets' - # will be verified as existing and allowed paths according to this Targets - # parent role when write() or writeall() is called. Not verifying - # filepaths here allows the freedom to add targets and parent restrictions - # in any order and minimize the number of times these checks are performed. - for target in list_of_targets: - self._check_path(target) - - # Update this Targets 'roledb' entry. - roleinfo = roledb.get_roleinfo(self._rolename, self._repository_name) - for relative_target in list_of_targets: - if relative_target not in roleinfo['paths']: - logger.debug('Adding new target: ' + repr(relative_target)) - else: - logger.debug('Replacing target: ' + repr(relative_target)) - roleinfo['paths'].update({relative_target: {}}) - - roledb.update_roleinfo(self.rolename, roleinfo, - repository_name=self._repository_name) - - - - def remove_target(self, filepath): - """ - - Remove the target 'filepath' from this Targets' 'paths' field. 'filepath' - is relative to the targets directory. - - >>> - >>> - >>> - - - filepath: - The target to remove from this Targets object, relative to the - repository's targets directory. - - - securesystemslib.exceptions.FormatError, if 'filepath' is improperly - formatted. - - securesystemslib.exceptions.Error, if 'filepath' is not located in the - repository's targets directory, or not found. - - - Modifies this Targets 'roledb' entry. - - - None. - """ - - # Does 'filepath' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if there is a mismatch. - formats.RELPATH_SCHEMA.check_match(filepath) - - # Remove 'relative_filepath', if found, and update this Targets roleinfo. - fileinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - if filepath in fileinfo['paths']: - del fileinfo['paths'][filepath] - roledb.update_roleinfo(self.rolename, fileinfo, - repository_name=self._repository_name) - - else: - raise sslib_exceptions.Error('Target file path not found.') - - - - def clear_targets(self): - """ - - Remove all the target filepaths in the "paths" field of this Targets. - - >>> - >>> - >>> - - - None - - - None. - - - Modifies this Targets' 'roledb' entry. - - - None. - """ - - roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - roleinfo['paths'] = {} - - roledb.update_roleinfo(self.rolename, roleinfo, - repository_name=self._repository_name) - - - - - - def get_delegated_rolenames(self): - """ - - Return all delegations of a role. If ['a/b/', 'a/b/c/', 'a/b/c/d'] have - been delegated by the delegated role 'django', - repository.targets('django').get_delegated_rolenames() returns: ['a/b', - 'a/b/c', 'a/b/c/d']. - - - None. - - - None. - - - None. - - - A list of rolenames. - """ - - return roledb.get_delegated_rolenames(self.rolename, self._repository_name) - - - - - - def _create_delegated_target(self, rolename, keyids, threshold, paths): - """ - Create a new Targets object for the 'rolename' delegation. An initial - expiration is set (3 months from the current time). - """ - - expiration = formats.unix_timestamp_to_datetime( - int(time.time() + TARGETS_EXPIRATION)) - expiration = expiration.isoformat() + 'Z' - - roleinfo = {'name': rolename, 'keyids': keyids, 'signing_keyids': [], - 'threshold': threshold, 'version': 0, - 'expires': expiration, 'signatures': [], 'partial_loaded': False, - 'paths': paths, 'delegations': {'keys': {}, 'roles': []}} - - # The new targets object is added as an attribute to this Targets object. - new_targets_object = Targets(self._targets_directory, rolename, roleinfo, - parent_targets_object=self._parent_targets_object, - repository_name=self._repository_name) - - return new_targets_object - - - - - - def _update_roledb_delegations(self, keydict, delegations_roleinfo): - """ - Update the roledb to include delegations of the keys in keydict and the - roles in delegations_roleinfo - """ - - current_roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - current_roleinfo['delegations']['keys'].update(keydict) - - for roleinfo in delegations_roleinfo: - current_roleinfo['delegations']['roles'].append(roleinfo) - - roledb.update_roleinfo(self.rolename, current_roleinfo, - repository_name=self._repository_name) - - - - - - def delegate(self, rolename, public_keys, paths, threshold=1, - terminating=False, list_of_targets=None, path_hash_prefixes=None): - """ - - Create a new delegation, where 'rolename' is a child delegation of this - Targets object. The keys and roles database is updated, including the - delegations field of this Targets. The delegation of 'rolename' is added - and accessible (i.e., repository.targets(rolename)). - - Actual metadata files are not created, only when repository.writeall() or - repository.write() is called. - - >>> - >>> - >>> - - - rolename: - The name of the delegated role, as in 'django' or 'unclaimed'. - - public_keys: - A list of TUF key objects in 'ANYKEYLIST_SCHEMA' format. The list - may contain any of the supported key types: RSAKEY_SCHEMA, - ED25519KEY_SCHEMA, etc. - - paths: - The paths, or glob patterns, delegated to 'rolename'. Any targets - added to 'rolename', via add_targets() or 'list_of_targets', must - match one of the paths or glob patterns in 'paths'. Apart from the - public keys of 'rolename', the delegated 'paths' is often known and - specified when a delegation is first performed. If the delegator - is unsure of which 'paths' to delegate, 'paths' can be set to ['']. - - threshold: - The threshold number of keys of 'rolename'. - - terminating: - Boolean that indicates whether this role allows the updater client to - continue searching for targets (target files it is trusted to list but - has not yet specified) in other delegations. If 'terminating' is True - and 'updater.target()' does not find 'example_target.tar.gz' in this - role, a 'tuf.exceptions.UnknownTargetError' exception should be raised. - If 'terminating' is False (default), and 'target/other_role' is also - trusted with 'example_target.tar.gz' and has listed it, - updater.target() should backtrack and return the target file specified - by 'target/other_role'. - - list_of_targets: - A list of target filepaths that are added to 'rolename'. - 'list_of_targets' is a list of target filepaths, can be empty, and each - filepath must be located in the repository's targets directory. The - list of targets should also exist at the specified paths, otherwise - non-existent target paths might not be added when the targets file is - written to disk with writeall() or write(). - - path_hash_prefixes: - A list of hash prefixes in - 'tuf.formats.PATH_HASH_PREFIXES_SCHEMA' format, used in - hashed bin delegations. Targets may be located and stored in hashed - bins by calculating the target path's hash prefix. - - - securesystemslib.exceptions.FormatError, if any of the arguments are - improperly formatted. - - securesystemslib.exceptions.Error, if the delegated role already exists. - - tuf.exceptions.InvalidNameError, if any path in 'paths' or target in - 'list_of_targets' does not match pattern. - - - A new Target object is created for 'rolename' that is accessible to the - caller (i.e., targets.). The 'keydb' and - 'roledb' stores are updated with 'public_keys'. - - - None. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - formats.ROLENAME_SCHEMA.check_match(rolename) - sslib_formats.ANYKEYLIST_SCHEMA.check_match(public_keys) - formats.RELPATHS_SCHEMA.check_match(paths) - formats.THRESHOLD_SCHEMA.check_match(threshold) - sslib_formats.BOOLEAN_SCHEMA.check_match(terminating) - - if list_of_targets is not None: - formats.RELPATHS_SCHEMA.check_match(list_of_targets) - - if path_hash_prefixes is not None: - formats.PATH_HASH_PREFIXES_SCHEMA.check_match(path_hash_prefixes) - - # Keep track of the valid keyids (added to the new Targets object) and - # their keydicts (added to this Targets delegations). - keyids, keydict = repo_lib.keys_to_keydict(public_keys) - - # Ensure the paths of 'list_of_targets' are located in the repository's - # targets directory. - relative_targetpaths = {} - - if list_of_targets: - for target in list_of_targets: - # Check if the target path is relative or raise an exception. File's - # existence on the file system is not verified. If the file does not - # exist relative to the targets directory, later calls to write() - # will fail. - self._check_path(target) - relative_targetpaths.update({target: {}}) - - for path in paths: - # Check if the delegated paths or glob patterns are relative or - # raise an exception. Paths' existence on the file system is not - # verified. If the path is incorrect, the targetfile won't be matched - # successfully during a client update. - self._check_path(path) - - # The new targets object is added as an attribute to this Targets object. - new_targets_object = self._create_delegated_target(rolename, keyids, - threshold, relative_targetpaths) - - # Update the roleinfo of this role. A ROLE_SCHEMA object requires only - # 'keyids', 'threshold', and 'paths'. - roleinfo = {'name': rolename, - 'keyids': keyids, - 'threshold': threshold, - 'terminating': terminating, - 'paths': list(relative_targetpaths.keys())} - - if paths: - roleinfo['paths'] = paths - - if path_hash_prefixes: - roleinfo['path_hash_prefixes'] = path_hash_prefixes - # A role in a delegations must list either 'path_hash_prefixes' - # or 'paths'. - del roleinfo['paths'] - - # Update the public keys of 'new_targets_object'. - for key in public_keys: - new_targets_object.add_verification_key(key) - - # Add the new delegation to the top-level 'targets' role object (i.e., - # 'repository.targets()'). For example, 'django', which was delegated by - # repository.target('claimed'), is added to 'repository.targets('django')). - if self.rolename != 'targets': - self._parent_targets_object.add_delegated_role(rolename, - new_targets_object) - - # Add 'new_targets_object' to the delegating role object (this object). - self.add_delegated_role(rolename, new_targets_object) - - # Update the 'delegations' field of the current role. - self._update_roledb_delegations(keydict, [roleinfo]) - - - - - - def revoke(self, rolename): - """ - - Revoke this Targets' 'rolename' delegation. Its 'rolename' attribute is - deleted, including the entries in its 'delegations' field and in - 'roledb'. - - Actual metadata files are not updated, only when repository.write() or - repository.write() is called. - - >>> - >>> - >>> - - - rolename: - The rolename (e.g., 'Django' in 'django') of the child delegation the - parent role (this role) wants to revoke. - - - securesystemslib.exceptions.FormatError, if 'rolename' is improperly - formatted. - - - The delegations dictionary of 'rolename' is modified, and its 'roledb' - entry is updated. This Targets' 'rolename' delegation attribute is also - deleted. - - - None. - """ - - # Does 'rolename' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - formats.ROLENAME_SCHEMA.check_match(rolename) - - # Remove 'rolename' from this Target's delegations dict. - roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) - - for role in roleinfo['delegations']['roles']: - if role['name'] == rolename: - roleinfo['delegations']['roles'].remove(role) - - roledb.update_roleinfo(self.rolename, roleinfo, - repository_name=self._repository_name) - - # Remove 'rolename' from 'roledb'. - try: - roledb.remove_role(rolename, self._repository_name) - # Remove the rolename delegation from the current role. For example, the - # 'django' role is removed from repository.targets('django'). - del self._delegated_roles[rolename] - self._parent_targets_object.remove_delegated_role(rolename) - - except (exceptions.UnknownRoleError, KeyError): - pass - - - - def delegate_hashed_bins(self, list_of_targets, keys_of_hashed_bins, - number_of_bins=DEFAULT_NUM_BINS): - """ - - Distribute a large number of target files over multiple delegated roles - (hashed bins). The metadata files of delegated roles will be nearly - equal in size (i.e., 'list_of_targets' is uniformly distributed by - calculating the target filepath's hash and determining which bin it should - reside in. The updater client will use "lazy bin walk" to find a target - file's hashed bin destination. The parent role lists a range of path - hash prefixes each hashed bin contains. This method is intended for - repositories with a large number of target files, a way of easily - distributing and managing the metadata that lists the targets, and - minimizing the number of metadata files (and their size) downloaded by - the client. See tuf-spec.txt and the following link for more - information: - http://www.python.org/dev/peps/pep-0458/#metadata-scalability - - >>> - >>> - >>> - - - list_of_targets: - The target filepaths of the targets that should be stored in hashed - bins created (i.e., delegated roles). A repository object's - get_filepaths_in_directory() can generate a list of valid target - paths. - - keys_of_hashed_bins: - The initial public keys of the delegated roles. Public keys may be - later added or removed by calling the usual methods of the delegated - Targets object. For example: - repository.targets('000-003').add_verification_key() - - number_of_bins: - The number of delegated roles, or hashed bins, that should be generated - and contain the target file attributes listed in 'list_of_targets'. - 'number_of_bins' must be a power of 2. Each bin may contain a - range of path hash prefixes (e.g., target filepath digests that range - from [000]... - [003]..., where the series of digits in brackets is - considered the hash prefix). - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - securesystemslib.exceptions.Error, if 'number_of_bins' is not a power of - 2, or one of the targets in 'list_of_targets' is not relative to the - repository's targets directory. - - tuf.exceptions.InvalidNameError, if any target in 'list_of_targets' - does not match pattern. - - - Delegates multiple target roles from the current parent role. - - - None. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.PATHS_SCHEMA.check_match(list_of_targets) - sslib_formats.ANYKEYLIST_SCHEMA.check_match(keys_of_hashed_bins) - formats.NUMBINS_SCHEMA.check_match(number_of_bins) - - prefix_length, prefix_count, bin_size = repo_lib.get_bin_numbers(number_of_bins) - - logger.info('Creating hashed bin delegations.\n' + - repr(len(list_of_targets)) + ' total targets.\n' + - repr(number_of_bins) + ' hashed bins.\n' + - repr(prefix_count) + ' total hash prefixes.\n' + - 'Each bin ranges over ' + repr(bin_size) + ' hash prefixes.') - - # Generate a list of bin names, the range of prefixes to be delegated to - # that bin, along with the corresponding full list of target prefixes - # to be delegated to that bin - ordered_roles = [] - for idx in range(0, prefix_count, bin_size): - high = idx + bin_size - 1 - name = repo_lib.create_bin_name(idx, high, prefix_length) - if bin_size == 1: - target_hash_prefixes = [name] - else: - target_hash_prefixes = [] - for idy in range(idx, idx+bin_size): - target_hash_prefixes.append("{prefix:0{len}x}".format(prefix=idy, - len=prefix_length)) - - role = {"name": name, - "target_paths": [], - "target_hash_prefixes": target_hash_prefixes} - ordered_roles.append(role) - - for target_path in list_of_targets: - # Check if the target path is relative or raise an exception. File's - # existence on the file system is not verified. If the file does not - # exist relative to the targets directory, later calls to write() and - # writeall() will fail. - self._check_path(target_path) - - # Determine the hash prefix of 'target_path' by computing the digest of - # its path relative to the targets directory. - # We must hash a target path as it appears in the metadata - hash_prefix = repo_lib.get_target_hash(target_path)[:prefix_length] - ordered_roles[int(hash_prefix, 16) // bin_size]["target_paths"].append(target_path) - - keyids, keydict = repo_lib.keys_to_keydict(keys_of_hashed_bins) - - # A queue of roleinfo's that need to be updated in the roledb - delegated_roleinfos = [] - - for bin_role in ordered_roles: - # TODO: originally we just called self.delegate() for each item in this - # iteration. However, this is *extremely* slow when creating a large - # number of hashed bins, i.e. 16k as is recommended for PyPI usage in - # PEP 458: https://www.python.org/dev/peps/pep-0458/ - # The source of the slowness is the interactions with the roledb, which - # causes several deep copies of roleinfo dictionaries: - # https://github.com/theupdateframework/python-tuf/issues/1005 - # Once the underlying issues in #1005 are resolved, i.e. some combination - # of the intermediate and long-term fixes, we may simplify here by - # switching back to just calling self.delegate(), but until that time we - # queue roledb interactions and perform all updates to the roledb in one - # operation at the end of the iteration. - - relative_paths = {} - for path in bin_role['target_paths']: - relative_paths.update({path: {}}) - - # Delegate from the "unclaimed" targets role to each 'bin_role' - target = self._create_delegated_target(bin_role['name'], keyids, 1, - relative_paths) - - roleinfo = {'name': bin_role['name'], - 'keyids': keyids, - 'threshold': 1, - 'terminating': False, - 'path_hash_prefixes': bin_role['target_hash_prefixes']} - delegated_roleinfos.append(roleinfo) - - for key in keys_of_hashed_bins: - target.add_verification_key(key) - - # Add the new delegation to the top-level 'targets' role object (i.e., - # 'repository.targets()'). - if self.rolename != 'targets': - self._parent_targets_object.add_delegated_role(bin_role['name'], - target) - - # Add 'new_targets_object' to the 'targets' role object (this object). - self.add_delegated_role(bin_role['name'], target) - logger.debug('Delegated from ' + repr(self.rolename) + ' to ' + repr(bin_role)) - - - self._update_roledb_delegations(keydict, delegated_roleinfos) - - - - - def add_target_to_bin(self, target_filepath, number_of_bins=DEFAULT_NUM_BINS, - fileinfo=None): - """ - - Add the fileinfo of 'target_filepath' to the expected hashed bin, if the - bin is available. The hashed bin should have been created by - {targets_role}.delegate_hashed_bins(). Assuming the target filepath is - located in the repository's targets directory, determine the filepath's - hash prefix, locate the expected bin (if any), and then add the fileinfo - to the expected bin. Example: 'targets/foo.tar.gz' may be added to the - 'targets/unclaimed/58-5f.json' role's list of targets by calling this - method. - - - target_filepath: - The filepath of the target to be added to a hashed bin. The filepath - must be located in the repository's targets directory. - - number_of_bins: - The number of delegated roles, or hashed bins, in use by the repository. - Note: 'number_of_bins' must be a power of 2. - - fileinfo: - An optional fileinfo object, conforming to tuf.formats.TARGETS_FILEINFO_SCHEMA, - providing full information about the file. - - - securesystemslib.exceptions.FormatError, if 'target_filepath' is - improperly formatted. - - securesystemslib.exceptions.Error, if 'target_filepath' cannot be added to - a hashed bin (e.g., an invalid target filepath, or the expected hashed - bin does not exist.) - - - The fileinfo of 'target_filepath' is added to a hashed bin of this Targets - object. - - - The name of the hashed bin that the target was added to. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.PATH_SCHEMA.check_match(target_filepath) - formats.NUMBINS_SCHEMA.check_match(number_of_bins) - - # TODO: check target_filepath is sane - - path_hash = repo_lib.get_target_hash(target_filepath) - bin_name = repo_lib.find_bin_for_target_hash(path_hash, number_of_bins) - - # Ensure the Targets object has delegated to hashed bins - if not self._delegated_roles.get(bin_name, None): - raise sslib_exceptions.Error(self.rolename + ' does not have' - ' a delegated role ' + bin_name) - - self._delegated_roles[bin_name].add_target(target_filepath, - fileinfo=fileinfo) - - return bin_name - - - - def remove_target_from_bin(self, target_filepath, - number_of_bins=DEFAULT_NUM_BINS): - """ - - Remove the fileinfo of 'target_filepath' from the expected hashed bin, if - the bin is available. The hashed bin should have been created by - {targets_role}.delegate_hashed_bins(). Assuming the target filepath is - located in the repository's targets directory, determine the filepath's - hash prefix, locate the expected bin (if any), and then remove the - fileinfo from the expected bin. Example: 'targets/foo.tar.gz' may be - removed from the '58-5f.json' role's list of targets by calling this - method. - - - target_filepath: - The filepath of the target to be added to a hashed bin. The filepath - must be located in the repository's targets directory. - - number_of_bins: - The number of delegated roles, or hashed bins, in use by the repository. - Note: 'number_of_bins' must be a power of 2. - - - securesystemslib.exceptions.FormatError, if 'target_filepath' is - improperly formatted. - - securesystemslib.exceptions.Error, if 'target_filepath' cannot be removed - from a hashed bin (e.g., an invalid target filepath, or the expected - hashed bin does not exist.) - - - The fileinfo of 'target_filepath' is removed from a hashed bin of this - Targets object. - - - The name of the hashed bin that the target was added to. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.PATH_SCHEMA.check_match(target_filepath) - formats.NUMBINS_SCHEMA.check_match(number_of_bins) - - # TODO: check target_filepath is sane? - - path_hash = repo_lib.get_target_hash(target_filepath) - bin_name = repo_lib.find_bin_for_target_hash(path_hash, number_of_bins) - - # Ensure the Targets object has delegated to hashed bins - if not self._delegated_roles.get(bin_name, None): - raise sslib_exceptions.Error(self.rolename + ' does not have' - ' a delegated role ' + bin_name) - - self._delegated_roles[bin_name].remove_target(target_filepath) - - return bin_name - - - @property - def delegations(self): - """ - - A getter method that returns the delegations made by this Targets role. - - >>> - >>> - >>> - - - None. - - - tuf.exceptions.UnknownRoleError, if this Targets' rolename - does not exist in 'roledb'. - - - None. - - - A list containing the Targets objects of this Targets' delegations. - """ - - return list(self._delegated_roles.values()) - - - - - - def _check_path(self, pathname): - """ - - Check if a path matches the definition of a PATHPATTERN or a - TARGETPATH (uses the forward slash (/) as directory separator and - does not start with a directory separator). Checks are performed only - on the path string, without accessing the file system. - - - pathname: - A file path or a glob pattern. - - - securesystemslib.exceptions.FormatError, if 'pathname' is improperly - formatted. - - tuf.exceptions.InvalidNameError, if 'pathname' does not match pattern. - - - None. - """ - - formats.RELPATH_SCHEMA.check_match(pathname) - - if '\\' in pathname: - raise exceptions.InvalidNameError('Path ' + repr(pathname) - + ' does not use the forward slash (/) as directory separator.') - - if pathname.startswith('/'): - raise exceptions.InvalidNameError('Path ' + repr(pathname) - + ' starts with a directory separator. All paths should be relative' - ' to targets directory.') - - - - -def create_new_repository(repository_directory, repository_name='default', - storage_backend=None, use_timestamp_length=True, use_timestamp_hashes=True, - use_snapshot_length=False, use_snapshot_hashes=False): - """ - - Create a new repository, instantiate barebones metadata for the top-level - roles, and return a Repository object. On disk, create_new_repository() - only creates the directories needed to hold the metadata and targets files. - The repository object returned may be modified to update the newly created - repository. The methods of the returned object may be called to create - actual repository files (e.g., repository.write()). - - - repository_directory: - The directory that will eventually hold the metadata and target files of - the TUF repository. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. When no object is - passed a FilesystemBackend will be instantiated and used. - - use_timestamp_length: - Whether to include the optional length attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - use_timestamp_hashes: - Whether to include the optional hashes attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - use_snapshot_length: - Whether to include the optional length attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - use_snapshot_hashes: - Whether to include the optional hashes attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - - The 'repository_directory' is created if it does not exist, including its - metadata and targets sub-directories. - - - A 'tuf.repository_tool.Repository' object. - """ - - # Does 'repository_directory' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.PATH_SCHEMA.check_match(repository_directory) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - if storage_backend is None: - storage_backend = sslib_storage.FilesystemBackend() - - # Set the repository, metadata, and targets directories. These directories - # are created if they do not exist. - repository_directory = os.path.abspath(repository_directory) - metadata_directory = None - targets_directory = None - - # Ensure the 'repository_directory' exists - logger.info('Creating ' + repr(repository_directory)) - storage_backend.create_folder(repository_directory) - - # Set the metadata and targets directories. The metadata directory is a - # staged one so that the "live" repository is not affected. The - # staged metadata changes may be moved over to "live" after all updated - # have been completed. - metadata_directory = os.path.join(repository_directory, - METADATA_STAGED_DIRECTORY_NAME) - targets_directory = os.path.join(repository_directory, TARGETS_DIRECTORY_NAME) - - # Ensure the metadata directory exists - logger.info('Creating ' + repr(metadata_directory)) - storage_backend.create_folder(metadata_directory) - - # Ensure the targets directory exists - logger.info('Creating ' + repr(targets_directory)) - storage_backend.create_folder(targets_directory) - - # Create the bare bones repository object, where only the top-level roles - # have been set and contain default values (e.g., Root roles has a threshold - # of 1, expires 1 year into the future, etc.) - repository = Repository(repository_directory, metadata_directory, - targets_directory, storage_backend, repository_name, use_timestamp_length, - use_timestamp_hashes, use_snapshot_length, use_snapshot_hashes) - - return repository - - - - - -def load_repository(repository_directory, repository_name='default', - storage_backend=None, use_timestamp_length=True, use_timestamp_hashes=True, - use_snapshot_length=False, use_snapshot_hashes=False): - """ - - Return a repository object containing the contents of metadata files loaded - from the repository. - - - repository_directory: - The root folder of the repository that contains the metadata and targets - sub-directories. - - repository_name: - The name of the repository. If not supplied, 'default' is used as the - repository name. - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. When no object is - passed a FilesystemBackend will be instantiated and used. - - use_timestamp_length: - Whether to include the optional length attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - use_timestamp_hashes: - Whether to include the optional hashes attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - use_snapshot_length: - Whether to include the optional length attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - use_snapshot_hashes: - Whether to include the optional hashes attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - - securesystemslib.exceptions.FormatError, if 'repository_directory' or any of - the metadata files are improperly formatted. - - tuf.exceptions.RepositoryError, if the Root role cannot be - found. At a minimum, a repository must contain 'root.json' - - - All the metadata files found in the repository are loaded and their contents - stored in a repository_tool.Repository object. - - - repository_tool.Repository object. - """ - - # Does 'repository_directory' have the correct format? - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - sslib_formats.PATH_SCHEMA.check_match(repository_directory) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - if storage_backend is None: - storage_backend = sslib_storage.FilesystemBackend() - - repository_directory = os.path.abspath(repository_directory) - metadata_directory = os.path.join(repository_directory, - METADATA_STAGED_DIRECTORY_NAME) - targets_directory = os.path.join(repository_directory, TARGETS_DIRECTORY_NAME) - - # The Repository() object loaded (i.e., containing all the metadata roles - # found) and returned. - repository = Repository(repository_directory, metadata_directory, - targets_directory, storage_backend, repository_name, use_timestamp_length, - use_timestamp_hashes, use_snapshot_length, use_snapshot_hashes) - - filenames = repo_lib.get_top_level_metadata_filenames(metadata_directory) - - # The Root file is always available without a version number (a consistent - # snapshot) attached to the filename. Store the 'consistent_snapshot' value - # and read the loaded Root file so that other metadata files may be located. - consistent_snapshot = False - - # Load the metadata of the top-level roles (i.e., Root, Timestamp, Targets, - # and Snapshot). - repository, consistent_snapshot = repo_lib._load_top_level_metadata(repository, - filenames, repository_name) - - delegated_roles_filenames = repo_lib.get_delegated_roles_metadata_filenames( - metadata_directory, consistent_snapshot, storage_backend) - - # Load the delegated targets metadata and their fileinfo. - # The delegated targets roles form a tree/graph which is traversed in a - # breadth-first-search manner starting from 'targets' in order to correctly - # load the delegations hierarchy. - parent_targets_object = repository.targets - - # Keep the next delegations to be loaded in a deque structure which - # has the properties of a list but is designed to have fast appends - # and pops from both ends - delegations = deque() - # A set used to keep the already loaded delegations and avoid an infinite - # loop in case of cycles in the delegations graph - loaded_delegations = set() - - # Top-level roles are already loaded, fetch targets and get its delegations. - # Store the delegations in the form of delegated-delegating role tuples, - # starting from the top-level targets: - # [('role1', 'targets'), ('role2', 'targets'), ... ] - roleinfo = roledb.get_roleinfo('targets', repository_name) - for role in roleinfo['delegations']['roles']: - delegations.append((role, 'targets')) - - # Traverse the graph by appending the next delegation to the deque and - # 'pop'-ing and loading the left-most element. - while delegations: - delegation_info, delegating_role = delegations.popleft() - - rolename = delegation_info['name'] - if (rolename, delegating_role) in loaded_delegations: - logger.warning('Detected cycle in the delegation graph: ' + - repr(delegating_role) + ' -> ' + - repr(rolename) + - ' is reached more than once.') - continue - - # Instead of adding only rolename to the set, store the already loaded - # delegated-delegating role tuples. This way a delegated role is added - # to each of its delegating roles but when the role is reached twice - # from the same delegating role an infinite loop is avoided. - loaded_delegations.add((rolename, delegating_role)) - - metadata_path = delegated_roles_filenames[rolename] - signable = None - - try: - signable = sslib_util.load_json_file(metadata_path) - - except (sslib_exceptions.Error, ValueError, IOError): - logger.debug('Tried to load metadata with invalid JSON' - ' content: ' + repr(metadata_path)) - continue - - metadata_object = signable['signed'] - - # Extract the metadata attributes of 'metadata_object' and update its - # corresponding roleinfo. - roleinfo = {'name': rolename, - 'signing_keyids': [], - 'signatures': [], - 'partial_loaded': False - } - - roleinfo['signatures'].extend(signable['signatures']) - roleinfo['version'] = metadata_object['version'] - roleinfo['expires'] = metadata_object['expires'] - roleinfo['paths'] = metadata_object['targets'] - roleinfo['delegations'] = metadata_object['delegations'] - roleinfo['threshold'] = delegation_info['threshold'] - roleinfo['keyids'] = delegation_info['keyids'] - - # Generate the Targets object of the delegated role, - # add it to the top-level 'targets' object and to its - # direct delegating role object. - new_targets_object = Targets(targets_directory, rolename, - roleinfo, parent_targets_object=parent_targets_object, - repository_name=repository_name) - - parent_targets_object.add_delegated_role(rolename, - new_targets_object) - if delegating_role != 'targets': - parent_targets_object(delegating_role).add_delegated_role(rolename, - new_targets_object) - - # Append the next level delegations to the deque: - # the 'delegated' role becomes the 'delegating' - for delegation in metadata_object['delegations']['roles']: - delegations.append((delegation, rolename)) - - # Extract the keys specified in the delegations field of the Targets - # role. Add 'key_object' to the list of recognized keys. Keys may be - # shared, so do not raise an exception if 'key_object' has already been - # added. In contrast to the methods that may add duplicate keys, do not - # log a warning here as there may be many such duplicate key warnings. - # The repository maintainer should have also been made aware of the - # duplicate key when it was added. - for key_metadata in metadata_object['delegations']['keys'].values(): - - # The repo may have used hashing algorithms for the generated keyids - # that doesn't match the client's set of hash algorithms. Make sure - # to only used the repo's selected hashing algorithms. - key_object, keyids = format_metadata_to_key(key_metadata, - keyid_hash_algorithms=key_metadata['keyid_hash_algorithms']) - try: - for keyid in keyids: # pragma: no branch - key_object['keyid'] = keyid - keydb.add_key(key_object, keyid=None, - repository_name=repository_name) - - except exceptions.KeyAlreadyExistsError: - pass - - return repository - - - - - -def dump_signable_metadata(metadata_filepath): - """ - - Dump the "signed" portion of metadata. It is the portion that is normally - signed by the repository tool, which is in canonicalized JSON form. - This function is intended for external tools that wish to independently - sign metadata. - - The normal workflow for this use case is to: - (1) call dump_signable_metadata(metadata_filepath) - (2) sign the output with an external tool - (3) call append_signature(signature, metadata_filepath) - - - metadata_filepath: - The path to the metadata file. For example, - repository/metadata/root.json. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - IOError, if 'metadata_filepath' cannot be opened. - - - None. - - - Metadata content that is normally signed by the repository tool (i.e., the - "signed" portion of a metadata file). - """ - - # Are the argument properly formatted? - sslib_formats.PATH_SCHEMA.check_match(metadata_filepath) - - signable = sslib_util.load_json_file(metadata_filepath) - - # Is 'signable' a valid metadata file? - formats.SIGNABLE_SCHEMA.check_match(signable) - - return sslib_formats.encode_canonical(signable['signed']) - - - - - -def append_signature(signature, metadata_filepath): - """ - - Append 'signature' to the metadata at 'metadata_filepath'. The signature - is assumed to be valid, and externally generated by signing the output of - dump_signable_metadata(metadata_filepath). This function is intended for - external tools that wish to independently sign metadata. - - The normal workflow for this use case is to: - (1) call dump_signable_metadata(metadata_filepath) - (2) sign the output with an external tool - (3) call append_signature(signature, metadata_filepath) - - - signature: - A TUF signature structure that contains the KEYID, signing method, and - the signature. It conforms to securesystemslib.formats.SIGNATURE_SCHEMA. - - For example: - - { - "keyid": "a0a0f0cf08...", - "method": "ed25519", - "sig": "14f6e6566ec13..." - } - - metadata_filepath: - The path to the metadata file. For example, - repository/metadata/root.json. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - - 'metadata_filepath' is overwritten. - - - None. - """ - - # Are the arguments properly formatted? - sslib_formats.SIGNATURE_SCHEMA.check_match(signature) - sslib_formats.PATH_SCHEMA.check_match(metadata_filepath) - - signable = sslib_util.load_json_file(metadata_filepath) - - # Is 'signable' a valid metadata file? - formats.SIGNABLE_SCHEMA.check_match(signable) - - signable['signatures'].append(signature) - - file_object = tempfile.TemporaryFile() - - written_metadata_content = json.dumps(signable, indent=1, - separators=(',', ': '), sort_keys=True).encode('utf-8') - - file_object.write(written_metadata_content) - sslib_util.persist_temp_file(file_object, metadata_filepath) - - - - - -if __name__ == '__main__': - # The interactive sessions of the documentation strings can - # be tested by running repository_tool.py as a standalone module: - # $ python3 repository_tool.py. - import doctest - doctest.testmod() diff --git a/tuf/requests_fetcher.py b/tuf/requests_fetcher.py deleted file mode 100644 index 1692ebee7c..0000000000 --- a/tuf/requests_fetcher.py +++ /dev/null @@ -1,173 +0,0 @@ -# Copyright 2021, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -"""Provides an implementation of FetcherInterface using the Requests HTTP - library. -""" - -# Imports -import requests -import logging -import time -from urllib import parse -from urllib3.exceptions import ReadTimeoutError - -import tuf -from tuf import exceptions -from tuf import settings - -from tuf.client.fetcher import FetcherInterface - -# Globals -logger = logging.getLogger(__name__) - -# Classess -class RequestsFetcher(FetcherInterface): - """A concrete implementation of FetcherInterface based on the Requests - library. - - Attributes: - _sessions: A dictionary of Requests.Session objects storing a separate - session per scheme+hostname combination. - """ - - def __init__(self): - # From http://docs.python-requests.org/en/master/user/advanced/#session-objects: - # - # "The Session object allows you to persist certain parameters across - # requests. It also persists cookies across all requests made from the - # Session instance, and will use urllib3's connection pooling. So if you're - # making several requests to the same host, the underlying TCP connection - # will be reused, which can result in a significant performance increase - # (see HTTP persistent connection)." - # - # NOTE: We use a separate requests.Session per scheme+hostname combination, - # in order to reuse connections to the same hostname to improve efficiency, - # but avoiding sharing state between different hosts-scheme combinations to - # minimize subtle security issues. Some cookies may not be HTTP-safe. - self._sessions = {} - - - def fetch(self, url, required_length): - """Fetches the contents of HTTP/HTTPS url from a remote server. - - Ensures the length of the downloaded data is up to 'required_length'. - - Arguments: - url: A URL string that represents a file location. - required_length: An integer value representing the file length in bytes. - - Raises: - tuf.exceptions.SlowRetrievalError: A timeout occurs while receiving data. - tuf.exceptions.FetcherHTTPError: An HTTP error code is received. - - Returns: - A bytes iterator - """ - # Get a customized session for each new schema+hostname combination. - session = self._get_session(url) - - # Get the requests.Response object for this URL. - # - # Defer downloading the response body with stream=True. - # Always set the timeout. This timeout value is interpreted by requests as: - # - connect timeout (max delay before first byte is received) - # - read (gap) timeout (max delay between bytes received) - response = session.get(url, stream=True, - timeout=settings.SOCKET_TIMEOUT) - # Check response status. - try: - response.raise_for_status() - except requests.HTTPError as e: - response.close() - status = e.response.status_code - raise exceptions.FetcherHTTPError(str(e), status) - - - # Define a generator function to be returned by fetch. This way the caller - # of fetch can differentiate between connection and actual data download - # and measure download times accordingly. - def chunks(): - try: - bytes_received = 0 - while True: - # We download a fixed chunk of data in every round. This is so that we - # can defend against slow retrieval attacks. Furthermore, we do not - # wish to download an extremely large file in one shot. - # Before beginning the round, sleep (if set) for a short amount of - # time so that the CPU is not hogged in the while loop. - if settings.SLEEP_BEFORE_ROUND: - time.sleep(settings.SLEEP_BEFORE_ROUND) - - read_amount = min( - settings.CHUNK_SIZE, required_length - bytes_received) - - # NOTE: This may not handle some servers adding a Content-Encoding - # header, which may cause urllib3 to misbehave: - # https://github.com/pypa/pip/blob/404838abcca467648180b358598c597b74d568c9/src/pip/_internal/download.py#L547-L582 - data = response.raw.read(read_amount) - bytes_received += len(data) - - # We might have no more data to read. Check number of bytes downloaded. - if not data: - logger.debug('Downloaded ' + repr(bytes_received) + '/' + - repr(required_length) + ' bytes.') - - # Finally, we signal that the download is complete. - break - - yield data - - if bytes_received >= required_length: - break - - except ReadTimeoutError as e: - raise exceptions.SlowRetrievalError(str(e)) - - finally: - response.close() - - return chunks() - - - - def _get_session(self, url): - """Returns a different customized requests.Session per schema+hostname - combination. - """ - # Use a different requests.Session per schema+hostname combination, to - # reuse connections while minimizing subtle security issues. - parsed_url = parse.urlparse(url) - - if not parsed_url.scheme or not parsed_url.hostname: - raise exceptions.URLParsingError( - 'Could not get scheme and hostname from URL: ' + url) - - session_index = parsed_url.scheme + '+' + parsed_url.hostname - - logger.debug('url: ' + url) - logger.debug('session index: ' + session_index) - - session = self._sessions.get(session_index) - - if not session: - session = requests.Session() - self._sessions[session_index] = session - - # Attach some default headers to every Session. - requests_user_agent = session.headers['User-Agent'] - # Follows the RFC: https://tools.ietf.org/html/rfc7231#section-5.5.3 - tuf_user_agent = 'tuf/' + tuf.__version__ + ' ' + requests_user_agent - session.headers.update({ - # Tell the server not to compress or modify anything. - # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding#Directives - 'Accept-Encoding': 'identity', - # The TUF user agent. - 'User-Agent': tuf_user_agent}) - - logger.debug('Made new session for ' + session_index) - - else: - logger.debug('Reusing session for ' + session_index) - - return session diff --git a/tuf/roledb.py b/tuf/roledb.py deleted file mode 100755 index 53d1c094f0..0000000000 --- a/tuf/roledb.py +++ /dev/null @@ -1,1063 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - roledb.py - - - Vladimir Diaz - - - March 21, 2012. Based on a previous version of this module by Geremy Condra. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Represent a collection of roles and their organization. The caller may - create a collection of roles from those found in the 'root.json' metadata - file by calling 'create_roledb_from_root_metadata()', or individually by - adding roles with 'add_role()'. There are many supplemental functions - included here that yield useful information about the roles contained in the - database, such as extracting all the parent rolenames for a specified - rolename, deleting all the delegated roles, retrieving role paths, etc. The - Update Framework process maintains a role database for each repository. - - The role database is a dictionary conformant to - 'tuf.formats.ROLEDICT_SCHEMA' and has the form: - - {'repository_name': { - 'rolename': {'keyids': ['34345df32093bd12...'], - 'threshold': 1 - 'signatures': ['abcd3452...'], - 'paths': ['role.json'], - 'path_hash_prefixes': ['ab34df13'], - 'delegations': {'keys': {}, 'roles': {}}} - - The 'name', 'paths', 'path_hash_prefixes', and 'delegations' dict keys are - optional. -""" - -import logging -import copy - -import securesystemslib # pylint: disable=unused-import -from securesystemslib import exceptions as sslib_exceptions -from securesystemslib import formats as sslib_formats - -from tuf import exceptions -from tuf import formats - -# See 'tuf.log' to learn how logging is handled in TUF. -logger = logging.getLogger(__name__) - -# The role database. -_roledb_dict = {} -_roledb_dict['default'] = {} - -# A dictionary (where the keys are repository names) containing a set of roles -# that have been modified (e.g., via update_roleinfo()) and should be written -# to disk. -_dirty_roles = {} -_dirty_roles['default'] = set() - - -TOP_LEVEL_ROLES = ['root', 'targets', 'snapshot', 'timestamp'] - - -def create_roledb_from_root_metadata(root_metadata, repository_name='default'): - """ - - Create a role database containing all of the unique roles found in - 'root_metadata'. - - - root_metadata: - A dictionary conformant to 'tuf.formats.ROOT_SCHEMA'. The - roles found in the 'roles' field of 'root_metadata' is needed by this - function. - - repository_name: - The name of the repository to store 'root_metadata'. If not supplied, - 'rolename' is added to the 'default' repository. - - - securesystemslib.exceptions.FormatError, if 'root_metadata' does not have - the correct object format. - - securesystemslib.exceptions.Error, if one of the roles found in - 'root_metadata' contains an invalid delegation (i.e., a nonexistent parent - role). - - - Calls add_role(). The old role database for 'repository_name' is replaced. - - - None. - """ - - # Does 'root_metadata' have the correct object format? - # This check will ensure 'root_metadata' has the appropriate number of objects - # and object types, and that all dict keys are properly named. - # Raises securesystemslib.exceptions.FormatError. - formats.ROOT_SCHEMA.check_match(root_metadata) - - # Is 'repository_name' formatted correctly? - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - global _roledb_dict - global _dirty_roles - - # Clear the role database. - if repository_name in _roledb_dict: - _roledb_dict[repository_name].clear() - - # Ensure _roledb_dict and _dirty_roles contains an entry for - # 'repository_name' so that adding the newly created roleinfo succeeds. - _roledb_dict[repository_name] = {} - _dirty_roles[repository_name] = set() - - # Do not modify the contents of the 'root_metadata' argument. - root_metadata = copy.deepcopy(root_metadata) - - # Iterate the roles found in 'root_metadata' and add them to '_roledb_dict'. - # Duplicates are avoided. - for rolename, roleinfo in root_metadata['roles'].items(): - if rolename == 'root': - roleinfo['version'] = root_metadata['version'] - roleinfo['expires'] = root_metadata['expires'] - roleinfo['previous_keyids'] = roleinfo['keyids'] - roleinfo['previous_threshold'] = roleinfo['threshold'] - - roleinfo['signatures'] = [] - roleinfo['signing_keyids'] = [] - roleinfo['partial_loaded'] = False - - if rolename.startswith('targets'): - roleinfo['paths'] = {} - roleinfo['delegations'] = {'keys': {}, 'roles': []} - - add_role(rolename, roleinfo, repository_name) - - - - - -def create_roledb(repository_name): - """ - - Create a roledb for the repository named 'repository_name'. This function - is intended for creation of a non-default roledb. - - - repository_name: - The name of the repository to create. An empty roledb is created, and - roles may be added via add_role(rolename, roleinfo, repository_name) or - create_roledb_from_root_metadata(root_metadata, repository_name). - - - securesystemslib.exceptions.FormatError, if 'repository_name' is improperly - formatted. - - securesystemslib.exceptions.InvalidNameError, if 'repository_name' already - exists in the roledb. - - - None. - - - None. - """ - - # Is 'repository_name' properly formatted? If not, raise - # 'securesystemslib.exceptions.FormatError'. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - global _roledb_dict - global _dirty_roles - - if repository_name in _roledb_dict or repository_name in _dirty_roles: - raise sslib_exceptions.InvalidNameError('Repository name' - ' already exists: ' + repr(repository_name)) - - _roledb_dict[repository_name] = {} - _dirty_roles[repository_name] = set() - - - - - -def remove_roledb(repository_name): - """ - - Remove the roledb belonging to 'repository_name'. - - - repository_name: - The name of the repository to remove. 'repository_name' cannot be - 'default' because the default repository is expected to always exist. - - - securesystemslib.exceptions.FormatError, if 'repository_name' is improperly - formatted. - - securesystemslib.exceptions.InvalidNameError, if 'repository_name' is the - 'default' repository name. The 'default' repository name should always - exist. - - - None. - - - None. - """ - - # Is 'repository_name' properly formatted? If not, raise - # 'securesystemslib.exceptions.FormatError'. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - global _roledb_dict - global _dirty_roles - - if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - logger.warning('Repository name does not exist:' - ' ' + repr(repository_name)) - return - - if repository_name == 'default': - raise sslib_exceptions.InvalidNameError('Cannot remove the' - ' default repository: ' + repr(repository_name)) - - del _roledb_dict[repository_name] - del _dirty_roles[repository_name] - - - -def add_role(rolename, roleinfo, repository_name='default'): - """ - - Add to the role database the 'roleinfo' associated with 'rolename'. - - - rolename: - An object representing the role's name, conformant to 'ROLENAME_SCHEMA' - (e.g., 'root', 'snapshot', 'timestamp'). - - roleinfo: - An object representing the role associated with 'rolename', conformant to - ROLEDB_SCHEMA. 'roleinfo' has the form: - {'keyids': ['34345df32093bd12...'], - 'threshold': 1, - 'signatures': ['ab23dfc32'] - 'paths': ['path/to/target1', 'path/to/target2', ...], - 'path_hash_prefixes': ['a324fcd...', ...], - 'delegations': {'keys': } - - The 'paths', 'path_hash_prefixes', and 'delegations' dict keys are - optional. - - The 'target' role has an additional 'paths' key. Its value is a list of - strings representing the path of the target file(s). - - repository_name: - The name of the repository to store 'rolename'. If not supplied, - 'rolename' is added to the 'default' repository. - - - securesystemslib.exceptions.FormatError, if 'rolename' or 'roleinfo' does - not have the correct object format. - - securesystemslib.exceptions.RoleAlreadyExistsError, if 'rolename' has - already been added. - - securesystemslib.exceptions.InvalidNameError, if 'rolename' is improperly - formatted, or 'repository_name' does not exist. - - - The role database is modified. - - - None. - """ - - # Does 'rolename' have the correct object format? - # This check will ensure 'rolename' has the appropriate number of objects - # and object types, and that all dict keys are properly named. - formats.ROLENAME_SCHEMA.check_match(rolename) - - # Does 'roleinfo' have the correct object format? - formats.ROLEDB_SCHEMA.check_match(roleinfo) - - # Is 'repository_name' correctly formatted? - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - global _roledb_dict - - # Raises securesystemslib.exceptions.InvalidNameError. - _validate_rolename(rolename) - - if repository_name not in _roledb_dict: - raise sslib_exceptions.InvalidNameError('Repository name does not exist: ' + repository_name) - - if rolename in _roledb_dict[repository_name]: - raise exceptions.RoleAlreadyExistsError('Role already exists: ' + rolename) - - _roledb_dict[repository_name][rolename] = copy.deepcopy(roleinfo) - - - - - -def update_roleinfo(rolename, roleinfo, mark_role_as_dirty=True, repository_name='default'): - """ - - Modify 'rolename's _roledb_dict entry to include the new 'roleinfo'. - 'rolename' is also added to the _dirty_roles set. Roles added to - '_dirty_roles' are marked as modified and can be used by the repository - tools to determine which roles need to be written to disk. - - - rolename: - An object representing the role's name, conformant to 'ROLENAME_SCHEMA' - (e.g., 'root', 'snapshot', 'timestamp'). - - roleinfo: - An object representing the role associated with 'rolename', conformant to - ROLEDB_SCHEMA. 'roleinfo' has the form: - {'name': 'role_name', - 'keyids': ['34345df32093bd12...'], - 'threshold': 1, - 'paths': ['path/to/target1', 'path/to/target2', ...], - 'path_hash_prefixes': ['a324fcd...', ...]} - - The 'name', 'paths', and 'path_hash_prefixes' dict keys are optional. - - The 'target' role has an additional 'paths' key. Its value is a list of - strings representing the path of the target file(s). - - mark_role_as_dirty: - A boolean indicating whether the updated 'roleinfo' for 'rolename' should - be marked as dirty. The caller might not want to mark 'rolename' as - dirty if it is loading metadata from disk and only wants to populate - roledb.py. Likewise, add_role() would support a similar boolean to allow - the repository tools to successfully load roles via load_repository() - without needing to mark these roles as dirty (default behavior). - - repository_name: - The name of the repository to update the roleinfo of 'rolename'. If not - supplied, the 'default' repository is searched. - - - securesystemslib.exceptions.FormatError, if 'rolename' or 'roleinfo' does - not have the correct object format. - - tuf.exceptions.UnknownRoleError, if 'rolename' cannot be found - in the role database. - - securesystemslib.exceptions.InvalidNameError, if 'rolename' is improperly - formatted, or 'repository_name' does not exist in the role database. - - - The role database is modified. - - - None. - """ - - # Does the arguments have the correct object format? - # This check will ensure arguments have the appropriate number of objects - # and object types, and that all dict keys are properly named. - formats.ROLENAME_SCHEMA.check_match(rolename) - sslib_formats.BOOLEAN_SCHEMA.check_match(mark_role_as_dirty) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # Does 'roleinfo' have the correct object format? - formats.ROLEDB_SCHEMA.check_match(roleinfo) - - # Raises securesystemslib.exceptions.InvalidNameError. - _validate_rolename(rolename) - - global _roledb_dict - global _dirty_roles - - if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise sslib_exceptions.InvalidNameError('Repository name does not' ' exist: ' + - repository_name) - - if rolename not in _roledb_dict[repository_name]: - raise exceptions.UnknownRoleError('Role does not exist: ' + rolename) - - # Update the global _roledb_dict and _dirty_roles structures so that - # the latest 'roleinfo' is available to other modules, and the repository - # tools know which roles should be saved to disk. - _roledb_dict[repository_name][rolename] = copy.deepcopy(roleinfo) - - if mark_role_as_dirty: - _dirty_roles[repository_name].add(rolename) - - - - - -def get_dirty_roles(repository_name='default'): - """ - - A function that returns a list of the roles that have been modified. Tools - that write metadata to disk can use the list returned to determine which - roles should be written. - - - repository_name: - The name of the repository to get the dirty roles. If not supplied, the - 'default' repository is searched. - - - securesystemslib.exceptions.FormatError, if 'repository_name' is improperly - formatted. - - securesystemslib.exceptions.InvalidNameError, if 'repository_name' does not - exist in the role database. - - - None. - - - A sorted list of the roles that have been modified. - """ - - # Does 'repository_name' have the correct format? Raise - # 'securesystemslib.exceptions.FormatError' if not. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - global _roledb_dict - global _dirty_roles - - if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise sslib_exceptions.InvalidNameError('Repository name does' - ' not' ' exist: ' + repository_name) - - return sorted(list(_dirty_roles[repository_name])) - - - -def mark_dirty(roles, repository_name='default'): - """ - - Mark the list of 'roles' as dirty. - - - repository_name: - The name of the repository to get the dirty roles. If not supplied, the - 'default' repository is searched. - - roles: - A list of roles that should be marked as dirty. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - securesystemslib.exceptions.InvalidNameError, if 'repository_name' does not - exist in the role database. - - - None. - - - None. - """ - - # Are the arguments properly formatted? If not, raise - # securesystemslib.exceptions.FormatError. - sslib_formats.NAMES_SCHEMA.check_match(roles) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - global _roledb_dict - global _dirty_roles - - if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise sslib_exceptions.InvalidNameError('Repository name does' - ' not' ' exist: ' + repository_name) - - _dirty_roles[repository_name].update(roles) - - - -def unmark_dirty(roles, repository_name='default'): - """ - - No longer mark the roles in 'roles' as dirty. - - - repository_name: - The name of the repository to get the dirty roles. If not supplied, the - 'default' repository is searched. - - roles: - A list of roles that should no longer be marked as dirty. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - securesystemslib.exceptions.InvalidNameError, if 'repository_name' does not - exist in the role database. - - - None. - - - None. - """ - - # Are the arguments properly formatted? If not, raise - # securesystemslib.exceptions.FormatError. - sslib_formats.NAMES_SCHEMA.check_match(roles) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - global _roledb_dict - global _dirty_roles - - if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise sslib_exceptions.InvalidNameError('Repository name does' - ' not exist: ' + repository_name) - - for role in roles: - try: - _dirty_roles[repository_name].remove(role) - - except (KeyError, ValueError): - logger.debug(repr(role) + ' is not dirty.') - - - -def role_exists(rolename, repository_name='default'): - """ - - Verify whether 'rolename' is stored in the role database. - - - rolename: - An object representing the role's name, conformant to 'ROLENAME_SCHEMA' - (e.g., 'root', 'snapshot', 'timestamp'). - - repository_name: - The name of the repository to check whether 'rolename' exists. If not - supplied, the 'default' repository is searched. - - - securesystemslib.exceptions.FormatError, if 'rolename' does not have the - correct object format. - - securesystemslib.exceptions.InvalidNameError, if 'rolename' is incorrectly - formatted, or 'repository_name' does not exist in the role database. - - - None. - - - Boolean. True if 'rolename' is found in the role database, False otherwise. - """ - - # Raise securesystemslib.exceptions.FormatError, - # securesystemslib.exceptions.InvalidNameError if the arguments are invalid. - # We do not intercept securesystemslib.exceptions.FormatError - # or securesystemslib.exceptions.InvalidNameError exceptions. - try: - _check_rolename(rolename, repository_name) - - except exceptions.UnknownRoleError: - return False - - return True - - - - - -def remove_role(rolename, repository_name='default'): - """ - - Remove 'rolename'. Delegated roles were previously removed as well, - but this step is longer supported since the repository can resemble - a graph of delegations. That is, we shouldn't delete rolename's - delegations because another role may have a valid delegation - to it, whereas before the only valid delegation to it must be from - 'rolename' (repository resembles a tree of delegations). - - - rolename: - An object representing the role's name, conformant to 'ROLENAME_SCHEMA' - (e.g., 'root', 'snapshot', 'timestamp'). - - repository_name: - The name of the repository to remove the role. If not supplied, the - 'default' repository is searched. - - - securesystemslib.exceptions.FormatError, if 'rolename' does not have the - correct object format. - - tuf.exceptions.UnknownRoleError, if 'rolename' cannot be found - in the role database. - - securesystemslib.exceptions.InvalidNameError, if 'rolename' is incorrectly - formatted, or 'repository_name' does not exist in the role database. - - - A role may be removed from the role database. - - - None. - """ - - # Does 'repository_name' have the correct format? Raise - # 'securesystemslib.exceptions.FormatError' if it is improperly formatted. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # Raises securesystemslib.exceptions.FormatError, - # tuf.exceptions.UnknownRoleError, or - # securesystemslib.exceptions.InvalidNameError. - _check_rolename(rolename, repository_name) - - global _roledb_dict - global _dirty_roles - - # 'rolename' was verified to exist in _check_rolename(). - # Remove 'rolename' now. - del _roledb_dict[repository_name][rolename] - - - - - -def get_rolenames(repository_name='default'): - """ - - Return a list of the rolenames found in the role database. - - - repository_name: - The name of the repository to get the rolenames. If not supplied, the - 'default' repository is searched. - - - securesystemslib.exceptions.FormatError, if 'repository_name' is improperly - formatted. - - securesystemslib.exceptions.InvalidNameError, if 'repository_name' does not - exist in the role database. - - - None. - - - A list of rolenames. - """ - - # Does 'repository_name' have the correct format? Raise - # 'securesystemslib.exceptions.FormatError' if it is improperly formatted. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - global _roledb_dict - global _dirty_roles - - if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise sslib_exceptions.InvalidNameError('Repository name does' - ' not' ' exist: ' + repository_name) - - return list(_roledb_dict[repository_name].keys()) - - - - - -def get_roleinfo(rolename, repository_name='default'): - """ - - Return the roleinfo of 'rolename'. - - {'keyids': ['34345df32093bd12...'], - 'threshold': 1, - 'signatures': ['ab453bdf...', ...], - 'paths': ['path/to/target1', 'path/to/target2', ...], - 'path_hash_prefixes': ['a324fcd...', ...], - 'delegations': {'keys': {}, 'roles': []}} - - The 'signatures', 'paths', 'path_hash_prefixes', and 'delegations' dict keys - are optional. - - - rolename: - An object representing the role's name, conformant to 'ROLENAME_SCHEMA' - (e.g., 'root', 'snapshot', 'timestamp'). - - repository_name: - The name of the repository to get the role info. If not supplied, the - 'default' repository is searched. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - tuf.exceptions.UnknownRoleError, if 'rolename' does not exist. - - securesystemslib.exceptions.InvalidNameError, if 'rolename' is incorrectly - formatted, or 'repository_name' does not exist in the role database. - - - - None. - - - The roleinfo of 'rolename'. - """ - - # Is 'repository_name' properly formatted? If not, raise - # 'securesystemslib.exceptions.FormatError'. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # Raises securesystemslib.exceptions.FormatError, - # tuf.exceptions.UnknownRoleError, or - # securesystemslib.exceptions.InvalidNameError. - _check_rolename(rolename, repository_name) - - global _roledb_dict - global _dirty_roles - - return copy.deepcopy(_roledb_dict[repository_name][rolename]) - - - - - -def get_role_keyids(rolename, repository_name='default'): - """ - - Return a list of the keyids associated with 'rolename'. Keyids are used as - identifiers for keys (e.g., rsa key). A list of keyids are associated with - each rolename. Signing a metadata file, such as 'root.json' (Root role), - involves signing or verifying the file with a list of keys identified by - keyid. - - - rolename: - An object representing the role's name, conformant to 'ROLENAME_SCHEMA' - (e.g., 'root', 'snapshot', 'timestamp'). - - repository_name: - The name of the repository to get the role keyids. If not supplied, the - 'default' repository is searched. - - - securesystemslib.exceptions.FormatError, if the arguments do not have the - correct object format. - - tuf.exceptions.UnknownRoleError, if 'rolename' cannot be found - in the role database. - - securesystemslib.exceptions.InvalidNameError, if 'rolename' is incorrectly - formatted, or 'repository_name' does not exist in the role database. - - - None. - - - A list of keyids. - """ - - # Raise 'securesystemslib.exceptions.FormatError' if 'repository_name' is - # improperly formatted. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # Raises securesystemslib.exceptions.FormatError, - # tuf.exceptions.UnknownRoleError, or - # securesystemslib.exceptions.InvalidNameError. - _check_rolename(rolename, repository_name) - - global _roledb_dict - global _dirty_roles - - roleinfo = _roledb_dict[repository_name][rolename] - - return roleinfo['keyids'] - - - - - -def get_role_threshold(rolename, repository_name='default'): - """ - - Return the threshold value of the role associated with 'rolename'. - - - rolename: - An object representing the role's name, conformant to 'ROLENAME_SCHEMA' - (e.g., 'root', 'snapshot', 'timestamp'). - - repository_name: - The name of the repository to get the role threshold. If not supplied, - the 'default' repository is searched. - - - - securesystemslib.exceptions.FormatError, if the arguments do not have the - correct object format. - - tuf.exceptions.UnknownRoleError, if 'rolename' cannot be found - in the role database. - - securesystemslib.exceptions.InvalidNameError, if 'rolename' is incorrectly - formatted, or 'repository_name' does not exist in the role database. - - - None. - - - A threshold integer value. - """ - - # Raise 'securesystemslib.exceptions.FormatError' if 'repository_name' is - # improperly formatted. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # Raises securesystemslib.exceptions.FormatError, - # tuf.exceptions.UnknownRoleError, or - # securesystemslib.exceptions.InvalidNameError. - _check_rolename(rolename, repository_name) - - global _roledb_dict - global _dirty_roles - - roleinfo = _roledb_dict[repository_name][rolename] - - return roleinfo['threshold'] - - - - - -def get_role_paths(rolename, repository_name='default'): - """ - - Return the paths of the role associated with 'rolename'. - - - rolename: - An object representing the role's name, conformant to 'ROLENAME_SCHEMA' - (e.g., 'root', 'snapshot', 'timestamp'). - - repository_name: - The name of the repository to get the role paths. If not supplied, the - 'default' repository is searched. - - - securesystemslib.exceptions.FormatError, if the arguments do not have the - correct object format. - - tuf.exceptions.UnknownRoleError, if 'rolename' cannot be found - in the role database. - - securesystemslib.exceptions.InvalidNameError, if 'rolename' is incorrectly - formatted, or 'repository_name' does not exist in the role database. - - - None. - - - A list of paths. - """ - - # Raise 'securesystemslib.exceptions.FormatError' if 'repository_name' is - # improperly formatted. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # Raises securesystemslib.exceptions.FormatError, - # tuf.exceptions.UnknownRoleError, or - # securesystemslib.exceptions.InvalidNameError. - _check_rolename(rolename, repository_name) - - global _roledb_dict - global _dirty_roles - - roleinfo = _roledb_dict[repository_name][rolename] - - # Paths won't exist for non-target roles. - try: - return roleinfo['paths'] - - except KeyError: - return dict() - - - - - -def get_delegated_rolenames(rolename, repository_name='default'): - """ - - Return the delegations of a role. If 'rolename' is 'tuf' and the role - database contains ['django', 'requests', 'cryptography'], in 'tuf's - delegations field, return ['django', 'requests', 'cryptography']. - - - rolename: - An object representing the role's name, conformant to 'ROLENAME_SCHEMA' - (e.g., 'root', 'snapshot', 'timestamp'). - - repository_name: - The name of the repository to get the delegated rolenames. If not - supplied, the 'default' repository is searched. - - - securesystemslib.exceptions.FormatError, if the arguments do not have the - correct object format. - - tuf.exceptions.UnknownRoleError, if 'rolename' cannot be found - in the role database. - - securesystemslib.exceptions.InvalidNameError, if 'rolename' is incorrectly - formatted, or 'repository_name' does not exist in the role database. - - - None. - - - A list of rolenames. Note that the rolenames are *NOT* sorted by order of - delegation. - """ - - - # Does 'repository_name' have the correct format? Raise - # 'securesystemslib.exceptions.FormatError' if it does not. - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # Raises securesystemslib.exceptions.FormatError, - # tuf.exceptions.UnknownRoleError, or - # securesystemslib.exceptions.InvalidNameError. - _check_rolename(rolename, repository_name) - - global _roledb_dict - global _dirty_roles - - # get_roleinfo() raises a 'securesystemslib.exceptions.InvalidNameError' if - # 'repository_name' does not exist in the role database. - roleinfo = get_roleinfo(rolename, repository_name) - delegated_roles = [] - - for delegated_role in roleinfo['delegations']['roles']: - delegated_roles.append(delegated_role['name']) - - return delegated_roles - - - - - -def clear_roledb(repository_name='default', clear_all=False): - """ - - Reset the roledb database. - - - repository_name: - The name of the repository to clear. If not supplied, the 'default' - repository is cleared. - - clear_all: - Boolean indicating whether to clear the entire roledb. - - - securesystemslib.exceptions.FormatError, if 'repository_name' does not have - the correct format. - - securesystemslib.exceptions.InvalidNameError, if 'repository_name' does not - exist in the role database. - - - None. - - - None. - """ - - # Do the arguments have the correct format? If not, raise - # 'securesystemslib.exceptions.FormatError' - sslib_formats.NAME_SCHEMA.check_match(repository_name) - sslib_formats.BOOLEAN_SCHEMA.check_match(clear_all) - - global _roledb_dict - global _dirty_roles - - if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise sslib_exceptions.InvalidNameError('Repository name does not' - ' exist: ' + repository_name) - - if clear_all: - _roledb_dict = {} - _roledb_dict['default'] = {} - _dirty_roles = {} - _dirty_roles['default'] = set() - return - - _roledb_dict[repository_name] = {} - _dirty_roles[repository_name] = set() - - - - - -def _check_rolename(rolename, repository_name='default'): - """ Raise securesystemslib.exceptions.FormatError if 'rolename' does not match - 'tuf.formats.ROLENAME_SCHEMA', - tuf.exceptions.UnknownRoleError if 'rolename' is not found in the - role database, or securesystemslib.exceptions.InvalidNameError if - 'repository_name' does not exist in the role database. - """ - - # Does 'rolename' have the correct object format? - # This check will ensure 'rolename' has the appropriate number of objects - # and object types, and that all dict keys are properly named. - formats.ROLENAME_SCHEMA.check_match(rolename) - - # Does 'repository_name' have the correct format? - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # Raises securesystemslib.exceptions.InvalidNameError. - _validate_rolename(rolename) - - global _roledb_dict - global _dirty_roles - - if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise sslib_exceptions.InvalidNameError('Repository name does not' - ' exist: ' + repository_name) - - if rolename not in _roledb_dict[repository_name]: - raise exceptions.UnknownRoleError('Role name does not exist: ' + rolename) - - - - - -def _validate_rolename(rolename): - """ - Raise securesystemslib.exceptions.InvalidNameError if 'rolename' is not - formatted correctly. It is assumed 'rolename' has been checked against - 'ROLENAME_SCHEMA' prior to calling this function. """ - - if rolename == '': - raise sslib_exceptions.InvalidNameError('Rolename must *not* be' - ' an empty string.') - - if rolename != rolename.strip(): - raise sslib_exceptions.InvalidNameError('Invalid rolename.' - ' Cannot start or end with whitespace: ' + rolename) - - if rolename.startswith('/') or rolename.endswith('/'): - raise sslib_exceptions.InvalidNameError('Invalid rolename.' - ' Cannot start or end with a "/": ' + rolename) diff --git a/tuf/scripts/__init__.py b/tuf/scripts/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tuf/scripts/client.py b/tuf/scripts/client.py deleted file mode 100755 index 8f30c53648..0000000000 --- a/tuf/scripts/client.py +++ /dev/null @@ -1,236 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2018, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - client.py - - - Vladimir Diaz - - - September 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Provide a basic TUF client that can update all of the metatada and target - files provided by the user-specified repository mirror. Updated files are - saved to the 'targets' directory in the current working directory. The - repository mirror is specified by the user through the '--repo' command- - line option. - - Normally, a software updater integrating TUF will develop their own costum - client module by importing 'tuf.client.updater', instantiating the required - object, and calling the desired methods to perform an update. This basic - client is provided to users who wish to give TUF a quick test run without the - hassle of writing client code. This module can also used by updaters that do - not need the customization and only require their clients to perform an - update of all the files provided by their repository mirror(s). - - For software updaters that DO require customization, see the - 'example_client.py' script. The 'example_client.py' script provides an - outline of the client code that software updaters may develop and then tailor - to their specific software updater or package manager. - - Additional tools for clients running legacy applications will also be made - available. These tools will allow secure software updates using The Update - Framework without the need to modify the original application. - - - $ client.py --repo http://localhost:8001 - $ client.py --repo http://localhost:8001 --verbose 3 - - - --verbose: - Set the verbosity level of logging messages. Accepts values 1-5. - - Example: - $ client.py --repo http://localhost:8001 --verbose 3 README.txt - - --repo: - Set the repository mirror that will be responding to client requests. - E.g., 'http://localhost:8001'. - - Example: - $ client.py --repo http://localhost:8001 README.txt -""" - -import sys -import argparse -import logging - -from tuf import exceptions -from tuf import log -from tuf import settings -from tuf.client.updater import Updater - -# See 'log.py' to learn how logging is handled in TUF. -logger = logging.getLogger(__name__) - - -def update_client(parsed_arguments): - """ - - Perform an update of the metadata and target files located at - 'repository_mirror'. Target files are saved to the 'targets' directory - in the current working directory. The current directory must already - include a 'metadata' directory, which in turn must contain the 'current' - and 'previous' directories. At a minimum, these two directories require - the 'root.json' metadata file. - - - parsed_arguments: - An argparse Namespace object, containing the parsed arguments. - - - tuf.exceptions.Error, if 'parsed_arguments' is not a Namespace object. - - - Connects to a repository mirror and updates the local metadata files and - any target files. Obsolete, local targets are also removed. - - - None. - """ - - if not isinstance(parsed_arguments, argparse.Namespace): - raise exceptions.Error('Invalid namespace object.') - - else: - logger.debug('We have a valid argparse Namespace object.') - - # Set the local repositories directory containing all of the metadata files. - settings.repositories_directory = '.' - - # Set the repository mirrors. This dictionary is needed by the Updater - # class of updater.py. - repository_mirrors = {'mirror': {'url_prefix': parsed_arguments.repo, - 'metadata_path': 'metadata', 'targets_path': 'targets'}} - - # Create the repository object using the repository name 'repository' - # and the repository mirrors defined above. - updater = Updater('tufrepo', repository_mirrors) - - # The local destination directory to save the target files. - destination_directory = './tuftargets' - - # Refresh the repository's top-level roles... - updater.refresh(unsafely_update_root_if_necessary=False) - - # ... and store the target information for the target file specified on the - # command line, and determine which of these targets have been updated. - target_fileinfo = [] - for target in parsed_arguments.targets: - target_fileinfo.append(updater.get_one_valid_targetinfo(target)) - - updated_targets = updater.updated_targets(target_fileinfo, destination_directory) - - # Retrieve each of these updated targets and save them to the destination - # directory. - for target in updated_targets: - try: - updater.download_target(target, destination_directory) - - except exceptions.DownloadError: - pass - - # Remove any files from the destination directory that are no longer being - # tracked. - updater.remove_obsolete_targets(destination_directory) - - - - - -def parse_arguments(): - """ - - Parse the command-line options and set the logging level - as specified by the user through the --verbose option. - 'client' expects the '--repo' to be set by the user. - - Example: - $ client.py --repo http://localhost:8001 LICENSE - - If the required option is unset, a parser error is printed - and the scripts exits. - - - None. - - - None. - - - Sets the logging level for TUF logging. - - - The parsed_arguments (i.e., a argparse Namespace object). - """ - - parser = argparse.ArgumentParser( - description='Retrieve file from TUF repository.') - - # Add the options supported by 'basic_client' to the option parser. - parser.add_argument('-v', '--verbose', type=int, default=2, - choices=range(0, 6), help='Set the verbosity level of logging messages.' - ' The lower the setting, the greater the verbosity. Supported logging' - ' levels: 0=UNSET, 1=DEBUG, 2=INFO, 3=WARNING, 4=ERROR,' - ' 5=CRITICAL') - - parser.add_argument('-r', '--repo', type=str, required=True, metavar='', - help='Specify the remote repository\'s URI' - ' (e.g., http://www.example.com:8001/tuf/). The client retrieves' - ' updates from the remote repository.') - - parser.add_argument('targets', nargs='+', metavar='', help='Specify' - ' the target files to retrieve from the specified TUF repository.') - - parsed_arguments = parser.parse_args() - - - # Set the logging level. - if parsed_arguments.verbose == 5: - log.set_log_level(logging.CRITICAL) - - elif parsed_arguments.verbose == 4: - log.set_log_level(logging.ERROR) - - elif parsed_arguments.verbose == 3: - log.set_log_level(logging.WARNING) - - elif parsed_arguments.verbose == 2: - log.set_log_level(logging.INFO) - - elif parsed_arguments.verbose == 1: - log.set_log_level(logging.DEBUG) - - else: - log.set_log_level(logging.NOTSET) - - # Return the repository mirror containing the metadata and target files. - return parsed_arguments - - - -if __name__ == '__main__': - - # Parse the command-line arguments and set the logging level. - arguments = parse_arguments() - - # Perform an update of all the files in the 'targets' directory located in - # the current directory. - try: - update_client(arguments) - - except (exceptions.NoWorkingMirrorError, exceptions.RepositoryError, - exceptions.FormatError, exceptions.Error) as e: - sys.stderr.write('Error: ' + str(e) + '\n') - sys.exit(1) - - # Successfully updated the client's target files. - sys.exit(0) diff --git a/tuf/scripts/repo.py b/tuf/scripts/repo.py deleted file mode 100755 index 0b61b2bc59..0000000000 --- a/tuf/scripts/repo.py +++ /dev/null @@ -1,1149 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2018, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - repo.py - - - Vladimir Diaz - - - January 2018. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Provide a command-line interface to create and modify TUF repositories. The - CLI removes the need to write Python code when creating or modifying - repositories, which is the case with repository_tool.py and - developer_tool.py. - - Note: - 'python3 -m pip install securesystemslib[crypto,pynacl]' is required by the CLI, - which installs the 3rd-party dependencies: cryptography and pynacl. - - - Note: arguments within brackets are optional. - - $ repo.py --init - [--consistent, --bare, --path, --root_pw, --targets_pw, - --snapshot_pw, --timestamp_pw] - $ repo.py --add

    ... [--path, --recursive] - $ repo.py --remove - $ repo.py --distrust --pubkeys [--role] - $ repo.py --trust --pubkeys [--role] - $ repo.py --sign [--role ] - $ repo.py --key - [--filename - --path , --pw [my_password]] - $ repo.py --delegate --delegatee - --pubkeys - [--role --terminating --threshold - --sign ] - $ repo.py --revoke --delegatee - [--role --sign ] - $ repo.py --verbose <0-5> - $ repo.py --clean [--path] - - - --init: - Create new TUF repository in current working or specified directory. - - --consistent: - Enable consistent snapshots for newly created TUF repository. - - --bare: - Specify creation of bare TUF repository with no key created or set. - - --path: - Choose specified path location of a TUF repository or key(s). - - --role: - Specify top-level role(s) affected by the main command-line option. - - --pubkeys: - Indicate location of key(s) affected by the main command-line option. - - --root_pw: - Set password for encrypting top-level key file of root role. - - --targets_pw: - Set password for encrypting top-level key file of targets role. - - --snapshot_pw: - Set password for encrypting top-level key file of snapshot role. - - --timestamp_pw: - Set password for encrypting top-level key file of timestamp role. - - --add: - Add file specified by to the Targets metadata. - - --recursive: - Include files in subdirectories of specified directory . - - --remove: - Remove target files from Targets metadata matching . - - --distrust: - Discontinue trust of keys located in directory of a role. - - --trust: - Indicate trusted keys located in directory of a role. - - --sign: - Sign metadata of target role(s) with keys in specified directory. - - --key: - Generate cryptographic key of specified type (default: Ed25519). - - --filename: - Specify filename associated with generated top-level key. - - --pw: - Set password for the generated key of specified type . - - --delegate: - Delegate trust of target files from Targets role (or specified - in --role) to --delegatee role with specified . - - --delegatee: - Specify role that is targeted by delegator in --role to sign for - target files matching delegated or in revocation of trust. - - --terminating: - Mark delegation to --delegatee role from delegator as a terminating one. - - --threshold: - Specify signature threshold of --delegatee role as the value . - - --revoke: - Revoke trust of target files from delegated role (--delegatee) - - --verbose: - Set the verbosity level of logging messages. Accepts values 1-5. - - --clean: - Delete repo in current working or specified directory. -""" - -import os -import sys -import logging -import argparse -import shutil -import time -import fnmatch - -import securesystemslib # pylint: disable=unused-import -from securesystemslib import exceptions as sslib_exceptions -from securesystemslib import formats as sslib_formats -from securesystemslib import interface as sslib_interface -from securesystemslib import keys as sslib_keys -from securesystemslib import settings as sslib_settings -from securesystemslib import util as sslib_util - -from tuf import exceptions -from tuf import formats -from tuf import keydb -from tuf import log -from tuf import repository_tool as repo_tool -from tuf import roledb - - -# See 'log.py' to learn how logging is handled in TUF. -logger = logging.getLogger(__name__) - -repo_tool.disable_console_log_messages() - -PROG_NAME = 'repo.py' - -REPO_DIR = 'tufrepo' -CLIENT_DIR = 'tufclient' -KEYSTORE_DIR = 'tufkeystore' - -ROOT_KEY_NAME = 'root_key' -TARGETS_KEY_NAME = 'targets_key' -SNAPSHOT_KEY_NAME = 'snapshot_key' -TIMESTAMP_KEY_NAME = 'timestamp_key' - -STAGED_METADATA_DIR = 'metadata.staged' -METADATA_DIR = 'metadata' - -# The keytype strings, as expected on the command line. -ED25519_KEYTYPE = 'ed25519' -ECDSA_KEYTYPE = 'ecdsa' -RSA_KEYTYPE = 'rsa' -SUPPORTED_CLI_KEYTYPES = (ECDSA_KEYTYPE, ED25519_KEYTYPE, RSA_KEYTYPE) - -# The supported keytype strings (as they appear in metadata) are listed here -# because they won't necessarily match the key types supported by -# securesystemslib. -SUPPORTED_KEY_TYPES = ('rsa', 'ed25519', 'ecdsa', 'ecdsa-sha2-nistp256') - -# pylint: disable=protected-access -# ... to allow use of sslib _generate_and_write_*_keypair convenience methods - -def process_command_line_arguments(parsed_arguments): - """ - - Perform the relevant operations on the repo according to the chosen - command-line options. Which functions are executed depends on - 'parsed_arguments'. For instance, the --init and --clean options will - cause the init_repo() and clean_repo() functions to be called. - Multiple operations can be executed in one invocation of the CLI. - - - parsed_arguments: - The parsed arguments returned by argparse.parse_args(). - - - securesystemslib.exceptions.Error, if any of the arguments are - improperly formatted or if any of the argument could not be processed. - - - None. - - - None. - """ - - # Do we have a valid argparse Namespace? - if not isinstance(parsed_arguments, argparse.Namespace): - raise exceptions.Error('Invalid namespace: ' + repr(parsed_arguments)) - - else: - logger.debug('We have a valid argparse Namespace.') - - # TODO: Make sure the order that the arguments are processed allows for the - # most convenient use of multiple options in one invocation of the CLI. For - # instance, it might be best for --clean to be processed first before --init - # so that a user can do the following: repo.py --clean --init (that is, first - # clear the repo in the current working directory, and then initialize a new - # one. - if parsed_arguments.clean: - clean_repo(parsed_arguments) - - if parsed_arguments.init: - init_repo(parsed_arguments) - - if parsed_arguments.remove: - remove_targets(parsed_arguments) - - if parsed_arguments.add: - add_targets(parsed_arguments) - - if parsed_arguments.distrust: - remove_verification_key(parsed_arguments) - - if parsed_arguments.trust: - add_verification_key(parsed_arguments) - - if parsed_arguments.key: - gen_key(parsed_arguments) - - if parsed_arguments.revoke: - revoke(parsed_arguments) - - if parsed_arguments.delegate: - delegate(parsed_arguments) - - # --sign should be processed last, after the other options, so that metadata - # is signed last after potentially being modified by the other options. - if parsed_arguments.sign: - sign_role(parsed_arguments) - - - -def delegate(parsed_arguments): - - if not parsed_arguments.delegatee: - raise exceptions.Error( - '--delegatee must be set to perform the delegation.') - - if parsed_arguments.delegatee in ('root', 'snapshot', 'timestamp', 'targets'): - raise exceptions.Error( - 'Cannot delegate to the top-level role: ' + repr(parsed_arguments.delegatee)) - - if not parsed_arguments.pubkeys: - raise exceptions.Error( - '--pubkeys must be set to perform the delegation.') - - public_keys = [] - for public_key in parsed_arguments.pubkeys: - imported_pubkey = import_publickey_from_file(public_key) - public_keys.append(imported_pubkey) - - repository = repo_tool.load_repository( - os.path.join(parsed_arguments.path, REPO_DIR)) - - if parsed_arguments.role == 'targets': - repository.targets.delegate(parsed_arguments.delegatee, public_keys, - parsed_arguments.delegate, parsed_arguments.threshold, - parsed_arguments.terminating, list_of_targets=None, - path_hash_prefixes=None) - - targets_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), - parsed_arguments.targets_pw) - - repository.targets.load_signing_key(targets_private) - - # A delegated (non-top-level-Targets) role. - else: - repository.targets(parsed_arguments.role).delegate( - parsed_arguments.delegatee, public_keys, - parsed_arguments.delegate, parsed_arguments.threshold, - parsed_arguments.terminating, list_of_targets=None, - path_hash_prefixes=None) - - # Update the required top-level roles, Snapshot and Timestamp, to make a new - # release. Automatically making a new release can be disabled via - # --no_release. - if not parsed_arguments.no_release: - snapshot_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), - parsed_arguments.snapshot_pw) - timestamp_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) - - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - consistent_snapshot = roledb.get_roleinfo('root', - repository._repository_name)['consistent_snapshot'] - repository.writeall(consistent_snapshot=consistent_snapshot) - - # Move staged metadata directory to "live" metadata directory. - write_to_live_repo(parsed_arguments) - - - -def revoke(parsed_arguments): - - repository = repo_tool.load_repository( - os.path.join(parsed_arguments.path, REPO_DIR)) - - if parsed_arguments.role == 'targets': - repository.targets.revoke(parsed_arguments.delegatee) - - targets_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), - parsed_arguments.targets_pw) - - repository.targets.load_signing_key(targets_private) - - # A non-top-level role. - else: - repository.targets(parsed_arguments.role).revoke(parsed_arguments.delegatee) - - role_privatekey = import_privatekey_from_file(parsed_arguments.sign) - - repository.targets(parsed_arguments.role).load_signing_key(role_privatekey) - - # Update the required top-level roles, Snapshot and Timestamp, to make a new - # release. Automatically making a new release can be disabled via - # --no_release. - if not parsed_arguments.no_release: - snapshot_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), - parsed_arguments.snapshot_pw) - timestamp_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) - - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - consistent_snapshot = roledb.get_roleinfo('root', - repository._repository_name)['consistent_snapshot'] - repository.writeall(consistent_snapshot=consistent_snapshot) - - # Move staged metadata directory to "live" metadata directory. - write_to_live_repo(parsed_arguments) - - - -def gen_key(parsed_arguments): - - if parsed_arguments.filename: - parsed_arguments.filename = os.path.join(parsed_arguments.path, - KEYSTORE_DIR, parsed_arguments.filename) - - keypath = None - - keygen_kwargs = { - "password": parsed_arguments.pw, - "filepath": parsed_arguments.filename, - "prompt": (not parsed_arguments.pw) # prompt if no default or passed pw - } - - if parsed_arguments.key not in SUPPORTED_CLI_KEYTYPES: - exceptions.Error( - 'Invalid key type: ' + repr(parsed_arguments.key) + '. Supported' - ' key types: ' + repr(SUPPORTED_CLI_KEYTYPES)) - - elif parsed_arguments.key == ECDSA_KEYTYPE: - keypath = sslib_interface._generate_and_write_ecdsa_keypair( - **keygen_kwargs) - - elif parsed_arguments.key == ED25519_KEYTYPE: - keypath = sslib_interface._generate_and_write_ed25519_keypair( - **keygen_kwargs) - - # RSA key.. - else: - keypath = sslib_interface._generate_and_write_rsa_keypair( - **keygen_kwargs) - - - # If a filename is not given, the generated keypair is saved to the current - # working directory. By default, the keypair is written to .pub - # and (private key). - if not parsed_arguments.filename: - privkey_repo_path = os.path.join(parsed_arguments.path, - KEYSTORE_DIR, os.path.basename(keypath)) - pubkey_repo_path = os.path.join(parsed_arguments.path, - KEYSTORE_DIR, os.path.basename(keypath + '.pub')) - - sslib_util.ensure_parent_dir(privkey_repo_path) - sslib_util.ensure_parent_dir(pubkey_repo_path) - - # Move them from the CWD to the repo's keystore. - shutil.move(keypath, privkey_repo_path) - shutil.move(keypath + '.pub', pubkey_repo_path) - - - -def import_privatekey_from_file(keypath, password=None): - # Note: should securesystemslib support this functionality (import any - # privatekey type)? - # If the caller does not provide a password argument, prompt for one. - # Password confirmation is disabled here, which should ideally happen only - # when creating encrypted key files. - if password is None: # pragma: no cover - - # It is safe to specify the full path of 'filepath' in the prompt and not - # worry about leaking sensitive information about the key's location. - # However, care should be taken when including the full path in exceptions - # and log files. - password = sslib_interface.get_password('Enter a password for' - ' the encrypted key (' + sslib_interface.TERM_RED + repr(keypath) + sslib_interface.TERM_RED + '): ', - confirm=False) - - # Does 'password' have the correct format? - sslib_formats.PASSWORD_SCHEMA.check_match(password) - - # Store the encrypted contents of 'filepath' prior to calling the decryption - # routine. - encrypted_key = None - - with open(keypath, 'rb') as file_object: - encrypted_key = file_object.read().decode('utf-8') - - # Decrypt the loaded key file, calling the 'cryptography' library to generate - # the derived encryption key from 'password'. Raise - # 'securesystemslib.exceptions.CryptoError' if the decryption fails. - try: - key_object = sslib_keys.decrypt_key(encrypted_key, password) - - except sslib_exceptions.CryptoError: - try: - logger.debug( - 'Decryption failed. Attempting to import a private PEM instead.') - key_object = sslib_keys.import_rsakey_from_private_pem( - encrypted_key, 'rsassa-pss-sha256', password) - - except sslib_exceptions.CryptoError as error: - raise exceptions.Error(repr(keypath) + ' cannot be ' - ' imported, possibly because an invalid key file is given or ' - ' the decryption password is incorrect.') from error - - if key_object['keytype'] not in SUPPORTED_KEY_TYPES: - raise exceptions.Error('Trying to import an unsupported key' - ' type: ' + repr(key_object['keytype'] + '.' - ' Supported key types: ' + repr(SUPPORTED_KEY_TYPES))) - - else: - # Add "keyid_hash_algorithms" so that equal keys with different keyids can - # be associated using supported keyid_hash_algorithms. - key_object['keyid_hash_algorithms'] = sslib_settings.HASH_ALGORITHMS - - return key_object - - - -def import_publickey_from_file(keypath): - - try: - key_metadata = sslib_util.load_json_file(keypath) - - # An RSA public key is saved to disk in PEM format (not JSON), so the - # load_json_file() call above can fail for this reason. Try to potentially - # load the PEM string in keypath if an exception is raised. - except sslib_exceptions.Error: - key_metadata = sslib_interface.import_rsa_publickey_from_file( - keypath) - - key_object, junk = sslib_keys.format_metadata_to_key(key_metadata) - - if key_object['keytype'] not in SUPPORTED_KEY_TYPES: - raise exceptions.Error('Trying to import an unsupported key' - ' type: ' + repr(key_object['keytype'] + '.' - ' Supported key types: ' + repr(SUPPORTED_KEY_TYPES))) - - else: - return key_object - - - -def add_verification_key(parsed_arguments): - if not parsed_arguments.pubkeys: - raise exceptions.Error('--pubkeys must be given with --trust.') - - repository = repo_tool.load_repository( - os.path.join(parsed_arguments.path, REPO_DIR)) - - for keypath in parsed_arguments.pubkeys: - imported_pubkey = import_publickey_from_file(keypath) - - if parsed_arguments.role not in ('root', 'targets', 'snapshot', 'timestamp'): - raise exceptions.Error('The given --role is not a top-level role.') - - elif parsed_arguments.role == 'root': - repository.root.add_verification_key(imported_pubkey) - - elif parsed_arguments.role == 'targets': - repository.targets.add_verification_key(imported_pubkey) - - elif parsed_arguments.role == 'snapshot': - repository.snapshot.add_verification_key(imported_pubkey) - - # The timestamp role.. - else: - repository.timestamp.add_verification_key(imported_pubkey) - - consistent_snapshot = roledb.get_roleinfo('root', - repository._repository_name)['consistent_snapshot'] - repository.write('root', consistent_snapshot=consistent_snapshot, - increment_version_number=False) - - # Move staged metadata directory to "live" metadata directory. - write_to_live_repo(parsed_arguments) - - - -def remove_verification_key(parsed_arguments): - if not parsed_arguments.pubkeys: - raise exceptions.Error('--pubkeys must be given with --distrust.') - - repository = repo_tool.load_repository( - os.path.join(parsed_arguments.path, REPO_DIR)) - - for keypath in parsed_arguments.pubkeys: - imported_pubkey = import_publickey_from_file(keypath) - - try: - if parsed_arguments.role not in ('root', 'targets', 'snapshot', 'timestamp'): - raise exceptions.Error('The given --role is not a top-level role.') - - elif parsed_arguments.role == 'root': - repository.root.remove_verification_key(imported_pubkey) - - elif parsed_arguments.role == 'targets': - repository.targets.remove_verification_key(imported_pubkey) - - elif parsed_arguments.role == 'snapshot': - repository.snapshot.remove_verification_key(imported_pubkey) - - # The Timestamp key.. - else: - repository.timestamp.remove_verification_key(imported_pubkey) - - # It is assumed remove_verification_key() only raises - # securesystemslib.exceptions.Error and - # securesystemslib.exceptions.FormatError, and the latter is not raised - # because a valid key should have been returned by - # import_publickey_from_file(). - except sslib_exceptions.Error: - print(repr(keypath) + ' is not a trusted key. Skipping.') - - consistent_snapshot = roledb.get_roleinfo('root', - repository._repository_name)['consistent_snapshot'] - repository.write('root', consistent_snapshot=consistent_snapshot, - increment_version_number=False) - - # Move staged metadata directory to "live" metadata directory. - write_to_live_repo(parsed_arguments) - - - -def sign_role(parsed_arguments): - - repository = repo_tool.load_repository( - os.path.join(parsed_arguments.path, REPO_DIR)) - consistent_snapshot = roledb.get_roleinfo('root', - repository._repository_name)['consistent_snapshot'] - - for keypath in parsed_arguments.sign: - - role_privatekey = import_privatekey_from_file(keypath) - - if parsed_arguments.role == 'targets': - repository.targets.load_signing_key(role_privatekey) - - elif parsed_arguments.role == 'root': - repository.root.load_signing_key(role_privatekey) - - elif parsed_arguments.role == 'snapshot': - repository.snapshot.load_signing_key(role_privatekey) - - elif parsed_arguments.role == 'timestamp': - repository.timestamp.load_signing_key(role_privatekey) - - else: - # TODO: repository_tool.py will be refactored to clean up the following - # code, which adds and signs for a non-existent role. - if not roledb.role_exists(parsed_arguments.role): - - # Load the private key keydb and set the roleinfo in roledb so that - # metadata can be written with repository.write(). - keydb.remove_key(role_privatekey['keyid'], - repository_name = repository._repository_name) - keydb.add_key( - role_privatekey, repository_name = repository._repository_name) - - # Set the delegated metadata file to expire in 3 months. - expiration = formats.unix_timestamp_to_datetime( - int(time.time() + 7889230)) - expiration = expiration.isoformat() + 'Z' - - roleinfo = {'name': parsed_arguments.role, - 'keyids': [role_privatekey['keyid']], - 'signing_keyids': [role_privatekey['keyid']], - 'partial_loaded': False, 'paths': {}, - 'signatures': [], 'version': 1, 'expires': expiration, - 'delegations': {'keys': {}, 'roles': []}} - - roledb.add_role(parsed_arguments.role, roleinfo, - repository_name=repository._repository_name) - - # Generate the Targets object of --role, and add it to the top-level - # 'targets' object. - new_targets_object = repo_tool.Targets(repository._targets_directory, - parsed_arguments.role, roleinfo, - repository_name=repository._repository_name) - repository.targets._delegated_roles[parsed_arguments.role] = new_targets_object - - else: - repository.targets(parsed_arguments.role).load_signing_key(role_privatekey) - - # Write the Targets metadata now that it's been modified. Once write() is - # called on a role, it is no longer considered "dirty" and the role will not - # be written again if another write() or writeall() were subsequently made. - repository.write(parsed_arguments.role, - consistent_snapshot=consistent_snapshot, increment_version_number=False) - - # Write the updated top-level roles, if any. Also write Snapshot and - # Timestamp to make a new release. Automatically making a new release can be - # disabled via --no_release. - if not parsed_arguments.no_release: - snapshot_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), - parsed_arguments.snapshot_pw) - timestamp_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) - - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - repository.writeall(consistent_snapshot=consistent_snapshot) - - # Move staged metadata directory to "live" metadata directory. - write_to_live_repo(parsed_arguments) - - - -def clean_repo(parsed_arguments): - repo_dir = os.path.join(parsed_arguments.path, REPO_DIR) - client_dir = os.path.join(parsed_arguments.path, CLIENT_DIR) - keystore_dir = os.path.join(parsed_arguments.path, KEYSTORE_DIR) - - shutil.rmtree(repo_dir, ignore_errors=True) - shutil.rmtree(client_dir, ignore_errors=True) - shutil.rmtree(keystore_dir, ignore_errors=True) - - - -def write_to_live_repo(parsed_arguments): - staged_meta_directory = os.path.join( - parsed_arguments.path, REPO_DIR, STAGED_METADATA_DIR) - live_meta_directory = os.path.join( - parsed_arguments.path, REPO_DIR, METADATA_DIR) - - shutil.rmtree(live_meta_directory, ignore_errors=True) - shutil.copytree(staged_meta_directory, live_meta_directory) - - - -def add_target_to_repo(parsed_arguments, target_path, repo_targets_path, - repository, custom=None): - """ - (1) Copy 'target_path' to 'repo_targets_path'. - (2) Add 'target_path' to Targets metadata of 'repository'. - """ - - if custom is None: - custom = {} - - if not os.path.exists(target_path): - logger.debug(repr(target_path) + ' does not exist. Skipping.') - - else: - sslib_util.ensure_parent_dir(os.path.join(repo_targets_path, target_path)) - shutil.copy(target_path, os.path.join(repo_targets_path, target_path)) - - - roleinfo = roledb.get_roleinfo( - parsed_arguments.role, repository_name=repository._repository_name) - - # It is assumed we have a delegated role, and that the caller has made - # sure to reject top-level roles specified with --role. - if target_path not in roleinfo['paths']: - logger.debug('Adding new target: ' + repr(target_path)) - roleinfo['paths'].update({target_path: custom}) - - else: - logger.debug('Replacing target: ' + repr(target_path)) - roleinfo['paths'].update({target_path: custom}) - - roledb.update_roleinfo(parsed_arguments.role, roleinfo, - mark_role_as_dirty=True, repository_name=repository._repository_name) - - - -def remove_target_files_from_metadata(parsed_arguments, repository): - - if parsed_arguments.role in ('root', 'snapshot', 'timestamp'): - raise exceptions.Error( - 'Invalid rolename specified: ' + repr(parsed_arguments.role) + '.' - ' It must be "targets" or a delegated rolename.') - - else: - # NOTE: The following approach of using roledb to update the target - # files will be modified in the future when the repository tool's API is - # refactored. - roleinfo = roledb.get_roleinfo( - parsed_arguments.role, repository._repository_name) - - for glob_pattern in parsed_arguments.remove: - for path in list(roleinfo['paths'].keys()): - if fnmatch.fnmatch(path, glob_pattern): - del roleinfo['paths'][path] - - else: - logger.debug('Delegated path ' + repr(path) + ' does not match' - ' given path/glob pattern ' + repr(glob_pattern)) - continue - - roledb.update_roleinfo( - parsed_arguments.role, roleinfo, mark_role_as_dirty=True, - repository_name=repository._repository_name) - - - -def add_targets(parsed_arguments): - repo_targets_path = os.path.join(parsed_arguments.path, REPO_DIR, 'targets') - repository = repo_tool.load_repository( - os.path.join(parsed_arguments.path, REPO_DIR)) - - # Copy the target files in --path to the repo directory, and - # add them to Targets metadata. Make sure to also copy & add files - # in directories (and subdirectories, if --recursive is True). - for target_path in parsed_arguments.add: - if os.path.isdir(target_path): - for sub_target_path in repository.get_filepaths_in_directory( - target_path, parsed_arguments.recursive): - add_target_to_repo(parsed_arguments, sub_target_path, - repo_targets_path, repository) - - else: - add_target_to_repo(parsed_arguments, target_path, - repo_targets_path, repository) - - consistent_snapshot = roledb.get_roleinfo('root', - repository._repository_name)['consistent_snapshot'] - - if parsed_arguments.role == 'targets': - # Load the top-level, non-root, keys to make a new release. - targets_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), - parsed_arguments.targets_pw) - repository.targets.load_signing_key(targets_private) - - elif parsed_arguments.role not in ('root', 'snapshot', 'timestamp'): - repository.write(parsed_arguments.role, - consistent_snapshot=consistent_snapshot, increment_version_number=True) - return - - # Update the required top-level roles, Snapshot and Timestamp, to make a new - # release. Automatically making a new release can be disabled via - # --no_release. - if not parsed_arguments.no_release: - snapshot_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), - parsed_arguments.snapshot_pw) - timestamp_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) - - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - repository.writeall(consistent_snapshot=consistent_snapshot) - - # Move staged metadata directory to "live" metadata directory. - write_to_live_repo(parsed_arguments) - - - -def remove_targets(parsed_arguments): - repository = repo_tool.load_repository( - os.path.join(parsed_arguments.path, REPO_DIR)) - - # Remove target files from the Targets metadata (or the role specified in - # --role) that match the glob patterns specified in --remove. - remove_target_files_from_metadata(parsed_arguments, repository) - - # Examples of how the --pw command-line option is interpreted: - # repo.py --init': parsed_arguments.pw = 'pw' - # repo.py --init --pw my_password: parsed_arguments.pw = 'my_password' - # repo.py --init --pw: The user is prompted for a password, as follows: - if not parsed_arguments.pw: - parsed_arguments.pw = sslib_interface.get_password( - prompt='Enter a password for the top-level role keys: ', confirm=True) - - targets_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), - parsed_arguments.targets_pw) - repository.targets.load_signing_key(targets_private) - - # Load the top-level keys for Snapshot and Timestamp to make a new release. - # Automatically making a new release can be disabled via --no_release. - if not parsed_arguments.no_release: - snapshot_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), - parsed_arguments.snapshot_pw) - timestamp_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) - - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - consistent_snapshot = roledb.get_roleinfo('root', - repository._repository_name)['consistent_snapshot'] - repository.writeall(consistent_snapshot=consistent_snapshot) - - # Move staged metadata directory to "live" metadata directory. - write_to_live_repo(parsed_arguments) - - - -def init_repo(parsed_arguments): - """ - Create a repo at the specified location in --path (the current working - directory, by default). Each top-level role has one key, if --bare' is False - (default). - """ - - repo_path = os.path.join(parsed_arguments.path, REPO_DIR) - repository = repo_tool.create_new_repository(repo_path) - - if not parsed_arguments.bare: - set_top_level_keys(repository, parsed_arguments) - repository.writeall(consistent_snapshot=parsed_arguments.consistent) - - else: - repository.write( - 'root', consistent_snapshot=parsed_arguments.consistent) - repository.write('targets', consistent_snapshot=parsed_arguments.consistent) - repository.write('snapshot', consistent_snapshot=parsed_arguments.consistent) - repository.write('timestamp', consistent_snapshot=parsed_arguments.consistent) - - write_to_live_repo(parsed_arguments) - - # Create the client files. The client directory contains the required - # directory structure and metadata files for clients to successfully perform - # an update. - repo_tool.create_tuf_client_directory( - os.path.join(parsed_arguments.path, REPO_DIR), - os.path.join(parsed_arguments.path, CLIENT_DIR, REPO_DIR)) - - - -def set_top_level_keys(repository, parsed_arguments): - """ - Generate, write, and set the top-level keys. 'repository' is modified. - """ - - # Examples of how the --*_pw command-line options are interpreted: - # repo.py --init': parsed_arguments.*_pw = 'pw' - # repo.py --init --*_pw my_pw: parsed_arguments.*_pw = 'my_pw' - # repo.py --init --*_pw: The user is prompted for a password. - - sslib_interface._generate_and_write_ed25519_keypair( - password=parsed_arguments.root_pw, - filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, ROOT_KEY_NAME), - prompt=(not parsed_arguments.root_pw)) - sslib_interface._generate_and_write_ed25519_keypair( - password=parsed_arguments.targets_pw, - filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), - prompt=(not parsed_arguments.targets_pw)) - sslib_interface._generate_and_write_ed25519_keypair( - password=parsed_arguments.snapshot_pw, - filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), - prompt=(not parsed_arguments.snapshot_pw)) - sslib_interface._generate_and_write_ed25519_keypair( - password=parsed_arguments.timestamp_pw, - filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, TIMESTAMP_KEY_NAME), - prompt=(not parsed_arguments.timestamp_pw)) - - # Import the private keys. They are needed to generate the signatures - # included in metadata. - root_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - ROOT_KEY_NAME), parsed_arguments.root_pw) - targets_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TARGETS_KEY_NAME), parsed_arguments.targets_pw) - snapshot_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - SNAPSHOT_KEY_NAME), parsed_arguments.snapshot_pw) - timestamp_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) - - # Import the public keys. They are needed so that metadata roles are - # assigned verification keys, which clients need in order to verify the - # signatures created by the corresponding private keys. - root_public = import_publickey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - ROOT_KEY_NAME) + '.pub') - targets_public = import_publickey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TARGETS_KEY_NAME) + '.pub') - snapshot_public = import_publickey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - SNAPSHOT_KEY_NAME) + '.pub') - timestamp_public = import_publickey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TIMESTAMP_KEY_NAME) + '.pub') - - # Add the verification keys to the top-level roles. - repository.root.add_verification_key(root_public) - repository.targets.add_verification_key(targets_public) - repository.snapshot.add_verification_key(snapshot_public) - repository.timestamp.add_verification_key(timestamp_public) - - # Load the previously imported signing keys for the top-level roles so that - # valid metadata can be written. - repository.root.load_signing_key(root_private) - repository.targets.load_signing_key(targets_private) - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - - -def parse_arguments(): - """ - - Parse the command-line arguments. Also set the logging level, as specified - via the --verbose argument (2, by default). - - Example: - # Create a TUF repository in the current working directory. The - # top-level roles are created, each containing one key. - $ repo.py --init - - $ repo.py --init --bare --consistent --verbose 3 - - If a required argument is unset, a parser error is printed and the script - exits. - - - None. - - - None. - - - Sets the logging level for TUF logging. - - - A tuple ('options.REPOSITORY_PATH', command, command_arguments). 'command' - 'command_arguments' correspond to a repository tool fuction. - """ - - parser = argparse.ArgumentParser( - description='Create or modify a TUF repository.') - - parser.add_argument('-i', '--init', action='store_true', - help='Create a repository. The "tufrepo", "tufkeystore", and' - ' "tufclient" directories are created in the current working' - ' directory, unless --path is specified.') - - parser.add_argument('-p', '--path', nargs='?', default='.', - metavar='', help='Specify a repository path. If used' - ' with --init, the initialized repository is saved to the given' - ' path.') - - parser.add_argument('-b', '--bare', action='store_true', - help='If initializing a repository, neither create nor set keys' - ' for any of the top-level roles. False, by default.') - - parser.add_argument('--no_release', action='store_true', - help='Do not automatically sign Snapshot and Timestamp metadata.' - ' False, by default.') - - parser.add_argument('--consistent', action='store_true', - help='Set consistent snapshots for an initialized repository.' - ' Consistent snapshot is False by default.') - - parser.add_argument('-c', '--clean', type=str, nargs='?', const='.', - metavar='', help='Delete the repo files from the' - ' specified directory. If a directory is not specified, the current' - ' working directory is cleaned.') - - parser.add_argument('-a', '--add', type=str, nargs='+', - metavar='', help='Add one or more target files to the' - ' "targets" role (or the role specified in --role). If a directory' - ' is given, all files in the directory are added.') - - parser.add_argument('--remove', type=str, nargs='+', - metavar='', help='Remove one or more target files from the' - ' "targets" role (or the role specified in --role).') - - parser.add_argument('--role', nargs='?', type=str, const='targets', - default='targets', metavar='', help='Specify a rolename.' - ' The rolename "targets" is used by default.') - - parser.add_argument('-r', '--recursive', action='store_true', - help='By setting -r, any directory specified with --add is processed' - ' recursively. If unset, the default behavior is to not add target' - ' files in subdirectories.') - - parser.add_argument('-k', '--key', type=str, nargs='?', const=ED25519_KEYTYPE, - default=None, choices=[ECDSA_KEYTYPE, ED25519_KEYTYPE, RSA_KEYTYPE], - help='Generate an ECDSA, Ed25519, or RSA key. An Ed25519 key is' - ' created if the key type is unspecified.') - - parser.add_argument('--filename', nargs='?', default=None, const=None, - metavar='', help='Specify a filename. This option can' - ' be used to name a generated key file. The top-level keys should' - ' be named "root_key", "targets_key", "snapshot_key", "timestamp_key."') - - parser.add_argument('--trust', action='store_true', - help='Indicate the trusted key(s) (via --pubkeys) for the role in --role.' - ' This action modifies Root metadata with the trusted key(s).') - - parser.add_argument('--distrust', action='store_true', - help='Discontinue trust of key(s) (via --pubkeys) for the role in --role.' - ' This action modifies Root metadata by removing trusted key(s).') - - parser.add_argument('--sign', nargs='+', type=str, - metavar='', help='Sign the "targets"' - ' metadata (or the one for --role) with the specified key(s).') - - parser.add_argument('--pw', nargs='?', default='pw', metavar='', - help='Specify a password. "pw" is used if --pw is unset, or a' - ' password can be entered via a prompt by specifying --pw by itself.' - ' This option can be used with --sign and --key.') - - parser.add_argument('--root_pw', nargs='?', default='pw', metavar='', - help='Specify a Root password. "pw" is used if --pw is unset, or a' - ' password can be entered via a prompt by specifying --pw by itself.') - - parser.add_argument('--targets_pw', nargs='?', default='pw', metavar='', - help='Specify a Targets password. "pw" is used if --pw is unset, or a' - ' password can be entered via a prompt by specifying --pw by itself.') - - parser.add_argument('--snapshot_pw', nargs='?', default='pw', metavar='', - help='Specify a Snapshot password. "pw" is used if --pw is unset, or a' - ' password can be entered via a prompt by specifying --pw by itself.') - - parser.add_argument('--timestamp_pw', nargs='?', default='pw', metavar='', - help='Specify a Timestamp password. "pw" is used if --pw is unset, or a' - ' password can be entered via a prompt by specifying --pw by itself.') - - parser.add_argument('-d', '--delegate', type=str, nargs='+', - metavar='', help='Delegate trust of target files' - ' from the "targets" role (or --role) to some other role (--delegatee).' - ' The named delegatee is trusted to sign for the target files that' - ' match the glob pattern(s).') - - parser.add_argument('--delegatee', nargs='?', type=str, const=None, - default=None, metavar='', help='Specify the rolename' - ' of the delegated role. Can be used with --delegate.') - - parser.add_argument('-t', '--terminating', action='store_true', - help='Set the terminating flag to True. Can be used with --delegate.') - - parser.add_argument('--threshold', type=int, default=1, metavar='', - help='Set the threshold number of signatures' - ' needed to validate a metadata file. Can be used with --delegate.') - - parser.add_argument('--pubkeys', type=str, nargs='+', - metavar='', help='Specify one or more public keys' - ' for the delegated role. Can be used with --delegate.') - - parser.add_argument('--revoke', action='store_true', - help='Revoke trust of target files from a delegated role.') - - # Add the parser arguments supported by PROG_NAME. - parser.add_argument('-v', '--verbose', type=int, default=2, - choices=range(0, 6), help='Set the verbosity level of logging messages.' - ' The lower the setting, the greater the verbosity. Supported logging' - ' levels: 0=UNSET, 1=DEBUG, 2=INFO, 3=WARNING, 4=ERROR,' - ' 5=CRITICAL') - - # Should we include usage examples in the help output? - - parsed_args = parser.parse_args() - - # Set the logging level. - logging_levels = [logging.NOTSET, logging.DEBUG, - logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL] - - log.set_log_level(logging_levels[parsed_args.verbose]) - - return parsed_args - - - -if __name__ == '__main__': - - # Parse the arguments and set the logging level. - arguments = parse_arguments() - - # Create or modify the repository depending on the option specified on the - # command line. For example, the following adds the 'foo.bar.gz' to the - # default repository and updates the relevant metadata (i.e., Targets, - # Snapshot, and Timestamp metadata are updated): - # $ repo.py --add foo.bar.gz - - try: - process_command_line_arguments(arguments) - - except (exceptions.Error) as e: - sys.stderr.write('Error: ' + str(e) + '\n') - sys.exit(1) - - # Successfully created or updated the TUF repository. - sys.exit(0) diff --git a/tuf/settings.py b/tuf/settings.py deleted file mode 100755 index f07c4d961a..0000000000 --- a/tuf/settings.py +++ /dev/null @@ -1,100 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - settings.py - - - Vladimir Diaz - - - January 11, 2017 - - - See LICENSE-MIT OR LICENSE for licensing information. - - - A central location for TUF configuration settings. Example options include - setting the destination of temporary files and downloaded content, the maximum - length of downloaded metadata (unknown file attributes), and download - behavior. -""" - - -# Set a directory that should be used for all temporary files. If this -# is None, then the system default will be used. The system default -# will also be used if a directory path set here is invalid or -# unusable. -temporary_directory = None - -# Set a local directory to store metadata that is requested from mirrors. This -# directory contains subdirectories for different repositories, where each -# subdirectory contains a different set of metadata. For example: -# tuf.settings.repositories_directory = /tmp/repositories. The root file for a -# repository named 'django_repo' can be found at: -# /tmp/repositories/django_repo/metadata/current/root.METADATA_EXTENSION -repositories_directory = None - -# The 'log.py' module manages TUF's logging system. Users have the option to -# enable/disable logging to a file via 'ENABLE_FILE_LOGGING', or -# tuf.log.enable_file_logging() and tuf.log.disable_file_logging(). -ENABLE_FILE_LOGGING = False - -# If file logging is enabled via 'ENABLE_FILE_LOGGING', TUF log messages will -# be saved to 'LOG_FILENAME' -LOG_FILENAME = 'tuf.log' - -# Since the timestamp role does not have signed metadata about itself, we set a -# default but sane upper bound for the number of bytes required to download it. -DEFAULT_TIMESTAMP_REQUIRED_LENGTH = 16384 #bytes - -# The Root role may be updated without knowing its version if top-level -# metadata cannot be safely downloaded (e.g., keys may have been revoked, thus -# requiring a new Root file that includes the updated keys). Set a default -# upper bound for the maximum total bytes that may be downloaded for Root -# metadata. -DEFAULT_ROOT_REQUIRED_LENGTH = 512000 #bytes - -# Set a default, but sane, upper bound for the number of bytes required to -# download Snapshot metadata. -DEFAULT_SNAPSHOT_REQUIRED_LENGTH = 2000000 #bytes - -# Set a default, but sane, upper bound for the number of bytes required to -# download Targets metadata. -DEFAULT_TARGETS_REQUIRED_LENGTH = 5000000 #bytes - -# Set a timeout value in seconds (float) for non-blocking socket operations. -SOCKET_TIMEOUT = 4 #seconds - -# The maximum chunk of data, in bytes, we would download in every round. -CHUNK_SIZE = 400000 #bytes - -# The minimum average download speed (bytes/second) that must be met to -# avoid being considered as a slow retrieval attack. -MIN_AVERAGE_DOWNLOAD_SPEED = 50 #bytes/second - -# By default, limit number of delegatees we visit for any target. -MAX_NUMBER_OF_DELEGATIONS = 2**5 - -# A setting for the instances where a default hashing algorithm is needed. -# This setting is currently used to calculate the path hash prefixes of hashed -# bin delegations, and digests of targets filepaths. The other instances -# (e.g., digest of files) that require a hashing algorithm rely on settings in -# the securesystemslib external library. -DEFAULT_HASH_ALGORITHM = 'sha256' - -# The hashing algorithms used to compute file hashes -FILE_HASH_ALGORITHMS = ['sha256', 'sha512'] - -# The client's update procedure (contained within a while-loop) can potentially -# hog the CPU. The following setting can be used to force the update sequence -# to suspend execution for a specified amount of time. See -# theupdateframework/tuf/issue#338. -SLEEP_BEFORE_ROUND = None - -# Maximum number of root metadata file rotations we should perform in order to -# prevent a denial-of-service (DoS) attack. -MAX_NUMBER_ROOT_ROTATIONS = 2**5 diff --git a/tuf/sig.py b/tuf/sig.py deleted file mode 100755 index 4e1f05fc2a..0000000000 --- a/tuf/sig.py +++ /dev/null @@ -1,395 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - sig.py - - - Vladimir Diaz - - - February 28, 2012. Based on a previous version by Geremy Condra. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Survivable key compromise is one feature of a secure update system - incorporated into TUF's design. Responsibility separation through - the use of multiple roles, multi-signature trust, and explicit and - implicit key revocation are some of the mechanisms employed towards - this goal of survivability. These mechanisms can all be seen in - play by the functions available in this module. - - The signed metadata files utilized by TUF to download target files - securely are used and represented here as the 'signable' object. - More precisely, the signature structures contained within these metadata - files are packaged into 'signable' dictionaries. This module makes it - possible to capture the states of these signatures by organizing the - keys into different categories. As keys are added and removed, the - system must securely and efficiently verify the status of these signatures. - For instance, a bunch of keys have recently expired. How many valid keys - are now available to the Snapshot role? This question can be answered by - get_signature_status(), which will return a full 'status report' of these - 'signable' dicts. This module also provides a convenient verify() function - that will determine if a role still has a sufficient number of valid keys. - If a caller needs to update the signatures of a 'signable' object, there - is also a function for that. -""" - -import logging - -import securesystemslib # pylint: disable=unused-import -from securesystemslib import exceptions as sslib_exceptions -from securesystemslib import formats as sslib_formats -from securesystemslib import keys as sslib_keys - -from tuf import exceptions -from tuf import formats -from tuf import keydb -from tuf import roledb - -# See 'log.py' to learn how logging is handled in TUF. -logger = logging.getLogger(__name__) - -def get_signature_status(signable, role=None, repository_name='default', - threshold=None, keyids=None): - """ - - Return a dictionary representing the status of the signatures listed in - 'signable'. Signatures in the returned dictionary are identified by the - signature keyid and can have a status of either: - - * bad -- Invalid signature - * good -- Valid signature from key that is available in 'tuf.keydb', and is - authorized for the passed role as per 'roledb' (authorization may be - overwritten by passed 'keyids'). - * unknown -- Signature from key that is not available in 'tuf.keydb', or if - 'role' is None. - * unknown signing schemes -- Signature from key with unknown signing - scheme. - * untrusted -- Valid signature from key that is available in 'tuf.keydb', - but is not trusted for the passed role as per 'roledb' or the passed - 'keyids'. - - NOTE: The result may contain duplicate keyids or keyids that reference the - same key, if 'signable' lists multiple signatures from the same key. - - - signable: - A dictionary containing a list of signatures and a 'signed' identifier. - signable = {'signed': 'signer', - 'signatures': [{'keyid': keyid, - 'sig': sig}]} - - Conformant to tuf.formats.SIGNABLE_SCHEMA. - - role: - TUF role string (e.g. 'root', 'targets', 'snapshot' or timestamp). - - threshold: - Rather than reference the role's threshold as set in roledb, use - the given 'threshold' to calculate the signature status of 'signable'. - 'threshold' is an integer value that sets the role's threshold value, or - the minimum number of signatures needed for metadata to be considered - fully signed. - - keyids: - Similar to the 'threshold' argument, use the supplied list of 'keyids' - to calculate the signature status, instead of referencing the keyids - in roledb for 'role'. - - - securesystemslib.exceptions.FormatError, if 'signable' does not have the - correct format. - - tuf.exceptions.UnknownRoleError, if 'role' is not recognized. - - - None. - - - A dictionary representing the status of the signatures in 'signable'. - Conformant to tuf.formats.SIGNATURESTATUS_SCHEMA. - """ - - # Do the arguments have the correct format? This check will ensure that - # arguments have the appropriate number of objects and object types, and that - # all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if the check fails. - formats.SIGNABLE_SCHEMA.check_match(signable) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - if role is not None: - formats.ROLENAME_SCHEMA.check_match(role) - - if threshold is not None: - formats.THRESHOLD_SCHEMA.check_match(threshold) - - if keyids is not None: - sslib_formats.KEYIDS_SCHEMA.check_match(keyids) - - # The signature status dictionary returned. - signature_status = {} - good_sigs = [] - bad_sigs = [] - unknown_sigs = [] - untrusted_sigs = [] - unknown_signing_schemes = [] - - # Extract the relevant fields from 'signable' that will allow us to identify - # the different classes of keys (i.e., good_sigs, bad_sigs, etc.). - signed = sslib_formats.encode_canonical(signable['signed']).encode('utf-8') - signatures = signable['signatures'] - - # Iterate the signatures and enumerate the signature_status fields. - # (i.e., good_sigs, bad_sigs, etc.). - for signature in signatures: - keyid = signature['keyid'] - - # Does the signature use an unrecognized key? - try: - key = keydb.get_key(keyid, repository_name) - - except exceptions.UnknownKeyError: - unknown_sigs.append(keyid) - continue - - # Does the signature use an unknown/unsupported signing scheme? - try: - valid_sig = sslib_keys.verify_signature(key, signature, signed) - - except sslib_exceptions.UnsupportedAlgorithmError: - unknown_signing_schemes.append(keyid) - continue - - # We are now dealing with either a trusted or untrusted key... - if valid_sig: - if role is not None: - - # Is this an unauthorized key? (a keyid associated with 'role') - # Note that if the role is not known, tuf.exceptions.UnknownRoleError - # is raised here. - if keyids is None: - keyids = roledb.get_role_keyids(role, repository_name) - - if keyid not in keyids: - untrusted_sigs.append(keyid) - continue - - # This is an unset role, thus an unknown signature. - else: - unknown_sigs.append(keyid) - continue - - # Identify good/authorized key. - good_sigs.append(keyid) - - else: - # This is a bad signature for a trusted key. - bad_sigs.append(keyid) - - # Retrieve the threshold value for 'role'. Raise - # tuf.exceptions.UnknownRoleError if we were given an invalid role. - if role is not None: - if threshold is None: - # Note that if the role is not known, tuf.exceptions.UnknownRoleError is - # raised here. - threshold = roledb.get_role_threshold( - role, repository_name=repository_name) - - else: - logger.debug('Not using roledb.py\'s threshold for ' + repr(role)) - - else: - threshold = 0 - - # Build the signature_status dict. - signature_status['threshold'] = threshold - signature_status['good_sigs'] = good_sigs - signature_status['bad_sigs'] = bad_sigs - signature_status['unknown_sigs'] = unknown_sigs - signature_status['untrusted_sigs'] = untrusted_sigs - signature_status['unknown_signing_schemes'] = unknown_signing_schemes - - return signature_status - - - - - -def verify(signable, role, repository_name='default', threshold=None, - keyids=None): - """ - - Verify that 'signable' has a valid threshold of authorized signatures - identified by unique keyids. The threshold and whether a keyid is - authorized is determined by querying the 'threshold' and 'keyids' info for - the passed 'role' in 'roledb'. Both values can be overwritten by - passing the 'threshold' or 'keyids' arguments. - - NOTE: - - Signatures with identical authorized keyids only count towards the - threshold once. - - Signatures with the same key only count toward the threshold once. - - - signable: - A dictionary containing a list of signatures and a 'signed' identifier - that conforms to SIGNABLE_SCHEMA, e.g.: - signable = {'signed':, 'signatures': [{'keyid':, 'method':, 'sig':}]} - - role: - TUF role string (e.g. 'root', 'targets', 'snapshot' or timestamp). - - threshold: - Rather than reference the role's threshold as set in roledb, use - the given 'threshold' to calculate the signature status of 'signable'. - 'threshold' is an integer value that sets the role's threshold value, or - the minimum number of signatures needed for metadata to be considered - fully signed. - - keyids: - Similar to the 'threshold' argument, use the supplied list of 'keyids' - to calculate the signature status, instead of referencing the keyids - in roledb for 'role'. - - - tuf.exceptions.UnknownRoleError, if 'role' is not recognized. - - securesystemslib.exceptions.FormatError, if 'signable' is not formatted - correctly. - - securesystemslib.exceptions.Error, if an invalid threshold is encountered. - - - tuf.sig.get_signature_status() called. Any exceptions thrown by - get_signature_status() will be caught here and re-raised. - - - Boolean. True if the number of good unique (by keyid) signatures >= the - role's threshold, False otherwise. - """ - - formats.SIGNABLE_SCHEMA.check_match(signable) - formats.ROLENAME_SCHEMA.check_match(role) - sslib_formats.NAME_SCHEMA.check_match(repository_name) - - # Retrieve the signature status. tuf.sig.get_signature_status() raises: - # tuf.exceptions.UnknownRoleError - # securesystemslib.exceptions.FormatError. 'threshold' and 'keyids' are also - # validated. - status = get_signature_status(signable, role, repository_name, threshold, keyids) - - # Retrieve the role's threshold and the authorized keys of 'status' - threshold = status['threshold'] - good_sigs = status['good_sigs'] - - # Does 'status' have the required threshold of signatures? - # First check for invalid threshold values before returning result. - # Note: get_signature_status() is expected to verify that 'threshold' is - # not None or <= 0. - if threshold is None or threshold <= 0: #pragma: no cover - raise sslib_exceptions.Error("Invalid threshold: " + repr(threshold)) - - unique_keys = set() - for keyid in good_sigs: - key = keydb.get_key(keyid, repository_name) - unique_keys.add(key['keyval']['public']) - - return len(unique_keys) >= threshold - - - - - -def may_need_new_keys(signature_status): - """ - - Return true iff downloading a new set of keys might tip this - signature status over to valid. This is determined by checking - if either the number of unknown or untrusted keys is > 0. - - - signature_status: - The dictionary returned by tuf.sig.get_signature_status(). - - - securesystemslib.exceptions.FormatError, if 'signature_status does not have - the correct format. - - - None. - - - Boolean. - """ - - # Does 'signature_status' have the correct format? - # This check will ensure 'signature_status' has the appropriate number - # of objects and object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - formats.SIGNATURESTATUS_SCHEMA.check_match(signature_status) - - unknown = signature_status['unknown_sigs'] - untrusted = signature_status['untrusted_sigs'] - - return len(unknown) or len(untrusted) - - - - - -def generate_rsa_signature(signed, rsakey_dict): - """ - - Generate a new signature dict presumably to be added to the 'signatures' - field of 'signable'. The 'signable' dict is of the form: - - {'signed': 'signer', - 'signatures': [{'keyid': keyid, - 'method': 'evp', - 'sig': sig}]} - - The 'signed' argument is needed here for the signing process. - The 'rsakey_dict' argument is used to generate 'keyid', 'method', and 'sig'. - - The caller should ensure the returned signature is not already in - 'signable'. - - - signed: - The data used by 'securesystemslib.keys.create_signature()' to generate - signatures. It is stored in the 'signed' field of 'signable'. - - rsakey_dict: - The RSA key, a 'securesystemslib.formats.RSAKEY_SCHEMA' dictionary. - Used here to produce 'keyid', 'method', and 'sig'. - - - securesystemslib.exceptions.FormatError, if 'rsakey_dict' does not have the - correct format. - - TypeError, if a private key is not defined for 'rsakey_dict'. - - - None. - - - Signature dictionary conformant to securesystemslib.formats.SIGNATURE_SCHEMA. - Has the form: - {'keyid': keyid, 'method': 'evp', 'sig': sig} - """ - - # We need 'signed' in canonical JSON format to generate - # the 'method' and 'sig' fields of the signature. - signed = sslib_formats.encode_canonical(signed).encode('utf-8') - - # Generate the RSA signature. - # Raises securesystemslib.exceptions.FormatError and TypeError. - signature = sslib_keys.create_signature(rsakey_dict, signed) - - return signature diff --git a/tuf/unittest_toolbox.py b/tuf/unittest_toolbox.py deleted file mode 100755 index 063bec8df6..0000000000 --- a/tuf/unittest_toolbox.py +++ /dev/null @@ -1,146 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - unittest_toolbox.py - - - Konstantin Andrianov. - - - March 26, 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Provides an array of various methods for unit testing. Use it instead of - actual unittest module. This module builds on unittest module. - Specifically, Modified_TestCase is a derived class from unittest.TestCase. -""" - -import os -import shutil -import unittest -import tempfile -import random -import string - - -class Modified_TestCase(unittest.TestCase): - """ - - Provide additional test-setup methods to make testing - of module's methods-under-test as independent as possible. - - If you want to modify setUp()/tearDown() do: - class Your_Test_Class(modified_TestCase): - def setUp(): - your setup modification - your setup modification - ... - modified_TestCase.setUp(self) - - - make_temp_directory(self, directory=None): - Creates and returns an absolute path of a temporary directory. - - make_temp_file(self, suffix='.txt', directory=None): - Creates and returns an absolute path of an empty temp file. - - make_temp_data_file(self, suffix='', directory=None, data = junk_data): - Returns an absolute path of a temp file containing some data. - - random_path(self, length = 7): - Generate a 'random' path consisting of n-length strings of random chars. - - - Static Methods: - -------------- - Following methods are static because they technically don't operate - on any instances of the class, what they do is: they modify class variables - (dictionaries) that are shared among all instances of the class. So - it is possible to call them without instantiating the class. - - random_string(length=7): - Generate a 'length' long string of random characters. - """ - - - def setUp(self): - self._cleanup = [] - - - - def tearDown(self): - for cleanup_function in self._cleanup: - # Perform clean up by executing clean-up functions. - try: - # OSError will occur if the directory was already removed. - cleanup_function() - - except OSError: - pass - - - - def make_temp_directory(self, directory=None): - """Creates and returns an absolute path of a directory.""" - - prefix = self.__class__.__name__+'_' - temp_directory = tempfile.mkdtemp(prefix=prefix, dir=directory) - - def _destroy_temp_directory(): - shutil.rmtree(temp_directory) - - self._cleanup.append(_destroy_temp_directory) - - return temp_directory - - - - def make_temp_file(self, suffix='.txt', directory=None): - """Creates and returns an absolute path of an empty file.""" - prefix='tmp_file_'+self.__class__.__name__+'_' - temp_file = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=directory) - def _destroy_temp_file(): - os.unlink(temp_file[1]) - self._cleanup.append(_destroy_temp_file) - return temp_file[1] - - - - def make_temp_data_file(self, suffix='', directory=None, data = 'junk data'): - """Returns an absolute path of a temp file containing data.""" - temp_file_path = self.make_temp_file(suffix=suffix, directory=directory) - temp_file = open(temp_file_path, 'wt', encoding='utf8') - temp_file.write(data) - temp_file.close() - return temp_file_path - - - - def random_path(self, length = 7): - """Generate a 'random' path consisting of random n-length strings.""" - - rand_path = '/' + self.random_string(length) - - for junk in range(2): - rand_path = os.path.join(rand_path, self.random_string(length)) - - return rand_path - - - - @staticmethod - def random_string(length=15): - """Generate a random string of specified length.""" - - rand_str = '' - for junk in range(length): - rand_str += random.SystemRandom().choice('abcdefABCDEF' + string.digits) - - return rand_str diff --git a/verify_release b/verify_release new file mode 100755 index 0000000000..7809eb7a71 --- /dev/null +++ b/verify_release @@ -0,0 +1,230 @@ +#!/usr/bin/env python + +# Copyright 2022, TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +"""verify_release - verify that published release matches a locally built one + +Builds a release from current commit and verifies that the release artifacts +on GitHub and PyPI match the built release artifacts. +""" + +import argparse +import json +import os +import subprocess +import sys +from filecmp import dircmp +from tempfile import TemporaryDirectory + +try: + import build as _ # type: ignore + import requests +except ImportError: + print("Error: verify_release requires modules 'requests' and 'build':") + print(" pip install requests build") + sys.exit(1) + +# Project variables +# Note that only these project artifacts are supported: +# [f"{PYPI_PROJECT}-{VER}-none-any.whl", f"{PYPI_PROJECT}-{VER}.tar.gz"] +GITHUB_ORG = "theupdateframework" +GITHUB_PROJECT = "python-tuf" +PYPI_PROJECT = "tuf" + + +def build(build_dir: str) -> str: + """Build release locally. Return version as string""" + orig_dir = os.path.dirname(os.path.abspath(__file__)) + + with TemporaryDirectory() as src_dir: + # fresh git clone: this prevents uncommitted files from affecting build + git_cmd = ["git", "clone", "--quiet", orig_dir, src_dir] + subprocess.run(git_cmd, stdout=subprocess.DEVNULL, check=True) + + build_cmd = ["python3", "-m", "build", "--outdir", build_dir, src_dir] + subprocess.run(build_cmd, stdout=subprocess.DEVNULL, check=True) + + build_version = None + for filename in os.listdir(build_dir): + prefix, postfix = f"{PYPI_PROJECT}-", ".tar.gz" + if filename.startswith(prefix) and filename.endswith(postfix): + build_version = filename[len(prefix) : -len(postfix)] + + assert build_version + return build_version + + +def get_git_version() -> str: + """Return version string from git describe""" + cmd = ["git", "describe"] + process = subprocess.run(cmd, text=True, capture_output=True, check=True) + assert process.stdout.startswith("v") and process.stdout.endswith("\n") + return process.stdout[1:-1] + + +def get_github_version() -> str: + """Return version string of latest GitHub release""" + release_json = f"https://api.github.com/repos/{GITHUB_ORG}/{GITHUB_PROJECT}/releases/latest" + releases = json.loads(requests.get(release_json).content) + return releases["tag_name"][1:] + + +def get_pypi_pip_version() -> str: + """Return latest version string available on PyPI according to pip""" + # pip can't tell us what the newest available version is... So we download + # newest tarball and figure out the version from the filename + with TemporaryDirectory() as pypi_dir: + cmd = ["pip", "download", "--no-deps", "--dest", pypi_dir] + source_download = cmd + ["--no-binary", PYPI_PROJECT, PYPI_PROJECT] + subprocess.run(source_download, stdout=subprocess.DEVNULL, check=True) + for filename in os.listdir(pypi_dir): + prefix, postfix = f"{PYPI_PROJECT}-", ".tar.gz" + if filename.startswith(prefix) and filename.endswith(postfix): + return filename[len(prefix) : -len(postfix)] + assert False + + +def verify_github_release(version: str, compare_dir: str) -> bool: + """Verify that given GitHub version artifacts match expected artifacts""" + base_url = ( + f"https://github.com/{GITHUB_ORG}/{GITHUB_PROJECT}/releases/download" + ) + tar = f"{PYPI_PROJECT}-{version}.tar.gz" + wheel = f"{PYPI_PROJECT}-{version}-py3-none-any.whl" + with TemporaryDirectory() as github_dir: + for filename in [tar, wheel]: + url = f"{base_url}/v{version}/{filename}" + response = requests.get(url, stream=True) + with open(os.path.join(github_dir, filename), "wb") as f: + for data in response.iter_content(): + f.write(data) + + same = dircmp(github_dir, compare_dir).same_files + return sorted(same) == [wheel, tar] + + +def verify_pypi_release(version: str, compare_dir: str) -> bool: + """Verify that given PyPI version artifacts match expected artifacts""" + tar = f"{PYPI_PROJECT}-{version}.tar.gz" + wheel = f"{PYPI_PROJECT}-{version}-py3-none-any.whl" + + with TemporaryDirectory() as pypi_dir: + cmd = ["pip", "download", "--no-deps", "--dest", pypi_dir] + target = f"{PYPI_PROJECT}=={version}" + binary_download = cmd + [target] + source_download = cmd + ["--no-binary", PYPI_PROJECT, target] + + subprocess.run(binary_download, stdout=subprocess.DEVNULL, check=True) + subprocess.run(source_download, stdout=subprocess.DEVNULL, check=True) + + same = dircmp(pypi_dir, compare_dir).same_files + return sorted(same) == [wheel, tar] + + +def sign_release_artifacts( + version: str, build_dir: str, key_id: str = None +) -> None: + """Sign built release artifacts with gpg and write signature files to cwd""" + sdist = f"{PYPI_PROJECT}-{version}.tar.gz" + wheel = f"{PYPI_PROJECT}-{version}-py3-none-any.whl" + cmd = ["gpg", "--detach-sign", "--armor"] + + if key_id is not None: + cmd += ["--local-user", key_id] + + for filename in [sdist, wheel]: + artifact_path = os.path.join(build_dir, filename) + signature_path = f"{filename}.asc" + subprocess.run( + cmd + ["--output", signature_path, artifact_path], check=True + ) + assert os.path.exists(signature_path) + + +def finished(s: str) -> None: + # clear line + sys.stdout.write("\033[K") + print(f"* {s}") + + +def progress(s: str) -> None: + # clear line + sys.stdout.write("\033[K") + # carriage return but no newline: next print will overwrite this one + print(f" {s}...", end="\r", flush=True) + + +def main() -> int: + parser = argparse.ArgumentParser() + parser.add_argument( + "--skip-pypi", + action="store_true", + dest="skip_pypi", + help="Skip PyPI release check.", + ) + parser.add_argument( + "--sign", + nargs="?", + const=True, + metavar="", + dest="sign", + help="Sign release artifacts with 'gpg'. If no is passed, the default " + "signing key is used. Resulting '*.asc' files are written to CWD.", + ) + args = parser.parse_args() + + success = True + with TemporaryDirectory() as build_dir: + progress("Building release") + build_version = build(build_dir) + finished(f"Built release {build_version}") + + git_version = get_git_version() + assert git_version.startswith(build_version) + if git_version != build_version: + finished(f"WARNING: Git describes version as {git_version}") + + progress("Checking GitHub latest version") + github_version = get_github_version() + if github_version != build_version: + finished(f"WARNING: GitHub latest version is {github_version}") + + if not args.skip_pypi: + progress("Checking PyPI latest version") + pypi_version = get_pypi_pip_version() + if pypi_version != build_version: + finished(f"WARNING: PyPI latest version is {pypi_version}") + + progress("Downloading release from PyPI") + if not verify_pypi_release(build_version, build_dir): + # This is expected while build is not reproducible + finished("ERROR: PyPI artifacts do not match built release") + success = False + else: + finished("PyPI artifacts match the built release") + + progress("Downloading release from GitHub") + if not verify_github_release(build_version, build_dir): + # This is expected while build is not reproducible + finished("ERROR: GitHub artifacts do not match built release") + success = False + else: + finished("GitHub artifacts match the built release") + + # NOTE: 'gpg' might prompt for password or ask if it should override files... + if args.sign: + progress("Signing built release with gpg") + if success: + key_id = args.sign if args.sign is not True else None + + sign_release_artifacts(build_version, build_dir, key_id) + finished("Created signatures in cwd (see '*.asc' files)") + else: + finished("WARNING: Skipped signing of non-matching artifacts") + + return 0 if success else 1 + + +if __name__ == "__main__": + sys.exit(main())

    ]/t)of;*lx)>T::-( SdE.2(W-RIdcB}2)1b4TBOEeNlTͮb/T IJ㈏sn@[˓Wo,Y9$i4{]ƫ2ͯgJoRz ㎵K/?݋*EgN|/ ZaH2Baޅorͳ";;tUR4DT![/\2@ a)1fa.j<)RVeqtj%c#ZbCI}pxEV"C˱"y;D,s:B *;!Js)Dv ;JC.Y؉0xMRB7Ț[i@XLw -'H,%Z~c&Hg6tϣ k\k8GOr 9wּ ~t**)7,0CvRZ̼VЫ֢EU9LLX;1(]W jQyH6 |"Ւ@c h|?6xoMꒋQۦcתu@f)+Il;ILьG7۔#pl8Wd*)Tca£t%pG@RoH|H  ěi#鯀8/$LH .CNQ@D/L47F9w,uVzJ'hŒs!czu`ĻaF"K^d{2r9NH -O$+ȝ"iđ| {Z 2[H Y&)Iq4DTI5[Ipʎ+hfLݨ'Ż9@a!P&F1pjjGjrd.f1`7:[>ϹSzkff~BI'IC7yvgUՑ ~ixGſpmd4 K퐆dY4GfNRB[Tȓ?.qR-g.rb8,ѫNyb%A]  Bil84ou^':7#\\glnzd 6v><9Lr@z Ǭ=d]ANOn0 4懎jurزR9qaPcL%$NSYz~Gÿmq:]lOM -T2@Y@4J+{P -mCz -ytȢ_ *^ɡ59t%4k? B.ّLw"Oܶgqs ӧ(hv4? Ќ)z)KJ]z|VO]f$+ۆmCD(XUq F,X/p0A$"19xO~OchzQ?v$q"̡TNA4Ý!4bd -N$wg].CMumf,[{ C>MF+[.Bw:'ּ~_=qx也dDGcil!$VX\ -\BdWCZp VݝB"Q*>UcT}0Rp< P]f/3.)frmiƢ#RB f5y?xmO@jE?0h~ti}Hp516tÛהVIUĤGe5 e#w J=9K:qki0ĴCQߺ~쵟? %-ײ deT -c #Nw]jTu֬*ߵ;լ4Ij܅ycz'pk]}O*'ȐֆN"q~w&Yݒ}&Ʃśځ@sCl4u7#w/I%OS혴XHtɇ$,6l^1ך>C MʡB:kBLCCޘ^UZI:WMm[VJn-68tgu:^%:>XnN^NbbD'W>YŞ3Y7x"~JxKV.C~@_>d1~.ăTGrsA^tc \G'5Rg(;9Uي1˻/.ۇu@Ӕpxk="~d2ZSzPGڰkS'@yznM^ħ L#mgEQRغ.VӨX+/R8Ьc3i,iR|]zf\AFh0"GlyR=6'װoVmN#XhobzzO4%e D|VGrlK=Si"3B<,յX+{QRgYNKb{ -qe\8u&'ukK:KP)(zX03#n͸6BlHh墖Ivt&XY8cZ=ت`BI:;w Z=찷nYq&_E:KjQ~)ER~YS~%=jDm}dБ58YtUx[s#tsoXx{em\|r8uYw0S1adh< 1{sN^E坙&jR ? 1ޜFxx3 Jx|g6ʥg1~-qͮZz)K8.eK n@UCsyt]RaE&DUz1(7|`<ǩ7Q}ڒL|o7a(qo;l]e0~آa,tm 5_)cTaU:,S0M15c -Mv̟fZ++R^WHV2.0%7ۏŷO,q J!HHfS]g:s(MlB,>Gl)iL@= xi0գJ/"-0~*%"pEQ[+Kxxn`XpCw:w?ktNlt~VH$E246Fޛ::s6:+A&ec`GJ`F Wsy%o+t{7ΞUZ ȕGxG_䱸T -@U .#r -D]x#i -@ -3.au@H6 L'^jJUKLlmڂ>, gxk>n5`y@k҅ۡFKD:j7͡2HzhIHj3 HE") /'H%@7:Xwy ƞD'=CΚOfmEscβrqnxSFaf*gV5t7cP>2+f$m#Pv H+ h -O^O$'ț! 5dX474 ^ E}G4D+}_΂3"6fgX` YEGXo2:^v|Rҳl`?fBe&)H/&]h*Gt[\0` 8gm 󑜺ipi/++P))@GVLZR$Hz#u惕@ގy?~8ic}~u-4# '=wap\8{N~ˆmP,-MÓ.pݟOc'8\[ohBhrg( zI} | aA#w r epQǖȇF "_XX9N7uf$S,7# Q)]DKw bs rNHh\ BrHmߢBD(kr@@}j8\UȺluP*5(H: Ad עn06[ӿ|acR&T󛹋~OC&CkQ(Vt=ZR._N7&TM|t+$oM^_ni:z:lh"a+>)?``K X+bg v0 -ƖS,LêR!au q"և'# !!M3{mH[uRmcC* -VNC֞sm{o۲sF#<;]>>K `iEJ9 +'&u[hihA5*'Ƌ_^taz-3Y9 -=]3W6anԣjRT^u0?6֖kw}@{e]phN<^]Ocd]VdQX_.֋Mm>fSZUyž^dd7_@>+gR_uZվj_ni/ziټmU> -(Zх};:G8w]Oz\U^$$̈n6'{@G>лO?n?s(GE98(f>\PVw`X+#6 sLPNwy6(Il{G{1E2c[xūEX3J.Ix`ZVڡ(r+`@~Zcv p@S_v°>NqlG+Ks\#Cfڕ5I>H.s!Jͼj+;@fQS ! ǘb Ia2`.^fǖQEy*zn틥)l-8(FwG1(;[!zTN;@<H4A>cUЋi1ҾfR^UFM˝-ϒy/㧚.+ ,&M 8'we'Su=?whC?Ox7vMeK 8P=֙O5z<#NJEs1}iمfJ瘔o -Fr2 Dun(H G'#yA7v-8#[ ҮVJA.gCH4D縓\/]{A`6oQQ7k%z(\n\G*ҦVΞ%%y绱m!{R""ȷ_4sQ}ځs#?{Ij,^?PRJKޘ<8Aގ_,.9OBc᳈ K?X#2\3wL,gtxH5dRJa:G%TeЊH GMDzW PJ[cItN?5y63ߩ,w6K[gf} Z^-Vή4D֨<?SD %+vklN YOlG9cvͻ0ߌ:]_9SޖȢoѲ]< -N}>,~c0YđY [.spD !{D~X%Vt:ױ!\MV;#7(er#neefe>"'/㔝eֳxa ]vӇuD7~;+,˪ĢjFR܉\zyFQ3`ZFU,evcXO'u1:l)݁.;ظ!n ߁vwLrגɼ{ ʭsfmgvؕaє ʁyZ;m>5LYLS)3#/vZ h:]ilEWIVf|}KD^rnYG¥MPi~q.JqrrkI͉9ᾖzRI>JۛbvU{:֞V|5K |5怸._,hi X=*r O{,|K&~x#-zvSVW xgq]$Skz_GQD?A~YpUOh8b_fÉq]~w?4a|g&d_l.$|='$4_3..zz<|]~9pgW]Gh\h s..M۹﹛m'E֌6IͱP9Fc"ȷhHzr$xB9zd. &:qbivmCmxSrRlǷkre~䟯C]DkU -+ 4^ohECKCS8SzQͬvMw*yv b_>D-E3tLdõݚ.˪?J9 Ygzc0b=:+ ^ɔ77D -Q@j2s(h0b~w̯4S\GVϺcVaZ YZZ%D^_ H>A>%;wU7E3]@ii=e8zA*l3(콻 {C#oQإ sj6ѣP#mwHN+ܚ<6ZڝsCQ'htUc Wa}"q/Ԟju+*!@,@m  ^vLƢkfK۝! :"ȏJ\(:zn^;gK-;UTdAG.+zFS_ .ɧSqe;r +F -nnKB wvf%-Xγc>*pRmx{9n7tIϽȐA~h~$َ "䧗~ -(6ޢ0H1KXGţMɯ##,UExl3:;FoK<͒r?&toȼa%"wl?`r4oCrdtgrSO,=裇kOGRKXj?Ol4jׅڰ9~?ԹV00(Ps}JWTX=By!R3\|%ƛ]'5?ni2h_gh{ 䟯@O(i3N3J^Ji{q<}|5-] 2J W95*YX_xn\NtnDoP9I$[X]G_>Mk?pa@"`pv%[xE}8Л[ji$ץ)uiWIdifT VXP?=O>c4s-?kmg총7mC*5(IXF -#_'4uEO{uOh#GxGoH060b6/3C]~Ap?4K4?E74h $o: s( @MCԀk1wn{ֽeLEi&nʆ>hu$͊1Yտ] X/&aYa2IAљoz]r] 'od-l1t45Fgp{Y*&gcփy9(ZAQ[ Uc/G粏ѦQ4ZV]s쪓12wgJ!(JPn  G N0a~UG,yk骭6EGF༑^~ֶ8xVi]L|; ̴x rlQH&RN`>;>Nt]|?jϛSpf[m Y^Z))ˣf*jߌdZ]?mkv\9%!o309%2\#D^Б]4aZz&+H4:qKD eŮ)\rb i IEޣEבz\Gߣrş-*G{ķpdٳ-;zUsW_N lSD](:(AQ*Sd.ޫ*ny[sNXKQ_B\9wbW]Ң+)uhvrI1> -ޣV%<`Bi1n>ͯ8h\+0pUnWe4/R{D8c_T{nꊜd W -aք\,ν~jqx߅a<)f-HZ@"VW?np_oŌrk#m*ֱ*1;>j^ {6^y ?g%@&!ȍV+nI&:+ Wz'5iL:\cUXu%y"Ƞ1xzF-Tv;޽9YMo1>wkj1<fCsb̼W!Y8Acݞj9gfMXFKd[,Cv\uK)tֺ.lkˍƷ/Wӝ,b sX~lߺhJ^^aRh@lEQ) Y*l8_љ9#c6C~hWcD\s4[m)>-±_m5ϷZջ5ǔn7SऔbL J[j)7i3бMe+M)]L4f_U{V9v7"hGhiRTɺ4kdC -*Rw%7ڇ#d &}& ?ʨ<'x޶hbTF\}HPzƠljBCa6ujlG+z]S|4IcqIwd>wjyQHbȥYnaD+e{Nk35 + ]K '4#/~BE?KOhF?%C3hGr<&h;'~JCWp4 s!4y9,ЖL(œgN?}N/J LpiζM'A~T -pK]t<;s6jPİmomhvEAFeId%`F9a[H(Jl%|dv؋PTt@n:'$\6ia 3u6rxyi>*|+h\|/Q` ?j#E 409xm`{hkxVBExE4*/ CfSi=sžnZv h[W@"FbNg?8^|tltj[T1.c,v JxC {g3}#2kY~Q-QJ'y=ſԔa<$睞"^ߣe9 -͹~@;}"|I(a뼐 fŏ4Y Xd斡aVֵl A yeLnA9_)v\[21 ?_<g!=M泥? p@" _V獮q=)nh yn3Zm5SǡjYJK69kҪ:taxQvR6/K||/I2ҡ7^5$07gWA؄n~C*LT8> ԊM LuBuH4b_gAyA#mAQOpnf7;po -qz8{%A!:xզps@1엫k#v;E@7")gت(zG#۹rfKS( eX;DqDK b V2ٜtq3j8TuD{>zIꑊeKڼwDUEvN 仂oZ̙ @) {:X3L -W_8UԲ|`=7 <0dobW@8>%D_nr(=µX4QX`5,i߽2K%wtl \}TQGȨܖ▥vcWR.??"F(d)RW_KEOtH6ͼ 3* 8Z zBr~(5E}b}\G3D"{ث/q+9\ nL:5ݮYCD^ag/CAֈ\h@b#k2F6MZ+/tZ~5n6){fKX.[B}Gqsȇ+Yw$ǠkF;4S}Zh/SV>z ׊ɦռgeIo"@s~1ha5 s(Nי{QW,e 7yy\ =*emmX}.fП`jG#Z -T)3unXc7@{ʝ3˳y6a%w]8>c-c 3H[y?M%7FAz}} SMmsi N}8ԺJ B+DMf kO/`Zq6,5-TwzدA~T -ue(JPJ_gqr5~ &fnPKTW#ȜJ~to c%iA/^,?&p&E؇ǰ(<7TVѧKR#1$šICfVcMqqn} -gR˸KvؽMi綽 `bwlъ /U+"?RO303cnITlDp<,_d&_kt2A#p;h/jڞZj8T1vo)@`E .(\ѓoo?dMUƤɊ^8am;A!`h:TIYлԬ9P;CG{NXRhtR> :QrZxcY܍)1WjЯ8 {wNjW"F^9fIPV:Gk\)ZTx ?@7JX9u] VAc层"Lq6=)z]%l6-o]V!z~$>vQsBAMS>* &w5~m:|-ͰY_y.?[+Ac-j/O.mn ߟb)at9蝛^:B q})Nnۄ>g_g -SV*\?Ð9lOin7yrM(5ΣKCVE{ԥyk 泥x#KVVJX^oؒ)r a3aJݷUօs:΍PIևɨg]_g x s4 &p+fs7;s⥲݀"/W'+XvD;A&mC -~NP:^Ȥ6?U\Ue/*P)q[NC|bT$m1O,;A&uWS(]x%F/3:A^%=2m2lU)WaFHbxPCQw{tى"A6@U8f'| dd=d,H;:H.cxKsmx{q2|T=^{4/q/Cg*/LE? g* w]~Qp>Ќ 8hF~oqYА*@דlPAv cnq)n91C!vn@]gQRyKP[PLjg@IÇ7ʏA~z AnhEU:V5=`U`9s -G\}=A A?Ħu@ߐoS~& MtּfI-_ʈrO0yk +;t$="` 0J9Ї /)cҏR~dۓ/6tDuj.r04|o/WOSٓER׊26j@t‘9MWK[׫Y]*)SjK'_\:?EQ)/6>{G.8|o\VPF:S>׼q:O<ڧe%%d+S昍f 2FGS䙺$VAx|:%@Ͷք\ -52O O'_-DbyNc{֨i9]^T1UZ np9?un8`KXuh`!2\ Fh |P< /(RzݾdNK#mqj5X]Ipx܆L2~1O ?\6v;wwE1r88օZ }zrAzV6S bQVF˥4q&knP#eeӶ~dPNoKXgٿ_]?v2@Q@f8% iݍϥ5_JSn`*/6lH9mlɱPBhkAjmKmjOVo՚?[]I?jZmd;wԜf훪=Ң ŕqk̛ϵ6% 4tޤ{w<k9%"AY0ZlGi2.+ꮦАAéalɋ9ņZ InC,]D+H'5V|K q+%/GbXƄ{ɚL.@8c1^Èj8 xg d©l7:)zl:l(Hw:lhlBt5; nq+K[ |f*k$nJ? }. Ȝp8^-n{@:@dg;Șg8D8cߨPO~~kޣkv#e]EV+,G -??[pr@6a(ce}9PtK @K+ eޝ^g>0.G;]ӏDL?{n:4w;b ECd'*"`IOXoR ק} NGاJ|n8Sطmiǎ!V3G'mZXō͏_=!-oJϗ%K l[gWd>Ԕ+h{Ix 'vM 仂(fu3б~\v{ikW:CXjh \Wj̑itm_q$!5vu -SQvzÅ=9ѷt2Ɓj:/GOG:0`~#ngоrԇdy! @dfhm_vesxFʘƧSEY\˷4)_Ǧzf{J,a"-vlS y?y1ۂ;Xl;wMh!n $ \LKfxo]+wӳQ#X&ʊbPyDT Dp@D/8QXB"w.S=vzRςKw snOd/ - L˦'KrGE 1`CShI#">6/ߊDX0Щ=RЮJl漴2n[LVW) -'f@=`e^ -\` -q~Mkp4e3>ff(E=\U:m 1gW˫ oWAv:_4k"vakzUh-[.}6}\r7L[ ?lGv7VAHU]!]V[UE.MŌ8t!?_2bJ}co@ bB7v׏\-$VsdM[1;97w`A~q&?p -&vG=>-DշSh\.=[$NŶ;%t ?Ah9Sp2#;SV r# -hzk|ˇ㴶w\øcWf:.-Fj7o?=P¾39bw]ܴ򠹻Nkz/*̎fF8p ;MwYqNpȇr&;+s6Լ}dS;vΛHq%n0OgnGvwu7]͆sܞMJi9ho8 uT`aûlF -+`.@v>)7ۊߴCnީH=A.EY!FTdž٘,q3l@(>D+:O_[Ԟ}wvG )M65'~?rs2H.=IW QGU=HLgnWIt""9ji8Ðr)QIfUfuJV7[>-L fWLpUK`tgrQށֽu`^~w?70ӕwG{ƂvS#9-1´qwaHMW T9eN|D9gZf96\ÈlP^y4|1 >bWRA-5MGJ[kSQ7QxD]d[=S/FG 9iꊕM"ee@&E7RoT:?m0 }t`ڊ_/ٸ&wfZ=aVUUiU }9l1URpiT`VG3рlϖ9⦻&y3SDd%9>wX.I-s9)gzd.l$.?qւ/ZդQs\PS!օ3?Wv;Q{T]fP}h"8Vڄ씍+sx+_\P%YV*[TNFș저Huo  B!0v: O=`J3+3@ȏ/gV?oj\aL_L]B@BPB0Q˿~P28~S',2K,U oc} fA,-_qq=t}ݱx5eptQ?O c-B Ẁݠ3 7-p6@jB܅T).a8ԒQ<~Ezw?o;}2k@ͳ 2 -F 4@DeJ'.(<VS#rz_/'.>YrGj w \_r>DžPhQ@@hPԔ@0@m@.d_޻x3L`x_6֏j=Csz#Q^vY@ȶ dO:TQXynaȢϣAO*J {KY_#f~)> vTO}1`ccR2LO|mPjP۾r [ [@5Z|4bw> -llex_q7R~t6ahʛbh'c0vLw(ZV 淔9Psq z.^=f˾^veSKQwWݴ7>.OǃSfhk՜btQ mƷfK腀{C 7 -mcH*1rGhF^لe &=^Hq,?sg2]6e<,aT֝f1 -AՅ!Pm M)A.'3Y= _YZ%t:,bMpzFP2>:Ɇ1 ҷH -2ߔˉZ}%[@eJg@ыdV Њ=nwAREA<5aqI -? xه٠pj0}yDnROvKNM7EQt<]^666\':_:؀B[7dr|UA*'-τK ]\਌۾:*D1IpXu`x &i+3+a%;:PE?w\ ]yo@cxjɴ+)h0 COλ9#h+Q=[n}6_jc zSA{m]Yf^n eqc F?_2kyS:ȍPdE/|Kl=sU=m+ kz嶸QyY-[|*SXVrq{V%}ԟ7JJ{; -gXv%(ۻ,O)@x,!b1w_ KNz;nP2l]hg~xJ'9N2s^g/uZ'q9Qs/IOʔ!Pw߷3-(,Af${rYkJiBN -gNLff&v~eNT7q$ɋ-Sk ?mrr9zBq8Ӹ0)1B=#hOm\n|6:9Wv"cwJl"7 i,FuXG*ūW_vl1M!ڤW??. @-(tkA<҅bӯM\䈓{6-6 -ޘKRS嫶rgv+׷>OEV=]lcGh'1#Y]8Cyݡ -@{;I'8&J'-7Qӯ[^9FR>7.{ᦵ5ID6cwI~ULL~0zXЕ\vru]B6Z>Ny鞈}SAMpFSgAXZE,&A}>ỜʝƺÍύGW|cî+ӝeZoWAރ @#R ZQ[c/9F%܏O\jE!-OaL aҝD[2narH:~/tY)=x]X&ar~7cW鳕^ />TycyA6或 4][j{kFQS妕 ׹oqJvy'l8߿odJj)oY -z~ Et;cdwn9v}r~smps4.Z U.fjk6`kf۝T̶G,ۭ}TZF6ZC7|w*X2(Μy]F| گ䅗\Ng,)id(+FY=86ݜZ,[| uk=.czʜBTϜ)άӄ+#!YSurZ^v{yvxdrgwe]F6iP-<0va26O+<!~Ц+ZI5!X{wUq`rvq4jw'*q\sߚ*&.މ65<Đ9)ER˺D5i"6 -IeRuU۲1'o6G+n`vȭma}Kߨ~wbB -Q[.ak Iyۛ^iVTȜUU8 &+ӯCl?;l',(uw d._3O 5[(}8̤vk&|U:}`2kv\3)gnq7%8}cdUm.Ǐc}C XLƞzсXrWqƳdW ⰺ/-Lq)٦{NZvs/;ۤhUJw i;rYE;bV#c81NA;<'87wѿ1ؗ_!_,MO -"yܗne_8'kk16@` Ǵw$l OF^{kCUF#20Jb%5`=s0V [yqaڜ1~_B@&.Wo}@]XmCbU[2jxh}, -~ -:8+r=RRQN +I;4$PlOY'Yvސ\}cX>4{jKu{70M)ۧnS{M*h88]ضt\l-u^ _8Fվ%ƴcNMF{BH_=PUz$}'"Ho_Aߩ$&!UK+RQj<_U;qD~tW!AA/Tp':Y6 Iv.q :o~+Q3 Mjx-S 3D?S˕g}8`øC<@sHrpSo7 GS o@NPP<Af晍KvUd[1Tߗ">8GYQY}+rJ0ծMlŭqja\NpHjs#`f^(x-fr;Ѐy̓]*^_YcbttN~ -$@To,o=\|rώ<DyQ~}dJNc 201gGD`Aӟ-3˓ -^[=(uuSUJ>Ae[?4jC;VYM^ÇBPI-_`Ev V'^_K+xq)7G>?ƧxFWF;a3;om6niN馬jM_o~\yd}MW/~pzOT Ƽ\#MmPwBݒ \[heҊoVTZ璞q/>\$P^[|+R./ō,m=þϫi~+OYaacq_8+=gGFo -wGʸW}}Uԝ3re.XzXr*]Cy5=ʸ?5cksZ?  -tvѯ7{!Rs -'O+-4;!(R|=<@Qx\U'`~f>b,Of8w:*,:taR<;0ҔzގW-QVළ윇δNEOC!ٞ1 TYדo«Fך&nJX?c:Vug~DJ{JpTր5농n_|PP3>0vsGq/"g}VfP3Oްͱ|c"ŐֈsE##սmynbMiQҼS+V^+S# ~wws9N .!˄.L?lvz}ϭ0\%wv=NyeL-RWbe-ktcTcf׋ O \}Q&-vh+eڌMV9@v!iQ=/Xvm`fFQ/gui*&+ՑTWzB0ZS^/i;2Hmi]*cM =+NxidJV@KdQ^]c:'` ̨|lƶ0ߗ5 -ѥ.zƷ*<\[5WAj qR4ŭ1{ mk/ڒJ`Eo4pawIN,t[X9©6 -ѠQ/]kʳ[>TJ -\4.sVĨ7υ_p54ٓ~kŎ6^ ]y>4I<9 >˻ÝוkcOEz?DўO~_O?n)gQ2\ *墽E2sWDž -k@(NG{N5rDfkdgğu"_cct;+U52uE7ei%mw}= 9{ܣpY' $N?oGJ groΆCiZ --_Pv-xVj2[Vt3NV \~: 9w*Ø, *9DJ) -*oAq`  P:BtpD&@x -ф ĥ?Ph pbڊ)N Ё0'vX ʯX_#޷D{{.)( GHw9k |-*h_JA@/]PHmkq[@8~ZBSvQxh D}_i hN<@~9@ b}^@H7$!Sk#l}LOS OPJz -oP `Z1Gj\71@30ДfeBhʘ|KlQl󯱬.FW=W C~kK=+B+9Cp`PEԳ =a?wsFMr4;d ,. Y<=_zwn#TZE #{sf9Rs`L=Fm7q90\|kKW:?*@y9 x_(ϼ5.o329Qyn>loFdη'oPy'_i~A>[$h5uA2]5ixNyTLf.a$ϢP'7:6y\nwc"*EStp{\=78yvH5oYɸzƷE^~ l8yBj|NCe^9O»P?w̿R->otqmrcX< =\d1=nfV  F -,zc_5e!G,V*?A%\7ra_EsCg9^ ܔ`'ГJzfЬKwu(}YL826]@hݭ:tRL[EƝÊ9↽s鸩uK lqVڝxc>D{!NԖa,|womsɹl\upW˵Simu$.,:O'L8N/NbkҸX0=7 7%WVO?:x}Oܖ7/%y-gB_лiJGy`8*lo-tߔ찎CX=lP7_AxmVk7K!TS{4LJsHL; -N@ -.{g16:&gm!\ɇCOr㮱[A;47hfyw*FGU^9YA}`ǮQ+fښK &a7Nzo7іT??=k}wql3gI\eD3w$%u]H+,fꃻ8/?h)ڮkanpL( 秏~Sh\1)bz%s=[upO'} yx3^uݛm(VW|=]#k-A6M22T]t8t 8L?2rLف&^^FjͮJ{Sz۪>ľ淎ThԔujDW֗y.QFI 7:J5?Uj.UPu -y1^WDq̥R /ڍ%a|5Eݲ*yeOk4Ŝi/GQ1Iqƻy~6Zz -t/|ږ/b®A+)-Y6k8ꝹwtA}R:p7P03+kR^ FU_޸ru>) '{m tTI31~2{hx(3'2;R_}=Ƶ/-ΤA8z90d+Od\Fmx.;ü4nx;(mx~@~B҃RZ5{@ dO|_$ŧRNhۻc+Fp=7-o{4(Q\e^VWn1YRYChn>^ 6i^;X)8 R.@?Т}K3r\x!olAУ^@A"f*.o35,QV`z\0j=n=Z>pf|8ӿ#}O2H9 9n*H&Oy Όf 4c.7cÄe-ק4gBM[ҰQ>0%^oU98bzZwfu*@uORԮKCqxt檶9#speKpoƚiU{ώ.z<g.$]H?=MuݲCtuڹBμ۝9#:ӧؙI~3ؿd[RHa^/[8֞ *W2=3i]:>`֟ܰ;- uam׽R[\[Rʐ3`÷hWvRRcs K(3qV;[TO -vEri,8LV-Iߌ)D undA:yɸWvF)*a\`Im^hcK/>-3?hKs{K~@IEO7W>t5.0?R}?fJzRME/~P8<em]@ѽ>VfQ}Z [Q.Hvֺ́I̡. e^gzRub}Lp?1<9C~#!o'nneNCUM\1*w[x`20+٦M3477)jx ?h'v{%=9o??Bk\T='[|ubrVkMfp\zscGifl[KcFHI#HSR~#C/gk}=[s\b-hxx+ZǑ}y% QK|2C -v>+٪ui7Q2p5TkLެFVU{Reqmō*l+7puq\H=2ʹqI^?\5\_'-L~`쉓j\V,R/']RMoj͓rƋ%%vBТ4- -ݞmӓ2Z| -<<БZވ}Ƚy>t"QJ2;)M]=q)s/$,BU4xd~ގbD*1vA&޼1w>=pfvP a& 'rL;ѧl sE̊\%b밄ҨHia-s*D.~z2tHG4$)!!{eE" >xs5Ҕ^aTO*tWs  u4 klCHuH cȤ]k/lbV[2_~ О3M[2: a@~L | IBWUSi;}9D_YooEb^KuM^ּ{l@J -}]D@V Ԇ %a -) |SMk= *4Iïy_r+o<95_ń'u.Z\r@-(t Qߨb ;Ct~-BW}m|=!Ǥga xOh2.^ ^FG%8zYc/ P5! "7sX*ԨZELh(-J\׵qB ͢klYggx~j‰a:Mz3 >Tv(վ͂&,8|<6l04%9k+k\emY%;ӣgg=#ٝw[z ht -9mE3>-<ٿ2 `6{bllh)X38g;GaеvzX' -sR*)\f6wmy\'m΋\7~\'O.& >mk4D|M+mt'˳q<AwmFo[zػoza䵰GDz(ey1u?0ae:c]+ߤگ- C oh+&RGj?dC76ӽPԶ^dmOy-S"`zߤK:gK/XT0z O3?u1 ->琪Jnn -hOOf0\𮾳OtԢ4}@}}N}3K%qrSܺ{.SS6uHS,۲Æ`5$U?ШlBUwAzk>r⊞ wd{e^lNy^9^S6ܵ3iL>`tv)ZV}*V2n̕nΛ]oGzYZwZ۶6X߲ب*ZP;%"t綢J׶`Is$0 bVT{=VM[71c`#lshnfHghl%}aZߗ 7Dz>7|ʼ*{5CwIήNd:R\`rsdecmXH<*Q t3JP_YA\t9%] ՝arɭf|`6LYq>;fD! -7J#܎ZZ'3lѰUH3T(#dBP 1ve'^aow<=o7Y>T,?g]7܁++K75l=FŁK B~S -)mǼdKqqݮ'pu#G'knwl)7i\-hw!r7Rm5b+D.Pj7=.Hݙv^Ҿ[QVߠ9p3߳n:=9§`t8ͦy.s[(N֏Eiu+R&L5o?ga!uR!ϵHH aGԮ˿5ȭLp7נ[nk0?:Ӄb8^\n<mb8^fo6S–lsT@\V9ĭxRJư]$iPY༐lvs ?\݂[_CViyi5۱PCfkD'yz_\Zuo;ѧ/rMq7d4lmjOqlcQ݂z~ :]5 YD1/O%9G&Ы؆Տ.1{йƖm? wAMuk[.4.7%^w5 DŢ60Z5Ջ -@.!~}.^Y>QV Gߟsl PU+N ['Q6Gip&|oŇv6K[_̘7yG7q6q~ps8r~g;7.S./]%<Cn7wJZxMI+PEuixx=396T>/RⱢRUoVm1;:$f=tY.;j9,,sfܿ!u -#7-Mg3zH֠J45'I VU5>@kSh -fR¦n?[@1sfL+_2(A6=K%];tm[C I y~_lD;>ZvC{̠_qCD 7(Sʉ1>0qNIs ^`eb*}LӣqE@WW̓8+ӀLlŚ K_gߨԣiHeMD>mVWvԑcmwSF*^+XrSWvw~'_o4j5%RM;e1WN=ZIGj*+K>b)|q+FƲ5/^vpgO#[ʣ ѕԁ癢:Tcw:XN+wJUI ,e>y %R v $ǐ3*>kYnPu@,'Lel瓒T6C n1MN$ۤ_`9<mj^XYuV"gW'P\{kS)Sbjoq*a}H{)M!W`gÿ^UlL9-L*MfXƺ`.*;>afHӟePI(w -GeBj}{m(!B #GgR#&moި~c -:TW-[ ?ğ`M: -H} <:*@~9c9z)@C -`DcV8qbaX'l|B>PDi"NmX5_pB2?1{%DB!@ @_YYoj1 {_B1g&+ 0]l8Dr㞆S mx$&RBw88v3`+ i:{ shj0+.vTXWaptCvsqD -%7W{|o {]:x,~?jB`֎|2qPdUx@Ekj9͌o^ymIKQUCi;.)>Fm'ȮL@ řTwΗ4cp b%q'h -QH-dhAa>59UmE`C+LJa}7DwA*8f1, T*N}DG8p}yOb]sZZ ?_xƓ`Й6'ҷ^We@|7_p{}k6Q+@*}nc5 Mbw5fjZ#ܞzgш}+{9Qy۵su@1hȣΫʲ|'Z`UD -EHx_x?D8koAլP>= މGAyڹ(r)Y])kdq.jp@ vIR2O1(WͿX: dz'zū<Ϊ]fԎ<.O[Qս_%^֩T$bQ: # 96⛶b60|YK岟/O\;ؾ;Y>yKk=ą[™npje;\0nyRE:CdnF[ӓJ=աtyᇸR-}xcqu/$:|q6>1}^>kïLKLwS'ITq^;5o d7/ϊ3aaq/p}+{+d)\ΡUί\mڷWڴ臰OLm܅StX{EO׺eB} FaU7 -9P_Y|mbߒsvg]4>eަdžmx2ro7jZl*i`dK.P*.(-k6_ًB-АaPD -9k"H3i(V~_>MFA{DRbw9yYalz^++f_.ޓEE3 !U(i/hͪWi5o׽ۼݙ^gdvLm?bBZ `=s=&xvOp 籕qCDow5*!Z6wӷI4KMrIC^7{ɇfp@ftB'RhOj=}̑E嶮1mO2͔9ݺ4܌2*ތFf :Ih1ίC"x]/BKi7h+ t_А4 d!p^E69Pi֒)cķcyU -1X8$9m^=YQGH]֮Q\ٹ-NM,;*"LQxtW-a^CbY<2Uߵ4šX/ZmݭGkw7zS*.So~A8U?.̬.SЉ?$nL\ _v&u"۸\'d:j*t/Uډ*^w^n;~ժ~ʳ{MOϝNuӣ?OXB -F5n3(,+L$k~V}3 ;YL=ygt<7`>l $ih#܇nkvSu erBq!w1Q/w91j@$Yw/eJHk - {0@0sc3Ōqe -C_+aqߣl6׆îi1[PWت0믺8JyLot᭽Pyb3YI,U9hW{wԲEެæƒJP CI~m.@A3O[UgkT235}CE:6b4X%C0jֹJhU`:J{tܑR -VLZN6d3\ۡYصlWֽҔIq\{*!56E$TA촤5^WcmU+;cXfƭDQb0?gZ|&r͗o,O{_rgw[ޘ+w)m);3|xؒzJs[ߔV9֙"kVG$>*wn$WWb. -ń8崮ddr9BcɃM-zV  |:N}lfwoACqUgqmȌYdm!>@ %8Pr+*@-Z=İюxzPG(BC"4_Y$T\\^@vM -oȃ~^9@hh&uh}3;@hWjM {ƏSsc_P_+BFр0_T‹BPV!fmp N ؁J!O$ kh,Umǩ}Cuwqh ->yX -@?oB`052&.CF^w9}{/of0,:A41P#JRCrqګ)u-F#(-uǂ{yM[n M)o3ӨBeE(4RZg?*-6,޻߭egM M{=|C<dYxm~ʇ"DPxa|q}' F42&LܙHVj84RwIί{]{|sErOԂP7U;=~ cyP[C\-fytz` -P{w0@T0}QP Ax:}  KrNڭ; ^h\Fp4}:Eh|OI\-ݣ  Y -D!Գ'B;w:0~qf2zޗ`g5"OXؼ:N;Ⱆ{\٧0zo+e!wT~thg #d;MCnpX/N{X3S٧@xdSvyu"rnmf#˼ *#(t~C91jO y@'5sqt$EolٷR]طGn؁{ncxN%m`-Cu#^8,#.fZ|?%6 dȇӶt6W8l.yn6 -n2n)כ;`f~go=K&oZ/lxnL\cOu JvƯu! ~j.yt@&<׼h]y!t|T!8$f!^[Qc|Ӳ=Ӆ:h&:_  cQ[/T0yUJr6]5ɍc__\/U7o`Abt6u@}/? 2:=7/?YX9Vo[H gD\&^ผ*b;I/[YyIdy7O5_<~e@fTZֽ1ZVcyN7E[lٍZ)DU/9;_Dj,5jpok쇘K0Uj'SxS_!iF?4w 0>nޡ8%*Y dߊ)nz>Y,Ǽb56=KGفf񼝣ٮCof}ӛu=i'sX>||2<@B-nc_)a&e_ws.ѫn -ݴ4KvGߓ&j/&:Ə4s WTŔ0,$ii_320L71CI$ѤhN;k8ʜ |Uw -q4wI7PX,0OLi/zJT*E~96Owhap1p69H (;[Aʺ}A_.D+ٯLOju\惨r2JWkzFN)2 -ꕜ.\U#\fjc2Zme塵ᇃNe'Go,Xf*ݩtJ4rW|Wm3?R[5 }ϧUW|&Tv3a o7K(r9%6/f*cvy={|[V伣9ig;m(ˣf@,HnY|?[^%D=6#_=7[1T~:fyzB/{\^zMvdi3&r-y-<Lߊ-[7[R1ZYZ6PB<3g}iasJWyۢ)g(C,:tVke7a -7:kJ͢iF򨙹zßb@TClrecb~Hz!Kt|(Z])8 eP6nfb5AٌԇxOWlajlW!%%OjxVϚ85K^ZXS8<X\olpζY!\%v{%W][\#t∌+=ް|oVڛgner=Q@ML a6wlx -5C/l!P^הƍۨ*͠SF| =-P`~6޻;㇘,A9MhT;Q.dCbgPϿSB=s;5Mbu?ub}^/O<*ӛة6ͪyc,Hޚ|tmg/MֹLINVZ J f!8nVMlrP8pȜ:mz֔"aVovvŽ_T8\{+m1GRG+fD%MV3USR{?8LomigrmTXGo5oSJgyi:=d?3x }7Sz)U&sŢfryb\SӀ$c7sKX,QqQa -k*M~.vCIL)z'Yrb>U5fRԷ96A҃x8^dqymro5D/sd͌3= ZtXJ2Ś]r5Bj/HW%ҩ`}gy<6*٫n&XYY~\KĆf7Vk@qe.o]sJ3 =z=4-b,uM,8d,t9~9X=D7 l3+mm,g %'4' '0N*N0zf'<'ױ 9N$Nڔ'lƉ~2wg|KqBD(k7s{/[v݉vӧ(>m su>(wq&/5ο*XxOC=1TBSHA{OFFYMMa -vsF+ -_?zU ݧ,=*' !/Rw2Rͺ 7| < .z2X8,cϗL,4A;Nᱠc[XIi6HTv6Zqc:wbKou迯v\+wx<}?R{!jquo u 5;XEXuQ &)uxմxTo”{|ܴs6N3+jp#;>I&>ռC^-{Apv{d. N|C9er.2Q -1aF"R]4QGg:;bBXʞ𐝷Kg/_;@wwO^\t͖ҮD*[nogA I\xHݨ>OWIxٔT}*Y[]eU+- -p%-=.\77LYq3K[f&ƙOwtn޹erPB{u2^o))4=Jƥ|O;pRZuKvٶ5ěV4[væS-CNSuBqbJ wvyl_@OiD5{pȺx=L{8PGvzGǗ]A[@X`ϛR8gW01q>b8\e ]u9}9񲺤(]S~6SE8Ū<.lQ^]tQsY}`3X}$k%iZ(s4!6DvzohBFdzȶ Z^ؼW=~/ܪll8^Nmi8l'b gKusk%'E8~J8wk,9N#82" 4M2rccdMpit8fi=%3#Z읟bY-OzR~qlw8(Z4o14y 0CӐlvجvR%/ǹߎt>LLkجg)db{,s]"2sC?&c ߓd2H vzg@$C|KC¶DkVN.-2Ѭ?jɛLHR8pXN\;q8߮|ݡkܭnߗBcF@f=eVJI8Wݴ}~m<(g1[^X<״b#໙[}U,uo{cSP*V뷖3-˯I #&n:E8uHU4Өn':iY ?PzR,#-RQB!Ut+q^}6Jh{ [n섈cm~RєΩ%j#Dj }e:,竿՜-W:^˯Q3}dgTunG3Ւ 3v8y*?ol>yXwU66ffg]o.hRYu~VQ\*fvz.[CɞL{80 d< K~Qx&\8yF<g&P]2?a{=+hAuR~fҹ2yߦ(uquw]7ST~̿!γQ"-sk!9!}?>?s_ ;pG>@AlQ/L9zWSCLV9/ - -ox۰0܋1{旣S-'AB֭EVoI'j7uow{ECNvX|,y4G%͂KVۋIm>,kt5Uߨx#Z\Z_ޚoO+=Tg*'Yn62l?!7;YGy۷L8m6J }0? m6h&%42Q,VCeW6bK_7z5ԕ;R:'U([bvҽNF0:\~~ƥͤNf%uQd+8_ ?wWIbo[v;}o CԾ[+IvS(JjbWn+++VIw]2̃]FGw-2z%VuL^Lޙo;?1ZkPvgno! Z+{p(; -. -(Ocd,\?bɤ¤&\?ǛQ~;dhp)|:ja?Ŀyڌ+k:4KMce/ [Gԟ5aSҥI5fo8CW1*Z7^PbsVL-SMLpÂ.'G - - "ۙA[D/Mbw&)I|=?{?AfޛwA4z+ ~/L4IHq@Zc/өui{b@W^W&@5e>D' e :Tt~C "*@HDk\>t )Sf1)YLW1xj1P.=?U|ıOAgPAqԣm b?ğ$NvycRMH@ߡRPϛϮ<N -b -n($jdLH YFBY=eZϓɕjhv: sn,Ċ4؋;1iN@ -v -y2&!^m' V 0y-&QgmeXŀh8 -k?Z)JXa~/&w>Y/og=u+e -f~9͝G g* pNH|{4zbqz(8m𑺓GARpmO ߏʸ,vQO^W[Q1^EW.bVL<ܮ/hԊ[ Ӟs^Eprbpup3qs|nNhJL[w^F07w~)HU|ne&g gᾜ;Pv}d >KE^Rvĥ;v6{.=TBrGU'6DERBbN^RdNyԃ?/ P p[*,{/>,{1vt7ſ``\&+m7 lC5\^aQi]OMхԤJJOxav5\YAV*BV&4K)K)")Lh_#tS^E/Rb/_3Fg|lT8m5^@0 #^Ψq4T;>[|sCYA\˓iWزP_, ?S8ͪ)i/'2 8jeg$Ĭgzz4O|ISU\yMSL!mJiDsY۞˭?a[KXI"uc._PB'"H EztLϧx}8r֧>El{s]뫎{SPͷ\@cqe־BLvڌD~m˶ߢ,#-q8MESdEq, -6}|Kvi;|:`Y [S7 A#ܤ-_n<{! -J⫕eW^#;>?sXyh9oc̰zx3e2zYuYS "y,CCnuV|Iח_UÏ7Gw25Mm{N!D)(qac  qښF_EE૗eW TFexq=/?H5ΆSS\Cȶ9:22S8uCjIy Z᛿x - -JiYy -rg(X7we5 ^ٛ3' EſHa_:H+?lxBn3~BkY.Ɔ)C'wUR"a -uyxky,ޅ;7JehpܓP -^3 -5y}^D%*6 Oً/UoZJy17I؋иoJ u8nL+/8 -{EL-v$aEk 7"3wO(_HSnclndL3 \#JsBnZk*r.[ox7:sL|:`r.Ee4]{Wbv ݁,G)yw|֦)~(ʿn z.L=~\5#ZG{$|-DvnW7@th- ̹]E))},x"C3zcE/9َNINx\cp7ׇ|_?@SUqwXx/\;ۜE|NX8i8-PҜ'DtԼ ld^]ju]0њVG,2I]Xޚuuğ0!Q!鐙ba ٶp/@&JXx?IW =/~w+z𹤷ۢTr}7xԟ"v˛jWj9c^ѧ[}6 A[KT -,one~X ƣ \%:!W 7/<;zp&Nm7fz/j.YW -xN|4/w}}uzyqr|f7M/ձ'l@3hϷCw*c['J6e߯߉~M AKv*~a~tOeJ{MnJ -/3JLwmbsazxLdGup`륿?%Q0z\)aϠ3=b{`zQi运E\1wS[O@=v~8BϜ,t$t_B2>KnKU梘Ǧ7oI\ -KO-B9;;E .FF*٣4jݹB%n4;VZM`0v:x^ό1-+^۟$\/%X6C?>Q6ZR̡҇md0BZ;ކ.UA; I ΙfVzXǥ1dPn;'OqRܴn#6)~"deWȹAwnV #Z/!;|`+3p;-UVlkuowzv|Kڭ^6P6[bhQ-[7P\oO[g+M\ڑӎ& -/}~bDWc66>Xش^ 7mIv}/]1ܗْms;#Q*mT:lt9U,eK/2TUiT,c,VyNzyD 4^h Q zi.lSCa?Ujt6 +9krTw\V+򦨔6@zm.םiȓ||\~Ncmi,-T§Lsl8aG%|x#s[@Z^t!Obb$y)2) ;TS İfǵ8B[0m)+vY7CjJnAyaǭֳH]an -`hvrzw*;<' zE@ރ Rj=`[k~q|l=Y JfYv%K!2TLONG_5f =`70Dk#  !QahO -JEe)+Ӵ:Zc6*8E0\KK&_GZ AU`RݷbJA9z?0܀v.smp^-3c`ة,3_6DWWr黕Զ?H=vMc垼i#Ꞥ-IB$&(%xp'ɤ +3I5f \ph9Lm$,"$ɡY!8I   DnZP hIj2U IZt$Ԉ$р*qvEQĭI`nJ%aTLRPOQ%')0IʔK2Qw mkz_ރFSLqO>W|Zibe?ɛifՋo/T$]$Up+I-=Sq.̡ rnoX?jw*9BBDOb}瓂"΁_D7^/$=2L\G^|^*jaᬒ.'ofxi2sIp(zpǤ -q~\|ͽ tIKJ/_Y4^ vSMF.o ,{ -X.Lp-!)p;)0U)<R -)>qz*IsܳElUm$j -plni]ʴڒ(jf=-W_N>I;a -v(O - E -VM<E)YͺQ4DTk|;qdckuŸϛۼ+erʷ۽vmW77תA -HL& \9/j~x>i._]=*8xu}o5w۳U(tVkk.nK|Z]Pspvm'cldX6gĦ\\̈́.ׅ>Q?͂a}256ʕ^\m.GOgƀB-huZX0 .Sgp  RSo{mm{Z_4[4zlƒJXR,G= Jjyn=|5Nώ9+b\Js9!򅓧kv~`Y S-+m+0]zD)Z` - Py5RnA@M/5TٸQ{E{͞}ssvϭ vOsփska0U1Z:Ƒ=EG2y(Q`w̐\2 nyNr}#s ׋a+˪ÎɲW.'Y5[qA -|1su>Ȳ q:RZA\qݖe4ޯ#mAS%5Sߜ>kkwb/F<}#+/Vs=ֶ/QYY1[6ٮ -C}p%`aCEu]{ -(I)}my̋ePkc: <=|p'Jk0r l<?y-8"y4 -;FɵJiOY>d]݅)wYۇ f g -jMB&]4||bȋR.ved?Ha_K_ E%p{η={ 7iڙNn͋)Lԙ5i>」󝵾w|kE;>H8'\FYl~jW \Z -lS?,tY|nje%ɡg=>bemxW@]IlG?r.|t ЁǬ{ھde/4,'AcSz|3#7d)EgHJNØ2>h~j|sD[<sw}D*Dtl}ڈM޸xax੡$%͕P,mv̻9\Pa[eg{^$x?"?t^:;0zuצpqnHU\V`{Y?KqRr͖Nnt>V*ۢ_(ALa#5WcdN8I:YR 5o -d=2& 4/^wivS3`:3[lA◷zO:V=ojލQXa!nkݳ99B$ -Nݥ3-gSzf&#,Y -^Wtm7+m4 gnԀ:y FJ 9n]>Y[^_3.>-+"C0PSWX">+`~-G8lb/fR#TH7 O_xZ|Crjm6܏ ٽFDʂ hIY>}>EJO/p}0`h*oo7sg͎WʆY4O4ƴ& V˽岣>`~cS#@ؤ0`1H,']t )S.{vr^FHN_:0ǥ<Bo\gH-*/~bNRLqD)n}xw IO{``1mgu'֝Oiw>kUyEIGC/~__FO8Ԟkwپ's4z켫8Z> ox~+0(,Z%?dӠh^U^_햓B'Z>!bs*lߞD__k -/rSMFl9!!養>8̵j6/NH//]cmNZE[$JzA5c#]6c6L8UAS_9d.!y cR/=Q輝ŜɎmG!'{N0c G`І Q j*i94JCk݆M4vS+Ϛr TZe|Wi寑%V%o m􄉙oPD0! w[4ĚG_5NuS7a_9ЪeoPe;J,*~[ZhtF}pZMٗl/$)F9W Tʗ Iõ܁"B傭ҒWiB -y"V?t_<)OOf@L;'Zk@dXEhQ!T*C ̑ |zEZXÞ~ 0!4x#j-FGQ4[vݳ4i,g@SmPX2Jxt Τ,zp6?>G"׭c  -0D _v|n3-j/A˗g?M=dYPf6[=!PV6((*TB8^l)V-'.t" v0|][4ÓȴpyeZgt_{)A6S@#Iq֙,$fx\d4{exd2yn?z鼚jd"S-PNv i@E_HUJ09'oΪ9O@W;L~>Ϡfo`sYChvW okLdWHy5V;.C9{Y*Z~tM.QęV]|GxjE&fB?#y=%-$Ioz$]4 "!'GE5I~5W=G\͓cǏ2 ,yE1cAOQk`H||xe'~&{P"%rKޏ5~5*Nghձg4Z<  QU.l v'<:rj7Pny"_cq;ueA+SSDN)T)[;x_EwGkBq;{x(}OkftmC7Ԯxƴt]Me<b{0?ELZoj'U39U'59~ ]->du ˉf$ۍgWOW&l/FY ^8C-HIɎihc`cF`\l/">-ToЩ~?.Ts[_C!)OX~Uu9hyYﻷ4qYeU20횧7Hu6Ϲ`ÑQIttU|jAo K^:cr{GH 7';?f -Ѯ¥{$'bT@Gzϕ<]N_׆*h'mh1׌+}[nI/W͐q9&;[4f{/fU/aO<ޯh:.L׍9"ˢ5|A/: Ys7eOrVde,/ɐqrGw)>RG6c(綏A۷o6CY];|9|`ϜlzҲ&Mg-Eo= -WraXٻケgww[\R H9 ͹c[ib|DZ܄|Q  `B~2xrb5 KzZH`E 9rsG*Qٵ$(~qm#Gٺg""S3!̳ \M빴u~)d5׾{r'UUݲ[!Ə {k'rQfT~I"][rh*#7Z7hͦ鯋n +a}uU]-8:ܺƤ\3y6|e?/i g罘?u[OV7TO)l%i;ʵlu…1N}c;+C^q3[HĵA`ErSaCfA'N^hӄoCmW_g[ͬ\TNû^9?ˁe׮ہ4'NݵE]NU-ڄy6]UfwnZL>ӲMij(VC)Y, ,gk()/~"X& -zժs뗳555ؽwmdi& -tT^RM7nüGls1}額V?0qh%q3/^({֤ -BG{czi@51q枾8#2MUN̮ [JYwh'Jux4Zb,v{MX",JZ3̼i}zϴ+vׅK$VV_}ܧqruǮϟ9l -٫=*ev[x*U -fjY@C"UӠZkGbT;~t<&vVa 6DPD,_cpƄU */OTk1٘&¶Ͱ9-b/N{SJ8/@y%fHbMDYN3G`&ur`sc%$ -nڬŰTNC -CB| zVQcҷur%uYfa[x&X(h}Oͤw !Y[V|{ i%&ÜBHŹ )5ZY}K:汁? -un^ 2oIN/gbfTn QӞ -T~\ te;K1i5n}Yt *<KGu6nR:rʑ&`sƦF4 n v3*TbyIPQΕߗ;j-BdX35ܷ[ BDBjDaϩ57r^t/!XݟJ}{;Ea GHu2>t)ʃSY&+t> -ʬibkGkf ]J NoKP.Az"y}҉:t5wMJ*|X¸[@MKpSGupHv)YwZ}-6覌#:FFh[ia|ӻfPNZ zfqr9wj1T p,H*A(/~Q0>~1qT p?p/kv4 $UnCVGy|)<%Rxc+~[xUQ#ѯ7oXn63}no/4X+>;K4.'s5x;Im(HR;Xi b.hE"v̄V=w?sNu` ű j0@-OWx?^Ya?,=yib0 mXRVTZ2++ 퐪A!&9CG@ҩ\3*&7=.m R8@^;KDgȐXgw{o,0mQF\\'oMG tҜrci :'vr k8|pafb4 {G df/v ⎚->uLJ\bfh[6G~S&ҫ#厧2I^%+zm %c־ۂe[BDqE(.I6IMEf$Ɉg$ߍV?I&ȭ2'o~wRnc!ůk;*f8бZQ*[rRgG:/$)r'iC$L\1<_٧Lad -; J7Ӆ^&`~~}Jvrdϐ#wY$P$N&i&ɽj=5׷>냿>xk/ ׬IȜf.uuՃ[ٚϢ(ڍx}ḍյ%' /w}HB㫕Z%I::1!(}=e+ :uՋ|U9hw_}8Avt.Cv_^uAqM}G?樳{:ߠMbJ;Rv - .y,+y_ yOY'pj '"zn9zTUD+Bcû{N:Xus}O_5'6P㦃ě'J>[ ?KhZNȦ#ɭ|6)У)?GC1yX!JPx6h)8?Fv;p}.BlTZEĊTw7T &8 -c ʧ h .2iَ_{~%+iSK<֚|=yokjQ8Ft T{c}MZE {ikGY*/*y|r-˳bSiidq2 -X9m_'&[,tt[9ƥx(sT1h`2-xy%#jM{rpkay]I|ꇯmo*:64{ :9sz9Jη922-'E*/Aa\Oԟ0Ǫj"S-ط -J5t5\q6.k 6]RӾSMAio9]ГyYSRM̝8ےϩi{Ԭi_dŷŋًo:ӳngYƧVKx;7KPCP9y^\~X@r - -r3 f Nψ#$cטFr0X)AR*cڸ}7kMv$.[,VJ Mw᪂nH}!LqMW}}?.+].!ojbW 3-O7 In6/ q61Q#mUGv>aH7$RD@ޔFpJ_czA䀥~P[wnQ5^IeP;$7y3sFbNxg^o#i=+N6&4=qOT~:!}5\ð<98X/Lj[k3O^qΖ/urd]﹠*-j>A=*&TɨH0'oӧoXU_ '~we8{ٰbz`vM$Pa1nGWCqIu n6qڋ7k endstream endobj 113 0 obj <>stream -jh\S+TF>CZ1u~u -)wd(]]U0(.C3 iQދ7c%ۭF/ENqE&ion YnY>LY;qo'}k`vE5q܅&\Z,B{N^DA;UUh1P !Ŷu~ܲiԴk3pӳ"s&2oeky:>u W&aygvg6&ò*w3O\͇.>ul˸$g=)'-$͗4]ph+gPpB  +Xf֫[mOk rs:O#:*v^"uQ_ot*/dw1ARk^*!]Ҽ -s]Mn+rч2v1Y\~TQnTgxU>? @U.4@UQ A\E#ZYZ̝ )"fJXf9j^%̥4?N`+dx@u'@l} + -ht(=OzziL睳"zf RypX;8dIQo#X;{?Ó+: c -a5 0N듉0:{p:to[pX; -.d@gdՕhWZ|W;(zұf.eEȅJ5U?i}%=&T~."Y\:$ 5ǿ74Ҵ[Ţ}[W -W0 mivJߙ~=A׿ݶ&tyMb\ʬ#Z_S?/wn~;Ytga9M;w' 2,&6N,dG_j>x}m3|Er|p(j'âry>{Ŗ MNíפO#!M_L詒'&\wN!gma#2˶E=}Iǭu%ӛ|Ǘ +~]T{#?E/r[kɩ1w5gUTO4 oF"'[Eq`n^dS_&ززDn*oЕ#sXޔ/L_Hߵ饯DBA*4 j1 >n5\gt>ڧǏt=L;w!Y{\&1%S#-wJ7lߜxW[_'C}mu%}MU[=N烘gi?{-LHwc02 Ĭ#[DcØp;͹xtEpy\"DO~ =UاzVW~ȂGk/_]^JM]ki~Cx%{̀lkWM|Ӌ?^cP'Kވ(QU۲Z7]?k6>%^R^CyJ DWn;)Y|3 XF -Xi pYn4#]" 𖟷4;jQ)Ы.!kCY-=:]޾%Wthkfj4/|a%)l_۵WyM;J8aW'Y3G/7<7FhTl~ʃ BCQ 8^܊^x]]/; }e#Tm$]"e%?C)ky*T~[Jl/ఽjͰa5:w݋~)wxoyfwIQMJNt[=n2wU4H/h24:ޝL;jsّ9)LϝEc߁CG~Csդџa\ -VK94,^նE3BV"_5m|eɤ_z>g}m%Mj4V/,ιxN JQX5& ,ύ o/{iifb[^&-wFU#הI"' jѺ`}LeM p.n +5jEQ$#/"ޚ\Bqz3PƖ*pb< SFCD7m|TFZ'AK: qcGGCn5t![uSUWk߳ĒC8"C!]9'6 pZ\=bBom#U.-} -,fzxF>F nZ|ís w@='5\lIF);WJaYr^JquE\j~|Qᩘz;3.۫P59aDpf٨̺3`%KllHo-?%uHSsxN󩆴XLffO(Vc"Am)ǪGT3 ׫[a p.O;^}qp0wP}3~?\`'6}/w#xO*ίUdܾլz5CMlvhtnw3%q+`QAnBݿxݾ8S8a}n<ǪrJ] +mWh;^;LNΉ1K1Skp!R!IG~~-L291]Ƨ]˪o ѺhM6edҬ W@D5ju^_kAm,dUdJ2/r4̠^xZtKbEO'څlMGH&.|l.qv9xXT -w0ޜR=6Ra:ÎX d~\}UmǎɅKaa1j^8]/cm;ߟh}]VQ#IDp,5xO8حձeN%1a~25urCB~tF&岩7jJztm_O wF EȕW>rr?^z]HQi4v,ү譞^l|.;kFk#m'MKkzԆ9}܏ Hq *xq}ӹm/*>\ǡ5>,}/e5|\+`UjbdUeNξGQOW$?7hbU{uGݶݴ7DHn cb5l]{Tز -{ H&֔&J77I̭W~Ď+f5t2A -O͠!*ߛmJ2! /? )Ѷ͚{-[4~y:3ídr4}5m8܏yͯ5n[HY>l%3lV'7BXe\!VLX /)Ӌܻd,'W'nFoFGM -*ߙDa2x=0U D<^Zdy~2km 6qY/yxnTh`  YH5|>@EDȵї pxl8ғJd n$Þ!c}SfY!]c׀.JKcAPrg -A zZBC "K|Dlqr٢ %@"F -mhNHh%K{_mݍmُc'f7ڛ-\>nKӋ^}VWe~}s`]0THT k z y6 Tt, ^Y r=|F١wέ β a2mz׎~MxAE⋽Xm(eI U-ٞ\z<9J%V5W{ZEE'gmfo'Xqh GI(em}AW°qjzqv w'akn"0M߽0}= Y 9 킬pN!?v}⤲%)lGh6s7`s7p}o9)ë BW٠WQαzn".VϷ787;YF^ -m&0~Z|Hcکc#\qK?꺡m|۠]"Z3ͼܒkKK%܊*2Pw{\&X9Mgږ=Mn1tm"H-'m?e -{?zȂ* _̽~<=5>Ug.SZa[]d|b<綈vxy鼩Y?JUZC6Э;9;7, >3̭O3VSNY^p;#h‚ W`11Hbz|d=O v -ޒqi率lޠAݵZ]xgs訝gָy2$3o>gŮh ^i֬nYf'xĚN+چs=&ݵt( m6x;`t5ุi64q"Vŭ6:ДFdP('[GJ}|sM6V#__O@UFG 6&:nO -䶫KK]D{ЪT1VisW&ɿPO_E$4)a m4;WʞR9~nJᘻ} -ݡIIh2t¬K[*FAnxX_5/<'uZ̵Gks}zx3 jxhxhN+T{) obcH0 |!",ssŻwq$V@f0:'tֆ}6(|+9.dilj-2$:N!g"WOoGrD*){ywrܨ´#G| 83/?eo5m2ޗ9:0paD nwF@Pη 7];ty^N;/vP3So -g7Ʌn&$}zd=8V(ݼ̃}Nj0^%c;oc eސggpN"J¨z =ɵ>+f9&LݺQZMSꂤڻD^#z$sAm*m /~}2xY4é8j&Ϯku8%TwP—7:o8[ CWyNKZ;RR Cu:Π} -e3iM7o6ijaD -F sU{:-H<\?X׭װ)Wn"RAA5yS)Pt5볽1~ul҇4u(ZHQb3#~x9m'V8<_ lm8\Ebaj~,˶3w#pVvh?gΈ1>RpX_&'aR<~~@sXfu:6A۩E.~K?wlEg6M`=4-Ǩ 1*}Dx[lMٙ3 kciڿsh~oxg$Ǐ])NI -E*D`}t6&)Vz]y'V?F{;aE7j_2F(\ya<ÅxYwxdЭޡLjA% bI1 վ[__X̕d"Խ#ID> һ8iV|n|=r,_6U9Ή-C-~)v{eb9 pPޑzլZ&#M!; A{x`t/7Ƒ҇j(4l[GQ@q.2JyoMǃ M8}طV.܁[ʼnFoW¿}EDa6͍O\צ/S;hv` 66ǠNogƳ=^:d1בlMZ98H6C;[pҘe%@i=sP]?;eK f]ۭb_.ndo/#@ a6]g j8@4#(ζQr+ٶNGTNvը -T΁+{4 -iһS]_V{ -LKp{_VZ1,~:'diOL*@O4@P^ Pj $jx(-g2/n]K.QXmhK=.34[X"FL+,,Fhר<4l]|oOYA@k%GkG0~xq 0ZS=KF" /IeKe2f錧S_%qšY.Է>bQZy$fz5N#T5@w[@`Oi{VM/Cnvׅ_mt&aд/*m=~?gm"!eb3?\=:Cy|ܺ h!g?W6ѮkBu__p)Gxqy:z,Iyp{1xbɭ8Y{׎0=zqA6iGxkBukĿzy5oRfޞy<ݲ|WAa o O+˱ӝ\Y>`?8RGzd4th0mڡw5CRMY]|2ԫo,.K9-sP|W=.Ηޢt6It)ݱc*]ȣGMv(QNb%vjmp\} Kfs߃^}Kd{חz4Ͳ,Bb=SG=̜6-|A|Tr~}?!v(nQ Khk}|mBRwuW'y\q:i/b6ˏS//{/Y-<9 -C"4=;%p*~ԯ^`_O>D>s nOcWEڬ7ޯm1rs˼s)E+?!-<7|ŷWF[4j)=ATn: l,lvSL|OT9i\LzZ_0Q>]Y魉4XQѨ~ ԣ\T}Fl;47ג=+ÕZB[s>)us.A:x;oeE 7%ӛL7/H/DqǛ{o:Vy9J^v4 -z͟>R>4W< OPvW3:"i2#Y/98n]/ǃ^h32M I)W}w4yTQՕw<ͱzMCmoox.fCC/ rVcIy13XZw]Т-pqj9y?8iA~Y{̘9t|z`pZ 6*Fn z9L$F-o?$i,ɔ,Kri-Fk,k|:X%Ai5O-ah{k؅}s)i_8wFa!ePuT8XG̏ cuTBdJo9CHPz_8` R@Hh_w&N؂Veb'bUȰiYSvq:ʎy%3Kcݙr[],RtC4iE%Πi,|泾-6seG˵^$sR\}6,5>,*A!_II]d܊8'zV/o7LM0`5;NV6_~}W xzlF !3. Mwe wivi9vvv\Tȁ:|ΧA'YN*һg> PheřW Һa,wKF4Y -E$VnC7ݧ|.l-qGxG wt2ixiS!ȋn -WZ A^=O6Z4 WsG;1ija9Ur e'E!BTNe2Nt#:bLIL,J7ƛkҌ l'I8_ c\⯒eG&j-gGϮ>71R_OAv4!F;݁`GU!i X}.1ZwNx O?x^cf9"V5y e[g6ͣk`kSF7JaY^Th&ь̖6hPwGN J -}Bәt$|i®j{ȞrV"P4N!g6~_ǻiZP'ʵ&erp֜"F(; -XS14иڹ˹s ceHA=T+ -9/ݘ|TImM^ByM;~O`#`Zy=o(7Umt\r"*{ ZcC86#Jazڋ`I5"^"&.{t#|Q ziUjЬ +IO8F؇XfvӠމ{4{ꋚܩ-^򂂵[.~Q܅/\"WsIDZT(/%r}ZAta;[x8n_f&vFaև8DS*VRNP?UR6gw2)zE? I-kn/7syl𠻟Cݝ,+KCD5@bg%o_B` 6g& -v6&*Rϕs2~UfX˷ӳY&]FA} -b| K)ʋN F".\*> @?[b PkPo -^ti]L;eH@/*n5zȿR_bsz?k{9, ؖfSş o a&Nav*׆Op"VD)Q*"!"{{ywXr`94jq-2el<+?xzF[UC?|ʹk].3 |}WFHϫyYS?Pxx9vw-{VZ9|yëx| qs.}L*$nsIgfS)bD =6v}G_h>YwvFysB]֔8+)Č$o'37WGStGz6՘90!bOKΟ>lZݽ_6b/&ᕿ"hLn>wY-_!-]czVn;|!"U^GzEbfW)|QV/TV܋Jca*ݭk q6z -Xkm夫?࿯G1Vg29^; %0G}H6;joblguV̬aaEG#80]̀^3emNjyء8)͡ 6 - dhR |xJ-e>/-|Fyw,['݇JW#-c;ddl^bN_7a}CA0 r׫^bd85qw1%/j!KGx<G+ԡ6ߓxPl?qc)O*˷b^r/ݾ:C}Ï%M͚6z=o)P`n{cJNNd#)/O>=&Z7G:Q=ګANq8^ғX̟κ{;Kdm#d M`&YbBz^kzndx`;GZ2Cism X#7_/{."PA⏦pa/; 7:~;8i4aѱk8[񹹴tk7zn 4j h<۞[}\zE -a!?,0w_Q9*ږl^+w-8WfdS`ڐG;'!gX/Zc$I]2٬Sp4^LQzmgwcx "ƴu.hm6BDhUQAV{wyؾy]5@ezcsmw뽣u27ynՑ+)㵏%=0=3ozE4̄n7TqJ?@#0tpL;:=9GIC4ʅ/DjDknn֑ -Sutp™ kU~m^e2bTȪxY5tJoἊwZ-9cx@n@NoDzļjޥQo*zfŰ@Pw_!^zљm>L<ܾ6G0WmmAUq%Ar34ڲ35x$ɛ/PXY}uspn՛^&?xYkteg>/~+ 8$(Ƿl<3:z^=_|r3f3UKڙE8ޫ҄x{9{%b#hυCg4p{珽".ձAkf0/nLx7. -_ĂwrC͚\)aw25h:Ħ5kU[)*w.&wQW,шDw ´pmAi?X79Nmۭ|M1NK؞vqN}PEԯb;5I, ~,ed*DX]{K[Dҭ.<'XG 㚶g_t8d^ⲙ6;j-~].]6gdZr*|Pe^:ZfmUQjE>`9MT%WΦ{]lz!D)n>8dH/tUf_'²l7O%V *F-05ܓ}Tлr1ly4[NS`]7+QMiU_;_hlBC)ldK᪳e }qY @m~9D_iܡWk"&5l`ͧbcO_+nrOv`#/P=Z(7:ڈGENMjz!m(OnH1$G!6=b2d3)̺b_rn?-8l[,@0׹uȽAv.f7VД}' Pե,\ ɜѸ bU kWͬN=oaFp귷3m'Yu}3B -=bwqg-.ff,g7u91 -x7Q/N>+$`lIz]"ޱ҉o;~Lq P`װ*PZ|6zq4Xʨ[xS۸xPePmCm(d;5ls̨ R!¥ᫍ8vgc)m!tF'b,2Ѫ6Z:$|~u(yZ(lq̭`.Ѹ<֗xZG -L56ԽSZy~&˃;bbRG~-n^-{ۚ_5H~AzHvYTiU/XiYc#8FNsMﮑd.4XL5- -DK3΋Dx]1}TWVIvp؝ f0>BXm7*J+rZ5 - :,9xZ Ir=c$YQQmɼ{ bQtEa П֮`?L?.wl7(TuTr+.\t -~y1enJ -Ke_`EYk7'cK;?)2 :tb}tk.㖚5M'?ym07 (\w5,<YvE@!Pj[JzosegΘXSY%3BWc=s˵F*ʊ}miB3o#vX@Z"鿂E}-[V:`|70y0`yKq]XK:P2(+`~%vt2fYDy6ylno>D|=CR✡Rt:lUT)e B *Y rt~u~<֊xs s x^&T,ޒo^1t~?Rݣ71zQ"V3 ҂ * -:q k  uqȥdR'JO4nUO1i/fO?W~_Q7+'>Y: I:Xvܤ4šF!sYeb2X4rB@FCZ'#zsݏ@7I" 4 & -@!@Ë @W_Q@Q|nC4Z4ȏ38q%NHʯJIHϪf)'Jɿz9_, 4h-9%H) SW@sƧY_QOʝ -o3#T O\n%5>~߆|䯊2ۏ?twjZuZTZuc;f_|5%#~= RQZW )闹sі?b!gۘ`E;K-dNWH g*&׻Ƀeȉ?o)B#F؃!f5ve 9ae۴.^h[l~QuiwMF##(zqt;>̄#:ٚIF0!o}Hi;/Q۪Y7zAk*iE7;|>d8k5yV(N B6>) -{*8IF:WC= -K&Obxf`#Н^M -׷^41v_DPyCrtS䗃>vUd8rɭmTzKRS0yf{@el:I/KL -w^DS&z^h9r}^\N:xo_|b#:% 0H1Z'B.m&"?^Bu`3dҾ^v[Dq;S2vU }4ϫ#ƮϜXG'/frl[{Vf5ͱ0s -NDm2ʚt$-5̴{G5P_Lޕ{%dMEszШ/ +FKѺmp03umU^t|ʡ))Z^ x|VB)3ɵo7aZW`^/}?&kB"HNe襽?턽|7#V°;*״ mP[GmJTYdKw+InD鷷C?ө_^Կ_X6Sy5_Egb>A l׎ ɴ -rCx{&Z| -hQX FR$RL:NĴg48L+˗gyqo4Evu*_3 =dU_HpAt{W䞑!o^OjV_౱Tbvp$*_LPjJX?;^=Ev7+$ Wc1wL|;~}uBon-,_̞eIp.\$6v5D_ߚKtoFfc_^~ - (RwWx )<'zhvL :TqΔgvnwP%L_ھo^~Auf+*KTz݅yr -N%`[{9>/.?6 -9|r{:VtAq<g'`[ߞ!̾gK@V$f/Z:̪X99߫bU${6z[p},erv~P:r8rZЌ'ԩ7h;9 홬Nel-gejCf Sy]Bf+  )ƧF.Džh'9AAM(TB=Fe=g@7n?^x'?|,:{Rh/ vgzûUl -Lr6w \#iv<8,q5K(SSʴ^z}@?H~m7zHcmAo.T={z w],Xڶtv&<~ep֭%(Ju:4{8[Py=U &YH<|pŪ?[C$2t[ѣrՉ#NRW,oicga -ܚLf74Ù:mAޑŸb.w1? }Ź!at¡cWsZ`L C8d۬,g}޴_̂*&7)Wv4pՋJ>SVcf<|<k̾UB(N6x [͹d;Gc:^j`0 ^73nʗAU6Rv^m4f01_$=Ӱ^Dvo$f{FMmzt:1XכH3m&!s(>(F`۱a]*ﶋ: dz !rLcĉ3\1(DZo7gf]ZXqOF[k4Wo$Kmulݾ6|}A ;dcG1EOvvݥڈj5]!{(R?'Z!MU=d@;?ݯ&:c\v]7WNΩ|=fSA36tݣwM={T6(j? )Q%L[TZ"5rDVzVZۨzvQeZ+J6r%|8k@/˲Ɇ w>NE3bQzz:%L/p Edq\~-"V[ 5(v,MgM.F>KRTT"^k^cZ^@\.rI6vK K{ŌIw<.ʏe{&/ټ ]u>\X:/[J6:+E;NPR Ymhwزn1l-)|z ix3v𺛂u#LZkˉn~縖 9mL"[z(v=-Y:AvW~pj&,cQ bRW -!^ l&S䉲 ב諷{|O YrHlHl,"^FZMiT2_f֛([|_LV7yG 3+FX7*jWo;?[-5=a^pQϣbEd6WHX8ۯxt Z"f* -0=v璘3^4jҨԸf5znnw=~ -khHJ_^Z$ծ32U6z! -jln+h5 `yP%HXrUs`%\I,:HtY>U~*fw(pn$:FkGTP؋ɳw5 B|{zm\v1^xJ>RbGxxG8paUZpm~*cgN>cH%W5DmeQ T,]aq,4M8`@ YUc|4bHlH$)K,O̶8 :@ Li޹Ý)wיJen:+EnQ6nɬR&x/=(7t>*r3!X?Keq(Yu@v ԥbꦟ7'WE@]X@)n/Cϩ}x|xzksjy;>T7lwDZTd~)'|3"[SDף9@hMh@h$hhiogn6IS$ҽ -U_[2L0,R_ғ,vUHz2%wWEʎ8pr۸@s1?n[PF!7@+)V=խz-Z٧x5~ _ɮAk]~2bn-5[Cޝݔ\n ->z5DUY<^xt -^%~M$|dDY}/{;"KK4&#OGejs瞅OW]qhMIA;AʼnG~*!?E{z>6eOپHpnp_uiu]I_Ge֝Lap6bkXo̫q24$(|a^{f:} m6VW9oC =4/{\l4d{ kپ*zOMv'W~84:ůj8xey\i*&٠}Кn'p]1i;Km5MԮ;%]5fB~! ߉풓Q6/m9Mj̷Ŝ+s !&=`(N,ƼSG]{]nvlzݬ@kصe~Yj-zEMQ2Gfs/T[|=8n~__|2Ç ˬwj3diD6g57s#}y[~qzy-ooztF(YUk?0K?g%+~2F#r3g5@͡pa:4_|MGwp~exY;n9緓zl+Ct4Iaa}\M&NZvey8X䐤arvd KDu)}3z, e[jE-<OE-ϲԧcgA,Cq ^g`$播>Cpmnxu2AtcWfwtI;fBgG㛞ew@-, M< IUV*< -Z -l;fYSӔe늋Sv(`[-X#^>r#v2uzS!кQ< Z hy3VUݧ+1,pViLm_4F}Ikۙ±75[U[MGJR̸U,- NTQ6y;ИbcPﻆZEˁ1[n .Uc4߹..\<#.+(v'Ɗn?Žgk|}{yΩn"*+fu}O:lui+PbuU)ü>HU[GJUGӧrʿD( pC? HXcuFӎfo.(tKs*!PwRLDZ k<.a%,9cV77!U摏W`+8} te~ت͞GkNXZ+ohoȰ(HSvt6)ԷKRp#Kg -t]1 JW6*{@1ۊn{QkQM,P1qJk4oݡ:,:z{6%VO>5zݬD˨qHo -3qFi~^z/٘QDND KɼGafANX{lu8C@F)9Ș쬗dvPB+D9WZkw^*QsOǍLP iBfwu'U^iiEq8jĻ1W/ idQwmEhv\]NYGʾf9 -.fM[덡e|K+r5dKNGБ\8r/'&.Jb[; F_YtYT,T_ef7O#!LϘu_ -#pu~hy{pܐ#e=uf-It2M371'ۏ+i2Qa %ȏUFD2aOU-݁N #$aL2JbX*kJU4b93MAS:Fla9+<5Pahj;4ZF'sݼM)^ Lz(=Taof;ڑ~P5 -5|!g"VDűQuưڐXyIqkxcwU`lXw7qj@&V"3!FϢ5mF 92 ̒ Dvb`U:gL6 5K{ VwM;V^jPJ{4#+3"5}ѶBӦ}K e4]]/ RYܐ6?MN(kr_[UuL1.~vz8jly&b,+|LrP"rfR+)CnVIR0^k7<3Z>9<^r~1v%cާS"JxzKy/ ҌI:ըh8p~k*o6lVnf;j.wϳ]v=m~2-4n١deѓhllG#7tFeK'V~jv -nyFAV*`B9^\urӲgqsN,Ti% cUN6T]Oo{*hm4̹[WZ䕖${\xjժYTVJ x5^#Pl@/)z!BRlm.Sl1|c=o@.;@ƨfX?˹)ZOkTFi Kؚ6F -Lx>5 X-ށEG.)lP !t4d?2PXԢ)%U&W@7BPK&PR~+*;>13<"etV|J)v`6>lC=dR|6f!ge0aDz\T^]%~ "‘oܙPm;d.H*L:vv`TdT/?D\~Hqے{`TOR<u  {@'@eS4Y߼b -߶AWۉkqc`61UU+ "_fFچ*n HfNhP`а4hXk)̲9_nGHƒUZU\toE#aOI47?ۯ6k7 QIos[{1 `Gl|"LoYhNGP>Jm凗;FJm[^+!Z_SÿlH.$4$f Se.Uh؇LnF?Jt]'ʶ8Fw0]uc>w'r`XMwLb&h'?%ܺw";##~Hw7fe$ʎ^C>- ~BV~ͫO-0~ •m֯{*H㉜CףS_̮[@/ PIoEыxM7t^XW=j7%tia";)i[Gz̦F졟`'=MH@7zImrGHIҧ̎yr"[z`؏r? B oFIru:Ƀ]5օ!s%{i s-G˙t} (?te/L+yjkr }QUy+/-\9˹ғe/wZńnvZ 1 -d%o\| Zp)?MiL:O{w8X;r}f:vw}3/ˉӃ/-WgC僥șc]kɩ^Zm&noႿ`&")! oϣ*9o.9[B@co Z=KEՌuM<Ffq5k8mi42z+i5,?=m6Ճ>\=شrM?U] <^M>50[8ag7,,f!uNQadQyD^_43[z+KC_\akskZVN{Pdr>.ʵZ83rZMUv|?ղBڸ1?ϏA sȉ, 8`j.Բ;LJu3JwU10%olN4: =vpzY3 s4\`\wm[)ڰ j bxQg9WMN@7喌q=`R6F&oH,9M,/X!{rWC=^[|DJb!k;pQuK 'f%T9Thx9z~w ț/ڋ1b52nIPUfY@էZ\5i 0=m?(<6r l& }G39C g/M5,6~,' e5]}lnzW6KY00UUT9r rz ~Į떙LϥPՎ<:#d;rv2ˆgK~/< -sK.Qx0٥bf&=w޺p*T\ X)Ic}\\+;h)(L[#3\]3Ζ{[UWx|SΦ nE/CF*{STX9E`tAQWP*<ʌ("(,ze{>M$3"zB>hBr1 sx?6؛wdҊ3XFޤ1~2z{zYy4Kz4aU&4-.SE<-S$;#SD2UbcfPͫPDwȞNV ~鵴l k6[Cs֤sS׵bM/TbOj&V+k*xݸ*gϜı<%&IOɂ> -:<"fz~6 -ۺ?8]cݻV">om,xH9:.h3 -Wӌo|ʊ=2݈;[ s^& ? ʺcQ?d:*5Z(jEf!J}/' ]3P%W:ʭ9wKiNW+ְ0Ӎ.G@hcD*gF FTŧrCþK1ǹ2+( ȶApH+VC -9 'XfʤU8ߵqfh>تa:  M,v4_e5pUۮFP!F|6'<ư+o?؂jX蚛(Td'-CtCCfPmGܜֹv>ن :w:ut%[O1ufuxUAX|íE[TGlﶍ -zaQYWNz&(dT.u苷LR|j!&RLm@00#ݍa ‶뿕+ZMQ_aoXuXꀫw 1`xqUȧR>`N 0/ ؂R,~,s,lV7`K2Y,K_ ߁H -0O xAvZ\"V)IL<֐zgzbW1M{rPnY8oAw7 lmsa -ouu1;{põQw\bΠkշV]Dُ'=籟G6|e78=])q [m̰>4*A8@EMWcn;y纫<]!r4Eg;_Q^cOa)ôD/J[ -8s~ն* ,HH2;@* H 1n, .ۅ冇 s_ЋFޕ?[@5s5Z~%_o4e jۼV(FdE (>eoEoQ?:31ǧ*1\~ptz:==ijs]g-{s?22)a/Ie+!j7;C]£1*FKTC_汿uA+ Nd 5`JLŽYxgr -a^k9ilh\2d>jf[!KNqmAg~BKܗ38/>ygu|Z_ˁ1Y{+-zvVhxWN*skQVI}b.c5<hff3֯?-1>U~]S˅ -s3Gc.=[cznUsx܍zX ~`kъ_7ː͗Ҿ^DLc7tkwl{C?7]WtBnBm>أzј=o௽?6}.nv$#]]$3l)mB=.y6kʣ_j#1]CgāB&1 F?ȧ3 >"|a1Q߄S?bt5X*u! -;۸z$S|zxL3;F22lϳCO3b:O4,>܏G#rgg^k.KQJZ?P Udźb]907>m0 ju\^vT_ ,^뀭j\.[E=ٴo7 -i2=F 03%f+4lM%p|ڤSRuRg6S P ԅ,?N3VJؐ0L1fxZu:[{v*PR]2tprg-W W EzHJoKË2ӦPwAJ#׳ -7]Y.`,cd3ܼ!ͧ-D/B0=ǙQz{"zh1[/-J -;,TWbD.a֐@*S%q {Zc -M%y;2{$"Bo\?K~jcAWN\87N?4<($Al:uvޖgf0Tպ/QsgFIR>ZeLcjmV^w,S{)IB&u3nb#HnbqL#բ~^XPX.&6&6_s kXyCr8Dzmzɂ=Q]׸Q]KFMAvJ\-i% - -a9ھS\8U1pάv -͢¢c'-غb0Qʬusּv~#K&77~tER;TU5Sŗ6;eHK:HHuDk%$kF.?E3D3ge]2>ks̥i%1Sftԩ9SJ[TMVb6]\ex踵l](.ldj*pƠfUnN't/^M?g +7U.GXm2bH\ mӌ|I^Tg!-kH:vQo_ErL \$ -*nNux6$*.R(N Ehvp5 |E2[g[LءB{oE*cWH c7D b&Bem|ָN0t%Osjߕ̮Żiy}u1*)tuHiqy-E5\ \ cq%bD -5Glzz!vGUQDhYF\L:RN1bDtqp*/_jNUqha-K>G>A6y8؋E:mt=0Ӗ8Uph􊃭!߉i4mj?^} Omhy_VyH~U> Zo'4}VCHa+Q3wk΋ʃn^^ KcS p.NeB/܊Q&h;^M]]jκ05cRI a)K|(,0^n}/Kd F@QT),FѺf~RQ<2xf(nJe2Q\c5tN?>'JW5ڹ -ҘVj(Œr Qqh"q:.>0eIT6l:f,pϠ9% jE'E@ h93ps)"w􅾶x~OVq;BF[#=(Zձ^Q^l[%v^p.O( )N@=E=bC"$@a Tdޝ!83ﵬ.]@ ! MR=ꀻHب}T~KӅL[ON߲ -wcnb@)o VL@9EJzJi@q -b Ck@A#9}VL/w!]9y@W -3nN}uP7[Ѿ W4hN݀Y/HdS(fl; -.,MGln-X@;xkM@(Ed][r:ЙPYbZKwGlU:y&YqYFŦB }WÚAib| +ˤءQ`V=0LɭC\c%%fw 6K`8'+>VtƜʵ< -p/?vx -Ǩ̮*zr/v,{\]lx 'GZ!G\u\&W$ʀbC~iGgREqDce&VF@,ʙ4J0vlJ׍¿'ATr7^:Aū"3|`^넀/i'xVpOV_s+lF-9Ƶ8dBC.jBxS}?[@֎?uߎ>qqŀD, 3 ;$[ ~ěrv)c.6۲qU^v漡ʼuƁNdz[ -?t_/!@.1;P=w]*}U@U*>w Ǿмc_3eg<D+;R-rC:L.++.;I/TTr%?9': q|6/Pj-9[iר~a%ܔ:eŽ{oCkqm$W+.?x&;Dw_;Ir diz!>vЏ>>C[|=:|bCM[-ez_}Yol+hDSxT!f[v-klvim>V`Ub]yfj2PŪ؝ǯClAG?Y]O_Auw< -]OiS?+~7!;wi?`_3'*&< -6qW'W7\j|, ㉓Vc#/ÐE'2O? wdG(яuY…)ȡU0޺nV_5x^v^_ Kg#g?նW^BXPwo`2fWKs};Ro?~qhAyo]lzR:;{x }۳_6M[Ws lGzE/1b5 -jӢgbcٮo/~]_s%љy#F{v0\d0ubo]JMPn_^'Kċ4i~&v\k< uҵl)5w>[mRwkf?B#'!@?m-6O8] q'*S+{om" -]=a=\#,csc}Ym^:tDa ?asA{PWti{٧qm&C.CY\uAh-{j ոӒ -/g5/;]N #\:cypxtL/܌l\ZDa} #G冯muheYQ-M&YrxTwO@1*dUgB ly_Kr_ly{/Sf ?׭~|Ao4*ƬZ?4(vB]~46iZbmZ *HlYWa뛘4VaQ>`бޖƙaFq+tqgJ>W( c>B, /hׇM|X}v[R[eKiQҶy]G+35hke5GU&JKJL;HL5w%7C#.gݾSUDYz%G\J53-A͇,dYX#vd` gʆR+:dP(@vΖglT**5L뷵\C۔Kϻc}f4#jZPFw= -jٞ?L&Ki6sy͉3y dr:XġYeF]Z_ymM[ޫKU5/sw0~xU) _opfffy+j!.:5dufяd2s[ImHLϋ(\vKd,l&ߓ6*'`">hi9+=-Oiܔ)&ynN?ep̌g8|{bkhwkjKX[\쯫Ʃ+Xz -/{l<΁xWx_aBO2~K ^%Jvqnvrr1*a "QfV|14*]*ŊO-]v8o9_x[h ;^BY(G-fXD֪KAj?6wtD1+x2\~]e:bG'f9:V}CFw'[2y/_2YM;cp<0{㢱.9s;زKjtf6\A1LӃEy@qL^&)Z{'Bk,!BF'/1'B~¢\̲]y TOX 5=$`b`7ᓢ]l?-1<֢-\4۾k():Z,)\e" -VwBݽlLoFR\?vLQ>ŐbW饸/KUjZx![m -ײΑzJM^i24GdST.M#0sybO3hH^|)m@L@ Ei"`b&ED%µ 2 R_4A& HX6NAWi,s3X 7Ӽ#(<%hK/5 -fRls -?').k@ ٯ(8Gp& 1tH {Y@"XDCVKdϯukZ4ϔ X۩3&3> - 8ioY֓P/,Xۦ7t!Ǥzڀzg&)3injtբ62)P -(Nʧ#@#4EU"JҖH*9oVz -FF&;|KLW.!7cm_ .``/Z,&=i:l[yL;neub."WʃQ@2~ffz\,>d۾ -Sm:`?e?o7"W7@Q{XcPOP w79(n&L -@q% #~|1Qܰ(iff7IGON__' k uQb ?@8eDڽԱv\5GTApcmNϓf0y/u_Me_M%|%ߪ-V@@AP,=vjr.@\PS 5tk4cf,||c]pϷz[8kG@W<豗֣_AOr a9)CVώ~4[(%mW{;alw[ߞ@eXLjĸqCډm$ѥu/ݞx -c>?O9W X}BpHK?v&kz+~ϗK~{d}kK^;޽( ~' -!ӋA3>߅뗮-]S\'c<[{cJjG,3o,V,?uU(?зFwc25Ԙ.> f++~b%-%p^g5<.o>tm{S|vLf4>6GQ#o=Z5T~>W eRNRLM 5vVBs=|:zL8PlaaYla~?(H}3Zqq-}>|Z|%+p80=1_HW98/$ޕÐ1gQXĝfF|5"fgMo:Tku=}6y5Ht;9H u? r*Վ?:sæ֬1\a(iP:eX<gU ܪճU75ƾGfݤsXu5; ~yQ xZqE%ƷM[rͭ GrZ{c .j)fSY\H͜姜d'[ -Mss]4 1}_vUpڇ}hO\* ;0g&DdzlIOtZ,u fM8:9Z6;4c4mOF2Vm\\gmEY_5Zv&sWxf.'cZIcK*|_~#zvxuh,jC&צ޶ǯ$<" u$aX鑷QjSL|kD.i -8ˎq R\~}?Y.D 녛}n*vsF3Ӌ27'zjNⱕˊ愤ö6eR/fqx=Xmi5̸Nv;ʼn Dg ODNY&)Š(ȧt v).̊H9 - B{3練xyW`|ms&hZ);^=Έ3wpFtI}'#I%XDH!13~3KE\\0}}1(Vk 9peG|};RX)~=S\2rWuŀ3]Hg אc2i+Rq;y7B.Ͳ?1<"hƧ]u5|-(Y|i0sΈǧF+3;X{O -t'NF/kP*J]{W#!MRtHa}W'KbvyYw-\6Rx3mf/iwB[)w {;!(83ʎlODww\=Lb)mp5bWqɕLX*Wn@}<FLQ\!+~D oRn>d3AksꡕgQN ?mBb@^~BLx %"O'-OwuĮS mju|w|p)~l6<)#l |ȇ)8,}1.J}4߇$g~.n63yA@2y13{lzyDu<ƾn_:6_F z8e4@_%=٘\pNZ@{@+ @§#\OΌ -8 obR껎nmE=,F(_Ff\ G&IэRϐW[`{Oq\l^{l36-f` $F^Zhf(=*Lk/Z8rzD(Sπ U"^'=O^˥|:VZuC\(4sҼVIcJ)an}Pa}+uo ҡ~2|ubLq9R n): ĝ$i)wEpQ+/⒢ R("""]1ךo} #پGo _ &*QUu'ݢ^mf? z1cAM"P21:@ OYP\@6r)HDiLǧ7{l' 2ܥH؛cSm1i3V8^7+LU2yIn9N:(_V{P-.4o83fP[M(*v>n eÓT6j9,9y?GomHWXn0aEҊ Z_:'~Ϡpyc:ih~€{b2&3(<<ߩ׎;\4C^\.-5/v. - N"~)K?, -?r9Mt`+u\x1ރ"eAQnVAVJJ 1 gpXRRO(+[>]8!s=}=#V5㔵n5edLd&lNzīFW|e&={fZݛ7ʢ>ru ԷxآTntW˰!g7~]% Ukeݝ:=%n%pV=W/"`xYzljuIw;om6~NVȝV{Cyȼf2Rb0I01(n"ћkfK $7ݲVj[3,?nFRV%&\VnTڮm w>Kt4(ѭ;c=ƒb)>Z;(>1k茌tƗJ[,46c&8 -Z Z~۹T"GI {-s]$,\iIUn1'b3v`3n:S-jyJ N޵F 9]Ն\jyQ`%BۮB7bJ2jEf|wsZqz@-1n1dSdQYb{جhh4I vQ6F=9XZϣc]> AnU` =r(FV>,%R$o֩+OP'$R|on𩣰GmRE!jfENELq2}mN6sz8!hPEJ?0JT`>`y8}[ic6o=,A.spxyt۾uov+3pb@{z-Al$X#W-f:*Ѧ.=p Un(5y -=X'c dMdT dþc Gdd* TwizZ_o֮fNp`l q5!/D#!yS/('̏9@1 оOptP~&ߪe(]=$ӰaYjX%azdĵI?E>y6V;b׫F44MN@PF]`1I`m\ci<`̬}r&Aov%F}i잎BS'aC5f-&%m#op1 жQuJpǭ%6[0 H:]%8WKeanpH"1v ٪33_/_{] siᴷ8tR3H!"u@xJ'hb`~Jp,  )SMQR΀ ! *2ٴ| r -\ښmELǏX)su"YYEhn_Q/ƼrמOlfZKP'nܮ 7# 73 7d>9=8+ȱ5rB2P@?|k;*pr8dy.LYlju)ǟՑ0}_|}N)O%ȟȿ{ - AL拳{-`^@J[ ߈iڛUWxANbSE֖KQ3;lH1fC%BL!#$ JK *Jhgl΀uPL(<urnVik'~R"Uy5Ȁ%57əәS__&܀o7(# VR -@O/ZS0+ t;SF_m\n^搣,׫VikWg)ho|"`Z` 0t{[sh!Y2Ծ.7TNcMZXDFe8~_GO~R6`s8 g՝< p>m"ۗ2)dG V\aeAvɷM'(aA؀W  S@Pj-Y m?!Rr}mlzګˎ.q>[t*ϓ1V?]2nDGD.,iv-g &#H;)o2׿+/W̵ySu*#~1GchF:]ot׈v-ԗ泵*aC5+vS/ ‡U͆ݣ~ي T6蹚`̓:k tL}{du^`;9E. 6.[\W˸)UN%7~i-ՙ#^h )6ktfrF,DO@OVOb=m+~t -Г7k殣_Z' |kepgx)̀٩aqƉ&C܄-{tdoAMʼnpCmc ms -d~kzoGjC/U5l֒۞&ޘXݍ|A;"N >jspIڱ8 6k>t'޷[?Gu%AԬm[w/~_=׺jG2 m mE&q=pl:)9ס -?o1[J)+Mڂ‘Qnkj^wt`V.5)!.Q=]\읪K#Dm;R^JK퉒f}ef[\WQYz DwΪ6C8d\ -ZdLa-y(Mz<^U]@+]t.]T}oPyq ċ;/C=lMYrtT䊛vLtbFηv]j Q?0jGN}%Oҕ@^ / 8~/Qu0#TT}=/v73bcdi\fPC)LCM_+SC6[it3J*>GpzHs2UH7zW7sef*&UR9(HrPU%O,o'2~R<2rւV{D:b>0b~rv%!0\N kEeBw* -u_:Mܓ'x،HdV꯽قw $ - R$;Eyzb/ K  JpG_sŴ=B*&}y}I6 s[>Ѕ qO $5Y:>uwȹeMuC$bnjO?B_vBK|^X:ɷrԙT-jwkny7>S/!=3V^X;phjc!x}#Iv筡HEIwI2ܥ)gv u5ˆqMj~աۢrJeB$kaѥL}F5/ ELoWDIF]2oK)wq>3!bkppbDrݲMi=e,+9F9oMnܲ.*wA[Zxb=Tʠr7)Z$;8y{,%wAg_ĭ'apqi*yXZ L^ L^/&ͫ_ETg5q`{UmVDi`lt{ɵ_3)Mn8Z}(eiTȞsQq+R#njbş66Ͳr|fOj1fzv9O>w`\hΎwZ~}Q6jh[rVuW)sW;JaOrnJñYWdViXmL;\inlu*<L eL>+§A -`C-0NU5>I=u\΅(įB3sD9_X?T, S/znuz;ڪv3O].!j}Bh>πvJ4WrF(i% @1A(l} {[].`iAX,,80C -`7[]PSDCZ_Zӌ>zbK~!RjU Gfd=c'xLnz3R2v40-2ɖ6dxT~; ~۾ې@svnLU -U.XKM /kN3=~9@r,[M&`G6RN2(w3gu],iwYWzqʒop^ iغ'wS[D)Bt* G V%x K@*ۏlUTz5_"u<Ʀ0Ec}}]^S6Iu|3p\_qP>d @\ rm^'j?9:9!?Ko@@.[&Xr J: [ll;<] QTUz t -1{w;^arWVuďZ9h:_wS:6_o 0eyy|MAH^f /W9{.Z[TpfCojz]v6B]or}1 ;_7O)  -'!s @{og7 C} 6J4>`Yt|i*KmB,\.sԆր?o|ܶd:'P(Vs7 fl4( 4Tz - -abSB Ȧ\\ďބ>d<6?A|Z`*Pu }{U};!GT,`:l 0VvyGۼ\ӯrۨ'S{=]b1-1;?UQ+ ްOu,d6,=RKg ɠ]y?f^{c¡^@ۍE.QOYwy#} ף'hL@.\sz.Ƀ@@F 2zl3݇}5|Cs]bҎw\ON*\c%0CjosdT/VfFPRcWխ)9MbMMwǽnf2e1B'~cXQᭅW$[G|{颧Y;A(ֳueMɰyM>a8 x̟pL872 @FC5ݰޜj 6uNe'rާ[ֿ2~e}l_7vAzpr+Ge?N;xG<" k.Ǥz]7.>< Csl@:Uá_Ǟzϣ%YnM]k]$46J]tN -&AjM=Ju۞T]Yiugɂ['hרB{mR8[֮-#FîdWN'춇|nXE?MǞ٤oÜ-W% -RG 7L &t춉L8nW07l>i=Z[I4h'U;Jͫ \w_Aئ[|v4c3JE"y&1#rxҠo,t,7ٗXݪXhh6f!VG:mWVLUڄ?ݸծU3Qi׺*GsX֠)2̇Tj${"Nv!XwQkU fuݕ{~4dtꁱLUΞPӞRֽ7 rF e2NQY˼T!鱋9DOϒE_4nxS̩/^;JŸmeEԓ_WG^ :NUXjz$R"3:ֻ/,"=L%I йlU?)#A.,`@Eм]wFQV:wWFWzdFmׯ6e[jrV9گYd(=V/I9E$WLrOeZ `!n% -it8[[WyI YrFY`\y{;Z_J-$WRJ[v|YbҀs-]1(/XqU<=-~Y+ֱ^F{=٦)[7ʷO)gq<,wd(ƻa.a'Nf$ֵ\0\JӠd&+Z<커l>ͭ$cF}5XŖ{ɺI%_L4vE0JXR3ĞrJ+S>E1ڒ7ca߿ jT %(lz3 MH\͞Swr?צƵ7vR;|0|5(TKOVopb#SyNGOHl.^+dӍwz -/lgdˇwC|:4}%*:ѿ=]t3:QOۨv6uCvC^ivǚ7ԱvIe̼Ze)Jta^I׆J0"8jB&EnUH时?tZNFKU'8Z|㣝}MMXlrkDGaΨb1 :{ FrLs<:NlF%xѺ,ё逞<`}I߽wsg֓$(LjC30gC -{gTi*LV#jf- 2FyY;zG{?7Ow~;?]_*^˥1)쓙>(Y7@,qg- \HeGOp^XKN)M -C.|"W={|+4g?HhϩU;PK AcrrhrFK W b˻GiXjl(OGo !c\)v#+Vۂ4'Ӟ! &Qz>!wJcmj Mzo_Zo6|>}pieЌA>_ AѫW/dv>jndV]JU7P/o1f-W;x{ĕ~rca{"ھɨnkڙAߠǞ:ռ3oLXuكCRk󤢧\H΅S5S9qIJ*8DRsѷ]1P -uy/lLfAE]nkij4#Q/*nyOeh ȫѴʥGJ<+Bv$wXcU ˖TxKLuzUEkJ@lcdև+ךc@MnrfR1j&2 -JXj -MQ&Er2@'h%cƯGFo{W2eE1[u aS+fge.>V$d|5-/} 7/\ԸZ i 06i}u=r4L r`h!t,INmF#*[cZS =bA6*('@ijLRZ}QMe΀.'J B3CMR<4T~=VړQ-u!.dL6&U$YW\1.DN:R>\J!+#rQԳ'DJ [,0,)4[ -35DG떃֥,͒7jγ'KY]!ؠ'ܰ rh&I]Ul) 1JVJkEPb_n$2{*pkY&'DOY0Tj~noN=!E^)DuY@Uœ 2}Yö́pfNbmx뙲0ւAv!ՊTs2wv/&y+ƥ0|0v]R8Jahg9@H|(OΩ!R\*?WV*{>nQ!cm!3c"!!Hvy -Ѕ3 /Ri@/F++b=[ ׎Kn=ں9flu.R/]mL@3SPk=QU,/&q (" aT`9;l ֎V3ۘK\ Lc쨴""SZJGfYC}u~^u7Jo=4/,]x02 APЍlxfxe.xEQ>68ǩ$atp H*lKǁrm76RQgcu;c^&>'{N^ΰ'~œu )p: \3@^*{F"gg,GG8qTr7YymfTt>%[\(DV_eM~cpj{U K$oY5 -H@ C0hC Oqvt@!@e@tKp$_.xnՀӎC9:<˻2guŞJ/F}ae0}ksSV-*A*Q*X?=b+,cv.BrMBr^>rawB§x-z H5ۯ*\d{S B4\4: -5hƉ\*[a۷ܪSVoxz7pe2M`:lmm!X9W 8]W^LUw^.pCVP KD{S9MI"Oxi~QF(EH#A'eȗT(?p;@NQKdiQH[6֢=zJNB4Z0nbP wQcL.@ jF].j\V.l#yqΉ;BYp Ԫd# ~v}}^ ^"fܶS{Ç{ ]iS,n -8uIk2X<|6yujET-PTw 6#l^.LCAZ[9p'6#7a/p)jXޮOV-[lmn`5!2VMk%_ʼglXBGfc=Zee7-\ѡbFM3AM]q XfQ@M78oE#w4M4ק:KʬKctLi]v;Zf:E%^Im KUJ]nT>3k `fINUKJ5mhq9۲,^ڢn9|xh qM@ȳY[za4j6w9jz,̬!3M.#5*OyN`R#C?&%ZjǗRLp%{gު&lf f |qZByM?W+3St;SLI<ь6yJite⢵2b}Ω5ϽќhmG>[a/1d8a6szw6TWA &p{ɵfbȂӮ-Mʁy!DU ('4Dj)tbُj$j?j7J49bXN1 -DÀ/pm?'~=QBYq A[3AfυZ©%^u*HI84\/:S~W qhKbJC>B8Y9LY]' ϪTC~@|q+2kzEY)pN.\TvʣQLvLxz_9䘭KAN4U7:Tu' ·J5v%ɗJi~\UyZUASINǜ݇%ػ(:o[+ 9VdfN1fBVTCqp"f2zGXÕИsB?"^|1zupq/qrt; -']J,Bv٪SǨ*%u2B cO^L&m rYbsصe[`dLVjdy(%`1SSw5{Q6AW9zŽsu^+ ) c"\b1G66.XN# -[)wS\0ӇcwNN-+}8[͘%P~yT\ٱk%]` 1*Trz6ݔӊ ?:/5{:f gk1BlO`iq0rr+O&IU?kI1ﰓُ#!Бm!Z;*YE##9{ҾɜL`&5WD5pbcZľn,MF-T^htic9pdA"i;% ZLp:1T0ǡĠB~ kd3#WH91RbG qMw#d V݉M2@z$qOd:53-H݊19%?sK#RX4B*6t֬zpeRJS{Kt*,BRT# hGY+׆M"唋XpaeZ>ZgH P/1q ܗ -]ēkH$8\9<6T(Er^[߈,#1(PězL`wQwtes:2V򊎔># -!jbp :"J@PqDbJE=@q<2r#Mod<9Nګ4lv ɵ|6)d,B\llڙMkOݷnUk.[B3Qvku -@VA@I-aJ6:N]E?vVӰ|Ǻ5^i匌rdw׳:佻mC^tL,zNC #E`;t#ڃj`. r` gOT/@*ϡ.lB`&厭1,&H'A9㏔ -~!s9DNT!- --W2NؼնUg'%'qض !86ӛ ]y/ - ز?w.`z*ZDWJNn q>pHg4 Y{’XH$ -n:hgT'v />ή q+:V#{#6ˇ(Q;AS#hJ{+M b;98e|7 -~?D6 kkwٶl՚%oùJ˨`[bޘb0 A@uE0*"6!(HA%R -|N%@`P&eXi% 8KM3zdVڽ\Z:(Q\vb%sǃPtt 4 !8 "\ xbG3`/FgZell7͑XpK/vlpe}X8 .&,PPcs[ZpH vg 4E)Ye0|*zί}2{ ZVj^ߖB:dPk7 -ްBe $L -2Mϵpg0AZ4. t@ ]w-[9wlK5UՌkv+L7dm&1c"Cg%ClsҶkKK8|wvQKO؏pPU7O<*$ V̍xZ8Z&%<᪶'.AܟgЮmIGl.F@[19g18 pB\=0 -_Bqwfm"3Ղ oi=j6\3 r;tw4cpc!9pr pLpx2IKG_Jd6tPw* (YۻʿFg a FL5)yfioY/Ѭ/+66nRҦъ23˫jn,levp*Q-ShHwOT}Y8p!D@Dک/z(83x)Qw2`ֳ h{5R:5ZUꌮvg,@=D;]b)4}uG7.yZ<n񌦋`U,8k܎pr;3}M+`wx#fXut=M -=2^j/rۿC5#@[ _=H5H+wH5D 9 -y\4A;2nݩel9r*ƏHWO+:$/TD_Z,@RЉGNP"%11-t'I15۬ RZϯ1Kh-ې}NyHLb$޶J([35?f\͡ly8@ԷjheoG4ZG8FS@$dryض<]]| ǜ7ts6QAϦl@v9 F -d82`88[k+Ŧ\F[۵NOgUcx gEŚl^iҚߪCNТ .bmi}aΐ4`Y/="fO_ln:mP Q f0 @JY3LNl=Zyq `G{ښ'/CZx? &g,do%)g7 -m6U/6`gԨ[mN PR~ݙG^0>Kh)^fM7N,x6r_*&+fV ,=tgF}/~6ٴ>stream -PV_Pls,Jl. w9Wjhej!偆x-/'9h5-iw^q4l$1?~c߁Ÿ N/n[([jia/ow]g8|C=LM d5J~ҎMN,zsk|x񅢡[ Ԙj-jiҸ -\?8C`%`Whfn>p tՁ꫕u{dގr5웛ro- Xci#R_Gq)]C Բ5͵Dy~J)fhW@WOyά*&Q"SCO~5TPըL?mvݽWNCJVRd0 'SUdvYvAO?lIi,d7(R|{ ϛ6R}amK{ -ݟǿ|cy<FQME!䨳d?ZTT/Otl3ߨy>~dcH6Pg2 fTr< ǔ,VQ)*Y/g/,L`*$052=Otm%2N31t`FEڟY{61ǽprZ]`n vuR^Gcvn 9+_ݡ|c{ai.V_0&4[U9t O>oHSX̏tmw -1ϻnpSύ-_0werMJ~>5=YXdCA*pze=_N"EI j)}2q 2xN?KX6gGgZ}ɏڎM_0QV;DE^&鏑IbY7? dD̨W~wv}6˟<&bQ'CXz{l^)٘k|  ub"* -T' -9 S5(SI22OL:합AyJG^4 <"m+Rq=fqeq!6M}d93Rnhm-g 9RTNKMK7[XĚ)mq[4 ->q.NznRnzvSTzOT< -kPL3*0fxo8S>?=#S=mu0ҡvҵы1Ʋf {t]?{Ә՛u,\mʳwAakVtO ]Qzo;7(cea_7?vpM+skC/$EV3^?Dr6#mj$T^ZWhۨ<LH\+↍D?' qyߋ; -}Tҵ>U&6 f??a78t5\@ϪT2ɡG!qY$ny0*ixJʁE$C5W>B79k*l%38E(KqR/!AGK+w=j]+}6kyՊ2󦁰${)͆ѷ2ӝUc'$="}_1:]';g˴HbےsI~㎒Ͽ*%~;Ds&|mu#&ٻsu*&u֌㯯 bޣ^8HE%J]QsC29G$s76`4-w]ӱd]{TYƭyzO|ޡ_?$((ͫ7|.va^pYi< OMCǮWWƦ: -+ ʏ.b}iP'IY,>fHWpO1v6h|׶/ -r 9;1ٱ4DgAQRc -2f {ӼG`*mgb[C6҆ܙZW7qvvq4^5n oIK[thŷ*v8U헣]AЭM_? -?o3h+nG뻉%_?+J=+W{7x M}+@MflUO`:gzs ??fǘ&{J:Jƀ\,9$ 0 w\mE$m3 -֮>9JdrL0gҭ@w/F]w]&}g!f7<ّ=ӓMJXp~ϰ6YsV؋_H\`p3s>w3 μf<$,c_y͋XưoDQKkO(Ϛ c<6Ttƒg6~P)'JҌ?c$i+F}|譑9|eawl+cq h wOsȸx$]u:{p~1hv7.sc|jjta!Xe/ko0>pz1M8O>f~5K$3Սٸh:F a|ȹxwv^ -Frp'+WtēU9Vçy͓: *k]645$y:_Nᢪ挘2=)k}7V>Fuz,f}I ӕF -):u` -u%p[0*,!vɛifJqR>%Õ }dz)m||7ޜfo>ӃIrkyk*r{ ^CZvho1'R~yyi`f>`SCWy yǛ4ܝM,Պ>ΨmJ*[}9a:[ϯ B%R_F5ڔUky,߂ m'L^\y_+fv5ZܛlI,f -AzЭ.߹uH+#=4Ԟ -vc7f<.у`sW\d&aw{Y^?S?noBE?D P'/qLi?嫖AJ7,u8+P3LveMXiC<ނzcA,}(u =|OQuR{G -&^.mmA4m%b-W jD>]gSw='R0{m|2~d ;UT<48m/Zgerψe=]j;#G\}"/6[m8ohFy[UYH1ѤeGXoc;vu;*}o?9%iW6+lD/Qod?󂽧 C - =~|pS"-Ι6B_ۏIMx J٬FOQ p;Tj/XXr϶4$y։փ@ [5ur -&;z6n/B~f7Cn w۬_^)]>7Yh-ӾuAե7`ծyT Nq,h'\˹g^.()GE.˲'0Jd֙FWOs0Y uSV/7##%?Vĕmj.uըcT-*Y [UU[ln,S#M-N^{):x;lFL;bK'[YT5) y8.΋Ե7v $fjqt{>YbOK'0[NL`$1,4&po`~.Lm7mU\e-Wx?7g3?ަk5`3-+ѐfbGH V]%7tcBç۹h*m poyu fJ@5Gotj;ٝӥcn3?CgmYߺx!ͅxw+lv+.|.Z\󟸟1ixlj`tNNt陴ܟ"άmNO^.lk>!ЙoŋU_BWED[@!V఼OcZ.f+uKZLGC JOyjck F{ zly,1QlXKFnֹ _-4}v2sJFOj ¾, 2ێ?YVoy'\_ M5]lvxxz1s۔ܖr>Ip/u9T&f9ECHyrf}ۍLrI-!v;m \2 {趄 [K^ß"ƕ~jHAxESQMofKyR(6ʵ==kv99ܷW|{o,X XCQJDTvx~G>*ǵ /XΟ2SQ[" -;K4+Z;rÆ 2l}Ti~Nfx{.7d6R-/%uL6#E~81Ft3UlZlZ;p'hQG׺J-k7o,X}+mp0Xq6e%+Dh6*]H!5=׌ CUzΫ3 f'et8>Vdc:خOI?VSZ;i튾^&xs{cQa|<? zU;*Ka|ڝO1u޲֔^^w;;p,FǛZG}}6~Pb ]xouHz8B'as쨞p?*N8ϯ&>f|~yD=Z* _M$z(+CPel/JT] w5e -\pR=Po I,~&xߙ&k6C~>cm Nm;οF"Y>liV)Nt| }j"plg'=~#w}5> kq \SLOwvz `W3$1>( ~a]=ilOGEp;M-DQѰK[UQĹ֕am:o?;f|zQW-OoF?[dR@x>?fP;m _m}Ptꤡ륽ȁ]i pgB~kAp]df۩__Τn/Ńt)u}m?y}Ҡf;!]EL&92xЦwl'IumL:O~.~|pdyꃋ-joud|I58xEJx 6CD`bndf!̷ ,IĵzTE|(DgO&\88JKi:h``Avws.  Q$ӕ|4ȩ -+FPmwR'im`^䜔8).Dfig(JH?*ُ -GۆX$S f+9ƃ47yq]8|yC.)>K?p`>/|/e Z}K4V"Z/IFI( -_p@0qF,#&'ɸzÍ,(xeҘ,`v:c#Ad;5|o 4V!6L^HyqY Ο=*R؀d oSAp}ieUq͗7X4 "d^$lLkn[ N(cIަ5Wze4pd$`:x00{1`%Ùp2xl&rq61tj2u[&N.)2]i80Wa%sٟIF#344I uivK%&KKFsxLwR0)VFBLv~L;(rI ް`ޙom;KZbjv;0h^VK LCZdpX9Ќ+h/\/XM]u#{j*]_L/=}: F`lrĤ1:܍+4s]nS)#tBtۃe2foegh4wVx` |3^b6z[+э^}g @/1-3\P]y\3_fwsrDu4dA =z:wحn7 ƃ7yOPݰ5^Jf4ȃAYbFS -gw4WÃt޸ʷGO]뼯􇹘˔7zOE|]jcXu6A25br#52tRXjm`+X09#m#㙟ncNLYt8 Hץ;9?=$ȅ7}ϊrM -Tr/ϝexFZ wT19^'ƣ9$ۧN}'\J3>&qc4w|ukU*.o &Z{1~kxޏZz_0pmi=У_K`ơ7e %1?\8G#0 {NȺ]ڭw$ >V&:L98d0ܽg}[ђ,ofΘԡrus*%z|N{tDTvr4ˈoqQn咸zTW1o5ӄ%@酢WP[Fr=uKㇲ Ay}wSc/f)4ŸlG[v7"nዻ 5Zs-ތt=p DrsYWV*e0O~u*Lh 1Ӽ;H_\l7ڧ+ƁX+V_z1M`nvu=3wXU;]k+WxõҦ[u~K\&_:_?vD{;4Tzzʿ;֛}vY{+'˨q2|**L9]S7" CYhޕϾhI ܅T+͍ڗK;X/dW/wg-y>6[ h6oOo d6Jͫ Yrߢ$SE[Nh|\OW sx 6w|ʠ/_rXK,N=\YTxR?~9Z~^ݙk63 { -Jf__1O{v?2TKu㸶ke~ƛ);vn AU@DT^eZ-ms|X~m\^z:MU'k g?jd7䔰v^q%]//,N -WccuuBz5̿Eu$eWh/W{AJm>9̵Ogܚ⥰.Ьl K9M,,/t9e|LAæK~ZjƬn{UumTd< >-dgS -s̝ܪW/Zy͵-5Hs)X0.vmL;^}o5wjYRvjas0ƃt(Qa($/ȹLg\zMwmJQ^5XAP_WE5Xyx?H[QaMPIk[T}Nqjwy==|i/V׼:y>sxv}DAuL~Vwyef6OXOÎ=kOr,]Ӿ7na7u3D\vrwE.Bя:ŐH1g1^->](Y==>b̕ex=_ -ՏznzQp }}5UNh.H˛v -Hu ե,tpQtT\9R|s3o®˃ ׼_{pk5+:nT/iyn6Y<9MN  Yy5̟eyZ;&N)=yvF,o, 78V2Tݟ_39nl%)ͥZ*U栽O W34x>T;׼aOhnSQ#UtZ#|)N3͂S9ιʛGZ(/8`[3wYC͞[۟i)SVY:.T}FK3S$C٧%K'SPmι &SE5ׇSNezQ:my;q:ee,B)i!j3_uJ{ԳNRhvy窜v;5vO;E"SU)L_+%R7c\t⽗ݡc_HLv8hHc¬W¡X97`{.'7cTo`/k®!/5S'}>P`݃@7ށ' HrYXJȐ -y\v)CYՁI+tО6b#Ϭ݀}ڋ<ٖ|~Hd|iKhr W(e9tHŠ,Q>sN> 2s2Y^Sc=/O|HcCFv!d*vi7htdct2]欮 c%QqaV'04FguW9O0^߇x;^֞ o}_ d-eBd[0(vf i&BV:Cx>_g#:{t/"d ñMt|M92tc^9. -o1}21 놙c%s618ڰ-3\U(Y].nj|S0X@b|e5l`MuzBr P{Zg7:ca'G\b]kj4ef͓q" ؤ -  EÜ4 W@8.jV%bj'p4[Z91My9+#b4EhJ藖޳M`hl`;xHt+FvⰏ)+?-*r{,ea]xŷ -<9R,D윫:;q`b$ )-좈D(όi]3_[~n^7f-svuLL.2 -Dj25ttjk+EW%O.T@ ]@݈jk+X5ttvݘ5ttVzx嗇F&$_ׅW`}G)P7?CsWA9~1<;ع14,TgyL"|_DcX3r.QW?$V3gP׼J4~U_5V^=2,9T |6V].u+D#c/kA;R"<ޮ<$ މs;wtQg :7bPCydD0 SGUaOyd|V7Iy=2>{_##V2>;4և0IX" _6#3<%*C#y w=2s<: b'ny,,{jԡR7$Ik sC=Fhs6(s627Բ13M87 φ<9,v]]$MEˇ87"nKG}u.J+L $wb(pMn" Ecc U_EhB&n +t=\DeCYxpZR\91pQU 봏>'PYm@.y\W*GHhJ|LjX&$ jhlk*B0uTE^qx<ɊIVO"{tq"D*R+RITbZ1жV J2JO6ǯ0R -'+p1ςη++pٕqK,2qM{s5&Z -g?FxTĘ,w)k'oDV-5,q0@Ʀ~Ĩ:̩B޵TuL02*7;ە1LdލIu21/& dj ?HƵ*O^u.?_X $jFDW$'ZM4 QMd$&^M8jJ VE{'UMTK7 !jFQtAqYL.`M[D"b ⯮ U st>,R-7*:j adݐ;j!aƯsa7^M~ V5N..fdj0.Ư ctAtQ5VUicUyITEK d_MёQV}9H _MaWcjZ\#TMQRtj+LcR +Ìk*>EqbhW)vOoy b$@z2{7&ژK|Q]x}r)85`;XpNaנt2वPO&jǦ1wD1ZְՏNLH jg -ibZ!-vaK7WXK]qź׫߅dtCcX%BQ~Pc!YpٟA~ P2p7NCaWPQ9D;8%3m}ZrUaEx)e߆_`M8~+نoۆ7 qÝ(77܍~S0DQ7危 V~ᮥ7y*Ub@c.q -,>݀n;{0~a8􈻞+ID;Y1r k>F_X{<\!k]2?K߯q[6%vS4*ӻ@6s -uَZ_XSz5ƮQtB~eL5|]"*҈ELA^T=bxcҫ&cI?kWg:q` -΁hu&u]kc0+림ŒF]C:#v{͏ -tzx';ˋS;V:թ$wY{|sbKd.ɭ$+\ype I%:oLl'rqPPtaUn9-_q1=Y`Zt)f؞KPLhN]97֩L n ? 6֔CN ߏ]9~ _:炍 2SbSOP~aal`-0ԙ#T@rdE {"WJ -0`W -0n `O 5š}EWX\,^`wFUƭ Yp`U(:Q/*PeC5h#WnjKƹ-|ѷeMR>y4F/MR>wx) _Ϯ^7C^Auj:3͠~}Ml[F/~6pE E^gCM^[b 5^/M~>b{!k{B*~*#=Wƻ/n27wnsng^Xcg#s]pc{=Vh{]>6ʽ~A%Wi7'yjн~19Ƽ*G on%> huj}Iڊvq8^X&$&t_t5/ -MuG^!E7}|O񈾏o9Cn¢1NlMEỗg{%W辊G{b1yt#釺ot7 E]G}dEE-6{oHU@5Έ^h5_Y?~>lpGo*ܨ.o}|1p\7B1S^[^ƻنAX/)1XEnMV!^i+}헉<t(YUkEǹ/HF%Hga,V?Gq{Fa/`OĽ~|˱w/:Z7B8px[+ek` >:ކӗχSw'p鏺^w??K{Vʿ U8ޗK zV;?E~.dj9YZ|痾ilK4mkֵy|ߺl -kp$ GWq(#puu(\ɿ|n63???Os>?~L/ϡJ⿇-q)Ko~{}55=m23o5Ŝ۾{9i,B_KٺIX[upf !|]:=C夝dvRJA -1ƐUQUgTɞЪmԄz\iwX-}79ukw~M nlyʾ̻ouMU֍߹df,Vdad/90vn2;=,3k kol+8ybU7BꯕY]ܞu|6ߩvӤ -0"~ԋY暉ezcw$^*%ryt3Vw[i5MF5̂{Hw?(~Wz~w -So[tL\Ŝsuqޮ\ngփ{!#e쉈'@vs4D)exg -gMkW/.J.9;"bԅ9$5gAd -,zB]^^߽[)Wݹ46֒a^7|IvD -;T<{p~7_u9-z:P\(h,6 Rc^&r\ھɱh1mO%ۿDs]?x-\3{ ]HW)__Tk.gBvT`fV慣YGhQڪerᶊ}eTn#R/c̽_'rgustZol<28+}W]ECZ-ɳ 7"AGM_ZJnI|=ARzYOBVn^Tvnk+R~X4kGaʿVw9;3@Bdiְ}gr􍬠θh]tN,~zjMoJ1 `}U`đV.$BkL+qb) \jlx׿ަ?Zbqv٢,htIjWt;vty Vq7AK\@%jmvgxw*֍=Gtnyk87RV:y"R!ovtߵFiXne]rM$pgMbgCh%!tPQ}Ii-s9 u&l~}yh0 ]>g4:֟R|hx7s#RNq%כV7)w;ߠ8ܙ;ʛrCMϖy5@gY<pBWRWOjYqtT;+ݒݢy۷-vV - Y>spte3;3e}>O07}@sC| Jkp,7S;='m]ihĒG;R9Tzs=3wܺ`Ev P dNPcHh=s$ηmxQP𡀟Cp$.o{@1#e% A'&,8ײ6V/K/1.Q 17耧ERhCIHʰ1XENM N /iqiaVû+#Fq(4Sr7/W^nD.kDC(wAD7Lcv,q\wTPܘ4L];>@Ե]GAeY HMdMY. cp슽Yg5ww ߎώP0{΂Ɲ,5h"qC,L7.vyRDn$rΡPLD!Z297]B9aj ]$ -CT9[ک)V̊t̩?gTh;,lg dkⶇ%9YH2 AG,h^^.8HN׫AGD*i:U YuiٕlT]:^&ѻf[E*_2㸯g\G>WpL^2>oM5'IbA2{ cKhI8rD5C6Wڮ3 ݝ,L -Y=Z~pYA)Z\?Ea=!w -?cɉSxM -[#`t 2UW1 \ F9_J6S{DR=yGiGtyd=hHBY˛bEXnayzqZ+> {(|Ӽދymjg򪋊.߯_&Q(4޺9kĽ1Ja/8IhY<=U=YA/ G"gSwT]y*^Ӹd=؄)RSzZ ibC玏7VCʮ-_UvrjֻtNK'|I?gpWJ'[X(?Xt@EӮtte5g,~j LɆcs xk-4,IWX4dh 0YWj:2G\ݕoYsgZtR(Bg S[zt˵z9-YI.##shD$Eseh} 9L'F[GN=7LfFiU,|ˆ޶8Hh,V?-Ks|U9ya T&Ѯ@xTzm[/\/ -ݎ h%ld֐+} E%k[yPG ~w"<@b][PܻmN>WWVv>-f_i%{Ю,Nx0_ gn ʊKV2KWONmkadM A"Eanqj0|nl-4NݨC~Pr?xZdQvGGpewRT,Jޕ׼,XZEK- y57ne}ݢ@v-e0W~ē:+0tM1qu7{v.u;m'eR<I!VP l<[hKVU|𜺳$s^;kC=:w",xٚq-- -K;Ǝ!LD5ۼRt]_ i(Lnb?#;.)w+3獔"+fTk3Tm{^+['mf(t&)HhKDۂmH ̲}SE).v6p"vvʀrҗYᘅ;/nbHro8 -( -F֚xyYvY -̥ީsf,,'D#/Wv~cd'RtU`|:G `roIQDK 'X5wxBq,/.TInBn1se+?w<wQL+Iݩ_3_]Oh5vnF̿xeRn~810J<$<8Uq̩DV \W:ڶH'vrCVٶOޝB?cS?'ߺæ[)0=;:5l o:~s`9cS?ȫ -!-Z}LovG~"g}j>9%f8g}:$)qJ┤STv픔|$RoϏ۟LUt9\kLUkxu~*CexŠ5 :&T/рm')W6@gJ6/lq/ab"%4h%]t2@Op_ M5K* ?&Ff+;Ktj ՀxBtzF8%V{1> avSJb% _b_Q0ֱhf']*!OE(ɂ(\҄$%ES˪X4mQDӊj'$?3%(5QSJ -mEM+30mBԒa -<Ւhj<`uu]PDQէpPՐdTRނ4ED]-($ d~#ɈQR%'؜W$U.2 IhrI44" Ԧ _tx]U'aB /`% ;Յ,C0r]EO8s&aD1l) [ VPNUdQ,2 %CS)'n|Mh"|'@2hlL@]E*h`O~ KXELߘz# ~ ?M†WuC`@[V TS;?^oםgaGHS'fI YK8xF3`F2QP`jLY$56"%5dAR<@(T՚Ԃל XmM>8c|":$IH@9fT$A;MڿBAt7>a;ІI *lXM(j@*]Ö-i37OkY V,Џ0T6 kj.==~i®vs VJ*p)Wc۩O﹦e?tDKbV@$Sa= r W@٩ϿLVm_uI` 2*! aœdNYjУ0U46LgfC?]ۿSƻt:ǽ%߷4&oTԙFOUx0@ YY?g`@Q: (I r2د UC'u$A坠xmWv_*l(E=a(*U[@8l TP&PA?RE!  -l#ƋJ5`j®@ɨ'@!ފL[Cyg'E4<: \HQoM`WlB5[2RID=$xAA,I)PPQ[ -HH @7JC2[B^)RɔpJ@ݠꅔ((:LHPg$87;L]Y*p Z -kg Oi- {1EqdQ1X4EI -~D85bJL`\ -` ˍ A0 QC`JRD8"FMEq)`.òc@ ؤ.WIC~Fe"`:t `u6 X  v"ñ,PcQM!KվuGUAkUH#՘¾5S VB,, X$ 7*e=iJj=,)jlpRjvU5\&S1㋏U92T\"vJ(EHMP+!D3KCC@/? Ah\JB9!0iP0/Cin+Ɨ_6B" rT0HR]cTAיؘHd . r4^((gSh.lрi[@Ѐ~Ogi+ -"x& !B)n}.p؂F N,DUU=Dd,YM$Pn ie"WFoOUDC6 -NA$#(!a"&d D%N jj XI}tRah&4!$KoYޔ Æ` - "rA& G@8)H:ѧ2@2sJ#7QABE%_`@}E&Op8B[0yHV;8VQL ud  RMN=/$E -`Q$mbLN@Id`|yHPΒΥXDK+B2 -0k ,e~9@`(DlKҸ&@DP91Ћh/ajȖrMz55`//He&4`M.jLB=.d"hx!2tE! `e( ,Ao2{ }8l~]*jn#1.d!Ip>DIbх>TNdyY"d%g[3tvBQT:Td."jBUEE3@,Y0MLj** І\Ogf-֣~E/HL -?JhHaU%j4(zknh[=%} ΃αѲ&:쐘 ء+. 2$97(QKD@ *qvȖrFbA5`{ƀ4\_$t7aM%TdĠ@8-t qM{KSX_Іt™ rm8Yxr:1a$tZatNd4YlEӃM`B+?0^*9 @ -*4k`F83rȷbj\rAx og7jŮ>-Z-לۍ>Lz Ubv,(2dzLRNA#4w0G_m*K= j@{tDvw6 nkX Y1cn/i -H+2Z +(qʒl ArA@ -mbA7$2tE"@O4G(2Q#6BD#M3_"/CMx!)7rQTDG4HT)~ӗ@"F h֧0zt bǶZ-# _r}DBd~$"ӕ\tE،M2l5  T4 ݟ -xFB9츼g2: `'xA>"'_O -Rq| Q1;B*gܙ>$qD 6  ~R#tt!ӧNH -]~ $Zzݣd`Q6(S2Dg3O)R1 -gAXB4&z:MSLLo$""*ya,bjؾSm٘4:ZA[AfhACπYF>`١bqqG+k#^b0E2yW+LI4TR}E/B/@QjRL=I -"U'XԪL^JtmCCC4{>n0㚁 50((/J m&9 TzD`˳AsBdUh3BeM,(ejߐLU:&>"P+KLi((ń4jM֘W`lh{ 21MhfԀQm4h̛i-AX%K2At -U`D._b`Ab -0,TݮJfh9)À%IW٪R D70m BA H6b0DU-H''5%}!t\%?14D6.DB"Zay[FۈN=R 'jBUtkB%8HDQjgPoh q$&H";l4MM<ШK -R!1@ ƦN$+h6Fq#XVʁ%PRTѶΊoJH $rWL (^N ,d Axdž\JU]QJA2đDi LASiFa$KSMџpF Q&M#q/Uu!Sɉ!n"`ފNAfE f(+ܸFB: mcA@!S$ ^r$njd"J]""G-2A01VP -6#*:&qbtGL^7`HfP2@A;"Nb 2d}/%6 !(1 'r:6Ð"fm - DZM3?PϪL~2$1+ [-4*O :fPeD - T$ -9e`Fp?=Cʐ% w$1 ]AyYj)^%c%%0-qދ91*OD&)&_# \yO}kZm_# -nMDmVAA\("ܪ(uBuL nKr|hd@6JX,Q F} Ɍ@"0S9:-:8- !kB&KTÌ I68ƶfL 79aAeI<ݮ XP5uQn8"oA˜/!bDPil.1THF${D&#J -YLa2i-EPo1S@ԴCP,E`4DK@n\4VDGm )hlB쩎s0CWXi EDf&JLd fL A*9:/4r`>JԓHp"S -Ir@B?5-:n(-_2Ä:~*[x)E`nN:.;/B1H:з^+Znya1MIR8G1%ukӔMYJb"*Ҷ4ϧbL۴V!%A}pO8D* f9@Q-JnI=`$: ~sb!i쐎8)5̔Aw6Y3EtHͧmQ@]>dںھ(ZЎ -?&#TY6o e"PN\зr85Q0EԙLR?7Pd9 A~b`2{ܳ($tJ|jK7^aW =ds(`. -_UhDhр0%0<0dP )4XE<ޮEԌ] ÊꙌ -6BMJt$b21i"6DeڈTc͡.Bf -S@h -0$j&IiDj%8"t -)\SІ0+lAd"d+]J8Zp *0)"G]bkxĴZUriK2Q -ypf#=W80Gtg -8,D -XEp\0YZkm\,^C7} sq"e=(WCM.04EaP"y EE0383>ow N=, -'T_PN08kk 3sUEDn+#b8mQuA抽3 -m(zt.Gئyݤ5Tv'LP2Wc}lXUOH VhaHmi* kȞ1f+XΣBjDTP9[32mUneO -qQD*HEx ef1#LPU);)5'/*c3ULbH<`, } ā I)qE`rT\v츭(M樘[3a"ͱ mo<ݼ;6Sy lk;T' }o tQ__:tת6VO 8$7}{5ɶ9q)8ĪExeG݃}z], -LqtԈtEܣ h&;Q2B~)y'l m)i~J펽|[y~-;KL:5*[ِV Є,!\-qiҗk!MȼtkooNd('o`&:5?՛ߑ 51WMx.@&/zL*K|:{^ =.#KGY^B阖6/$%0'o pvOI{'~BxVYv*&ؾJDWvƕ c˝ ’1t}TdAAR/lDj%WP2djZ8A:]piS~GRSLebm[pw -. rO -'+ڪWP bycu/Q-?A%$1iGxF I%SC8Śo7:yNٓ94\šbMZ>mMЙ1',?zKb='ܧ .5 <_YX -3| [Ua#RkEe7Z5.p$9&~81;XDiR%(hAjf]St7UqqJ:]Ju$S,z[g:f4P:)R, ɄY -P0TI,TUNKU1_*"s/gO#Ÿq(W}PNOP i2 Z[4Dl;)j !Y1VQ 1\Pr'SmZ8 p\ S7<ꄏp!$ӐNjϭ{™0k{TԂ<ح'][% :"nh4"d'ŞJTO;{m2(gFs{^ar´N;:jfȝG蹻!]aNkS N]&3}Jַ[%-NZrٍsDNk8V. wǥm38Bmj4ۏhdE'zHWE4(؎_=Xt+M/JG騭엀'lBi%O N88a@ޚ҅PW2;A2NAd#S]kM^l_EAySd7qvhPǪ̭6 ֺǁĥag*U#v -^RMOHϸb`ި(6](NqZUlΛg;Ʈ㬳LWY DRӟc̈ߕc9@uA~|M6 #Nϱ ~wC != -\YI][njPc+L,❝BgA;EG/dMsf#Ec?Յh WiblQԄքJʏlO=Bkl2F0V -pzvK,=*:8Mx[`BKq`^Y2rQ@ӱ, ,U۵߸pA!&O3'й*tvLrn3S<2HQS]2}N!nƏM5.3-Z9}Cnnxc_-/l\8uDؗA>8]BTL8>+CƜ kytw)[;$vknRc/+B.ϥIJAkN$+ST=밞DzN[CvȌ dEMF7Υu\M:T +pO6.#`b&ls;u"uz3Q1.cO1Ԭ$+*&hm YvVix!{HO8`>RK5 R1f.z8BFOwhVP%N3|}~4B7nkoOgѭ908{\*ӵ6ӣ --;6z)Fl@zӃ<~*?}o㧟]ǯ?~/??/-) endstream endobj 115 0 obj <>stream -%AI12_CompressedDataxr%u&p4"3ț΄#7IO{ucDݢ"ˬoubg}ZEVUkpU$º_g/}NU7y^Պ/no߽y_旫nZvE_d՞ E~6co~^]z~/p#u~?b8$m\~8۶_Qn_`Ṏ|" )I}e3pe dny5tD+ӏ!aOZ}y9{xBb5_~vۻnDӢ[k§zaV[^fǫ8^'_ =V]Q~f}.0?B|-@7w -:ykW?d.ton4To~]@i{odڋW7+wH?gE^&,u]-W?nͧg/o7g͵ܶ۫Ӌwo_=ʺ57G;yvWm-lNxu7s}8|sz}}Riw]ڜ/~apRݞ^~y}W^NvvNWWw/+k,<@ޞ -xe>5t'wo_n?=8՛?4Zhͫww^?2?zsX+iOۻ|?ݝn|+/[=_o}6;!5Y{KYC@-r6{;+=ыw\)oq2k%gl}E7"Yg - ݚ/\N_hWx_ijߖV'%iī`|v+79û޽zV?^_{ͷģ?|!b.j!ӃOG'؜Z_lZp~\GU' t'.>__{WyV$6t,'axiNƗ+zܴru{Q(Wz}&\.y+f g!6).//ZEh NK?OS(nP yf_~A4ςm)]̗K+Ht5ښ}-ւ]zAuQP(ox&pN8\${AQ(s$z^Nz=́Lv6&̂in^K9Nq<vK/ׂ{_1vͣQ,xLF>b" 2B!fgB16G -%酢D,PY(͙Plt9BzH#U]emu}V* : J Z j-4[6P$Dd\ 9Hssttuu%Bv=S;~"$t*Pौ F!Bg!ˋ鍐j%Fm' z.&}> /Bτo8y\8 Bh:3g,i癏?Eq.[B py"q@d̈́3>= ^?ev||"gJgIϙ?k6 / 8_ > 0!t)W/a -Q;CmO1>wGa}=dR:r;k߻Ɇ}:U x?cO0cs?{|ۻWۗRӪf@ [e02R ' |x6I G^7 -Gd31 e}&>#OLO-dO \% J?甀9gNg3|>pxZ !=WeIjIaS]NIoaF}C"ȅ@ -oD1#'&*fa?d~T c}RoW0g;NN>MŸOq-dqrɏ8"mEȘϼL#O%+H>cRMT$blr<)Ȩ$.ՔɓT<<_>ojzǑRY%eJR1uD5jvQz,w)Qy*X{P*9wU -K:q:DH x$&-UUw(9/p?B'Su+>i{!Xq9/M+Ox'm2̳8&h/e|hc*c -jUOrZ%/[:JhSEC }=ͪehW1Ju%W7,4$S -3E.gd79=l`!i da^4,d4l -L%3mHk@HDLB 8$.lsH{fV񥞦VԊפ4DBq!YLAd>6.;8ݰ>fؽJdP=z4ɖR>3|CO?W<7[?t]n9 ?O.*~*~ -K 쏺aDN*$*vvXNdw8\!Αb];k≠B;:Gz}csqyryK'eP(x&`x& -v[8. ]0ր('@Hjx7A@^eʠJ56C'l]hj\欑3G b5=hUs705oS=i?'uAE[K`BVrW39EqQ]Ν'YnoOЧy™7>qşYIsy<閉}?Mq?y?]&k-&넔:pC.HDrb'ZJtN 8#pF޲+8TbC^7}!mM]Sb[2găW -c׊ ->+)['\!~$&%>9" 3'Xd\9qOb.]A4-wĬcA*p0=Ou4i\47`=!h'0M9?={iqՅUtηn^'Ӵcm8q>:Ο888888?lfffωmysx͇?Ǽ4OV'3=eS6A*ª-[s˥пPTjM -<}6B}g1ύ쒕)8mQs.l;h 2<^y1rNg#4 ,I+>>j|KQZjh<׼G<cXۇhODp>ӁGnWdv>@pIo`=h"5*pEtPY ՕD5:jO!"uR˞NJZjG)uPR N":&?`iNϥn(]"BI<\,d/1la^W{u|tx ߡ(q|5;| C8=OFl#UYS,3ҙ}lQ(|h7a:`mxBߏB%}Wtg[$_i!bѸq|Ngljvw4D1АKeRebE56a_649(lʰkIisLB=8~qy]Zgmuywu;4رlR3HvX^IΪjlj1cγNUVORn^K'~/B]?Jd -!IZrhsPVx -Q(Iv'q'f8Qw0@TӜj| qBϳu7|Ե5U._S(JEZ@+UIPT)a&>V:|GݢRnpzٷQ>ѫ9d=u!d~QuӺ\/"]x; lycу_uxݡRq9DjޛyzFXؤ"Mg^bLXSS˵}RˡqcA^^z42h[]G2 b臰ɖRJo/ s7NLm/7s^JI4%v3=%Kpm -yf4{J=%s0`>|9dӿ?|9!0`>|9 Yv {s*Zi*wߝy.Jk*I_?fPJYO}#ע&^j;w#6l -8z׌-.c+籦ۼW5oW?]~D!ή*Q`A Cl{Wꑊ}mRЕ]7zhnj}A١b*v8KJQh;0tDj)#*5;aN1d5j)H1(fs10i*¹ %hd0|>i+g'pCs,[9 -`1ʤfO2Tٍ9BJ h -rB@R&vÑ GOdo 33ͪa]>8AsE)`0a'rOF!eB/E?|q%xBKGKsC{:Oj><?7hfl5vf=bPM5>;.ɾp0;^6k콘w=ҏd4_Ff4Fh"|ט=poϏ7z7J\pezȿ?;ji,WF,=/eujaY^ԙ׋\zPCJk54{lu=Vj&9VXٕU:/Z٭r֤2+;%V;C+UUʊ*JSTRknՔCR(~5'\rgȳ94{M)i}X=R^1eȕ0an}?O";J<㝺НrUxmq? U"6.rG$ -Wۓ/Ӂw{j`$NENXӓ`YS'!FuRxƒGP4#; -inEqB/_/A۷ۇ;9۫IO.n2%tVaO!Ou?.XO?7~ծ^+Il+iݛie-6VNޞ\zi+bg_W=ۛw^o[_M;دpǒӉ|QASpdr9 `(Ū"P_ ߛ=5 k"ͦNGB2R+3{Ʉ3\,sƛB"u(reEŶQeGe{e՜S`‹l|< -tNu,EgäSmhPߍC6Ew<9,Q@ṱ4KQᯓUɮD("b krCb"4*68K,i1C˥q݈~rFE>cesq@v kd ±B~"3WKߔ?rt2b#{(*Z LFgk;@Y/Lt( b՘[8%AoxɌglĶvAKXX79e o3s1֨k XuD'YG plq?A^\(p2Ju E:EΉ0ȝ9L|4?/ /H|pTn}1]) /(R$H}Mr3YDtܬ`#wv Xdz~/| }`z 1}8_?cz@8^Ҧٿ'MYSo(?c[oWzpyC|+S"qB _~wo$$0O_BP~il>_rC~/zܒ 2ׂ@"aW$(`.@Nz)IfjJt= -}?嶇V4s%kj;;ry^ɂ+;"u B9=!\DZ2(cp!zp޼Bb -e.lIB=VX\34 `]E"`Y,RNOrF`NJApYi8%,B x_p _e#Kn6bovF.6m^;Be?B|O/dm|TBiB*GdfdJ qqzHEI_+9Z&{Mj&u 2 aI(3ybBOdn &|@!脨x/$lJe; /Cn,{V)fʹ&2WwA4o>~J(` /@4!7 Ruoƛrcm`G΅䡟;bb61-\lq9#hQ~b:g8!G$ va Jbnİ -;-" -i]" U_V .0.s ҡQNwE:Az"; -:8op;4O;a4LSJL r0dA@&agcB`8O{'>CRw!`+ z}G\6 p<ŠOŠ1rBe׹g`8[.EyP&\< :b0. ix"oom~XQ()+h}3.d鲊aʲ[Î!{`Wdw$aXF!M:al`+_`` @ʬQg-]:/4I.D;osxd[= -OK !ߓ~0]Si.g2cҍxV+@h!1 jW  d Iߕ`j#1Yx,R"xbD 8* @$PueJPMTWKŃ?-Iy!/ {CBG0` ]D0Fa >{A -%wST!A7L9C6T"R"Z0 aawIZ"^(щ9vTi NzVX%,gk?i#QK&]gdreJrFCX@܌lゐ+i;ZfSGS䦾bX04E/-4= q[*Hcf- 5T%/Eu7 128d¬1PkM3 sv׉]XvTLX c}A3xrqKz[lOƈCCE,CCb\?00 (ET7%w0MS%@_Ti 0ȸX =rL4k"w'~"2A D> Ȭe@J&QAt:@GM62@>Yfh^9?Z,Hblu'#(=t1dhlYЁW1B$ )b)qE];Nq X^ FgHN 2~w/F=c$_)(%NբI-ez#6ljN DFZyw$;3ZDXc si- P-("/w#Z `i 3my t"p!B_ -%$3A9XlCgڢЮ P[Vpj a v͠H%d/eAV:B8{ %lt^nQuDP}N풑HKhd&\ A;:p0:$o;:C1, lnå" B9cӄܡ4"@ˌ˼*Q -4)2/8&5P($I! e} гUJKZ^YM'HQڨ\'Jb%v -l֪(wgM~-圹f~|'82UP.O$?8U芩|ܦrp$MA]V+]7瀟xp-_#WUs}au%]W7k d5!4|da'׫:sk\K3][VW ; -vqBҮZPk囎ڈ+vlJuW;%'ݹS,Ӯ;Qs|E\SDҕNH]_\7 8avRj̀Y+_pFWj)X5ql|֢c~v`Tx}bއd) -z_խQBT^*ыq%wm+[![TFZȢV4-64)5~<7^{@J Ƞ" -|:PCS\ -'}.%e,a./ ~=~YWec~{.?Ǽ6E?JDZH]Wcn>q&\~0kGe|:XĞs S#96X;0"pȜ2-L@S>VIIZu{)TJkd,fP6IFԕZf`l )W7(H_ V\_bXMbj&X; |80+N`G|P#Xe+JH'އJ.$_x˾zuUWu6cr_Fj_"wl~vFPfny8H~r|pWܧU q5D]Uu|haI4_#Mtf(Kj(&n;>ٶB'c"\Dlg=6=:` -;eP5, - r.=.DD ݯD0EG($wJQM̖mW|am mʗ͌$l3"1IgeАWQBMF\rJmkhe#-<~6x(Y__^w%T|} oCN;X 8Nr<2C ,5IrlHbkMc'A -`{vzQC>&{a uy0 =EuNg+(@yXzJZTD{P (#gw =v0uaE=}[v0~B_ j_Й"peiq#縼Ñ&!y[{Esr*ȼ "!޷`)X(NF}av;U\DQPEEv">iv1>E qI.``E/1IhΩԯb C4ORUIf0S '47h,>TΔmbs Wu-]00sB0&S7if >tM -}UW -14>& jSvɔc^F&AxpYy^o#ʻ%գ*w6;:Pyȶ<=π"eSi f`oZ}CCߐroM3(Z:iJ#с/|K -, S+\jtz&*Fx&\-c4ѪB=FSl1e&(,Fvc49Vn U#qu Yf? jzyLA챞N)BMὓN4]SXKshdI=|&2BX{&t) Y?/9D!c0x!C -q "B4CXyLjX_r)#xh"s1]ьG懞DX5[?k~)Hs& Vy7E2EԲ!pPMdʆ@HzHrtL3`O*) >9L"xvXSP I2Ӣc@X<~eXP)vK:q-z1=x-E`% b!BD*f/#aKqHGєH4K0Ђ D(E(b!ȍ@%8b!Vi* B"v^DS/L {7 -"mVo# Kmc0S0TsL -C"Tru4#Q z$caq}<#; v #b 6KB -#E5#ӭ Ģ,,R3ٓ$n ebnT NutR rW;rG 1MelY&9B6MLMZr[ )Ń,̡ - :|#锩 (\,űRqR2-BbA4 7 Q냜DhSn5npK,$ ŕ Q]R >&h$>(c`3FfMrw1Y -TtQ"@e5I{,|ee"1x(8=nedQ k!TXR9 mNӁ_a#PUkt J*YΊWEs`'~/ktf -.#BJ֤s^ЎϚIY7_3B}T5Kp8G߅չ3Z`"ؼn3n ->ָEG,hs%"$,%jqp(hi%<KzQ?ѓD%C>E -&y~2B_cT7fs6NT@?)0 עy4馘ă>HU& vX$ՔocjNV&G\Pfb U]sǐ d;d%=)Aw~P+<$G4%s!z28uϒ@*AgY7%Ϸl}O@`|-+.oʹ-TG'cM0k˪pluSe0R @~r83TDSck.lW9ެ"^8li5}_Y)X‬;4TM\YWT^rU.xV]wj5:u=c]W& -w1k Zf'(2:'j; swE\Eеf48i3[|d 5`gAPKɲVcm3lSr?3dM3kfgƓy<3ռY2hFWd?ʙTaeF6Y*valY+k79&:cݐWn3-.ᰒܾB ]qKe%=ӄ3xfB4r۩On^u ˭Zܞk[}+/w3.>2hWڥX :i_.t$d"[D)Gj,nT͘DCy!ƅi -EfAb}U+)ppT6n3K(R!=H4 -dZ?LY8#~DR;\*ai39@J"{dy2A~~A"3蔑bPh)XɠއQWncu~i}bF96o#?9 :jvѡCA?sրIx gY> @3MSIئ|1A -R[FKCʘ%ŬvdqHW9‚d+P/&4UڢS!IqD7Qt >TT珝)CG.kCc3CPlUDzeH DƐ.yq)g "i' tdDX "iD@DF&F˶oD@HG=BW~3 7۵ "0ݛ5|CV9Do"84CaBiZ$'I!A`&2@C@5Ջ@_v9O: ,&~Xb>~7oWoBy_q|ܬ'3LQ-Py-U4:%m.*s$ fr4`JAKpy>`0W_]Ӝvp,5͑=n3D ^3t+p4uiF0/i>+6W4n*aE`:h^"\&V#BQV<Ӽ9J^J4Gufδ"4GQ9ԠI^P<4~ Eg!*°'9?B%S(w9b͌71eprhWnz2">msFɪ{Zni!X`\朣:b,u@ n/p0GO`TphQ})-vh0Zn~` NhٺeyQd`''nr3J=vNf,! -y@ - `*}3eAi΂eJǔs|s_aŔ:=;1LR4|T>p9~WTC [˾M[`ȃPV)?-EK8FZRE8spv.Z׳͊DIZ@w%vXa@g ֱM  Ok"icHqV %0oK]kq% n]l8 ˌ[,CWT=$U`+ n'Ei5XV Q@uc'VӁ *Z4 "U@l6r f)Zwȭ$}ӊr0  w@\xVfZa~+S z"`Gsb5122NkP8<#hl -GlV^4L"9_']2Pm: 2ᶦ3S^iżիYZ-lz:T!W p|:P<9\Ag2Ǣl2r?=86͖`ثe1*XRΙ[mm^Uv@ "نyݤr_s}%\k5U5ׇAH9y VW/]`sx,HVk^yB]\ˮwQƸ@^̰E.f%W+1OBf*W*3,*l+zq4ǜZ`ؚ#b/˶UH˻9 -pN RTPsT7vEyzQ<'^8ɪثȯ3*疒Vc >3^G1ޠ"V@Cx1?^$X1R^Q1ΔyuƊy"yG d-zH*]圧נ8TU錬t^׫^V -r;ʞ@]@"&Cx-J.x]RZ~i)xSI^/]^{5 f^߼Iy^A [ ^]O+B[ɧVXf뢮WuXKqdKn* |ǮB0!Cyq_*cyV80-ds=d99+(`p`ȶ(PJ"c^y@k+&T~TC +,2s cﯫ-mPhc]9Wy|;uۆ)ș|ЛADe5гr" 8<$g lݤҗ7.š5Y/{(%c&~ˮxĚ{ Ds."Wr" DzMXM\]J'\..w9 E7.# ݡU[Ixn.0g&'!5E&iU#ym5x Gt:y `<(yOL^Ml{Mj @(7%HF|ӈf1#XDl2d6Ԉ< 6h~5F#]){oNLA}sè -ʒ`ؔbLd.a(+hQd!r/x ,@q8B9x%0A |subṠp$ rz'03 -CCiM3&ʘ؈p^2Fx"Gdq`G8#s"Ĉڙ8Dj#XGQd9]<@ Sp$nDL$0b5EZ6 -')(bH -Kq-S]9,Iazj58O).L&aE~@>Gg >Xuf˲ -\&mu#ǹ;&C86d0:âk c)qLj{$`~D8e9aqr|0J!L#t{M9N*rԩRb2\!RG%u$:gRx?WX3Lo -+Dp-m颪-M -+qWuGm*4WMR9xA{AUIX]W;%rͤ3W`՜~e]Uwժ W:&q5mq\U\5\a9W!;64-#LWFhU֎|]"w5#|Ӥ;MpӎMw -|EU]dYf 0e["np'޹$՘8ঘpn9R2EkxܔL*MSfaS[sun,sύj7$θMLyu3n:`71:fH穳ݪ<[>w i%7`&JpmB̴b+q .1E#7:W"]rIcn宄67psݰ^ _'B"[+-.fr@) {\ ^s4 ^ׅy/[ ^ؕ^/^W +$N-ȮǮȕ$3J.yJS\WK35NST]-TGyJS^SEʏj+)}K0ȾiUdlX!zuYYc - N5e4 j~l֢'d~ym# ȧnzɒ-#o>-e#yCژXNHJN^RA˶*Xx-XyG+ >zbkV\W5V|kfJДg,`./z'>oD*[QYѲKuE7+}#y)+s#җcQ!Yv= +cW4ɹv^ϳV[4hb%Dq6Vj/rEŬjiM>)Z*E$ZT-]2Vu5@fYA+\gS׀h] (KƂ5#Uec+ ceHp,a%jsT6*3ZE\[+ʺz^+{|=^WuC/l浏DnpϞ\G鋇RWտy/eFrػcLdSmcJOt]ZB_` -Jr["+Etju 6ow_b,Ք~=u>1Zۊ|m2(w5c -M2֩/GN32ȋ(a'C8s\ް1aKY,4X>=>Z-.&$ǐhG-b*8sGL4.hQ~T=F?t֚cgKf -GC6 \Dh؍PkõQԀx3=حY\t 4 21GM@ΡN6.m]+{"DH)DP%a1[tTrb[FڥW]:LS(H>#NT`4T Fr}"-S1iJD4LXPh@(B!7YA4Ae $( *u e>|10"-"8k(!kojDm̂w'mYezzDcGTEm_@GV/<\^ӭ+ZUmuaqNTlxX^LoDZ}Nm2^﴾JےO[E;2l4I)d; -BX3' >)6P*3_JeeڭpV82ZeV=Oȋ,WhoJR>P@aR̙LC&X|df 9xeSּ0Ӕ'0&q>8pC5ˠw3#3 V_;H. -޵4:M?ӷIIYz!}ᬪ? Z?zry:O(NDsv&ʌjyEfteJA3f-]J8S򶘒e7ܐ 1D#Έ-pL%oO)0X˘O/y٥7CBgy)L5JӦ7+٣ aD9)՛-$$Of:EQ3{8'q?oČ0BW-I| 3!v7 o -KS!a;>G_k4?^uuTR%cy@tФ؆u`T @ք%ڡfAxVr3 5!5dmC;s -%A *Tv )R(ΒH,oF;TɎsvӕ$["֡eJ[SX6t-xkXg4?dޛA:[, JԞrp(K{ʌAh3LsAq1ҫE7͖"<)4MOAM T`.)߹~D# -A(>B}{>R%Q-vR*Czl:3qj%O%Ę#MD)4&<+m[YZm\S?c |4=2wxRs@[tanId+O񤌡=ڈۥ_4onLyVSA'Z᧶9 ZhqHɅOMHU .UbԲy_& *h _|XYaD~3Ӏ J"Xʼn,h,ra#K -u"N-hcEaAdz$2 -8Q‰"}+ -UhX9YH,z֔$苜"q2Ar*nC$o$靠/H,OS؞M"HKK-I,E+a^HP?ObxRS$Vnf2<_ B{[ {!h`H ӱoUaeժCG5% -(4TʉBD&?#9Um%(VD`#i>j!Sk6N&|K;_XVa`D`ܒ唊vF/%Els:,u2 dglSو"C&ՕD+23cv[>*H,a -UqH x1#FxXLc̽q -1 jͪh,/5e4@4zs|;#؍XWW -4o -4Zar3q ?.Q?Kʗ3̴",ʌ%5*e&)f /Ӑy\}8AWa7f0FgF>H~?H:3Lގi sj|FRirPP7[RgXn0?R^ZPR Z7®Ŏ:2]E{8n3Oe8L%vs*r(JE]5Je%YXԛftPP CYiP%}m`O -EP+):-Z|y%%2B98tC>+1uc21%@b("SТY.U/J-b(ʜ.2Q E1E."+yaCYaSVDYa/ .FPn% L%ega,1ṋ;-oʐ1.A4~~ (UF}/K= )ˆz .Wp*RZ> ] -kM YlNTn=1fHr10Du\2=Ɲ1Ou⣎)2l -sG@ h?)pUxrO䶰8|UaNMEfC(SLoTnV3k/FZ0 G[ǐb\dx'4ʲQCg!&!tl+Ǜܠ\nH콆\;ނ*jJII-m3|wGiZՔH" XVxh(-^]uSS!p A&ݥYTX o ЦBs ת3&BCpfVBiL `̨PodF oES-L:7iH9*, ke㔤 ~ .̈́7(#V0EvՌG5m -[̙ -gKZ0Z:];kd0JIET -f,O4g".kYzU3q,%o8 kwr޴8(m*@ Mqvht#`檠SMGjrB䫪{ĞGvJv٠E`Nu)]^ -dW|X@e\i9NvTTg4fI%x Q#k$T;^SIǝX4aǢ$Ō$n蚹P!XɁ:8NHtkc͝ J~rtW`vu]j"ֺF,y<2> mօw N4 -JB@7x ̂ -.f-vDFMZ -v[f:Osnb8 4b3ƋeS*}ᬝLcưFyظI+ur*4,X0&I*f<4!I ԑJU x06u,"<$\*}KhL|郒s%fUK#4A ./d˂.;SB()`KIs)Cˠ u -ɤ&153@Q]bgb%>Ӥ9QiY2EY2]gL,ͱAlBmQ0AR7 - 5qFx!%P7딘A=2GŐrx=ӰGF$wBQK3zOԂj^AW}gZ?"R0kHyN!r4h_WkCւo٢Jáےy9{wL dFvvSʅ}Z/LpaM(Vp\8TeDv\{ S{0!Z3SnASθszP$7Ư0ʀ>nb2F@;5.Xgw,|:15x .f7baHsBIs -b6{RFȸrXRPH^8n{DXl + fCo=4#S%:K$6-S1vP cF^"۬JR7GS^&vBA%#0Kޢ|B -~O ֖ixr URs n;[8i8? -X,2G{ c_3R<4P:OF4 `8Tb -76-:d"ϢWZ|3ΟwH]M/I39l4q"xQB;+øz12ox&% )C (7h)$3`jyW'JFS6%C*q×]WB]U0*nѮk'KQئ8W M -1j+*]Luɹqk_F«vmGѮP 6=Ӻ;ؔy'2wbj w'kn2ȟL!RRT$׭N;[JJ2[p nX  :-%;S&LIf.(7]93&isS1w\lU+l|sv tqdLoR=ٺհbul4f:[* ;45FOc6ZnCuq.2=,lvMD1p%d; <ٖlBQ9SLk]kn w.[M˖RDLVz75K`"n (Eh.2s$=&Vr/J;B0d1Q8CL..# "fEUk@XΔ }ՔH-.f1"rس)7"yeWA~:T*y,S9M9s]͔8 )տtBy7LoVޚ*←9*|gn}̹[h}^KMףUϛ2Q[h@,֢ԪKew!e>B?ece4Xr'{hK֢keދdw;e&d9yU ,/^s85Ct׽C1PnI^'nV[Q>jkr,>8)_wP{x]RΑ•ˤ=3%mAY!:PD\ ˏjf@&e`,/'HXC,mr#NN 0bfMZl`f$C Јi#czo-  pl]Jg]{I_0_DwW1ߤ7FEHb 2cCДuC=CJB4z]]j qe -[.=@77}xz ieEtEy#KLYk,6ivo2R݀ECkNuW(>h]aV6.O:GyJ8Tg.*uu1gGQ46o6~-n#yaɣKN] h;!_B_wzWAE)wRp'I(@]Ajh;"/g]51O|AHD0+;3C~\󦲠yD,D͟9vtvAqP\n_WBG)=>iB2].6s"x1КΏHA=1dЈv??*_.ۖWoe7r\EbcuR}K}[ K ?ӚJ7B6e7B6e7hڛi/i/it#d^v#d܍={эҍ-{ٍ-{ٍ-{%![%!Z#!{JGe;BeA~ٛ MhHȞҐ={ِ={ِ={!!FCB4퍆+ Ѵ ٴ ٴWi/i/M{n;qhϬ3 $3jcleמY5 [0!gO:gh[QDϬ xS|fu -Y3a3C~ȐANb|4:b>r1?s>TMGǤ)ՁGQ:XYC)p|g;"xtgl?ڢf"C DAW5!^bY13 !fU $AH1 b!A< ԓ -1 b!A< Bxb{]3H 3H T3H fĩfĩfP?j.ĿOW-d'-BUa[h;>/%f!O!﹀d^)@X,eP)T,+0;A&I2 |? 8y5ƛr( -v$FYdovBBREr6u-b'FIrz.6eԍ'X3ܩ`|vOIm`5 VH_<9>~#nf8"~?[wgP¸G?=IqTwbPN+I.DRQVL?3GjCnΪw;ُj\1xa9*roQW1>l9n;6^ƫm^oeJVFM:9#4@ouũXt̻Vuh^-gYU21yJDSLXhdY'8μvɲ ً g^0Ǫǫm*S;ⓐqYٖJ8iL|3-Ey_"eڒP6k-ZSZO]mK&-mnjf괙҃Ŗ .u [+ճJ_ ? ϲ>JP'>R)c]VLW9l]4 X4VIޟl&na%ڬ-*â@X6YEP5n=K̹xi9N[5u{!eu+VAXOk_JY,!5ڛvWWɤ]Wc,*e=S -gCVhxƟgXг.(9" ygӤOW}T <, bҚ{fz-<8uhg|gZژ]Ն׿1AwAd#;[,8:10`_5Y33gDwvj{hMc@ -*]ju=dzY&(%*mur b 4ut:.wTgR[l;.*kvN۲ӛbvnl.a&0"VD}`r_@il]X [\=epK,GX@A:QuHfUaonV7ѸþxQ-c[7I,gq6Z?0nUGĻZ=]֝ȬIIR,˥@;/2#!1O^7x1>]nꎉ^O6:8 Z0Caj- b;nu:5jl#u<1]/:vsOZ]S[&%X/Z|enf}ݎX/z߿ -gb2ϱ^l& j(-}]U ŽjxNNJA g~*4,;wc\{(~p)z~#-.NT+=PuSХEj+3MSν^YcγxYVUk:߹u}%;3oC?ϯbK@jb!}UTl?6*El$V[!`YWm[0x)^cޏ8]>;{@Pm1Vc4~ĐXS}OVwU|}P5,Ceː~kfY{aa(ys͏ "Qȓ ?#r)a).˰kDIEh\[*Y䠆yE>z^cҳZ@=Ֆjf3Ifn/=<򄳋t3z&S)3L=D&M}i܉dlX2KH'eyFe86Z`.C-sչ˜v|Df -53՞[ґ]Hϛ[M)6->O֌1鍊*k66ݔYp6}y.ab_{0ŚOD b -WP b -JRe.°3Ƶv߿q p7?^ 8]Z;M5~|RvG֬4nԽmCdM}|RG %?ί}|/zU^1\Sx/wūB3HnG([LOce$?R~46w}>wpY7_9tɮ2M[?f7}|4L{^|CSZ??98g}x8*7z,/-}H]rEHqGf9B4)[hy=eԂ:-,m0d85R˝ *.Id9kS&n3ܕ2QSdNOs1PNΈ9޳p+rIDfNi$$@2ИZ*{yYum;E14)n^nCOTy}%{+8qQM3IT8Ń}.)>nE4{6gCӀ}*aZ8 Vjz*[%h AY9e:=-oVcġXˑ`' _9zJ}W-rUvPJeħ|_~/'+ d9Z&5b9/30k,߷Nf9w)ԯ)ϵޡS8?*cyS]3BtR2z}eZz&j5#a;6tj|CdqWSUJ[$mO>`\jQSk[-^7 gݽrOmLy&Pe0K92w*<>ܬxNOpf.M]SUu2&Lm?nbo!q֞md8<45qԨjGŠ~y9\ˏQ"}ޯ~5ߣ]gz'Í-տot_~W3f;m_x&Gӿ_c/6+~/~ӿ7_?Oo|O~/~oA[}?[7o_o^ob~G۟W?~ow_fgx/o#_}~h@XQ9 -G͟E~g/~/o7G+X l)ώE Ǣ5O"AO?ZW|s;b?5kz||W?yE??ử~~ϾO# ]^QU&ʽL*[u+IЙԡP?vEX(I? JPB!~Qˆc=퓶虐FφӌU| -=$N̒KjS/'/ OW[L6/"$p<ØSt?ȸRI9u)k=nJ3Ia*igpYM5_;Y{N|JCRE_,.8F@U}-j,tްc+.j)9}Xt\'+׵M\5vXW/иѺ!TgXD؟vwA½NQ[a>!J.ћ#eu v% X; BA憍ه+mZ#Ď6V7H-v%lp3)kb5cvEl˺-~S_Yc6zG)zyqI|e(BSI(#vZ>7kյÖK{?PVV3,8QJzZ]k.0պכ]VS\'@ELfЎR3ggazomlT(UgUG -,cC$XUifWiQ|9ݛH3;-YO mpxȣ`Mð(eg N:j%9MyB"TM{7̤ `i\Ĵ#O͢C-[D5t -M1 dQ>.ڶFˡC |n5jcJvQ0e񹏋(h8~+e>˲ wBkbejaa-A65ͭ!OG}T4Xcܭ (ONJ& j\˾&e^V/d%OF!.tqgY6~Md2e;K<ѕDXqZ05Zgޢ|L'Pp\v4MeK=Fu]ȫˉ pirGj91iR#胷nsZZ/]1U]귵eȲJ8Ho9>/p8=24I\DQ_9Z*?x渍ARm~?1K OT-2&hʭUJmLXt"Mtr^ez؎UZV{_{ IA@5!PYކE_H'qM\,mwSF, ;]/J~ :GQ9tDpMQZFz7Td5ZQuC̓%uUkjl!ba;UI&; ;-J1hshq5VAOTC*fb`5(&SkwXW0.֦ z&%MXc7,:/yvoX(zO6 ʇx;f(0}N= -J/y4 -UqwCg>Gi $ ;yeYNea!s7ñfͻC;0^oM/>snuqpe˙Z}Φi5E-PnXVw-ꬓ -<7e,*yW6槴Ւ.SV' j_*j Iͦg$(o>Ғx<%cޛE:@ػ@ś(d/(]'_ R{\œaE5- kF͋ٵ'yK(L{HG䣄<7rrh(~ Kڝ&kkK?o>з[0\bK"6t ol d`Z cޭ&I~=k阖\^4)]COZH-f2qҹ^|K0q\`0c8e9]wYF}'7͛~Q u2NhѴkPA4( \m:SKb9iӶ{n\ -_V=́bO:03!M:A~Ӊ?iSkJUpt߶Ve+X:=uf[q{V'&۪Wk p߶ӞQ@[`V+* {2e z8iuv;bKD5ۀي鳒4kbEm{}%p`n%5Y,?QwLB0¹i"6%:=t=`ldzX<:,  ۄ_mº.cS:ΪXΐ-!=Sժ:mi$KSF ,^ô[[lĝћ?"mŋ ͒%>oE\% ^EsQy{fCLxڠOy -֐5M+ϻO21}za#q{A ƎJ́  @40G8"fogd3dQ"Bm z[Cuiv3<`^͓=Tg3} q' ˴ ҙdpp-wS 3A㈗*f>wIl@]'scY5Z8,Kd,)@o -R0LgWFI8e86o{[kbRm7:NdQH exzkio.6Cs}yGΊtq|{-ErwZۙaUC}cq\jT'-vk^Vz !i +(dUIv-jɐ_I9Cy6BN=- iU;\Lvd&7D%V `WbvϠiFei'-C= -);~}+"55[vaZ%dY ;6*s/KV6TvzX5L_T!r+iY.u>QqtM !Ek&pdS$eOk+_|c|Y%K?c'auw*yyY=J"vpZN6`mE=o( - ޠh6'QFuXH+RAH4輬jn(V}lQ?_>Z  ZCw9԰7K`yX*~y# n,JJBtgXjiGO~DCx-sMI|5Vºj* q -dnq#Z}EcR_0nJ>*/xcI%BP2-[U!yέݾ>TȚ1p6 cUO$9*NZ%Y`1;PN¢ - $58369`Q -"7@Tْq AdbA7<\-U,8sQńq>]6ڛ۴?Ak8>٘-^tB޻Gx7MLԛ=S.?TdxVk'V5ziirSwbE5o5B4Y4pOPg`WH#[ͤO|m -)GZ{Ϝh6k2Y}Ԓu1!"wfH?J6*-pU!*uDgiЌ0zXl;f$tmٿu0r9E/{I6Opa&}ae`,C.͒!ajeۣ>X)exMؼ -/c2n(u]/ջ`8 1v)6u3ƇksYΚ?U4H:Mq4pnYc>}ws~&Z vH6\VA d n#~\es[]5K:LRk1OyϨ<\]Se85F<(M6Gwϕ4%|tX:Gŕ\kM -N1i_){E[K?.rbIla3Y8Y2Dզ( !qZz;FCef=:aa\ 7QW"DB$QVFoLe=oX[޻:W7qnS)畾[{Le2PݽBzZδzbTVy'l WvJL͘/˪hSwp*xĀH˾b!VxVⳍ '.52>m!0͐hS!Rho*nԷhonabY,Wx<hZ -p;ms+a,RӥRNޭjP,9Z1pXm3g{{d7W3T~oKw hF Jlz lL6#hXӪP[40^>.Y'˄si -\3%Rźƌd:#jchW!\_PeryF+dY-8,˱h*f%}B`6@nǐ;^8p\umF7 -xUTZxvV^ľIKRj9lH+šߵYS&̠Q@?aI͓+_/=VmpnkqQS]]X3(aeLM -8#{[LgU_Zrn^?[@%T$E6ŊJB3!T7wTJ+Vm5D ҳSuFñA6h]KR"e$fqLuр`OLN`JTR.DyѪY4f)25WC`0`AuԀj8taγR>:\Ō„"A\(u0,hohӫ&>n7\o%j~(% Y >J*Nn 3NHj6O~du5>jvlp|Dd9:N5!yzŸ{$b wQd'@24]fO -"U.|ylsMj|TO}%D/f[b{ PD2)(^ﮆ(U,g+jY)A_M}5oY7QMXr#al:,^a4Fo{H_<$:y:C %KP"?d8wv `~Re@cbZ\8nW&лFiYz˴׻NB|OH"ǜEZL"ɴDZYToTahxswI:,gG<fxTZ\# {}Q;jGچ\7g TIh{q!Ev+ZUTn҄啈o,fk3\敔l-􅪉J͜R=4.Y4񖻬isT7m:_{]w % V*fb2Z|0DȧցHi\5]ќXH;͹I@0R $4DBײ/'#7Bxذ:A#hߧ v _ﳘB}RȱXY-+X]qz`Y_:=XU?g]vqZzw,K$ ЬR۪O=|CWa[diވrVX-;/a &:B5"Z@'" m/DʒhT!{8qR}#v\8+0Փ@YեBim MxZAYTF0(SV=M>~EGyu׾O}{ϕ?Lت&۶fw(R· DahΫmk_MS[y'QHg,X!'>?{5cV]wU}UzꃬrQJcv 6СunRomkbj(XB`TT×[DYRթ.8B^vZ4P]m[us[0΁ҦyѬ-&M]$n:FTRj\*{f(RwTGm QuhP? ,HAZ~uaBQXZH-JWU*ndӜ;v׾9ឹuKQ U8yIJXf- EpkTz:ئyyYwOP:wJG["zQ؏ܬ\VYMm)3J茛AtދAd_Xb{9n1)6 ʍ mW<쒔z!nd=1 4SBd,'agUG"JD;|5UJhVײ?%"sH?t;8 Do_yg׺7u:ʺ~r -e0e4ۉbg/ѩd]c{حڢZ ># {6m6g -aQw߲ #~A&>[P9¶KK4ȩ (4[DCDl=Lm }J4&Ӭtl"ݥ)"::LA<]sM-R!e_^c': -5@)6:bOA2DbYkÄFn[)Ga *)]1ГKE/bG1uZ!QEt:eZ`>'J9e4=SEZHCs]+r8_,T [6o.wX@8R ꘵Yo}x~  O$ב[H7^+؜b1૔xZ#jݵ<DuMeB -)oiiVU:#.?MJ0mwC&&D=jQ6Z2:FQMܒ;O{;i~$asXI䛇@UF <膤PX\Z?FkCKj"h![H|SHK1kS -n,w?a  -cn۵!yQ$+#^(R`HͧO1z+?͋c)(k}V>)NEMdoաݾ15/tGnU6wUDH @Đ$TGGPS=rUVzxWQ*HӬZ+~if㶖cu؟,m2|Bb!kS??m?o1FK(:>bUҒ6U)¦&e7ZqޑOP[w:v/R"@}D6W 1EKQ7SR> r$zMd8GZpy!W30vnQ u8+Sj][\iS#w7pM#B*"YS_OnޛRKqɅՌhOCBяmdӚM -mIYx=4&gI[A v%n̿%ar+MNA29(s:J~CH0=}kWuVǽcuAZJwS~+}7o'U$:'x/PbM*,p` -XY.j~W,,fT>ŰT7X)`& ),؀",,֔ XyŰVmX s>b,6Y[+`2el,F,^g8`2a&Őz ,FH {\Ubq~d]ŰbX X Cay%gQ,6 X,WW7X&gتN,6,剤b*XgMQb*WLpd]$6qd=+rŬ+J劭\1/X[C;&0 WqaxnX v+(/\zu xXRb]U\NAJa\1,oX?/\ޮX&7WKju5\ۋ+&[RbX\+rŰb\.៉+~_9w?b WLG\1 /Pbl5Wp5}p53W5p\1} 3WlW Ck +\FW!$+Lvp0sņ]-@A c*)\&1b0bis~k)}rZ+WLN55O\&+z\ˉ+6 W, W k\Fz报a报1bM*iW+bJ+|r+ka,\r1sb W Cu)p0 3WLXS`1k”WcQ}Sbhb WLR@3WLA\1gNbO{c]bdR_\1~JqZioӞz&;b+&a>b W, W,mc_bWkbVÙ+qT 1[bbwp0s\1\"3Wr:Y++6EgWK(\+&O3WL+a,\1 +U -WL@+f+ַrŌw0L\4WksŰb\Z5i+6Y&ذWW`כ++FиpWIqmTAWmWmWkҭ\2qņ5bP+Wb!sY&,/+*\1W`{6?eMkJ bOXpZ+W;bX\fJmp]db\FpŘT\1cCQb2sņ5bו+FD#VbD -Wś+F0r8#RbVo WDfXZfX++&Og텅+ -GDrz X_+\OXpVpŰT7WLϱpŰe/çp:Y#\~X?bDj*WǛ+ptZb -7WLY+WL+FXr@b}ю\b]Oɕ+_4sŸ/Pb$q^+\1Y -WLW k助GpŸ9oMp@hbt2V +}}qňUBم+u\1Bfo-\.OpdyqŔb\Dq+\+Ɣ<9q4{Y+<6sz.+Vb -WLOW\1~q4\pf1+Ƽib]WLOaOnJW}+8+&w&:?b+ , >83\bT7W)-5s/R+\1vO\1Yk kaxrl6sňbbTk\\bl+WLS#BPb-k™|r(\1,+&ˋ+rdbI\1rŸc+8Ë+FrEbM|ϣrb*WLb抑xs #Sb|+\1>rW+\1f_:sd(\1,/X;z횹bX\1rŚT4&{!fW W fq+&àa8\1vaa,\&k-mkBZb*bT!;1Pyq0\&߉+&(a>hpdrYNSኑzqŘ -Wt+FpXf+d3sڽVfOISjm⊑|qb r$WG)7sŚc'WL](Wi኱࿸bUbLK+'W cqI+&'WLp촉+b-k -"'W+u5'X%+$y+rgN |)F9sŚ8q/Bኑ-\1R/fЋ+uX 3'Wmniڝbo3WLc\ -r0Xb.?+6:I*ك%b1Üba#i>s ;W C#N\1 r}gXsE+fI\&*V\1(g bqJUkb -!|rŬWlpŸ+֔)/WhpK\1gX#4sŚt?b,!+$[ኙ'kK~+ٕ+]b+VzhwqXpd)\1,okpbX\1SƿpX\1Y W Kaxq0V3W Û+cQbX*W k­L\1?+F؋+#I_bo#5SbSzb3b]$+&KW kabQZbX!Zb]+Xb+&'W+9qHUXشrTFXbS‹+bYb]\nP+F]XoW3+WÇ{q:\+\1'_\1+f+VK+rŰb=+֗`WWS)g* -WfS#S}ѕc(\1\Ӿd抵hx Xwy+ gn\1Mb䊩p,pŸ7W̬+Frr+޻WpIpń䊑..\1=ٙ+}qx -WkM\1WL5>WcMQQTb]sW WpȻGb8+֔bJ~rھVASWQ)0q^2/Se+ʀSbbsXb2\1;+Ϛb -W Ë+ַ;b}*Wc/\1c/\1jH -WWSaS8sdbO+Fmh*IKq6 ÄFIם3ä!c謚bRp6sdb^\1+4s6y+C抍'X+:BC]1+&v;uE+r\1oƫyIX/`Hod skAbkDQ`/d1 fEa -YLOq!!b(|d-d1,,>zO")IkʇOd1bf,U,|La&Oƙ,LS_'YL,FC!Q"MQb-b$8db bꓛbJ|ŨbB{=/?PE%t\!ayd-d1E/ Y$dNH^zԋ[b],&Mźdg7YLm,g!P.d1 Y]@%yźD9gs,,6W'!##Y+2T7X,f?g&1ib,,M^qVD?_d7v& d2M kJFb(d1Żf0da Yo!>8<+d0'Y`!d1"oⲅ,W!)"ZbX*Y ˛,i!Rbv'Yq%qd\C.d1j YYN7Y k%5N3Yd1YgN,d1},fy,0>Ś(Nd1,FRZgU -YLBO,&LWbTņe&u&5sFYLe<,&=,F,&-BSqDEK9<d1TXڂ,BMZř,SUb2&A MkyJ>b2,,Ek -}dL&)4ŤWb6>>bMb3YQbe MV'Ũ{Zb -bibi _b7Y!Pb*YlXfYg5,IȻZ$YLB/g YLͨ/JRb:Ek} Zۿ,Wb*d2&b|R%oŰT,/Yg,,f,֤;Xl+YlLda XS0a&5,>bz&,f -3Yڽ7Yפ$UZbo,FYH% K dv?d1$LC7q&~Lh&EjpJ+YKa&$&u&e&,\b,/rY+Y ,FJ% IEZbX*YL[O YXb_d1,FL#JIcTbX*YlXfY'Ť*IS9D}d1Ԙ_d1,&,F'Y [!a(d1IL#cVbTʼb Y C!axdb:k&s?b]։,F]!Iˣ,ֵ%5LbdN}"1mTXZfX,$[ItfXf0:YU! Ld1fX_BcATb2Ld1I Yd^A7C!2d1A*d1 -YB,FM# \b -XZf'8gvb Y Lߟd1,&L%~xr3Y/, )O>:'YLLSyDOd4F PDc>{4fo~!}0 YLg}db -Y Ë,% z'{"q"Ls")Ib&IAv&u~Ť:p -Y,b*J$.d1 -Y, 3Yl,,o )=>0 d1g>g"eL#[b*b0ņb=rH #"j&v/dʂd1>Md{"-b]M3Ya"_JV/$ -/'lf]f#mƀWnخSpig6`(0H-& sȆxg—^jf>jʿ'W؂O]n:Ll/h lAgW؂EOJu-@TLƙ^ -ig^DWjd[D/s2[D/e pu z{"za 6 NJC,@/lX - AI sUIbP2^+V/ -BT8HIP"^T((D/tY30K"xxiG\H{0w:R9jp5i9^*^*IZә -L{ޅw]X:se+,Euc0^2gR~%v"L.^wNo5MN?/]Qw!ѪҠw.gߛ.FJpMb#à]3A:<.a̮bc.H= \;],v늿g\WؒuXHE8̴.I#@l.ZV[ %kW5ԦZ7VUnK -4uxRH]%뒰uL \X.}޳]Q*\2c&cJX.U-KV6‰bѷraٖU,r˲>`WKڷ*w֪:(TZ(~Cq*W -kXeL .;(WuH蜶gA<R쏪r@ՠ\Ƞ\bE,P.UyK]g'hNB$esCLtM5A*J(*J-Pa\P.ބ޷QWr٪V-H;\a ]GcTRи4ҔQ+q\zgp\X}C8.Yvt)P3&^c8., -k%KzDw ŵ1e7U2)p\eq kP,0[G_ -^/k3%`qV`yFqcDcHz e%UR.1ZV^vbpIMp&I o9N^m 憎5[HVQ7)[BH,ْޒq}-jB]Iaԭa3qK9Q-61Q智uz@U"|fnJvOц#=A m]m,m̸ְa Fbɉb\- - %oKrFjx[D4?o3_x[9T{-zCJ?]q[-v0i ZfVf[mI+q[2p[ܩ.p[n{/nK7&gKj38[Rpgf\if)E"fKȂ61J/I[XLNҖDϤ-xIڒ#I[ho1*<- a &EɊ%)JJRh۝-Q-F[am Y[M -2b}0(Ojjj%™-XڒRڒdڒ8*jKXEh -YUČڢ'GmCa4gY[6I}4|ᰭ۲I Ʈ<`[mj4- -WlKbV3lKFàmQ- fږRyжx,*s ږU\km1kږ AZdr|9}mꩽm@um0 ߷}`[:0 GZWhfALz=/? "yBf~ũ:G¾"O-Kl*d Wa`lhG`T%'"|l!$Zג D,էilp%%Ň/ kW޽0,:ҥܺ<`j.H-Bb圑ZR5Rkt",H-skBڍRKM&gO\ ƻ!}]fԒ}ԫjdQՈZ:M#c&jIC_it"B\q \Ijij8$x'Z<] q:23P ޶u;\(pZw|nlҺ(PZ,($I ?%1UKv Ѻn CbTLѺ#-fK*0BЊ"h8Kk|ͫγIҳ}_+ j2؀rI"y+eOZ8+yd'%jmļڢzԑW -nW6ÎQ♥RS"D<-XWјC먯l{A]mWKՊጹB`7ۤ%τ\uk8W AbčKB7T򭰩Q1%ݪ_~p+׀nt{`[u/l+iיh+JKlERsFyYl\+}ZYL/ - ␍"*8#Ծt쉴E6V[Y,$s-em^dYG`J^ރe$RY(YVneeXsq&Y)$fx]W պZWHV*56eF]RAYɪddѢ(+a˟PVrxeeJg띔e,6#R񇢂3JVEdA9A 4vj^jI`rLrZ'D P$IVX,~|a"Yzd<$h@A @4g\NoYiqBd -]e[J'gXaS0h7ZDdXgRP0kDiz0O~Xaz9_B}vXa]Ԟ+*uY#sZ4}XiEm*csJUtJOJ{c*ƽc%#}Q`Dma1VR2$KJ+ڭPKo"3JґV[sI+m rU FGl,*JwQrbbd++i|+,1V}hh9Fbx?f*5uJW!+kzX)VlYCX*|(eiX0VL*0 4wU$+Ex~3J[*cFG`ViI{Xi +^OJIV*$+,mX& -n?͖)%ou`dNTJ'ƊcOtXY ꉱR k_`HGYnXIO S`\+%Ji03Ɗdq`REѝce+Q茱Ri޻XDV`}RVH90V'Zҙ3Ɗ}E3JcƣzE,0VjHPw`SXƌҙԣfXytB2% UvރbdmĊ̖J RQ|@+ -b;5 {9+rRgUpnb}VSTdͺʤ8VݺsI -d c>1:JZbA(-yĊ +%wiF93Ữ5$a؄>3dULjYZ+FmbS: ŊīN{ XU -13`("e\]/waX8pv0_["V5iGAXI]7ŠIY`"X]YmQ3!W~v_4dૈ(kϨW)ޜJ*c+j%G^%'ՙ^DW1t]Rm駼)- tW,*}v0M tժ_Otzy<F7YVə]l/!U -I4KeN厀0-jnrgyU0p̏ZHWتefWi'*Uep]W[tU_\F>"%ʷB1"FtL"WNĆϷ*'tAi84UX+Uh]2U4grUǿh\UZo$誎u[Ejdk6Jv]%®ZB! 7EaWaUD3 -!_vZbWӟU>*=.]F&TX3Ԍ@t$U=BPORB(Uߡ;OsN*\ ( W19ZtB \ſI:X%pD &n@@]WY\)-r< wpUB?\7[W\|UEA*DB8LMF/hZCgҖҺBR)icLȈ+=1 tV(U]%lPzSUI tMK%b7Nl($*mAtU{= U*FКUI9,Js=xv Y U& 7/Z~U?#P~ -Mط*i}~q_Eޞ0U^JJ*aj'OJ]:LQ._Wm!㫶*n~#>*+ fT -|62covia'f(nlXUc)4 -z.A"UTVȬηǭJօUDL;̂# ]eGퟂrz] rw7ň\5c'3Ă[uQVnU:43aYDg]cUe^Yf>nZ` &JBgU((h1^'* 󧈵>)*t`wP^ŨSNj Nxo62Zx"SFAQSeRf*I-u'B$I]G: -gDZZT#B,TiMGEx8Lਸ਼qTIuDkStk>9/QE4>q@KAQ=n 2̡TbrR;%B z"S Ըx<T*5D$&XB"lJ]LѶx{BO-!MeRuGB"xܪ Om%LmLVR=ʁ(%7'}ǥQSײrLԩk)ݣ37Ot%ƀN拿kpS lrŚR*7)N7 -Y97@S[H-:jQdKQL VfM -&”0u(ۥ35+w^t).TO[h --RfQ9QiaQefH)mVxPJa'x6%JE &ņ΂*w51V-%[ ^%eMnVXRk3fI*EۖV&eF:n `HY,5'e6Tye=yqn lS%`RL -LD0cӤ@`-\̓`<):83xRl+LI51ܣ^s}'nK?;pk[ -N(0<^44) wmӷ#'ES7-pRRl8dKwKD pRa -I4)(Th=l4uԢI5)7q3gKnfc+cT0%㤖8)-#(vvh 7AT;( T~BI"Iٺ|$BQ&Ow"%O eQZ T"IwvE`QRK } Kg%Er!Wrƶi()R<1" FIT)m<3BUT+7^C -e 9xИul~bglE)U_y͐hDŽPEˬ:f5tt׀R}}ˌ_VPM4^H =QMH/?^ #n}gF^s`Q_;xMC&W,:8"+m{!|_řz?D|\-m8,0Б,eKb PJJ=lAׂ6j)vY L︜b -b)WmUKl'֏ʳ4,IUgGY[0Bl' ~|Px(Iɫq@RAS8aIE:^=@g§^өcI=nV+ۿ 8?&"Ihq&)%sgu+4[7xi©VbLU(<m3o:N)7P'+'׬jmIUQTĥ4aTبk jన -K&V)wcӳNLLo3kHظ -w}Ttjo bɧr[3;;qqoKJت ?{, -'vo:N\m!JJ2OpeWk>}Yx3nsn*!jg1Uuj"V̍a`,O OPbJT8p+fLN!Zu{1 )~ljpd-c%KsZŴo;h/줿~~b)itH~l+{Κy/WJuLwofH[|8Rʁ0YX?צm endstream endobj 116 0 obj <>stream -;0'ʪZu`+Znibbec&_VFh][z f'ی<Я2BXwRH?NWj|syH48/NEP36ϭj3EEOm*xLM=.ʹ T썕 m)"&Yns"ci=pM4sZ.%Zw)2AV+տuE QLTκBd(ҵU\( 5NzRϢR_\U^=6'rߤl<ׄDc)wyЋا)(]RmeɼL^S7"$Йbgf+SӬ⯝E%b?W#]t͌TbHf=90')v*Zr?Ͻw@p1mpՉړKcǵ]R/2Qu=DRO4iFViS x2k*@-ڋ]u+_rMY.;aDZk2G7?aK\vA ̀~|}+!] 5o܏7nPo5J'cMM C 6`qgPKz#T'7іBo>NQK[fERfzmUj:hY>¡-B}U2ɰU!Vdb} UWEY๤:e787bO^8U"_,_ -(K}oA{]`o,ECO/9)J,7qR&aMPiM4(ub/>7k騉q\=O;hՊԇԣqϩǽ<'ھ=nx/ᰤgjr4Ԥ:J"el? wPB/ ܮF~q2d+޷2%YC1561:k'x)Y<w}T pB=8Dl7GR.-,u߼_~vߐyqa~KSBy2Vb jIdpR#SiF]K a -ZW"Y|lOR΂yzTF4X_{r"/b$wWot[8'A7L+)Ojai63_N,iV%-r~i-HLry\W&`X|:xen K(%[t˷%X[lVh8H,q%(U<۾;uQ*=JkI}p P5 -:SJZwii#R_L8.1t⫕aWaE0N굗&]ߧіm#{9imBg,pCrUZtł6(/A`Bz16iɭiCIU&ߨvĻ,06ʎ BCg=Z)F6S@\ -)RH衬ti flu)E\,0TVpTM&RE[s!&A5&MBi`z9uJ06'+s#!lhC۩,͔O;E|ɮύS0z9}o$~n$؉,$YZd?.*湑`y`I!~ӽQxLrzL 9CjSEiB^[(rCtȿkW,j5UR!2eUBdfm&yzY_êTw9%L R'`'E~;'P<Ϯ&V=TjPk -@0j z ԗK>9-KXb9_×ZQ4tϠyPߴx`;\mKge<_N!@#)ƦB-f+10$ ->.oBnMYq`R}4UXE80)Йj->O# -Դ^ N Kv+'} ރFz9m5F;{٬֫{32/X2q'L@$i$o`ԉ$lb2tL+ZlsL/$jm ê6`ҕ$ -kuKoђߤs3j( Tu&eT g~ϋr0Z3-PؠwRJUo㔕2>}PjBi,EQs)'FTd'HX&DSiDOoqHFT}]2X V§+dAä6c `bܼ^aLaXۉ9LP%aFNnFmK~tC9|j=a|mkg mŜڹLl4˪bb3BeĴL1#8N]U/*湟kEˋ#w,}J'I_,mP gxhw;oo 'tXA"MFxTEd!죟S,hB 9/ߗcJyk̀/i(@q0x(u7HJ @p^ԇt}mUa 1=cf`8gZZ'6t wM:uKm MBO;1QMSotZ5ޑu*8|vk>6@EzX\f$XP5#N@}SK< b\\T2ElRXQ Di<+jIU|;B(Mҹ=|i+~%&;8Z8p!Ӟ{>͘ݑXigGٰvEt։*tUF5Q?9vg:FJ~d_2hp{ab,`f+XSǭ1 tezBԽ#/kN)*\'vg@YPGһE4RrϟV0(f"BSx"zW^[0hW}L:xh>&Ns$)]i!5⢉S F՞> -4J7lbqҏ.~G)\:N=>YLL.ݼR˱el _h IS8vN -~/^ -b18a~eZYBrå vYbb$㾽F~ -jdr,SQ->J9u+(4MK6q990P'Nc%SxoVf1i: 15BE -`УN*]3f=HR2ʓ1|FҮ -(jPBf#RRA)=Nӄmp8VgHRr]6$҆e g:z&ţRzzwj޵k])A=IS V*T -{yj@oZol٢Iˁ2B j,q(IPOdwEcki ăîXHU^B|sn66~;&oAݵ@It_5?ɸ>BUS]Q,۪aS4x a>Rv- jv:Ef fR&K+ kWƶWâViylrgaP.]͉bI!D'ΓdҲ -yvf 5 -ײ=zj"%lgsM۫pYd;y4Ɏڸ;zwkfEa"ՆZc]qבJL]|S)Va/կ*;U -<~qhg1~K?k8Sޙx-ԧ$dOA,۪Vbcü0 v:dKGGwF!`m'}ˠCCڴӸBzqh> akb ^bV+'{ӎ"!x4J d1iZ,)0$_@UY9J(ª}ܘ0eFfлeHjmR,qUaߝPmD7m4CJ41+jŔb,fehcVs-et>n빼f=E& &aM:HYKx4fk4n0VA}Na.wg,UJ|nI5ȰM"Lu*0BY9^^EJ2h @7վԂ 8FM>m!ab43A9;uň|tL/a]MRܜf00vFp/=xj{G9֗3 j1sC% Ad5>@jss:.r稸^]a'*s\f[O)ϭ-Ap uO~sV^L7Uxښo<ŧ V_RMuTJS("ocQgY2N|LƇHD0%f  (#E;lm GAxS݈7*pY7OfocId]Kjḇ!c6ZeZ*FᄊBi d3\vm8S.FOKUԜ&TŻi3o@7he6 ˶"lp4a +b؅& ct\mzMQCŠn"dRhw<&Nf7VO7 Jd'ۑ7 ~{ă܏|?tü2[i =bx0bCR&G%2tJqx|8TW5lK/lXcK LӖ~sS'KGͱS2 .T޼&;=S)_Qݭb wma4E?qm|= pf]Fۄ8q?TZؔ3w9 S5 oaiۺlrPqt=qWh?ȑ׬BfG=EN.bȩ Ocȝkt}!wmƈw/EsX X&>7G" 3"47EN 7cVv{a)kE-E"gmFO:ܢBQD]a~lp/箑ށ&#yHrR6gFlY2:$L%zgm&)Vo0.R*x^-bdT.[fI:CӎT9쐲0>+OQ^'N|xx{[Za[x̰pkWǩmf8sKv+۶vSS?FO'%`LmU}ȂX+t6 jWpkEy9}ʓ y*(fn,HUJe'%#UFy{/9UC|W舰)}tDT Qdwb]뷿*%7Wd5AVL)rK,oPUD0?@nI*R;p2I}v"m&s -%[X.[~i)^JZKUz_vnIf==8giz -6wke0f\A[i=CH:! /Z1.4R/%uҍWFnZKwx}{^8d>mA -uMi@VX -.xO\L&k'óbD6'~Uv*\eZ;*86UN=o= -e -A e:+wC{)B_2TǩR۸[aֽti9p7tQ r5*_;A2QjE3ʽ6ïfTyQRڵ1uI= mwos[碜Xp GyxRr~\^ޣ~vsL _+"k¹*8wssמp4bWV2U-ιS8w>;Cs9~55[\ĹC);)_Ν¹P9wR&¹c9w[H%Nr4}9w bjٌ9wIwdƹ]0w0wsgEO}ߘ -KCsǭ*;sǃW1w#*ΞΌGsWbUʘ;sZJ샹#1w<s/`P -Ns'bT%F̝s7bJ}d ?;sqs'勹 `%*ch`: TGf̝ֈ_Y1w^s'Oܡf̝Կ\us8֬`T)R3aX]Rn 8:N#MJ$] sO*;o)s2΄Nju0 s'ዹV`0+d]RsP1wZ}1wt]=}m%c;WT]?c̝/NA銹q``Ȼli^bV̝XWbVJܡb:ӐT1w2&LU|1wʣV̝*NM_̝Ԋݩ`p`P 3 O`;+N/nL1w {~csZ0w(s_W̝|m%a;*NI/N _ܩ싹asV` p0wZmNb1w[ F5s7µ`W1w*dVsGAJܩ7q/jܡ8Z1wR*NspA.;{X~1w;cg]Rjg1w_̝]0wII>Fܱ"` [̘NS1w|0wIO'c̚b0*;)s'勹#Y0wR*c~1wV̝ܜ*NCsqb1wIu̝ϊӟb"s'`0C`L͘;N,;;MLsɷb1wIu`0w@b#s1W ]1wY̝2勹c#W0wLUV0Q]Y0w探܍EΘ;b"s=/ӤX1w0~1wsgJܙ򋹣l`P -價`̝Ζ s`1]_\9`ا}0w$ ΔLBBsGWܩ}bsM)^1wII؜1wн~1wzX*N+s'b0W+N/Njau0wQ^R0wA̝ԊR1w*CbP 33` -NU5sj`:\a)aKT0wV*n+si3Wv_̝˘1wIIGp`=jI;)_iDQܡ|0wR+肹/0w{^ɧ`1w[ ̝ӋU0wQ NЂ#Z0w[ɘ ssGq=W0w=\ -k3댹3_̝F`1w[ ̝^ ل1w -&UV6.iR*btG;6cx??;sR0w1wR*N_bTG\1w[ɘNks'^1wz -NSs'b4W̝]f嘟1w?;ɘ$VsPkO~0w H;` Ӓ1wfQ/َKm1wa#dS0w}sEssg֭o΂KJm50wat?eV2n*c0`h-;uV7!nDf܍v`8`p`x Nm1wsBIbs'`}0wz -Nc"`!;0T̝n_̝sO)BI-;˘; _j);)_ 꾄}0w72Nws` ^0wd/Ns/ʘ;]NbI;?;busR0w(̝Ԋ=`tq; -bG ܩ{bz̝shZ vH s;i;ܾ;s/^1w`P -價#K1ws;VU4sǨ񋹳q2aJ ;a,5T0wcCT@/nrI;5c찄1̝nOY/Nz1%sgVs7hΘ;ZC);`;}B̝> sG0wxYV傹S`|0wXiT0wzo+;EJ謘;ĂOIܩblc1w_~0w -U̝?ssC{/̝v_sGp`~0wD -/v`bk̝C` ,;OaW0wbsG⃹`7f/K;b̝sC1wJj|1wK) T1wD>;=csGt`M*SQܡfBY+h˕1wR;}ӊS1^}Gs􃹣 sG_WQܡ|L;`C -*ƢxάF3V+CϘ;*ܘ;`DO ;NQ/ PA]0w(o+NW_l*s`4%s9b4b/;Ϙ;~1wʙT̝bsGs $c~0wFܑ+;russG`Trܙ/bܙA)Ѱ1wVU̝N;}n`ȶ̝Zܡ|0w=\̘;}s׃0w4'kU0w\~1w#A`(G~z(O-J;}/\)W1w>; -NgV̝T_fsA sG9ci.c(`P j s'Θ;ʻ aIyU~?;VMQi;0w; `Fܡ;)]iҺ`lZI; ,aLq^laFQ1wuzƙ0w|NGsX1w];kK;:3 -_]~$]م:苹 cX+sܑ+;[{b)sGAi(~殪w49GyYܡJܱ`X];ԂR1wR;:O -Rs)ZW1wlU9sGT)aY0w90wJ:~1w3V̝sg_̝!3Bo~1w-IsǷ`Ȫ̝23?;*~ ݓ1w?;kɘ;z;,; Y3#?;},2ls`ܑ-;򭿘;* N_bbcEZg}i̝u%̝I͘; -價#^0w˘;ZV~1wsGn=cb{aQsܙO M;=sGsua\0wN승mnܑ>(;_Is5`40wڄT̝ޕ/ʧc_0w~_ӂbt;&e;1wkUኌƳh  sGB!|0wӳY1wT }0w -`_W0wR*n]; -^ -Ncsɡb42T̝ؾ;-*N0s1D&c|);)_jܡ܉SP1w*n ascOq%]RXm`$.b`p8ߌsX0w(s'勹M}̝nbxm9mb6Vvhӽ@wRXН&Nz|@w+莵FьݡН -) -tGcQ@wVtg^PtR@wI-;btGhLw)ZAwں_НtGhP  @wPeNi!'ݩ8|AwtG|AwڳUНttGssQ[@wy|Awt'|Aw4^@w -2 سfН - n tEJ?>;mw+Z;9t7ݡJI ݸ -t%%Ul5@wt Q@w2n tT;PaY@w^>;Ϡ;Bwtt`D(tGxyhv,;)_!C);;tt)Xoۚ; -\V2_p);b -j H ~(Mtgn`t̠;BtR@wݡJ'?;a -NR@wf93BtG}RYyG)a%Q@wfНֲ_vqXΔB[;jj -莾Ϊ53莨tm%;sq>;dНHtG/P -Ntϯ;)t' #AwȠ;{H~Aw*a;] -KJb[%AwНپ; 7 [B݅@wݑ(;*n)tgj~,pT^Aw=:"_ ij R@w|@w}M t8tg{ @w[5Н>:п;GQ.W@w(НtR@w+Aw 莨Hm%N!0b'tgtckRAw}dݩRPNoVݙ/ 2莚Aw@wf Aw.;etZ -StgjݙAwn;CH'(t7-;<>KI;vx; (4stniȠ;@w4Н -KJݙAwxp+Ft71~@wR+I;޺sAwAwR+k)C͠;N,;>O;6Ktt7Z@l^Aw3Fm%;-F*χss@w[ 6P$b2XUJ!|@w -|ziuݭ:uڂ n+tH;꿠;0 tt߮V2 , #tqKݡnНne*.aA~ r B-uuN)UAw{W\QR@wÌ@wFcɠ; 0);C [BݙAwfΚQt~@wZAwXН9 -dНb_Н+;BtGtxݍm%;{ -莿 #_@wR*C-;>+VIYdQ@wD]R.+;;* -N -yt*莕f݅P@wK НWt)X?;NK -3taX@wz>;ZE -N3uѓi!SAwtG -NStG7U)ZAw[ɠRAwt}@w4ZeНv}#f<Ơ`w/:զl,qS4]J ^٥:NeuBSqҚslŝ[F=ֽtuP}}Ngdި}]u&֝ aeyuS8Y_:t(`̰qۈXu&icB'${lNsS]R7ײ!uLuqsHWB`a86 Жt*~7uN)æ-XfG,_-f閊e) JaaYsX4\]H:ُ>, -$tZ>-:[b73wZ[eH<6W'sGKxwQ]l_9d3uxy9CSH 0 8ȉ͛^psY-i^Bح IxEcݕ4w0QwМ( gȊ|ݱ?ck7̵kGg^BH$8s>+gN9Ia98sRʙkZU \go6~™kW.͵+hi!9 0QQӔMY9)DhNe\|@s }oĭ!WHsR /Y㸓G -hN"*sڰ™k˄!@sĹqMI}^̙S!ʌl}S.A]ΧB.w^9|d9ĽO)ar%rF.f \ΛŖ< rk,'*晅nB-1r7/Z^{ 3Tz D0a#H%$ (zr`zOwz~$D<}tDVPrh\X4+ ZQ-y;."ȼWT8GNuf ޕc䖐)r[t6v6}|/!D(&D-hs2?nSjBÐ.l6܈ר6v#,jZ {b\#/p4~h 'װ+Єa}SqZ8ơЈq|°-@ (b -s ہcǩX(ר8q%/p åc,|Ä?:潹qnMHnuzw=bM8ڷ/l -V:C q,jԽ]8sc8PGk2+[KP㶒q[ j6 >ŦPi'#/( עzaS88b@w7j/lV26n5\pUqMVqḆqKи%3N&1 w?3N_>6q?J9\}!Pmsd\ Džkw=dT 2&J m ̭)q@1=) :|IIF<[|lӗg Pw'j =zO\ˉLS`luM ?32nq -NP_qhGi<;9q5oi6lq:k;)"xsýqcܴռq -(PpfPd&}K m%8Fm.1Cv#c1ܿ﵁q6h:r`B3/Dԃylg/xqdؘm%꼸~[u)g]̋~(" ޸8\Pŏ=cFp m3.ݘ)XhdRƀ.<rS5\\.aY mr ר.>L\#Tŵ}Q*!TYšݡ$ŊC!8$AK!,~4=$XjĉwC::ސB0dtn|s]qfk)EXbmpWp@Kh8-E08 2Rh &s_/.fRm!QP8 -4ipz$ؚ/(hk -w -'bfkG 2N*uT8B㲱j8"x=gT` -ey - - 'ebµv 3P]f΄Yf&\c7s܋ טfv[km33,Q2| Ā5]8u7,J8]-)ՠ5.^7?Qqo߃&⢿I\Lko(FR:ߤPcukԫisu,m+Հ_ uf-v|:Nq7+h육#e Qš97m+UcCR0~ 㸓Qc_oR}ps;M$ԚZ?-̵oGo~jJ l֚Wͮ*AUH~o[{sߖ&UPk07y$4u@MYG'T3]88y8{}QP K 'oR on~Sߚp$˂L}jPUV^ -{g#ߞUloXDnO,ޛMjq Dςi'}4 O.ޮ#Ohy[b`ޔ.0o W+7[rȨS!kqy|A]n0o"qivM+Yx7qZ6mivXFYo`ۮG`uDv8:f;v8MvzJ춅u[b`ݰC{(¥Nh2&QTdVꦑEuSծL)f9)d )qDº)Pf=PuәWHX7mШ__X7m%٠-\luT7$msDGln[uޖ(~\M|LuKAYy 7!Ս=_;JukӃqM -Bukjli4wQ.w13 u uk)nGq31_u#t+t#d)<Һәд87 Z.pn6/87禐y' f9/=M!l6zܛF|IJNs#8%he?BX u-<: -͈Tu: l:F2 s2ܨU[u2[inX47~Ts/ynQg@7rЍ=;F$ TP;6m" - tv3*#ȄH7R:ШQMS  ZFI%ndҨ5#2ҍ?k@BtCJt#>nF.LtxèAtk$n  f87Ӎ"n!8FfUF3@7*ƻyn&qIƻan<ݳܴ6&H(7뮨9$qcp0n*:[)n75a]Wpy<&ۮǠ> ߦъ -ؠi&}vn2 5%h-6;E|۴7Lnk`7!rLvkAn#j6pE"5cBMnSۈJФ6)6nh=nC^mƓmbpۦED'mDQ|86 -vӹmMQvۦ(6spZYdn*0wlviNm{L3C^oX̶wxk ^V -M8no6(FXibk]?BAk{}Ihm*=4V6U֞oםq -Ith~=x H:w7g`uXRBmM81;mzyIdt6XsN/.lfOGg_ ˭keBE!aW*=D)!d%dds!p&Ix@6<6P+<[IO=s<0ilc(l*3eQTfW—TaS-ؚ -M -`5--#k`k5{#ȌYۣ!؄l5=}״I9.KV+`wkW,ʂ5ِ@U&Lk;ko7U3vM(}yAOC#-@9Tq(rA\p7wͬfx'g=Y)B=Q5pex鰄WL5?y\^Laȳ*@ .d<2bM-@ =AXSlb櫵#\LLx5_r/4`76ZmZ'i5NvFcP$\~o Cjt 0ql4v*O Z9fVFxQLSʖTmO6r& i, -7p - BCFMc{1VV0Ԇ7.>so2[G7@ݻW~1 +vxAx0M}zJ MfrbY,6}ഁqŦU jkb|^33<82C5rjK4M"6 2cX l;+1 il Lkc;uJ]|-ҭVZXb*A \ bZf.ELjș 5_c*ڣ21"d;j -LPϾi% -`ٖ:6祑&Ţvֆr0Mr=֋F9,,RC @G 7G 0 -T* jF K}J6Xi6**_nҸ4ե8(6PHiZٺ$Him/XmTʂ~4[#GF B5OA5jO,Yo>x R*4-o2Hil)!3S=k# }ǚ+0ibdH~&(i? J'o@v-H5F;$ xp3iH@&I5xLPBsyɪr/R, 4)BZ{¾@;"M#5gDZpeBt2H4JD1.} ^F%-Rh. zE)gfZ4>kf4hG󸆧x! BH#2!-)ɵ - i4B񜐆4QDHC-i(4L($ TVHSDHbLH;ཟHYQT3ZA)Ea$_i "m+v,5ifi~i, "Zi(F/" 1#@=DFDHӍ4׼@)4;53i>MPOAHi;Oi ΰUrL @s- FlGCЊ8QKGJ2hN|4f>0'ᡝGSE)Gom䂏F9∶1GCe|454:΀4%c,4:b"| ZFHU Ҿ?Ey9ZB2"s7kʟ9L轄EFH-D%` rx$8r qendZBHC+iRx!M^&B 7wB\1AHf+!0 ,Bpsv̈́4tJ mTr=OԾ=:%oNVqԳdVz><03 -D7 yuE!gOd<{? ->)?UpgJ[-SK-3:o?έ-arD-.f€(VedZ&LƂ5s޳bTbie3n`p63-j)IXfASܷ51fMxfͰR۬)PTkvvPp NMW*,O*=q{p[:WlD*O58ַkҒXx -KqP -暥T+?eG>q7)s1Ӓ$xX;Ԟ}̓8Eȵ6/%>vRa&m=3!GERD/3# duɴ%jZ]n<<ǢikE /'|2ԙt*{2)>x=\( fw {Z#bE!^?VJ$\ ϝavߥL.bw)uVbNYW -3a|걵*/-qj:oYbR?rE艿WT<\V^oR.{yi9%&%OP8&M) >q(vf[&PZb9:5BK{1a|lTvī:6R &b[(<.7J! dv-I3'w w%'5FJQ1B*-Mv&z{33w;xht?#bN@I?ʽUr\פ.ENvfیC:^) }5b ؞r *sn9EYz},nwmbNe[]SaNJ^3[KHr/}GIpc7 ȫءċ"K]."?6Uypȍ;|=g.eUp^2%ʎ8Ԓ:r6ol蚩cg#2W#J.)lsD{NMOERJsF_+bBOz%rMT'h4;$9 JӇPpB,Nvr/.Nr~MC=,Ib;Xݹ|y\γNGcy_f)Y<~**܇ -Ms;:C5>YKJ͠ѱX@TYd^&ueEK)KcOR;b;}ek""cө5wV0OxlCi?P3| g=-eB38bV)eݮP p4SMյ2KݓҴ8nD<\9- u7GUٻlLO{6rdw)( EIbQoPkǙlkegx˶NP024]:3:sˍ,p>(mX bݧ rFhYw b9׿Hm;>/~82w,P쮩[S2GZb -61ZAlUi -=ak"< C88=9A ϊYTOΑQvOcU$\v "pmrӴ*#]t+;\UoM}*Pf*ME>gL~b'_f[]b$4R M]pHrױ{z6>ymR[y{JC\ -);鶼B{(NIVfwki7{es6jf72$ l\~$mR_DGi6ňK! -=%G kmynv@qyؕ,w|۸n6krzNe]xNی]lP$6= !˫H+&ߌ -pmQ'_8&k61h -w mzskKL̮j64mBBslFQj]0ۺeGq0ۺ[ 4T/I^}u_URG8(pU򦱨=,5tIsȶNza˭(*wc ۤ;Q-.m\l dT<;F)ZHa-r#r8oމJ'AnrGh(qRL , bԐ@8i9-fj([s,4rNbަ߸/^D QJLb)7(X"ZHiYmvXv -ꏬJTNO|J&Q@ƢXYM:ޡP$7?0Ojd[c&sdm"C*5nKU,E1I{^)KmuKi֭Yޟ"U F6`585i 3e;jnd$O6TV*x`yͥh##k_Z`,n5VKIg[U&M6 T/Ų ypCI պ)ݖbRIi -M%b Ys H"P #\ZeB s3NV$"|,BAހimݘcr }X`RgUJظUfKҬWf7dplܷ)ŭ!y%1 kj۽f; 3{T0/21}zqDءĹ~oR Aa\05@s~,UE$wH)Ȫ:'VH~pi]KeZEAO&mt -=={;'M85OuLKN,$,ҮRQ`IT+;&dI,6L1}RON[]X6TIcv7ԜLZR04ia뒌쥰1IPG8Õ>.wPYkҹbgOxPE -[-W>FV)hd^yTɤA>FeqY+4B΀!t{B ?i=zݣ=7! -*ɋEf 16d he ygALg_N;ZbͦtS:,lykt $V27 L{!Ȟ$FnAG>'NWʔgL' ]a<+`ʵ3%>^DMC;2`Wtlw[V '(|rwTi [|,)cyxvF2x,$H\f - -zhr=-zOݰW' -XwKLa$?;J@Jݖ[NFdiRɤO @)I:*n}COx?NƦjmYr ֈFŲ@2K^#lȎ_,9+m0t KÝ+ˌ\a&fq,.8,T)P6 AfZ ӊt.3= -JE 9ӳT {[ZJ:21r$; ËzyI&(jQ=Tgk z&WKxigY_P}22k( 7I`iÒ{qaqv*G!S5q -_ggqI˪>|Sd*G~)Q$qVՌỻ~Q(ITHP Yr=B\)zP9MLj֞tv 30V[{L.Rmr$vIL{`~m_ik=]ܒCQ EB -[(Bk6K)"7r8dQ%(g@uEciœY,%SӁn^ Ah"΢SvV`9\5w 'nzH#Omw /OfczX -7G1Z6iUHT1\jVV;K@6ԪgPP2qsTQ!Z6WLC{2iY v@a|J޺n6`ҷ@L`n&X<j봅$.ws{hgY,}xʧ) -XNFlp Z)n.9#}AAȓONI>Nw+i Hyk#PmP&LYi)(ѭy]hc9-M=W9r[hpz;aJO5rrkܐJJ{,%ZW=Mdw[ wd9*JW5ex w**rYui%P9[q\wk.1JH'J)yu6/\jpi@Zc~Xg[C eS]w9ڑpB}5.v@6@V15|氒tZtOU[Ns뇧mx}rl9R.(5NA֚rG/m}SVF*K eCO%JG!?p+ kЈ붘WsQ}۪T P V-ɟ= -9RF,DA#=T58峡N1S1N>?nZb-1.oGꖐHXU0dcKi#Z5g 5=۩rȜ2Xl+c͛jL(x1 %J42: -;-fUKƜwSa:;= ̕51%G D)2J֝~ױ4+aRN:Gqఞ_2-oQ;k -|͋Ї"fo(4[gf!5LS+Jgtw3$Ddf#ustFL)##=yŚv}RzOJ&)pѻ)]a;s/LZZaLT ujTbZk(-byŊ ȜTx@ 8FH ȜŹil9U@_0IkȜdo/ Wt5""̡jxHPG1i`Q/{DojWTWF vfvEqlǓ"VOœ!nrA6 -vzTQb(AC5-'jsgz@H3j R _@a6oJnDd(rzO*mŔq kl[!QThEL* ܭDAQX5rSwg7R޽wXydmjKjmxzNpӞCp<0ŮXCo ]FuKHkf,[G=?rkc-_NW5ST&-)͊_%}5oQٗ3/ *~@j/oAjs*=(C=v~:ŖylvVRaQC?Àϱ{ۚ-cn}fi5ϚTK润bTYf%U7ÞW1aH= 煉A5hn&EجNjs4 {6X309:?sdwLrY0G -9S(q=QIΡt?INJh cEHysLXESݣՒnLR| ύ w7op6$wB'oSq\#}[bm  MBMڍEg%C^Mbv2by.-Ws`~kp{DzzSJ*knFYmtWIy$9T D![|-k4=XŽ _n*".S7q~7{ -^ju^3+OnfXY F lY݇×EJPdA'6]q[)ԉ".uER:iCWr^c;#%қrQg0jmyʇyeJ-DСع4;5fI*m=okzAh - 5YuRA{O*}٬66c]WoAb>hva8j6ȋC3mrשO_48_yX`GZTW1,-|aס`+4jJ(DUͯXױ"dZ,vDPrB[#V,zYLqЪ *fudke2GT0XVy8b8avLE^;-efuUf;z(?G#P`0Tj]bFux̳hg5WhGS%zZkYG(oA.CgMk -U$pͦXLGvggj u?}\9C9C"抾ݗ -5v5+(QVDO$݊A㋱hv_$wt޼H1m;/[4khF2&KJr-ɳB׺o(8Nx72qvS; zXi!@g(GdK5c;k?MY#PuhQmRV\5rZ2udR:3FG7n!c}h~WVN7 YwŞc3U\½C%<;\Rz<*}/GiWY^ϭ{3ׁACzU*947\:#E8˲Bݗf2ỳ":vVz-jzNYbepUH,nqk?=["/=LMTS=J2"JΚ~Zt.҉_OoDBJo7еj-Sr+W?pv40WAEK+r 8҉dF5ŝ۳lY>qĪwFgugYw%_^f ;e;nh5fH,LA9T+bTgFFAP<,; -EH.ue8vC0M?F̪z7(w{c^,XO):Н $oJw7НXtW@w"3?AwU t@wMqAwƂ~݉#AwEfНH;NAw[&]%j*+>t'5 ֽOНm835P^'c:Nגʹ7]}PҸeM5@wEVJ} &Н+p+iѳT/'+\Q{-B wJȠ;)]cA$^AwCXAwStН: -\wНptgt7DjrFz+2FOs8w(sWJa÷pP4qʵ}+V]ir}pj^>V NCs8wR^NsұH]S~ܵ2t5qZd~J59wM2s'%s<9wbf]]%ɦJs8w(sι9wR2Nʓsƹk4cvq?pf%]ێ8wR3NJܵX8wȕs's'ri̜ףWs'¹CInuΝJnIɜ;箪,b;'N$̹mr:p`P'TVݢzdT;4sSױper8w\RU%W=8wR3sWs'5s*`sWi}ιqϜEY8wS Ν~!sd=9wUĹ2n*+nQsWI;1ßJʹ8wlw]%WiUO%]EʹJV]UGĹkNj5+FwΝ̹CI;w]%r*}EVs Ν>̹SrQxpfݝO˃s8w8w;9wxof*qp%{s8w(s/s+N'9wCY9wS Ν| s'\֓su;#WΝ'sW9ql{LN;݌̹ʹѨ&qZMĹkb$TVTs'TܩDɹәd]sG5qPNjJ;)OΝ-s@s'Ν,;2+nQբ^TĹI<9wz24wMe up)+ -s!qX8wĹjpt̹ɹS#sZe=NJ1i<8w82NJܱ;N9w[gΝ2qhΔ8wp;ĹO>9wH4phܙr1q*9wNJMeM58w¹BzDVĹ*qM; ?9w,N̹JBĞĹkkƹk[{ܵ@H']%/zU%oJ3sWH8wT;Ĺ3eܙι9w(sUA(NJIxpj?8wUN'箎\9w;sgʹcO\B+LIi6q~rI;3nQTsWZ9w,OΝ^̹{&o}9wR2R_ι8wV<8w{+箾̢̹r<8weʹ9w/+jܱʹ]eɳp^n*+nLp+^z7Ν̹ʹjp*jWe7=8wUIsgq-¹jpjd ypPNJIyr8w(sI͜;l+Wv(H~aiɿ9w8QN.'NĹÇ8wSY9wS5]zI͜;)sWGWs'5sP=z7ΝbMe upQd]='&s'gRMer6(U -fΝ'.q%ΝOΝ̹CI;|pP箒3r<9w:̹&PnƹOɹ/sO*Ws'5sixL::毜Z7ΝB֙s7sι5q*yI9mܡ<8wsDZ玫|p:Ȝ;-EE4]%i<UeQ%]mw)yɹp;*iݙ8wCI箪?\sI;sȜ$Ts:s}p˜;NΑ'N̹ӎOΝ~s5jܩsGhY㧕s7s7UYV¹!sGh>q^w"箍*͕sJܱ]IyrjVZsܡ&]. V;gII;sΝޢĹ%sZ OKO;9wR/; }_8wR^58w(dc9wˉs,ĹɹV]eX9wJ[I-pHJ;z!&?] NJ+N(sg+sW{s̹}-փs'g~ΝO]Rr&~Rr@hN.'FIɜ;Njܡ,;;;Ng9wcs#d;+**ι~s9w>9w;d/ʹSTɹ&Ν@sWNXXsW~pt&si'z̹CH;)ΝĹ۵r*;i)/qJU --*ʹ&OjUb¹r=Յs8w}~J -~U^U}^wyܑL8wʾp4>~p&]ʹC+sjGs'{z8dҕs'OXiʜ;E͞;sT̹SfރsIٜ9wE;NcQջqJ[sGn)JvLΝs1qʜ;:;*vVΝs'ɹӎsQ,s>8wR3N9w(]6Asg{;ɹA`*jU*;FĹW]ι|.;֛{rp&][q,;sy%s5Μ;;N{fΝl'NW9w,ƅsSV1']jܡeI)2Njd3N;~mQuIBp}}{*A%q_Z}ܡ¹R';)։sWp$lܙƹcz/`|ܩ)qTtp -7Ν*2N9w럜;[8wљsM -}[8w5Ν'玬sW2$];NRK;9w5hs'ɹ3_9w -2箞A7Z9wY?9w55qP箞;sW_9wRj's'ɹCM;)s'n^9wJ[s'%sd<9wUʹS,*qQ8wȜ;>9we UNO]ÓrP=;s'e'ĹÝ8wš<9wR3IIyp$&71qP9w@¹S|/s̹#2q*-V]%rg5*9w7Νj3N5O]N{f]k&ΝqIJ;#8wX9wU&q(XHĹn8wVsrZ/p3qÅs׈st"s0VNޫ̹Z"qƹ#%?qts'Ós$q-; -;+s[8wxEΝԕsGVܡ<8wp+玚wΝ;sE$ΝµO@xΝ8w:'Os'`MϜ;;玮͉sg~f6qp=8w"2N_i rHZ9wJj͜;MΝR2N)PO]#)H9wɹM9wZueΝ<;&fΝ[,qιCL;ĹM;3Nʃso q3paM58wꎷerNhVƹpD*KG[i 58w;sp)B v8 -C=2Iw(uNAC,e vHVҝ^&N1];~tHwTc>HwtgVs)_D UI+N^Lc}%J"ݱt'5&ZDX+NOҝ:dwtG܃tHw(tG>;NќtG1J# Ȥ;=L{>m!& U+NʓtW-Hp&ҝ'N.Lk@+Vҝn=IwxtA3xPNJ&IyO;tעyz"HftIcDO;e&tGDkp=/;&Ɍ ~#MIwv9+NtHw(t'IcHw83鎦]]IwtNY";d&ݱxhʑHwVVݢ:فDk{Vҝ̤;\wtt芿Τ;9;Ét/IwL2tIcHwJSɤ`AwU&YwҝuhZIwywҝZIwN;٦t\LSI3u%ݙLy'nΤ;DCy/;IwIwʢɤ;-2鎦rҝLIwIw4wLPώҝ]Hw*O_Iw򿿓ʤ;)t( ԕt{Iw^y0ZIwo;u~fNҝLIwl ѝ&ˤ;eHWdHVgt}({D: ܷDCD:)NN -_HWaDBۤ'(?ǣV\ HWDRz抂8W T3DuNݡQ_Fqj=`t4ɢ;, 3Hi08tdžNE K:B!t_tHMM a!)z@P;ߕsG%B룳0sS3,cw0$f 4%Di=vڍS^s M8?S`J㻢A2xsSXpsC ڜ#wԺQ5 mGuQ憰973wSU)AȲ1aHkpR1\VœTlA+RbZj4+-8A+t>?%D`/>s(xV\!̹G|0b -TaBt/ucޘ ̩;*-J_&aJV9/zZ s>\ZX}="洒i10\s[bp\Xbot˱LcPa%{0Vw`Pj/ өs^n(+^nӂ¨ -l\WEˊ+ hXrڎg8'F—U\aaMe uhCop -r5c;Fi冰p'|~}Jp9y'vJtZ0rl9T-QvmbI%0rRlBmیD:5: 4l83ޔO+e3pۇ?0i34 f\!ÿ^fѶrSYrClB-׀a=6[nrS d7:qk+>싨O}YΚr *d~p5;s}а{ożh95P`ƩEW\-u[-7@i< -::7&\B>&Yh?s˙X˱5rR XNBӜkP妲`z[{黸/7 NX4L2\9GQ\9 u^\9UgʱNrl'PTƀPW(w[pX;c W-WP$lB\9x#މqgG9(Rl1%0^ոb^+ ~}P9Lǁ3 Ip4Q<9n89ݒI:&L>G,W 9EJ'GNv^;~xDNYr7B9ͥG- wyc#04Z㦰8eHx\tJ4pyvW?IB^ڠj-+>)gǕ^ |\)OB;qxCm~8Sqll `x}Ol 0'*0aɇRgR\Vͼ#g+&nAӂd+0K0qE tksEY0qS L\ޣ+0q\L-`P\8)x&NZ1qR&ġȔN8V~8+ǃVk]sC77ՠ/ Ni%0ku;5nGÆܓ?[7C⦲Bj*VlWHԋ8)$g$HT)rıl+k@tm1H>  q6K|O@+Dw+A}ӘG:x݈/wax8fyN*áta28nᤰMx8x0\ pCIxlk})pfFZNz 'R wFP'pjpN˃T8rL雹0-O͝A ->xpZGI88ŕ`m'SheI& -Na - NX c/ZP¤M p'ߍ"waVݷڝpj>Ѷs"4UjB 1oDA ҚR&c*sm( 6i$ɍjwf)fɥd&f M2=0$\)@\7%(ߨ4S+6iʊߨ2N Ц\}I_oTSZ:xߞ.pF"5+{8VI!~oEab6ǿi%{]7}OqYo{7  [ld _AuɀB]9o2M;k7u&d4oUd';Jor{790 %[lྱpz*Y -X4ܱoRp+'Q fn1oU3 =obJfy6WDMeѤwƾNqEM,&&W A~CZ#ߤi7ʵe7nW"?Ej߀X\;tB{]7[޶~Ë`h 7 kLbI3oh:oR#M*`h1o(Ǒo04:NBc@aW[n \Di8ħo6H>o 6^89o;Ji47Kۀ-| M7 el7 -7ko g9H7 4pKDAcFQ7HI7 [97\75pOcGZ;70]7wz>}QN2M gOZ[{S@27e&M"j`$7r=Cd$ fpsW_rߙ{qo2{c- v3M߮IzS}g[M 2pǼ]̈́+̃8eS4!Ws [@'oZ*2ox0\7xh ފѣDo%뚈7)x JsNxc>3M'&itp WyɑCMz3oE57g qX^T|e`}k`x7jSݮ wMF&voVOѩZ⦦[qnQ ~J󰥘mS`ˋM>uL:VH+mG3[mJ,ź~Ӣ۲$Յp2a.+Q~US+)lX(SYж צ⪁kSw\ܠM/6/̦ -ZbRN4X[:24j!re) -btj#4fjS[vZiS)}Ro[v&JVـ鈄CѦTMIM'8M)3HhLjAo]_;lW޿NgXCiDԁCh6wŸk]m̢e7ۈDeSc,E&eS+-|́dL(H -d zؔ{h>dxn=NG cObaG)k Ŧ-ĕF|Pش%: Z̹ܵɄ]Smok$z͊[ ՅȵNA\k5EoMecȜmNZ̼M5| <fz>Ư5V/da-5XiZHBsdXNS<jUZ -Xۛ&bb1/&触AXkH|5Z^AWnWQy}EV+rlBf&V#hӗjffsd1[hz%i_YMYAV+ŧDV˪eS(4qZ VCe @I0VJ@6jR>jEi@hNnB{d5 $YM^FȱW6WZO_V:$Iq_d\' V4ZbqǪ)nFXjdX5"LX5r ˭X=-!j%V=jôXJLV`mbմU+$VZ!L?jTMݹP$`0j@`iǁUCd : Z Zª2aI4IVM_kŪic8Nyc -7U~PcȿǯX53[jX5 :{ l_j,LZU+ۃ$YVӇzYn1d5T&.\5ΖjT\ܾX2j%8 d{wV!wŪojF -ʈ&M JV[ׁUΘb.X5Y ^K+VX5,V Vr>Q4X55cjV[.Qը/Rլ+JViՉUGZjR-Ұj)ӱj(n !kfkBը`\jlKs>6p +RiD@tNFJˍ:AUF0R@hlmHVySMb RK@5gT@@3$jVW,K72@1RMS=9""VoL5! b ZO92RMưNRM-srRu4r[jnC>< gSp>W$P6hj$ JJNq -NIK j B(5v!a^B2Ҝ51PjR (5)'lU v`i$)`i<7J4dsYjJ/5i@`~ $I!/P` JDq,ǴMVR>IjJ}iZھؓ@R#VTVxעzMWQjyA̍(5ZN>aA!"Pj(+r &ђVӡQjZoAgJO@IX*+︢ԨH'/?Pj,rKkV;w(5JFnԴ9%ւR+48(5YdeM5hj A405-s-i1Tr0hj&qju|4ZSC%=xj(8 &~婱8{tJ)QS[xj5y,65gcQӂ[M5ЊS1l4tVڢ:>shДhj#ZKB+[ijLZrksrO45–c45vEF,a!h#ŜS+!p^uH &V2JMj삤&_5 s+HM `2 ZNV UAQê: Q^$jcX#^AiK4 4H\in`ZБcǚik4 p8c4 *өidfHL^Zg;piCXiidFA;9Ii盹$g5Ni2o2_3gZ\iIA浌AH;9BHZҔB.iW,5:{x4N - hSYhS <}ES4T4U"9h`b[hW8ȃvY˕E3MՠJ-єv3 ub@mba%g#@4 Fy - iACG::L&'b+ o+STylLڂBb.3gB$6C-ų% m*+ DABS΄%DL -7$b ͓V -a`э[ҔZD@c NԿ;h1,:Lr#Hg:RBuo2D-|6{6Egݛr ꙼f3]&_K,3 X+ 'LBqVjт8'l:Ws/@{֪)> -㌂*ge 24WH9f÷kRTmaW ;L1qTO{7+yͬN{ u fh5A7+-D7D='$?n z%57 PPwPn@~YPXfD۝n&HƠj&5͔bie}hmfmtTo_|3)߬xoFou͒~^.=fK$Xyo&eL'o&k7[of\zuOBhry6c{t.b7Sg7j7l1|ɕo&jAKa:ތM fx3ڪ Tx3e،J*7B"@;PĂoƞ嚽$9xt&}4A7SeZ7N~o_-g:o~yoo姿ï_w~_?޷}}ߺ-7?^_8k࿟=͟ۿw߿/_/^~п\/o?~?^_?t`}^?~?/s~:tk5r//~_~-'`'U>?U9[W;PȞ;/orweTƮ!~*䷟"{{[8ۏ6O Pcy7m.f ( y~m'6l~mɛ!wsC~[|v; {}U^95sWo8Ί,TJ* -My9ŖW9T;Ì'uATS{Z-1ywūM%?zYR/#ĵG k6R{ɇ7sXH&⧈sTkie&kwU;f~V+JV̧&jlju?&מNK2؛IndXz -x?odۣyoDew҉)Owjwlկ.VP`#knk&:Tq괫%~*"G]AtdﻛJ'CVsAoMOxl/ƻca*5*ֲOŌIHvٺ¾8]܊}wtrK|:=c(ckQC{>ex{P32{Mnkf,dX{2˧?? -f=zL3wۇk/|UX񢀯oryGt{P')"-}T*z]Gy&}ENGC÷Gǎ-"M?ZCu}Y~qlvoo:tU^~c}3xENG-oOH?eHxhA -Ŷ[} _}?|F&:C0`ԗ=DLHZ[q*r:7 -g):j{YYu49ݺrmNCinH谭~ _JmkMyz74R$m~d|<΃K:2NFScswY5!?Ĕ?<߸M/k~AsBtEʜ7{.w> Zmo=@qSKcbzxoW0/-?`m6]~˾u^Y[O3z6N s8۪͸6?Zgmgs[Yt^ b}I[0s|תҏmJ?q㸯w+ ~^=!6)O+<јqV}iu"kf1^g|۔#A;i4kW֯|gAbmgm?υXV>~'vƶw,6OJcD,[gp,Guk?*gYVk/yYvϝl]e<=x[R`<Ӎ&iWnaIG&Dq{'eD7?K}p Ϸ?ض1L|}R1rNݼDr"bMش~ #ie1&cNE~[\7nElYΌY1hAM˦wL~^15_d*߇ -ZĀ2}duKNG'gWB;gFd vƉ;ѷ۟'}pqo= }}^A:헏2Ǽ>io<Ƙwh~zZүXˆSKݶcG+TwG7eW߶O ӟ}VVq]x"*DtKbVgAߊmob$6q14oMv\uxUς`{vR8zS`% ~ǍlwL>.juM{v+BVL~D@x +&{|Ϫjװm5&mJ?nuУTZ|v-Mq`kr[Ũijxw [2ҴqX"⩕(f?216,[]p1FY^Ƽ=ހw[ּft;5Pniq:'pf~8@: 7z :98=/![&uRC^D~l.d>%j  T>,=Vj<SXir𨒜/_ė1ĝ%zc{-7|Մ"aud6/ʔ|U_Ƚ>6ƒq!eX \-p=^ s\0P>.?gXV:/zeϷFqMiw\q {~$m;!Ra+{=lj} vu>t[|o˒C[G S,'R5=_2o˔':Yk(w3pm?OW޹۪}G}T>x:긂v1n.~+D-sPK;BG}7gsکuc.w܇?O=N sXÐbYG%=Qa̫1/kˏ/m+ݯ":{+54/ oaJ-˺8Wu8n=VQdA|sza̭O -r,ӷ>e=B+oˍ$?J8,>'_>>HLj4>HnK8~DLėbG?t0ӱ};@˽w9;?x3f ܋3 F:bp#>;=.JJSLKj_cz uL8;ۏyR1e+ -} ZM4^d0/w]'@n)]lb~V?-6AoTͮyHT [l5ch:N2BN#moM#"%0ک u\ҘwyƇwRWa Wj)n3(t-O1 R5(-DALp"*# Tc#6u3F4W3Lӳ=OA@슯v9p9P boa3=G -QΟ?4s|!:^vs mO`gyxz;FX.?x_fo֐?71$KDW})6Ƒ/.p&3j9~d/|{NƻBΘ87ϖ|%Gc-&z"VV/.ZW -n% -nk=٥qbW4 O:sA9-=}/vD$3.QĴ7Eָ^-%K>}_g͝D ^kMr>4{H7?Q6@Ri DD.T| ̃[2ڵi/G xR%7ӄ.e\+hobVqeh9\RRNBU=i3#O1$.J%9#e{oQU A J]~oI}Q%N:샮AYoTe/gZVJTx&uoqԂy(5{@rHNBeiOͺ4! . -*l U_5fs?CU%qj;L J1[gJN 7oEX>QRI9k^2ʚȆdMjȞz[LYr33jv֝Ѽ)xO0*kc:E%M&Ϛx]\l(\l= I {LImL^.*QIl^Mur/n! mPssl3-!na|@VihyN Mr bKbZB V-qE2 ű&n͘/%Z} bߙAT =qOgzK7#0z%9yl%vPX9d0}3I?5n8}jsH9gEK!Wloqic9',JT}|{J`M`T! &2*=M KXG4Fd,rRy{*<okh39pm/GYj@bL1 -:B;m*s'-?Êi /ME= Pq*qh6þfZ/ *REܗNK- xJj)*Gw%A@^QMtTP\e`YB ^@VT e~$,0bԭ%v11Vds!,"H4jMٙb` -#ך:E89X)z:u4!6c*NRIw1)u2MX+϶z"q2U76aF*9DrB-4CEM~q8<-hph@*po_`IFJ03h>+4alŤD`;llh F`+>u*]G„I9㣶93H } ܱU >u0s]ϔl'ġ%p y2$/ml,*_JvA2 -{%Rrf%sjXifc}gJjV&qCz<2HF`Q=H4i T&#IM7`eYjSrnj屖-SwZTɂcPG'Ms18b0?\ ۥՊؕk o9l5)LU+7Mhc#B{5F9lf r> {LXhh dS'VQ1WZխC"U MwUlz"q3#G3uou{ _bY,<>&i*M9Z^ڇG8iL3#gC{sThso;Xj+z3!ILWhŶ2<4UOY2b҅zXR8&Dd9'brSȍs@9hgfLJsy4,.{ֈӐl%Fo0fWVfU9mV{6 KhA]0n3o6 sӄĕI: @ߋ FC9h(!rh@,+㌢VkV4e*+( VQ+[`fKX}fA뵹#[t)By ʹxX͠a;,ZDl֮Ւ\&cohxV=9$ [0m 0Ŧ&OmjpmZh xT;⢷Jt x ~VU7v`SH^Ğr4WVrÜsf|i2Tėh5 apvm6&-81jtF M© -)[, t,H{bϺVL~So9楙a͌m# FF]S3a17&ݏ`ߠ2[u%?ڕwJHk;@ 8YX2TDz J9!+f+R{ԮCB a8zߊfa^E+ckfg"s^abL͛.e9%1n<'͡.ҬܜhZ%mRyMC t@}Jd&ϔ)/yJC>/ٴjdѵMkXZE乖4;粐ԕh{s R0yJA!Υ( -1itcӬ:B*19K*[feOt!x~Ь\=jHGC~'6tbPم71ʡXu~y0oք6RQMc+irW}j&)p#v Y7&;l2^{{`Hw?ٺ%D7? Jhř"E4Yf@ <"dqijZj$z@ -Crl0X KzS19 PhV&\b.BlJf0.5qxVu+"-SS"E` gĚkOG' *16?)nY bihZ[|*8Sn ^@ݳ&31!#-bU{ ҕeђ(rnulۓsfG&'I%-d:B;h7%O>n7pI8E +g+c3!Aۦzzy^1v]@tm X)C5څ;c¨TZd+F:4`֓@!4Du|nq-/wBV$b {gx6bk.x3qToZI{6:.,ZKMx#JVNH&Q˭2eyMcEeé)9BZ$˱a,-o%)ZE\+ D=@+^_:LfMY[oms|ɢǰ18C%2$`^asdO  0i4D̋3Y9LjM%%\gf|b<tl%{8.r]gg'SWU\Bk)B\ mnA"qښv5)Xfp#OtQ{<#t' $mUQs*qrdPSíjYIqŠ*I"A8}8NW0%f")@ↀ FA bH&WaشGiJ-Aerku2 Zp``FUYY\,ď2k%o/?_CU &(ɹg$TSdPZ JiaX -A:?HO\2b/l<]-Ak%Ч3R o$ޢU-7wU۶~U@%-I#1G\r%|6Vԃd5a$Dvtr Ab}} Ҫ - u&+y3²3%d+kyI%y0DYM#!5K[SDʅ~2YLP\G׫%̇*X&m,s!7Y1ŪS:sК|rT+X ӕ4#͚W Vo1!{F p\;d3Qo 'Oe!~mqhHѵ&_8s򢳎&^b)M"LRe=UI/Ő|!}YK@M[NRX;rv!` -ZѰnn rhFndʇzxd-z=QRr,5Cۊqk״Cj;q>c1طiQђ {jp`M[?Yb5H] 5$Al#lTD|n$P='*v:kg`Z2QlZd -`eP%rk ?DhBլ[g РG!=V{kub .Yhҟ gyn3ZuZhNW]LІG0Kl(9̼5uhfJsXlV1rXSd@,:E2 -wS [,?c%؝i0, ˣA V`=7l uym 626>ƵCd^,ƨ bu$Z˭gfI5rCޏ-McV$ kGЩd -tt#cac_Ѫ2J y%U뵄he?XB -qfIluPX}AG ږI Sb"n%}rG»M*"U;`r n=ZE`jh:$Qłw~аNR3#`,-=(b48_ m9oL)R9iw5Xhs@vQB $ډvtұaDZu>uZ8KZ68&fR ! -FK Ќe3 -dR.vv\2m\܋[ 1/İeaъ3"Ac`Ox/;m][3c~V)Q,Pi%1c}<?s5vdw'dω6)Oqq67lGZLCh7Y~@4[ Mv.S+!Ӷo5z`oηw|.lL+V[R|zGIySCo;֙ 1&[z;['˾c[/vYޗMv?T^u6QXg)-qVku{"c&ɺakCHdcY79rڳFɮ#MJ 8es캯gYm8ݶfB6qE06bUZ_:l(xhD_iW@Sؚ) -FeF^=ZP1ZG܉|,5e-ԕgQBCXsQdH9XQK Q{Iuŷ~p.ۻxz:޾j562\#65 RxVf9,Jl}8иVFɓ#u95>1gyofFV!oh< >cߪyMWϧUkl}.k=[{-1Rqf䖮,R?CiX9H`W5Miݯ:(B|6֩u`]S: }h%[2H-|s,aС4>B/x:ov?xrVGWaqDRdu*)+gK2=G$x]#TWnA^R1nԍ߫c6/q< ދ&+P#g mPKgG_ 7a >!w[S} 6_t<,(}O*wk>w8G\K{Ci+@ Q[.k"?b4]]>*v䜖W< @k|L\|AOn_;?r.=8$,Dt63[{iOj7@ǵ!oTB\cv{rC~y̽cKNrb5ԗCrCA\cHjl9FQqԋ#,f'D[p:?.7)B<yҋ뾷= @~#?=('6|q$dyRb#0$M#˚GYX$F݅MCrmj1"S&.vǮ/\ L^?BbH rHpvY ϥ$CBqyj>_zNNL/n?}?u}߷p޾/.]ݾ>׏_Yoy/ꊋo_|w_% 7 We}?-/\՟}a-v}cw=$GxX{`ϯ_z'k}$ˋw}e_~b${R-{Hw|#9/վys.1ջOy\oy/g=>B^Ż0WCwlߜ7}ݓ]H }d }'9/8?#qϫ7o.V^׏\^\ ?h>5Xn|sox۷Ww~)'?;xqLqix,yz>! xp.< 螛 xrUzNIz>^\=}=.˫O)bo-pw˫]~Cڛ랊6z6zon|Ƿ;ȴL!=Lix!;Dtf0wÂ>-W^=g?T8 'qohK9͍?Rr4-".ίIzA)6yo沧`ѝ={^|{חwMmoމ:zw9餛}OW?ǛnsQ&]cW{힌Z X/nofަw/tw1c{{D?x,՛;@.آXח矰`U7q~Ylwo|w{[zs{>>7t˗W>5p &z~_>@ktq9*u1A;])͑ߠY߿}{uWol摹B6JHa0;pc̓͸ O7s~{*)gFp稖?pg8s3wZp\3w]!?;#z*8A3w.pf=9#Kym@K7 >=~@a1n ?w<7y=ڟoOӎ?C?oA'Ap{A?}˷lg'.~*4%;H FA= wn 8~|vσ9t(|zP>x(|L僇'#>&_W_^zg{i/ϧwhbtib6 xm>9;N?z'<_O]j~- ->5?RlB/no}IWOi w!tw__חo[3CaVx + endstream endobj 117 0 obj <>stream -Ar{\}c]=YC}F᱘R[h;'k^.c6vy+wa^Xw?!~1tbE.`'!o=B 'ړz X~ CY&ջoo.oшcvsc{Lw9vw&ѻ|:է -=zl*z 4衿ճoue=lz[}믿y}ڻסq0qi] W|Bx `!NNW^xsNIq?+/3vOv{N ˧^.Lzy^}#:qiog=uo>;?m|mv#v鞚幄?!<&C1CxZ; - Gw ׯt}<dL!:(CtZp!:ps?ZTA_Czr޳nJbcɽ)sS͟Y7{&uPG},XM:4>z5ѓ)6/cdԡ[á[Ï6C 行Ǯ9**w$?ccY9Y董g>Ø`|ןlW>w{=znY׷_zw qp[.ޝܿ8(|s\uOVw===cq"4{$Q97 l9O |Z׿#xp]v~<?o30 e$w矰^b^-<1=#qhq -W|%dM=Նoo_j7o/ٱn;zCi4XۦӠʞ'0Rw[gӇ){o_sy?Bq᛻Zs {jݩڲC\v9}ݫW޿sS}/osS\wO{y]ޗ7N{N {Wԏs"n㰃 d=XCwCwOivW=XOOz:O?z*~2Q=2yrh|C]cŏ^{گTNL?vz`T`}NA4SѠO'TG(bh]̟Òt?z'ZecOӡ7Q{~x -]e!;.:k4D)׷o~|gYوZ -@i9y6=EN6{S{ Y~w$j;;[>ϗ>yz>?\};[=Apc.|j[i[Nz(eߝ??;u➇kPyvv:?v=(%o˕?dq'~uN`S{_c\Dq_|0.+l⾋GukE}Y/.] ]]9K/Jzgg;/9@|uF#wK7n*0zIn8NaTH.zq*,yɕT4UN*#~t_ƺO͚cS*nJJ\lPu;~: ~\u>GC\}K|*\X !bR 9\ش>NX cJ9EbOFUe^LZ]J.bLc{$P? iXYNI5ʁT6Vì\ hzX:46cPP~ '(pĺ*ƉmQ U7A1X.<̉ҴrP&ETl]<1y*uT!`SՕ#Z36Q]*? T?rcm_ս^Ȟu%BHH,`."=me4~. 3Vip2h#Je>&U -ɱZ+Mx³nqX3bVU9UYV?]B19U;BaT^ -OWAoLǪՉMcX|ꂔ4R˪T ] RyN4pCRÞ/NXo\@81E8AUK9y"sIP0,FK -\*Ua EYa\u }Q%(6`68rsurNXjso#[Q?C@ՏL$z@ (6O:#$ne,?VAƪ!}dqUxǧb*+T92 & j0TM_ ->[$b3Y4 u-(ac$(̗hJa֩r4(F7kDi$3%Xb(sOh`\Qe|4>Ib.u Gb@SW~N+ʎ) O K׌ 1@f֩Ȳ:w,!*aD.PN}b]F"2]lLX?GurSWWTW&߁]75x$Bɤ;XuSQ^5nJpNŒ6qq4obPÍJܘ"l2R'W]I~;6UU~ uYaܪu*Gc®()s$̡`IgC & +u0cY/3TX#dEDFu'XDPz؉sftH -uj++'N4JIY5(JT$dpQu#4Eas.^@Ĥr{ӽ:"!@*Pn XʃubG)(Tdz`!G^s]B^u q…ej!Rh+:HuO/ -rU}IwR{]Sp|f$wTȰ[5Uo -vl4> -Ry l8Lh|SVUIFJ0<؍,eYtܴ18}.֧nj:e^CKSJbWn8~q"-BUH$B${.Wc jp0r5a”RooBJ4r+ aiW<Sw3UE܈vvQPeaMq${AYSմM58!d[+a8 3Jhi,@qnPI~I8Lk,_z1J1jf=\yý87~xTJz(^UcG]8""}Pg;fL(a+b]xu(+ prȠ}4rPFzbbe5zة^HNP&UnAMLMΆaEA"Ll:cu╖mk2J0 `RLudǐU:XjAOU.IJ %a$(,(F5`avJkbD2\e.8zLպM9)@ۥZ.a0o\?c8 yÖ|GfhIRU`Il|TQHyje/q ^)XAĨjQ ꊌ]JjOp"æB nqU"gAZT@W5>g:՜ȔP'1Pf1e\I5׏dTJzM$FO)0RƪvpbKU괋l׉N#7 V^f.8N0GYqkl4XDѫ;7!gư ьDfِtt f#~C7aMo![uHuQ"U>x*aD=pw^ފhˁ52ebI`>sA{c|{ KDjq#vCL9Yl~eE4E;tNg0m#S_Dը$B/tJqW)#b^{2kv1zHn FZc`y -k =Tk>b(eDj 4D֬RК\` 9ܦQ<: #j -cIBywj3/4>δcSƱ-,DyXGqe{V;#jLnm3V|28篘gK8>c91'91wQ>/W9']ٹ7\9_v܎\s'DJNYQY(gBlom5?w%#`J3-(i7 -ΪRجVX (eIU=[%): ;_pfؔ%m4"8%@ewE1Lۑ8z5OS1U7@DOH/tHz:N,G /sPh*/cEHuX3m<7.1#Vhkl>cRV5U?f:XFcntc0EڶPPw}틣Q1r*wC!ƾX )0+N#Pb] -/ë2RKx܋t;xVP# Zmh)#VE0q-TBr~Su -uP/p}#Œ3WrAMc:/lTÛn 4xMcHo /Fn"ӧXCdOHLQкു:8#ϹMR&Ք ' ~H1@V -) FDݟ\K ,N\P}A@{C4a u-5[7; _1 |O }x2c{iXlNW=rpu)&f8#I(:GGf=G7y$8aD̯0F\fI1֖?-}N5k -}*ҮZsL^8Qyr>j0,{ ˉ<#tm0g -e1'88u d}]N) Ɓ *S^";?xг# Mxb!&skF7Z.< 8  )$ڬd=Cʍte]e11 Z #6vN Fb"% vEH .iL{;@-C)3B/^Y{e@Hil*~