diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7950ce85bc..0cdadf05e9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -98,11 +98,12 @@ jobs: runs-on: ubuntu-latest container: python:3-slim steps: + - name: Install dependencies + run: | + python3 -m pip install --upgrade pip + python3 -m pip install --upgrade coveralls - name: Finalize publishing on coveralls.io continue-on-error: true env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - python3 -m pip install --upgrade pip - python3 -m pip install --upgrade coveralls - coveralls --finish + run: coveralls --finish diff --git a/docs/CONTRIBUTORS.rst b/docs/CONTRIBUTORS.rst index 302c8c205b..54ef8a8d40 100644 --- a/docs/CONTRIBUTORS.rst +++ b/docs/CONTRIBUTORS.rst @@ -113,9 +113,8 @@ CI/CD will check that new TUF code is formatted with `black Auto-formatting can be done on the command line: :: - $ # TODO: configure black and isort args in pyproject.toml (see #1161) - $ black --line-length 80 tuf/api - $ isort --line-length 80 --profile black -p tuf tuf/api + $ black + $ isort or via source code editor plugin [`black `__, diff --git a/docs/ROADMAP.md b/docs/ROADMAP.md deleted file mode 100644 index cf6baf2f3d..0000000000 --- a/docs/ROADMAP.md +++ /dev/null @@ -1,80 +0,0 @@ -# ROADMAP - -This is the roadmap for the project. - -## Release schedule -A new release of the project is expected every 3 months. The release cycle, -upcoming tasks, and any stated goals are subject to change. - -Releases are available both on [GitHub](https://github.com/theupdateframework/python-tuf/releases) -and on [PyPI](https://pypi.org/project/tuf/#history). The GitHub listing -includes release notes. - - -## Latest release -Please consult the repository's -[releases page on GitHub](https://github.com/theupdateframework/python-tuf/releases) -for information about the latest releases. - -As of the last editing of this document, the latest release was: -Pre-release v0.11.2.dev3, January 10, 2019. -* [Release notes and Download](https://github.com/theupdateframework/python-tuf/releases/tag/v0.11.1) -* [PyPI release](https://pypi.org/project/tuf/) -* Packaged by Sebastien Awwad -* PGP fingerprint: C2FB 9C91 0758 B682 7BC4 3233 BC0C 6DED D5E5 CC03 - -A number of older releases were packaged by Vladimir V Diaz: -* Vladimir Diaz -* PGP fingerprint: 3E87 BB33 9378 BC7B 3DD0 E5B2 5DEE 9B97 B0E2 289A - - -## Tasks for upcoming releases - -In no particular order... - -- [ ] Provide protection against a class of slow retrieval attacks using long -inter-byte delays, without sacrificing the use of clean, modern, -production-quality HTTP libraries (requests currently). - -- [ ] Support ASN.1 metadata: loading, writing, signing, and verification. - -- [x] [CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/1351) badge. - - [x] silver badge - - [ ] gold badge (currently at 74%) - -- [ ] [Support graph of delegations](https://github.com/theupdateframework/python-tuf/issues/660) -(requires refactor of API and client code). - -- [ ] [TAP 3: Multi-role delegations](https://github.com/theupdateframework/taps/blob/master/tap3.md). - -- [x] [TAP 4: Multiple repository consensus on entrusted targets](https://github.com/theupdateframework/taps/blob/master/tap4.md). - -- [ ] [TAP 5: Setting URLs for roles in the Root metadata file](https://github.com/theupdateframework/taps/blob/master/tap5.md). - -- [ ] [TAP 8: Key rotation and explicit self-revocation](https://github.com/theupdateframework/taps/blob/master/tap8.md). - -- [x] CLI tool and quickstart guide. - -- [x] Improve update speed. - -- [x] Fully support Windows. - -- [ ] Generalize metadata format in specification. - -- [ ] Support post quantum resilient crypto. - -- [ ] Resolve TODOs in the code. - -- [ ] Support Python's multilingual internationalization and localization -services. - -- [ ] Improved regression and attack testing. - -- [ ] Automated tutorial and instructions testing to enforce doc maintenance. - -- [ ] Continue resolution of outstanding tickets on the issue tracker. - -- [ ] Generalize encrypted key files. Allow different forms of encryption, key derivation functions, etc. - -- [ ] Speed up loading and saving of metadata. Support option to save metadata to memory. - diff --git a/examples/repo_example/basic_repo.py b/examples/repo_example/basic_repo.py new file mode 100644 index 0000000000..480a109053 --- /dev/null +++ b/examples/repo_example/basic_repo.py @@ -0,0 +1,401 @@ +""" +A TUF repository example using the low-level TUF Metadata API. + + +As 'repository_tool' and 'repository_lib' are being deprecated, repository +metadata must be created and maintained *manually* using the low-level +Metadata API. The example code in this file demonstrates how to +implement similar functionality to that of the legacy 'repository_tool' +and 'repository_lib' until a new repository implementation is available. + + +Contents: + * creation of top-level metadata + * target file handling + * consistent snapshots + * key management + * top-level delegation and signing thresholds + * target delegation + * in-band and out-of-band metadata signing + * writing and reading metadata files + * root key rotation + +NOTE: Metadata files will be written to a 'tmp*'-directory in CWD. + +""" +import os +import tempfile +from collections import OrderedDict +from datetime import datetime, timedelta +from pathlib import Path + +from securesystemslib.keys import generate_ed25519_key +from securesystemslib.signer import SSlibSigner + +from tuf.api.metadata import ( + DelegatedRole, + Delegations, + Key, + Metadata, + MetaFile, + Role, + Root, + Snapshot, + TargetFile, + Targets, + Timestamp, +) +from tuf.api.serialization.json import JSONSerializer + + +def _in(days): + """Adds 'days' to now and returns datetime object w/o microseconds.""" + return datetime.utcnow().replace(microsecond=0) + timedelta(days=days) + + +# Create top-level metadata +# ========================= +# Every TUF repository has at least four roles, i.e. the top-level roles +# 'targets', 'snapshot', 'timestamp' and 'root'. Below we will discuss their +# purpose, show how to create the corresponding metadata, and how to use them +# to provide integrity, consistency and freshness for the files TUF aims to +# protect, i.e. target files. + +# Common fields +# ------------- +# All roles have the same metadata container format, for which the metadata API +# provides a generic 'Metadata' class. This class has two fields, one for +# cryptographic signatures, i.e. 'signatures', and one for the payload over +# which signatures are generated, i.e. 'signed'. The payload must be an +# instance of either 'Targets', 'Snapshot', 'Timestamp' or 'Root' class. Common +# fields in all of these 'Signed' classes are: +# +# spec_version -- The supported TUF specification version number. +# version -- The metadata version number. +# expires -- The metadata expiry date. +# +# The 'version', which is incremented on each metadata change, is used to +# reference metadata from within other metadata, and thus allows for repository +# consistency in addition to protecting against rollback attacks. +# +# The date the metadata 'expires' protects against freeze attacks and allows +# for implicit key revocation. Choosing an appropriate expiration interval +# depends on the volatility of a role and how easy it is to re-sign them. +# Highly volatile roles (timestamp, snapshot, targets), usually have shorter +# expiration intervals, whereas roles that change less and might use offline +# keys (root, delegating targets) may have longer expiration intervals. + +SPEC_VERSION = "1.0.19" + +# Define containers for role objects and cryptographic keys created below. This +# allows us to sign and write metadata in a batch more easily. +roles = {} +keys = {} + + +# Targets (integrity) +# ------------------- +# The targets role guarantees integrity for the files that TUF aims to protect, +# i.e. target files. It does so by listing the relevant target files, along +# with their hash and length. +roles["targets"] = Metadata[Targets]( + signed=Targets( + version=1, spec_version=SPEC_VERSION, expires=_in(7), targets={} + ), + signatures=OrderedDict(), +) + +# For the purpose of this example we use the top-level targets role to protect +# the integrity of this very example script. The metadata entry contains the +# hash and length of this file at the local path. In addition, it specifies the +# 'target path', which a client uses to locate the target file relative to a +# configured mirror base URL. +# +# |----base URL---||-------target path-------| +# e.g. tuf-examples.org/repo_example/basic_repo.py + +local_path = Path(__file__).resolve() +target_path = f"{local_path.parts[-2]}/{local_path.parts[-1]}" + +target_file_info = TargetFile.from_file(target_path, local_path) +roles["targets"].signed.targets[target_path] = target_file_info + +# Snapshot (consistency) +# ---------------------- +# The snapshot role guarantees consistency of the entire repository. It does so +# by listing all available targets metadata files at their latest version. This +# becomes relevant, when there are multiple targets metadata files in a +# repository and we want to protect the client against mix-and-match attacks. +roles["snapshot"] = Metadata[Snapshot]( + Snapshot( + version=1, + spec_version=SPEC_VERSION, + expires=_in(7), + meta={"targets.json": MetaFile(version=1)}, + ), + OrderedDict(), +) + +# Timestamp (freshness) +# --------------------- +# The timestamp role guarantees freshness of the repository metadata. It does +# so by listing the latest snapshot (which in turn lists all the latest +# targets) metadata. A short expiration interval requires the repository to +# regularly issue new timestamp metadata and thus protects the client against +# freeze attacks. +# +# Note that snapshot and timestamp use the same generic wireline metadata +# format. But given that timestamp metadata always has only one entry in its +# 'meta' field, i.e. for the latest snapshot file, the timestamp object +# provides the shortcut 'snapshot_meta'. +roles["timestamp"] = Metadata[Timestamp]( + Timestamp( + version=1, + spec_version=SPEC_VERSION, + expires=_in(1), + snapshot_meta=MetaFile(version=1), + ), + OrderedDict(), +) + +# Root (root of trust) +# -------------------- +# The root role serves as root of trust for all top-level roles, including +# itself. It does so by mapping cryptographic keys to roles, i.e. the keys that +# are authorized to sign any top-level role metadata, and signing thresholds, +# i.e. how many authorized keys are required for a given role (see 'roles' +# field). This is called top-level delegation. +# +# In addition, root provides all public keys to verify these signatures (see +# 'keys' field), and a configuration parameter that describes whether a +# repository uses consistent snapshots (see section 'Persist metadata' below +# for more details). +# +# For this example, we generate one 'ed25519' key pair for each top-level role +# using python-tuf's in-house crypto library. +# See https://github.com/secure-systems-lab/securesystemslib for more details +# about key handling, and don't forget to password-encrypt your private keys! +for name in ["targets", "snapshot", "timestamp", "root"]: + keys[name] = generate_ed25519_key() + +# Create root metadata object +roles["root"] = Metadata[Root]( + signed=Root( + version=1, + spec_version=SPEC_VERSION, + expires=_in(365), + keys={ + key["keyid"]: Key.from_securesystemslib_key(key) + for key in keys.values() + }, + roles={ + role: Role([key["keyid"]], threshold=1) + for role, key in keys.items() + }, + consistent_snapshot=True, + ), + signatures=OrderedDict(), +) + +# NOTE: We only need the public part to populate root, so it is possible to use +# out-of-band mechanisms to generate key pairs and only expose the public part +# to whoever maintains the root role. As a matter of fact, the very purpose of +# signature thresholds is to avoid having private keys all in one place. + +# Signature thresholds +# -------------------- +# Given the importance of the root role, it is highly recommended to require a +# threshold of multiple keys to sign root metadata. For this example we +# generate another root key (you can pretend it's out-of-band) and increase the +# required signature threshold. +another_root_key = generate_ed25519_key() +roles["root"].signed.add_key( + "root", Key.from_securesystemslib_key(another_root_key) +) +roles["root"].signed.roles["root"].threshold = 2 + + +# Sign top-level metadata (in-band) +# ================================= +# In this example we have access to all top-level signing keys, so we can use +# them to create and add a signature for each role metadata. +for name in ["targets", "snapshot", "timestamp", "root"]: + key = keys[roles[name].signed.type] + signer = SSlibSigner(key) + roles[name].sign(signer) + + +# Persist metadata (consistent snapshot) +# ====================================== +# It is time to publish the first set of metadata for a client to safely +# download the target file that we have registered for this example repository. +# +# For the purpose of this example we will follow the consistent snapshot naming +# convention for all metadata. This means that each metadata file, must be +# prefixed with its version number, except for timestamp. The naming convention +# also affects the target files, but we don't cover this in the example. See +# the TUF specification for more details: +# https://theupdateframework.github.io/specification/latest/#writing-consistent-snapshots +# +# Also note that the TUF specification does not mandate a wireline format. In +# this demo we use a non-compact JSON format and store all metadata in +# temporary directory at CWD for review. +PRETTY = JSONSerializer(compact=False) +TMP_DIR = tempfile.mkdtemp(dir=os.getcwd()) + +for name in ["root", "targets", "snapshot"]: + filename = f"{roles[name].signed.version}.{roles[name].signed.type}.json" + path = os.path.join(TMP_DIR, filename) + roles[name].to_file(path, serializer=PRETTY) + +roles["timestamp"].to_file( + os.path.join(TMP_DIR, "timestamp.json"), serializer=PRETTY +) + + +# Threshold signing (out-of-band) +# =============================== +# As mentioned above, using signature thresholds usually entails that not all +# signing keys for a given role are in the same place. Let's briefly pretend +# this is the case for the second root key we registered above, and we are now +# on that key owner's computer. All the owner has to do is read the metadata +# file, sign it, and write it back to the same file, and this can be repeated +# until the threshold is satisfied. +root_path = os.path.join(TMP_DIR, "1.root.json") +roles["root"].from_file(root_path) +roles["root"].sign(SSlibSigner(another_root_key), append=True) +roles["root"].to_file(root_path, serializer=PRETTY) + + +# Targets Delegation +# ================== +# Similar to how the root role delegates responsibilities about integrity, +# consistency and freshness to the corresponding top-level roles, a targets +# role may further delegate its responsibility for target files (or a subset +# thereof) to other targets roles. This allows creation of a granular trust +# hierarchy, and further reduces the impact of a single role compromise. +# +# In this example the top-level targets role trusts a new "python-scripts" +# targets role to provide integrity for any target file that ends with ".py". +delegatee_name = "python-scripts" +keys[delegatee_name] = generate_ed25519_key() + +# Delegatee +# --------- +# Create a new targets role, akin to how we created top-level targets above, and +# add target file info from above according to the delegatee's responsibility. +roles[delegatee_name] = Metadata[Targets]( + signed=Targets( + version=1, + spec_version=SPEC_VERSION, + expires=_in(7), + targets={target_path: target_file_info}, + ), + signatures=OrderedDict(), +) + + +# Delegator +# --------- +# Akin to top-level delegation, the delegator expresses its trust in the +# delegatee by authorizing a threshold of cryptographic keys to provide +# signatures for the delegatee metadata. It also provides the corresponding +# public key store. +# The delegation info defined by the delegator further requires the provision +# of a unique delegatee name and constraints about the target files the +# delegatee is responsible for, e.g. a list of path patterns. For details about +# all configuration parameters see +# https://theupdateframework.github.io/specification/latest/#delegations +roles["targets"].signed.delegations = Delegations( + keys={ + keys[delegatee_name]["keyid"]: Key.from_securesystemslib_key( + keys[delegatee_name] + ) + }, + roles=OrderedDict( + [ + ( + delegatee_name, + DelegatedRole( + name=delegatee_name, + keyids=[keys[delegatee_name]["keyid"]], + threshold=1, + terminating=True, + paths=["*.py"], + ), + ) + ] + ), +) + +# Remove target file info from top-level targets (delegatee is now responsible) +del roles["targets"].signed.targets[target_path] + +# Increase expiry (delegators should be less volatile) +roles["targets"].expires = _in(365) + + +# Snapshot + Timestamp + Sign + Persist +# ------------------------------------- +# In order to publish a new consistent set of metadata, we need to update +# dependent roles (snapshot, timestamp) accordingly, bumping versions of all +# changed metadata. + +# Bump targets version +roles["targets"].signed.version += 1 + +# Update snapshot to account for changed and new targets metadata +roles["snapshot"].signed.meta["targets.json"].version = roles[ + "targets" +].signed.version +roles["snapshot"].signed.meta[f"{delegatee_name}.json"] = MetaFile(version=1) +roles["snapshot"].signed.version += 1 + +# Update timestamp to account for changed snapshot metadata +roles["timestamp"].signed.snapshot_meta.version = roles[ + "snapshot" +].signed.version +roles["timestamp"].signed.version += 1 + +# Sign and write metadata for all changed roles, i.e. all but root +for role_name in ["targets", "python-scripts", "snapshot", "timestamp"]: + signer = SSlibSigner(keys[role_name]) + roles[role_name].sign(signer) + + # Prefix all but timestamp with version number (see consistent snapshot) + filename = f"{role_name}.json" + if role_name != "timestamp": + filename = f"{roles[role_name].signed.version}.{filename}" + + roles[role_name].to_file(os.path.join(TMP_DIR, filename), serializer=PRETTY) + + +# Root key rotation (recover from a compromise / key loss) +# ======================================================== +# TUF makes it easy to recover from a key compromise in-band. Given the trust +# hierarchy through top-level and targets delegation you can easily +# replace compromised or lost keys for any role using the delegating role, even +# for the root role. +# However, since root authorizes its own keys, it always has to be signed with +# both the threshold of keys from the previous version and the threshold of +# keys from the new version. This establishes a trusted line of continuity. +# +# In this example we will replace a root key, and sign a new version of root +# with the threshold of old and new keys. Since one of the previous root keys +# remains in place, it can be used to count towards the old and new threshold. +new_root_key = generate_ed25519_key() + +roles["root"].signed.remove_key("root", keys["root"]["keyid"]) +roles["root"].signed.add_key( + "root", Key.from_securesystemslib_key(new_root_key) +) +roles["root"].signed.version += 1 + +roles["root"].signatures.clear() +for key in [keys["root"], another_root_key, new_root_key]: + roles["root"].sign(SSlibSigner(key), append=True) + +roles["root"].to_file( + os.path.join(TMP_DIR, f"{roles['root'].signed.version}.root.json"), + serializer=PRETTY, +) diff --git a/pyproject.toml b/pyproject.toml index 2f21011953..c54700dcc2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,91 @@ +# Build-system section [build-system] requires = ["setuptools>=40.8.0", "wheel"] build-backend = "setuptools.build_meta" + +# Black section +# Read more here: https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-via-a-file +[tool.black] +line-length=80 + +# Isort section +# Read more here: https://pycqa.github.io/isort/docs/configuration/config_files.html +[tool.isort] +profile="black" +line_length=80 +known_first_party = ["tuf"] + +# Pylint section + +# Minimal pylint configuration file for Secure Systems Lab Python Style Guide: +# https://github.com/secure-systems-lab/code-style-guidelines +# +# Based on Google Python Style Guide pylintrc and pylint defaults: +# https://google.github.io/styleguide/pylintrc +# http://pylint.pycqa.org/en/latest/technical_reference/features.html + +[tool.pylint.message_control] +# Disable the message, report, category or checker with the given id(s). +# NOTE: To keep this config as short as possible we only disable checks that +# are currently in conflict with our code. If new code displeases the linter +# (for good reasons) consider updating this config file, or disable checks with. +disable=[ + "fixme", + "too-few-public-methods", + "too-many-arguments", + "format", + "duplicate-code" +] + +[tool.pylint.basic] +good-names = ["i","j","k","v","e","f","fn","fp","_type","_"] +# Regexes for allowed names are copied from the Google pylintrc +# NOTE: Pylint captures regex name groups such as 'snake_case' or 'camel_case'. +# If there are multiple groups it enfoces the prevalent naming style inside +# each modules. Names in the exempt capturing group are ignored. +function-rgx="^(?:(?PsetUp|tearDown|setUpModule|tearDownModule)|(?P_?[A-Z][a-zA-Z0-9]*)|(?P_?[a-z][a-z0-9_]*))$" +method-rgx="(?x)^(?:(?P_[a-z0-9_]+__|runTest|setUp|tearDown|setUpTestCase|tearDownTestCase|setupSelf|tearDownClass|setUpClass|(test|assert)_*[A-Z0-9][a-zA-Z0-9_]*|next)|(?P_{0,2}[A-Z][a-zA-Z0-9_]*)|(?P_{0,2}[a-z][a-z0-9_]*))$" +argument-rgx="^[a-z][a-z0-9_]*$" +attr-rgx="^_{0,2}[a-z][a-z0-9_]*$" +class-attribute-rgx="^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$" +class-rgx="^_?[A-Z][a-zA-Z0-9]*$" +const-rgx="^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$" +inlinevar-rgx="^[a-z][a-z0-9_]*$" +module-rgx="^(_?[a-z][a-z0-9_]*|__init__)$" +no-docstring-rgx="(__.*__|main|test.*|.*test|.*Test)$" +variable-rgx="^[a-z][a-z0-9_]*$" +docstring-min-length=10 + +[tool.pylint.logging] +logging-format-style="old" + +[tool.pylint.miscellaneous] +notes="TODO" + +[tool.pylint.STRING] +check-quote-consistency="yes" + +# mypy section +# Read more here: https://mypy.readthedocs.io/en/stable/config_file.html#using-a-pyproject-toml-file +[tool.mypy] +warn_unused_configs = "True" +warn_redundant_casts = "True" +warn_unused_ignores = "True" +warn_unreachable = "True" +strict_equality = "True" +disallow_untyped_defs = "True" +disallow_untyped_calls = "True" +show_error_codes = "True" +files = [ + "examples/client_example/", + "tuf/api/", + "tuf/ngclient", + "tuf/exceptions.py" +] + +[[tool.mypy.overrides]] +module = [ + "securesystemslib.*", + "urllib3.*" +] +ignore_missing_imports = "True" diff --git a/setup.cfg b/setup.cfg index 1a7608caf8..73a975b948 100644 --- a/setup.cfg +++ b/setup.cfg @@ -45,27 +45,10 @@ install_requires = [options.packages.find] exclude = tests +[options.package_data] +tuf = py.typed + [check-manifest] ignore = .fossa.yml .readthedocs.yaml - -[mypy] -warn_unused_configs = True -warn_redundant_casts = True -warn_unused_ignores = True -warn_unreachable = True -strict_equality = True -disallow_untyped_defs = True -disallow_untyped_calls = True -files = - tuf/api/, - tuf/ngclient, - tuf/exceptions.py, - examples/client_example/client_example.py - -[mypy-securesystemslib.*] -ignore_missing_imports = True - -[mypy-urllib3.*] -ignore_missing_imports = True diff --git a/tests/repository_simulator.py b/tests/repository_simulator.py index cf74748bf3..bb86b59cb7 100644 --- a/tests/repository_simulator.py +++ b/tests/repository_simulator.py @@ -93,11 +93,7 @@ class RepositorySimulator(FetcherInterface): """Simulates a repository that can be used for testing.""" # pylint: disable=too-many-instance-attributes - def __init__(self): - self.md_root: Metadata[Root] = None - self.md_timestamp: Metadata[Timestamp] = None - self.md_snapshot: Metadata[Snapshot] = None - self.md_targets: Metadata[Targets] = None + def __init__(self) -> None: self.md_delegates: Dict[str, Metadata[Targets]] = {} # other metadata is signed on-demand (when fetched) but roots must be @@ -111,10 +107,13 @@ def __init__(self): # target downloads are served from this dict self.target_files: Dict[str, RepositoryTarget] = {} - # Whether to compute hashes and legth for meta in snapshot/timestamp + # Whether to compute hashes and length for meta in snapshot/timestamp self.compute_metafile_hashes_length = False - self.dump_dir = None + # Enable hash-prefixed target file names + self.prefix_targets_with_hash = True + + self.dump_dir: Optional[str] = None self.dump_version = 0 now = datetime.utcnow() @@ -140,7 +139,7 @@ def targets(self) -> Targets: def all_targets(self) -> Iterator[Tuple[str, Targets]]: """Yield role name and signed portion of targets one by one.""" - yield "targets", self.md_targets.signed + yield Targets.type, self.md_targets.signed for role, md in self.md_delegates.items(): yield role, md.signed @@ -149,12 +148,12 @@ def create_key() -> Tuple[Key, SSlibSigner]: sslib_key = generate_ed25519_key() return Key.from_securesystemslib_key(sslib_key), SSlibSigner(sslib_key) - def add_signer(self, role: str, signer: SSlibSigner): + def add_signer(self, role: str, signer: SSlibSigner) -> None: if role not in self.signers: self.signers[role] = {} self.signers[role][signer.key_dict["keyid"]] = signer - def _initialize(self): + def _initialize(self) -> None: """Setup a minimal valid repository.""" targets = Targets(1, SPEC_VER, self.safe_expiry, {}, None) @@ -179,10 +178,10 @@ def _initialize(self): self.md_root = Metadata(root, OrderedDict()) self.publish_root() - def publish_root(self): + def publish_root(self) -> None: """Sign and store a new serialized version of root.""" self.md_root.signatures.clear() - for signer in self.signers["root"].values(): + for signer in self.signers[Root.type].values(): self.md_root.sign(signer, append=True) self.signed_roots.append(self.md_root.to_bytes(JSONSerializer())) @@ -192,24 +191,31 @@ def fetch(self, url: str) -> Iterator[bytes]: """Fetches data from the given url and returns an Iterator (or yields bytes). """ - if not self.root.consistent_snapshot: - raise NotImplementedError("non-consistent snapshot not supported") path = parse.urlparse(url).path if path.startswith("/metadata/") and path.endswith(".json"): + # figure out rolename and version ver_and_name = path[len("/metadata/") :][: -len(".json")] - # only consistent_snapshot supported ATM: timestamp is special case - if ver_and_name == "timestamp": - version = None - role = "timestamp" + version_str, _, role = ver_and_name.partition(".") + # root is always version-prefixed while timestamp is always NOT + if role == Root.type or ( + self.root.consistent_snapshot and ver_and_name != Timestamp.type + ): + version: Optional[int] = int(version_str) else: - version, _, role = ver_and_name.partition(".") - version = int(version) + # the file is not version-prefixed + role = ver_and_name + version = None + yield self._fetch_metadata(role, version) elif path.startswith("/targets/"): # figure out target path and hash prefix target_path = path[len("/targets/") :] dir_parts, sep, prefixed_filename = target_path.rpartition("/") - prefix, _, filename = prefixed_filename.partition(".") + # extract the hash prefix, if any + prefix: Optional[str] = None + filename = prefixed_filename + if self.root.consistent_snapshot and self.prefix_targets_with_hash: + prefix, _, filename = prefixed_filename.partition(".") target_path = f"{dir_parts}{sep}{filename}" yield self._fetch_target(target_path, prefix) @@ -242,7 +248,7 @@ def _fetch_metadata( If version is None, non-versioned metadata is being requested. """ - if role == "root": + if role == Root.type: # return a version previously serialized in publish_root() if version is None or version > len(self.signed_roots): raise FetcherHTTPError(f"Unknown root version {version}", 404) @@ -250,14 +256,15 @@ def _fetch_metadata( return self.signed_roots[version - 1] # sign and serialize the requested metadata - if role == "timestamp": - md: Metadata = self.md_timestamp - elif role == "snapshot": + md: Optional[Metadata] + if role == Timestamp.type: + md = self.md_timestamp + elif role == Snapshot.type: md = self.md_snapshot - elif role == "targets": + elif role == Targets.type: md = self.md_targets else: - md = self.md_delegates[role] + md = self.md_delegates.get(role) if md is None: raise FetcherHTTPError(f"Unknown role {role}", 404) @@ -283,20 +290,20 @@ def _compute_hashes_and_length( hashes = {sslib_hash.DEFAULT_HASH_ALGORITHM: digest_object.hexdigest()} return hashes, len(data) - def update_timestamp(self): + def update_timestamp(self) -> None: """Update timestamp and assign snapshot version to snapshot_meta version. """ self.timestamp.snapshot_meta.version = self.snapshot.version if self.compute_metafile_hashes_length: - hashes, length = self._compute_hashes_and_length("snapshot") + hashes, length = self._compute_hashes_and_length(Snapshot.type) self.timestamp.snapshot_meta.hashes = hashes self.timestamp.snapshot_meta.length = length self.timestamp.version += 1 - def update_snapshot(self): + def update_snapshot(self) -> None: """Update snapshot, assign targets versions and update timestamp.""" for role, delegate in self.all_targets(): hashes = None @@ -311,9 +318,9 @@ def update_snapshot(self): self.snapshot.version += 1 self.update_timestamp() - def add_target(self, role: str, data: bytes, path: str): + def add_target(self, role: str, data: bytes, path: str) -> None: """Create a target from data and add it to the target_files.""" - if role == "targets": + if role == Targets.type: targets = self.targets else: targets = self.md_delegates[role].signed @@ -330,9 +337,9 @@ def add_delegation( terminating: bool, paths: Optional[List[str]], hash_prefixes: Optional[List[str]], - ): + ) -> None: """Add delegated target role to the repository.""" - if delegator_name == "targets": + if delegator_name == Targets.type: delegator = self.targets else: delegator = self.md_delegates[delegator_name].signed @@ -340,7 +347,7 @@ def add_delegation( # Create delegation role = DelegatedRole(name, [], 1, terminating, paths, hash_prefixes) if delegator.delegations is None: - delegator.delegations = Delegations({}, {}) + delegator.delegations = Delegations({}, OrderedDict()) # put delegation last by default delegator.delegations.roles[role.name] = role @@ -352,7 +359,7 @@ def add_delegation( # Add metadata for the role self.md_delegates[role.name] = Metadata(targets, OrderedDict()) - def write(self): + def write(self) -> None: """Dump current repository metadata to self.dump_dir This is a debugging tool: dumping repository state before running @@ -368,9 +375,9 @@ def write(self): for ver in range(1, len(self.signed_roots) + 1): with open(os.path.join(dest_dir, f"{ver}.root.json"), "wb") as f: - f.write(self._fetch_metadata("root", ver)) + f.write(self._fetch_metadata(Root.type, ver)) - for role in ["timestamp", "snapshot", "targets"]: + for role in [Timestamp.type, Snapshot.type, Targets.type]: with open(os.path.join(dest_dir, f"{role}.json"), "wb") as f: f.write(self._fetch_metadata(role)) diff --git a/tests/test_api.py b/tests/test_api.py index 2b0b202d59..8bd69c9b32 100755 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -14,6 +14,7 @@ import tempfile import unittest from datetime import datetime, timedelta +from typing import ClassVar, Dict from dateutil.relativedelta import relativedelta from securesystemslib import hash as sslib_hash @@ -28,6 +29,7 @@ from tuf import exceptions from tuf.api.metadata import ( DelegatedRole, + Delegations, Key, Metadata, MetaFile, @@ -43,11 +45,17 @@ logger = logging.getLogger(__name__) +# pylint: disable=too-many-public-methods class TestMetadata(unittest.TestCase): """Tests for public API of all classes in 'tuf/api/metadata.py'.""" + temporary_directory: ClassVar[str] + repo_dir: ClassVar[str] + keystore_dir: ClassVar[str] + keystore: ClassVar[Dict[str, str]] + @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: # Create a temporary directory to store the repository, metadata, and # target files. 'temporary_directory' must be deleted in # TearDownClass() so that temporary files are always removed, even when @@ -70,24 +78,24 @@ def setUpClass(cls): # Load keys into memory cls.keystore = {} - for role in ["delegation", "snapshot", "targets", "timestamp"]: + for role in ["delegation", Snapshot.type, Targets.type, Timestamp.type]: cls.keystore[role] = import_ed25519_privatekey_from_file( os.path.join(cls.keystore_dir, role + "_key"), password="password", ) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: # Remove the temporary repository directory, which should contain all # the metadata, targets, and key files generated for the test cases. shutil.rmtree(cls.temporary_directory) - def test_generic_read(self): + def test_generic_read(self) -> None: for metadata, inner_metadata_cls in [ - ("root", Root), - ("snapshot", Snapshot), - ("timestamp", Timestamp), - ("targets", Targets), + (Root.type, Root), + (Snapshot.type, Snapshot), + (Timestamp.type, Timestamp), + (Targets.type, Targets), ]: # Load JSON-formatted metdata of each supported type from file @@ -119,7 +127,7 @@ def test_generic_read(self): os.remove(bad_metadata_path) - def test_compact_json(self): + def test_compact_json(self) -> None: path = os.path.join(self.repo_dir, "metadata", "targets.json") md_obj = Metadata.from_file(path) self.assertTrue( @@ -127,8 +135,8 @@ def test_compact_json(self): < len(JSONSerializer().serialize(md_obj)) ) - def test_read_write_read_compare(self): - for metadata in ["root", "snapshot", "timestamp", "targets"]: + def test_read_write_read_compare(self) -> None: + for metadata in [Root.type, Snapshot.type, Timestamp.type, Targets.type]: path = os.path.join(self.repo_dir, "metadata", metadata + ".json") md_obj = Metadata.from_file(path) @@ -139,8 +147,8 @@ def test_read_write_read_compare(self): os.remove(path_2) - def test_to_from_bytes(self): - for metadata in ["root", "snapshot", "timestamp", "targets"]: + def test_to_from_bytes(self) -> None: + for metadata in [Root.type, Snapshot.type, Timestamp.type, Targets.type]: path = os.path.join(self.repo_dir, "metadata", metadata + ".json") with open(path, "rb") as f: metadata_bytes = f.read() @@ -156,16 +164,16 @@ def test_to_from_bytes(self): metadata_obj_2 = Metadata.from_bytes(obj_bytes) self.assertEqual(metadata_obj_2.to_bytes(), obj_bytes) - def test_sign_verify(self): + def test_sign_verify(self) -> None: root_path = os.path.join(self.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(root_path).signed # Locate the public keys we need from root - targets_keyid = next(iter(root.roles["targets"].keyids)) + targets_keyid = next(iter(root.roles[Targets.type].keyids)) targets_key = root.keys[targets_keyid] - snapshot_keyid = next(iter(root.roles["snapshot"].keyids)) + snapshot_keyid = next(iter(root.roles[Snapshot.type].keyids)) snapshot_key = root.keys[snapshot_keyid] - timestamp_keyid = next(iter(root.roles["timestamp"].keyids)) + timestamp_keyid = next(iter(root.roles[Timestamp.type].keyids)) timestamp_key = root.keys[timestamp_keyid] # Load sample metadata (targets) and assert ... @@ -182,9 +190,9 @@ def test_sign_verify(self): # Test verifying with explicitly set serializer targets_key.verify_signature(md_obj, CanonicalJSONSerializer()) with self.assertRaises(exceptions.UnsignedMetadataError): - targets_key.verify_signature(md_obj, JSONSerializer()) + targets_key.verify_signature(md_obj, JSONSerializer()) # type: ignore[arg-type] - sslib_signer = SSlibSigner(self.keystore["snapshot"]) + sslib_signer = SSlibSigner(self.keystore[Snapshot.type]) # Append a new signature with the unrelated key and assert that ... sig = md_obj.sign(sslib_signer, append=True) # ... there are now two signatures, and @@ -195,7 +203,7 @@ def test_sign_verify(self): # ... the returned (appended) signature is for snapshot key self.assertEqual(sig.keyid, snapshot_keyid) - sslib_signer = SSlibSigner(self.keystore["timestamp"]) + sslib_signer = SSlibSigner(self.keystore[Timestamp.type]) # Create and assign (don't append) a new signature and assert that ... md_obj.sign(sslib_signer, append=False) # ... there now is only one signature, @@ -205,12 +213,12 @@ def test_sign_verify(self): with self.assertRaises(exceptions.UnsignedMetadataError): targets_key.verify_signature(md_obj) - def test_verify_failures(self): + def test_verify_failures(self) -> None: root_path = os.path.join(self.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(root_path).signed # Locate the timestamp public key we need from root - timestamp_keyid = next(iter(root.roles["timestamp"].keyids)) + timestamp_keyid = next(iter(root.roles[Timestamp.type].keyids)) timestamp_key = root.keys[timestamp_keyid] # Load sample metadata (timestamp) @@ -247,7 +255,7 @@ def test_verify_failures(self): timestamp_key.verify_signature(md_obj) sig.signature = correct_sig - def test_metadata_base(self): + def test_metadata_base(self) -> None: # Use of Snapshot is arbitrary, we're just testing the base class # features with real data snapshot_path = os.path.join(self.repo_dir, "metadata", "snapshot.json") @@ -289,7 +297,7 @@ def test_metadata_base(self): with self.assertRaises(ValueError): Metadata.from_dict(data) - def test_metadata_snapshot(self): + def test_metadata_snapshot(self) -> None: snapshot_path = os.path.join(self.repo_dir, "metadata", "snapshot.json") snapshot = Metadata[Snapshot].from_file(snapshot_path) @@ -308,7 +316,7 @@ def test_metadata_snapshot(self): snapshot.signed.meta["role1.json"].to_dict(), fileinfo.to_dict() ) - def test_metadata_timestamp(self): + def test_metadata_timestamp(self) -> None: timestamp_path = os.path.join( self.repo_dir, "metadata", "timestamp.json" ) @@ -348,7 +356,7 @@ def test_metadata_timestamp(self): timestamp.signed.snapshot_meta.to_dict(), fileinfo.to_dict() ) - def test_metadata_verify_delegate(self): + def test_metadata_verify_delegate(self) -> None: root_path = os.path.join(self.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(root_path) snapshot_path = os.path.join(self.repo_dir, "metadata", "snapshot.json") @@ -361,20 +369,20 @@ def test_metadata_verify_delegate(self): role2 = Metadata[Targets].from_file(role2_path) # test the expected delegation tree - root.verify_delegate("root", root) - root.verify_delegate("snapshot", snapshot) - root.verify_delegate("targets", targets) + root.verify_delegate(Root.type, root) + root.verify_delegate(Snapshot.type, snapshot) + root.verify_delegate(Targets.type, targets) targets.verify_delegate("role1", role1) role1.verify_delegate("role2", role2) # only root and targets can verify delegates with self.assertRaises(TypeError): - snapshot.verify_delegate("snapshot", snapshot) + snapshot.verify_delegate(Snapshot.type, snapshot) # verify fails for roles that are not delegated by delegator with self.assertRaises(ValueError): root.verify_delegate("role1", role1) with self.assertRaises(ValueError): - targets.verify_delegate("targets", targets) + targets.verify_delegate(Targets.type, targets) # verify fails when delegator has no delegations with self.assertRaises(ValueError): role2.verify_delegate("role1", role1) @@ -383,40 +391,40 @@ def test_metadata_verify_delegate(self): expires = snapshot.signed.expires snapshot.signed.bump_expiration() with self.assertRaises(exceptions.UnsignedMetadataError): - root.verify_delegate("snapshot", snapshot) + root.verify_delegate(Snapshot.type, snapshot) snapshot.signed.expires = expires # verify fails if roles keys do not sign the metadata with self.assertRaises(exceptions.UnsignedMetadataError): - root.verify_delegate("timestamp", snapshot) + root.verify_delegate(Timestamp.type, snapshot) # Add a key to snapshot role, make sure the new sig fails to verify - ts_keyid = next(iter(root.signed.roles["timestamp"].keyids)) - root.signed.add_key("snapshot", root.signed.keys[ts_keyid]) + ts_keyid = next(iter(root.signed.roles[Timestamp.type].keyids)) + root.signed.add_key(Snapshot.type, root.signed.keys[ts_keyid]) snapshot.signatures[ts_keyid] = Signature(ts_keyid, "ff" * 64) # verify succeeds if threshold is reached even if some signatures # fail to verify - root.verify_delegate("snapshot", snapshot) + root.verify_delegate(Snapshot.type, snapshot) # verify fails if threshold of signatures is not reached - root.signed.roles["snapshot"].threshold = 2 + root.signed.roles[Snapshot.type].threshold = 2 with self.assertRaises(exceptions.UnsignedMetadataError): - root.verify_delegate("snapshot", snapshot) + root.verify_delegate(Snapshot.type, snapshot) # verify succeeds when we correct the new signature and reach the # threshold of 2 keys - snapshot.sign(SSlibSigner(self.keystore["timestamp"]), append=True) - root.verify_delegate("snapshot", snapshot) + snapshot.sign(SSlibSigner(self.keystore[Timestamp.type]), append=True) + root.verify_delegate(Snapshot.type, snapshot) - def test_key_class(self): + def test_key_class(self) -> None: # Test if from_securesystemslib_key removes the private key from keyval # of a securesystemslib key dictionary. sslib_key = generate_ed25519_key() key = Key.from_securesystemslib_key(sslib_key) self.assertFalse("private" in key.keyval.keys()) - def test_root_add_key_and_remove_key(self): + def test_root_add_key_and_remove_key(self) -> None: root_path = os.path.join(self.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(root_path) @@ -433,14 +441,14 @@ def test_root_add_key_and_remove_key(self): ) # Assert that root does not contain the new key - self.assertNotIn(keyid, root.signed.roles["root"].keyids) + self.assertNotIn(keyid, root.signed.roles[Root.type].keyids) self.assertNotIn(keyid, root.signed.keys) # Add new root key - root.signed.add_key("root", key_metadata) + root.signed.add_key(Root.type, key_metadata) # Assert that key is added - self.assertIn(keyid, root.signed.roles["root"].keyids) + self.assertIn(keyid, root.signed.roles[Root.type].keyids) self.assertIn(keyid, root.signed.keys) # Confirm that the newly added key does not break @@ -448,33 +456,33 @@ def test_root_add_key_and_remove_key(self): root.to_dict() # Try adding the same key again and assert its ignored. - pre_add_keyid = root.signed.roles["root"].keyids.copy() - root.signed.add_key("root", key_metadata) - self.assertEqual(pre_add_keyid, root.signed.roles["root"].keyids) + pre_add_keyid = root.signed.roles[Root.type].keyids.copy() + root.signed.add_key(Root.type, key_metadata) + self.assertEqual(pre_add_keyid, root.signed.roles[Root.type].keyids) # Add the same key to targets role as well - root.signed.add_key("targets", key_metadata) + root.signed.add_key(Targets.type, key_metadata) # Add the same key to a nonexistent role. with self.assertRaises(ValueError): root.signed.add_key("nosuchrole", key_metadata) # Remove the key from root role (targets role still uses it) - root.signed.remove_key("root", keyid) - self.assertNotIn(keyid, root.signed.roles["root"].keyids) + root.signed.remove_key(Root.type, keyid) + self.assertNotIn(keyid, root.signed.roles[Root.type].keyids) self.assertIn(keyid, root.signed.keys) # Remove the key from targets as well - root.signed.remove_key("targets", keyid) - self.assertNotIn(keyid, root.signed.roles["targets"].keyids) + root.signed.remove_key(Targets.type, keyid) + self.assertNotIn(keyid, root.signed.roles[Targets.type].keyids) self.assertNotIn(keyid, root.signed.keys) with self.assertRaises(ValueError): - root.signed.remove_key("root", "nosuchkey") + root.signed.remove_key(Root.type, "nosuchkey") with self.assertRaises(ValueError): root.signed.remove_key("nosuchrole", keyid) - def test_is_target_in_pathpattern(self): + def test_is_target_in_pathpattern(self) -> None: # pylint: disable=protected-access supported_use_cases = [ ("foo.tgz", "foo.tgz"), @@ -506,7 +514,7 @@ def test_is_target_in_pathpattern(self): DelegatedRole._is_target_in_pathpattern(targetpath, pathpattern) ) - def test_metadata_targets(self): + def test_metadata_targets(self) -> None: targets_path = os.path.join(self.repo_dir, "metadata", "targets.json") targets = Metadata[Targets].from_file(targets_path) @@ -530,7 +538,7 @@ def test_metadata_targets(self): targets.signed.targets[filename].to_dict(), fileinfo.to_dict() ) - def test_targets_key_api(self): + def test_targets_key_api(self) -> None: targets_path = os.path.join(self.repo_dir, "metadata", "targets.json") targets: Targets = Metadata[Targets].from_file(targets_path).signed @@ -544,6 +552,7 @@ def test_targets_key_api(self): "threshold": 1, } ) + assert isinstance(targets.delegations, Delegations) targets.delegations.roles["role2"] = delegated_role key_dict = { @@ -607,7 +616,7 @@ def test_targets_key_api(self): targets.remove_key("role1", key.keyid) self.assertTrue(targets.delegations is None) - def test_length_and_hash_validation(self): + def test_length_and_hash_validation(self) -> None: # Test metadata files' hash and length verification. # Use timestamp to get a MetaFile object and snapshot @@ -647,7 +656,7 @@ def test_length_and_hash_validation(self): # Test wrong algorithm format (sslib.FormatError) snapshot_metafile.hashes = { - 256: "8f88e2ba48b412c3843e9bb26e1b6f8fc9e98aceb0fbaa97ba37b4c98717d7ab" + 256: "8f88e2ba48b412c3843e9bb26e1b6f8fc9e98aceb0fbaa97ba37b4c98717d7ab" # type: ignore[dict-item] } with self.assertRaises(exceptions.LengthOrHashMismatchError): snapshot_metafile.verify_length_and_hashes(data) @@ -661,7 +670,7 @@ def test_length_and_hash_validation(self): targets_path = os.path.join(self.repo_dir, "metadata", "targets.json") targets = Metadata[Targets].from_file(targets_path) file1_targetfile = targets.signed.targets["file1.txt"] - filepath = os.path.join(self.repo_dir, "targets", "file1.txt") + filepath = os.path.join(self.repo_dir, Targets.type, "file1.txt") with open(filepath, "rb") as file1: file1_targetfile.verify_length_and_hashes(file1) @@ -677,9 +686,9 @@ def test_length_and_hash_validation(self): with self.assertRaises(exceptions.LengthOrHashMismatchError): file1_targetfile.verify_length_and_hashes(file1) - def test_targetfile_from_file(self): + def test_targetfile_from_file(self) -> None: # Test with an existing file and valid hash algorithm - file_path = os.path.join(self.repo_dir, "targets", "file1.txt") + file_path = os.path.join(self.repo_dir, Targets.type, "file1.txt") targetfile_from_file = TargetFile.from_file( file_path, file_path, ["sha256"] ) @@ -688,20 +697,20 @@ def test_targetfile_from_file(self): targetfile_from_file.verify_length_and_hashes(file) # Test with a non-existing file - file_path = os.path.join(self.repo_dir, "targets", "file123.txt") + file_path = os.path.join(self.repo_dir, Targets.type, "file123.txt") with self.assertRaises(FileNotFoundError): TargetFile.from_file( file_path, file_path, [sslib_hash.DEFAULT_HASH_ALGORITHM] ) # Test with an unsupported algorithm - file_path = os.path.join(self.repo_dir, "targets", "file1.txt") + file_path = os.path.join(self.repo_dir, Targets.type, "file1.txt") with self.assertRaises(exceptions.UnsupportedAlgorithmError): TargetFile.from_file(file_path, file_path, ["123"]) - def test_targetfile_from_data(self): + def test_targetfile_from_data(self) -> None: data = b"Inline test content" - target_file_path = os.path.join(self.repo_dir, "targets", "file1.txt") + target_file_path = os.path.join(self.repo_dir, Targets.type, "file1.txt") # Test with a valid hash algorithm targetfile_from_data = TargetFile.from_data( @@ -713,7 +722,7 @@ def test_targetfile_from_data(self): targetfile_from_data = TargetFile.from_data(target_file_path, data) targetfile_from_data.verify_length_and_hashes(data) - def test_is_delegated_role(self): + def test_is_delegated_role(self) -> None: # test path matches # see more extensive tests in test_is_target_in_pathpattern() for paths in [ diff --git a/tests/test_examples.py b/tests/test_examples.py new file mode 100644 index 0000000000..2d3c480462 --- /dev/null +++ b/tests/test_examples.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# Copyright 2020, New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 +""" Unit tests for 'examples' scripts. + +""" +import glob +import os +import shutil +import tempfile +import unittest +from pathlib import Path + + +class TestRepoExamples(unittest.TestCase): + """Unit test class for 'repo_example' scripts. + + Provides a '_run_example_script' method to run (exec) a script located in + the 'repo_example' directory. + + """ + + @classmethod + def setUpClass(cls): + """Locate and cache 'repo_example' dir.""" + base = Path(__file__).resolve().parents[1] + cls.repo_examples_dir = base / "examples" / "repo_example" + + def setUp(self): + """Create and change into test dir. + NOTE: Test scripts are expected to create dirs/files in new CWD.""" + self.original_cwd = os.getcwd() + self.base_test_dir = os.path.realpath(tempfile.mkdtemp()) + os.chdir(self.base_test_dir) + + def tearDown(self): + """Change back to original dir and remove test dir, which may contain + dirs/files the test created at test-time CWD.""" + os.chdir(self.original_cwd) + shutil.rmtree(self.base_test_dir) + + def _run_script_and_assert_files(self, script_name, filenames_created): + """Run script in 'repo_example' dir and assert that it created the + files corresponding to the passed filenames inside a 'tmp*' test dir at + CWD.""" + script_path = str(self.repo_examples_dir / script_name) + with open(script_path, "rb") as f: + # pylint: disable=exec-used + exec( + compile(f.read(), script_path, "exec"), + {"__file__": script_path}, + ) + + test_dirs = glob.glob("tmp*") + self.assertTrue( + len(test_dirs) == 1, f"expected 1 'tmp*' test dir, got {test_dirs}" + ) + + test_dir = test_dirs.pop() + for name in filenames_created: + metadata_path = Path(test_dir) / f"{name}" + self.assertTrue( + metadata_path.exists(), f"missing '{metadata_path}' file" + ) + + def test_basic_repo(self): + """Run 'basic_repo.py' and assert creation of metadata files.""" + self._run_script_and_assert_files( + "basic_repo.py", + [ + "1.python-scripts.json", + "1.root.json", + "1.snapshot.json", + "1.targets.json", + "2.root.json", + "2.snapshot.json", + "2.targets.json", + "timestamp.json", + ], + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_fetcher_ng.py b/tests/test_fetcher_ng.py index 55dec1e301..4958da9f15 100644 --- a/tests/test_fetcher_ng.py +++ b/tests/test_fetcher_ng.py @@ -13,6 +13,7 @@ import sys import tempfile import unittest +from typing import Any, ClassVar, Iterator from unittest.mock import Mock, patch import requests @@ -28,17 +29,19 @@ class TestFetcher(unittest_toolbox.Modified_TestCase): """Test RequestsFetcher class.""" + server_process_handler: ClassVar[utils.TestServerProcess] + @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: # Launch a SimpleHTTPServer (serves files in the current dir). cls.server_process_handler = utils.TestServerProcess(log=logger) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: # Stop server process and perform clean up. cls.server_process_handler.clean() - def setUp(self): + def setUp(self) -> None: """ Create a temporary file and launch a simple server in the current working directory. @@ -64,12 +67,12 @@ def setUp(self): # Instantiate a concrete instance of FetcherInterface self.fetcher = RequestsFetcher() - def tearDown(self): + def tearDown(self) -> None: # Remove temporary directory unittest_toolbox.Modified_TestCase.tearDown(self) # Simple fetch. - def test_fetch(self): + def test_fetch(self) -> None: with tempfile.TemporaryFile() as temp_file: for chunk in self.fetcher.fetch(self.url): temp_file.write(chunk) @@ -80,7 +83,7 @@ def test_fetch(self): ) # URL data downloaded in more than one chunk - def test_fetch_in_chunks(self): + def test_fetch_in_chunks(self) -> None: # Set a smaller chunk size to ensure that the file will be downloaded # in more than one chunk self.fetcher.chunk_size = 4 @@ -105,12 +108,12 @@ def test_fetch_in_chunks(self): self.assertEqual(chunks_count, expected_chunks_count) # Incorrect URL parsing - def test_url_parsing(self): + def test_url_parsing(self) -> None: with self.assertRaises(exceptions.URLParsingError): self.fetcher.fetch(self.random_string()) # File not found error - def test_http_error(self): + def test_http_error(self) -> None: with self.assertRaises(exceptions.FetcherHTTPError) as cm: self.url = f"{self.url_prefix}/non-existing-path" self.fetcher.fetch(self.url) @@ -118,7 +121,7 @@ def test_http_error(self): # Response read timeout error @patch.object(requests.Session, "get") - def test_response_read_timeout(self, mock_session_get): + def test_response_read_timeout(self, mock_session_get: Any) -> None: mock_response = Mock() attr = { "raw.read.side_effect": urllib3.exceptions.ReadTimeoutError( @@ -136,28 +139,28 @@ def test_response_read_timeout(self, mock_session_get): @patch.object( requests.Session, "get", side_effect=urllib3.exceptions.TimeoutError ) - def test_session_get_timeout(self, mock_session_get): + def test_session_get_timeout(self, mock_session_get: Any) -> None: with self.assertRaises(exceptions.SlowRetrievalError): self.fetcher.fetch(self.url) mock_session_get.assert_called_once() # Simple bytes download - def test_download_bytes(self): + def test_download_bytes(self) -> None: data = self.fetcher.download_bytes(self.url, self.file_length) self.assertEqual(self.file_contents, data.decode("utf-8")) # Download file smaller than required max_length - def test_download_bytes_upper_length(self): + def test_download_bytes_upper_length(self) -> None: data = self.fetcher.download_bytes(self.url, self.file_length + 4) self.assertEqual(self.file_contents, data.decode("utf-8")) # Download a file bigger than expected - def test_download_bytes_length_mismatch(self): + def test_download_bytes_length_mismatch(self) -> None: with self.assertRaises(exceptions.DownloadLengthMismatchError): self.fetcher.download_bytes(self.url, self.file_length - 4) # Simple file download - def test_download_file(self): + def test_download_file(self) -> None: with self.fetcher.download_file( self.url, self.file_length ) as temp_file: @@ -165,7 +168,7 @@ def test_download_file(self): self.assertEqual(self.file_length, temp_file.tell()) # Download file smaller than required max_length - def test_download_file_upper_length(self): + def test_download_file_upper_length(self) -> None: with self.fetcher.download_file( self.url, self.file_length + 4 ) as temp_file: @@ -173,8 +176,10 @@ def test_download_file_upper_length(self): self.assertEqual(self.file_length, temp_file.tell()) # Download a file bigger than expected - def test_download_file_length_mismatch(self): + def test_download_file_length_mismatch(self) -> Iterator[Any]: with self.assertRaises(exceptions.DownloadLengthMismatchError): + # Force download_file to execute and raise the error since it is a + # context manager and returns Iterator[IO] yield self.fetcher.download_file(self.url, self.file_length - 4) diff --git a/tests/test_metadata_serialization.py b/tests/test_metadata_serialization.py index 9275b8b821..149a5ec52e 100644 --- a/tests/test_metadata_serialization.py +++ b/tests/test_metadata_serialization.py @@ -11,7 +11,6 @@ import logging import sys import unittest -from typing import Dict from tests import utils from tuf.api.metadata import ( @@ -54,7 +53,7 @@ class TestSerialization(unittest.TestCase): } @utils.run_sub_tests_with_dataset(invalid_signed) - def test_invalid_signed_serialization(self, test_case_data: Dict[str, str]): + def test_invalid_signed_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((KeyError, ValueError, TypeError)): Snapshot.from_dict(copy.deepcopy(case_dict)) @@ -69,7 +68,7 @@ def test_invalid_signed_serialization(self, test_case_data: Dict[str, str]): } @utils.run_sub_tests_with_dataset(valid_keys) - def test_valid_key_serialization(self, test_case_data: str): + def test_valid_key_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) key = Key.from_dict("id", copy.copy(case_dict)) self.assertDictEqual(case_dict, key.to_dict()) @@ -86,7 +85,7 @@ def test_valid_key_serialization(self, test_case_data: str): } @utils.run_sub_tests_with_dataset(invalid_keys) - def test_invalid_key_serialization(self, test_case_data: Dict[str, str]): + def test_invalid_key_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((TypeError, KeyError)): keyid = case_dict.pop("keyid") @@ -101,7 +100,7 @@ def test_invalid_key_serialization(self, test_case_data: Dict[str, str]): } @utils.run_sub_tests_with_dataset(invalid_roles) - def test_invalid_role_serialization(self, test_case_data: Dict[str, str]): + def test_invalid_role_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((KeyError, TypeError, ValueError)): Role.from_dict(copy.deepcopy(case_dict)) @@ -114,7 +113,7 @@ def test_invalid_role_serialization(self, test_case_data: Dict[str, str]): } @utils.run_sub_tests_with_dataset(valid_roles) - def test_role_serialization(self, test_case_data: str): + def test_role_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) role = Role.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, role.to_dict()) @@ -162,7 +161,7 @@ def test_role_serialization(self, test_case_data: str): } @utils.run_sub_tests_with_dataset(valid_roots) - def test_root_serialization(self, test_case_data: str): + def test_root_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) root = Root.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, root.to_dict()) @@ -204,7 +203,7 @@ def test_root_serialization(self, test_case_data: str): } @utils.run_sub_tests_with_dataset(invalid_roots) - def test_invalid_root_serialization(self, test_case_data: Dict[str, str]): + def test_invalid_root_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises(ValueError): Root.from_dict(copy.deepcopy(case_dict)) @@ -219,9 +218,7 @@ def test_invalid_root_serialization(self, test_case_data: Dict[str, str]): } @utils.run_sub_tests_with_dataset(invalid_metafiles) - def test_invalid_metafile_serialization( - self, test_case_data: Dict[str, str] - ): + def test_invalid_metafile_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((TypeError, ValueError, AttributeError)): MetaFile.from_dict(copy.deepcopy(case_dict)) @@ -235,7 +232,7 @@ def test_invalid_metafile_serialization( } @utils.run_sub_tests_with_dataset(valid_metafiles) - def test_metafile_serialization(self, test_case_data: str): + def test_metafile_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) metafile = MetaFile.from_dict(copy.copy(case_dict)) self.assertDictEqual(case_dict, metafile.to_dict()) @@ -245,9 +242,7 @@ def test_metafile_serialization(self, test_case_data: str): } @utils.run_sub_tests_with_dataset(invalid_timestamps) - def test_invalid_timestamp_serialization( - self, test_case_data: Dict[str, str] - ): + def test_invalid_timestamp_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((ValueError, KeyError)): Timestamp.from_dict(copy.deepcopy(case_dict)) @@ -260,7 +255,7 @@ def test_invalid_timestamp_serialization( } @utils.run_sub_tests_with_dataset(valid_timestamps) - def test_timestamp_serialization(self, test_case_data: str): + def test_timestamp_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) timestamp = Timestamp.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, timestamp.to_dict()) @@ -279,7 +274,7 @@ def test_timestamp_serialization(self, test_case_data: str): } @utils.run_sub_tests_with_dataset(valid_snapshots) - def test_snapshot_serialization(self, test_case_data: str): + def test_snapshot_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) snapshot = Snapshot.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, snapshot.to_dict()) @@ -300,7 +295,7 @@ def test_snapshot_serialization(self, test_case_data: str): } @utils.run_sub_tests_with_dataset(valid_delegated_roles) - def test_delegated_role_serialization(self, test_case_data: str): + def test_delegated_role_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) deserialized_role = DelegatedRole.from_dict(copy.copy(case_dict)) self.assertDictEqual(case_dict, deserialized_role.to_dict()) @@ -310,16 +305,29 @@ def test_delegated_role_serialization(self, test_case_data: str): "missing hash prefixes and paths": '{"name": "a", "keyids": ["keyid"], "threshold": 1, "terminating": false}', "both hash prefixes and paths": '{"name": "a", "keyids": ["keyid"], "threshold": 1, "terminating": false, \ "paths": ["fn1", "fn2"], "path_hash_prefixes": ["h1", "h2"]}', + "invalid path type": '{"keyids": ["keyid"], "name": "a", "paths": [1,2,3], \ + "terminating": false, "threshold": 1}', + "invalid path_hash_prefixes type": '{"keyids": ["keyid"], "name": "a", "path_hash_prefixes": [1,2,3], \ + "terminating": false, "threshold": 1}', } @utils.run_sub_tests_with_dataset(invalid_delegated_roles) - def test_invalid_delegated_role_serialization(self, test_case_data: str): + def test_invalid_delegated_role_serialization( + self, test_case_data: str + ) -> None: case_dict = json.loads(test_case_data) with self.assertRaises(ValueError): DelegatedRole.from_dict(copy.copy(case_dict)) invalid_delegations: utils.DataSet = { "empty delegations": "{}", + "missing keys": '{ "roles": [ \ + {"keyids": ["keyid"], "name": "a", "terminating": true, "paths": ["fn1"], "threshold": 3}, \ + {"keyids": ["keyid2"], "name": "b", "terminating": true, "paths": ["fn2"], "threshold": 4} ] \ + }', + "missing roles": '{"keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ + "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}}', "bad keys": '{"keys": "foo", \ "roles": [{"keyids": ["keyid"], "name": "a", "paths": ["fn1", "fn2"], "terminating": false, "threshold": 3}]}', "bad roles": '{"keys": {"keyid" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ @@ -330,10 +338,44 @@ def test_invalid_delegated_role_serialization(self, test_case_data: str): {"keyids": ["keyid2"], "name": "a", "paths": ["fn3"], "terminating": false, "threshold": 2} \ ] \ }', + "using empty string role name": '{"keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ + "roles": [ \ + {"keyids": ["keyid1"], "name": "", "terminating": true, "paths": ["fn1"], "threshold": 3}] \ + }', + "using root as delegate role name": '{"keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ + "roles": [ \ + {"keyids": ["keyid1"], "name": "root", "terminating": true, "paths": ["fn1"], "threshold": 3}] \ + }', + "using snapshot as delegate role name": '{"keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ + "roles": [ \ + {"keyids": ["keyid1"], "name": "snapshot", "terminating": true, "paths": ["fn1"], "threshold": 3}] \ + }', + "using targets as delegate role name": '{"keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ + "roles": [ \ + {"keyids": ["keyid1"], "name": "targets", "terminating": true, "paths": ["fn1"], "threshold": 3}] \ + }', + "using timestamp as delegate role name": '{"keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ + "roles": [ \ + {"keyids": ["keyid1"], "name": "timestamp", "terminating": true, "paths": ["fn1"], "threshold": 3}] \ + }', + "using valid and top-level role name": '{"keys": { \ + "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ + "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ + "roles": [ \ + {"keyids": ["keyid1"], "name": "b", "terminating": true, "paths": ["fn1"], "threshold": 3}, \ + {"keyids": ["keyid2"], "name": "root", "terminating": true, "paths": ["fn2"], "threshold": 4} ] \ + }', } @utils.run_sub_tests_with_dataset(invalid_delegations) - def test_invalid_delegation_serialization(self, test_case_data: str): + def test_invalid_delegation_serialization( + self, test_case_data: str + ) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((ValueError, KeyError, AttributeError)): Delegations.from_dict(copy.deepcopy(case_dict)) @@ -355,7 +397,7 @@ def test_invalid_delegation_serialization(self, test_case_data: str): } @utils.run_sub_tests_with_dataset(valid_delegations) - def test_delegation_serialization(self, test_case_data: str): + def test_delegation_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) delegation = Delegations.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, delegation.to_dict()) @@ -369,8 +411,8 @@ def test_delegation_serialization(self, test_case_data: str): @utils.run_sub_tests_with_dataset(invalid_targetfiles) def test_invalid_targetfile_serialization( - self, test_case_data: Dict[str, str] - ): + self, test_case_data: str + ) -> None: case_dict = json.loads(test_case_data) with self.assertRaises(KeyError): TargetFile.from_dict(copy.deepcopy(case_dict), "file1.txt") @@ -384,7 +426,7 @@ def test_invalid_targetfile_serialization( } @utils.run_sub_tests_with_dataset(valid_targetfiles) - def test_targetfile_serialization(self, test_case_data: str): + def test_targetfile_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) target_file = TargetFile.from_dict(copy.copy(case_dict), "file1.txt") self.assertDictEqual(case_dict, target_file.to_dict()) @@ -416,7 +458,7 @@ def test_targetfile_serialization(self, test_case_data: str): } @utils.run_sub_tests_with_dataset(valid_targets) - def test_targets_serialization(self, test_case_data): + def test_targets_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) targets = Targets.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, targets.to_dict()) diff --git a/tests/test_trusted_metadata_set.py b/tests/test_trusted_metadata_set.py index bdc2c78cc3..6a3142c916 100644 --- a/tests/test_trusted_metadata_set.py +++ b/tests/test_trusted_metadata_set.py @@ -4,7 +4,7 @@ import sys import unittest from datetime import datetime -from typing import Callable, Optional +from typing import Callable, ClassVar, Dict, List, Optional, Tuple from securesystemslib.interface import ( import_ed25519_privatekey_from_file, @@ -18,7 +18,6 @@ Metadata, MetaFile, Root, - Signed, Snapshot, Targets, Timestamp, @@ -31,8 +30,13 @@ class TestTrustedMetadataSet(unittest.TestCase): """Tests for all public API of the TrustedMetadataSet class.""" + keystore: ClassVar[Dict[str, SSlibSigner]] + metadata: ClassVar[Dict[str, bytes]] + repo_dir: ClassVar[str] + + @classmethod def modify_metadata( - self, rolename: str, modification_func: Callable[[Signed], None] + cls, rolename: str, modification_func: Callable ) -> bytes: """Instantiate metadata from rolename type, call modification_func and sign it again with self.keystore[rolename] signer. @@ -42,22 +46,22 @@ def modify_metadata( modification_func: Function that will be called to modify the signed portion of metadata bytes. """ - metadata = Metadata.from_bytes(self.metadata[rolename]) + metadata = Metadata.from_bytes(cls.metadata[rolename]) modification_func(metadata.signed) - metadata.sign(self.keystore[rolename]) + metadata.sign(cls.keystore[rolename]) return metadata.to_bytes() @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: cls.repo_dir = os.path.join( os.getcwd(), "repository_data", "repository", "metadata" ) cls.metadata = {} for md in [ - "root", - "timestamp", - "snapshot", - "targets", + Root.type, + Timestamp.type, + Snapshot.type, + Targets.type, "role1", "role2", ]: @@ -67,10 +71,10 @@ def setUpClass(cls): keystore_dir = os.path.join(os.getcwd(), "repository_data", "keystore") cls.keystore = {} root_key_dict = import_rsa_privatekey_from_file( - os.path.join(keystore_dir, "root" + "_key"), password="password" + os.path.join(keystore_dir, Root.type + "_key"), password="password" ) - cls.keystore["root"] = SSlibSigner(root_key_dict) - for role in ["delegation", "snapshot", "targets", "timestamp"]: + cls.keystore[Root.type] = SSlibSigner(root_key_dict) + for role in ["delegation", Snapshot.type, Targets.type, Timestamp.type]: key_dict = import_ed25519_privatekey_from_file( os.path.join(keystore_dir, role + "_key"), password="password" ) @@ -80,41 +84,41 @@ def hashes_length_modifier(timestamp: Timestamp) -> None: timestamp.snapshot_meta.hashes = None timestamp.snapshot_meta.length = None - cls.metadata["timestamp"] = cls.modify_metadata( - cls, "timestamp", hashes_length_modifier + cls.metadata[Timestamp.type] = cls.modify_metadata( + Timestamp.type, hashes_length_modifier ) def setUp(self) -> None: - self.trusted_set = TrustedMetadataSet(self.metadata["root"]) + self.trusted_set = TrustedMetadataSet(self.metadata[Root.type]) def _update_all_besides_targets( self, timestamp_bytes: Optional[bytes] = None, snapshot_bytes: Optional[bytes] = None, - ): + ) -> None: """Update all metadata roles besides targets. Args: timestamp_bytes: Bytes used when calling trusted_set.update_timestamp(). - Default self.metadata["timestamp"]. + Default self.metadata[Timestamp.type]. snapshot_bytes: Bytes used when calling trusted_set.update_snapshot(). - Default self.metadata["snapshot"]. + Default self.metadata[Snapshot.type]. """ - timestamp_bytes = timestamp_bytes or self.metadata["timestamp"] + timestamp_bytes = timestamp_bytes or self.metadata[Timestamp.type] self.trusted_set.update_timestamp(timestamp_bytes) - snapshot_bytes = snapshot_bytes or self.metadata["snapshot"] + snapshot_bytes = snapshot_bytes or self.metadata[Snapshot.type] self.trusted_set.update_snapshot(snapshot_bytes) - def test_update(self): - self.trusted_set.update_timestamp(self.metadata["timestamp"]) - self.trusted_set.update_snapshot(self.metadata["snapshot"]) - self.trusted_set.update_targets(self.metadata["targets"]) + def test_update(self) -> None: + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) + self.trusted_set.update_targets(self.metadata[Targets.type]) self.trusted_set.update_delegated_targets( - self.metadata["role1"], "role1", "targets" + self.metadata["role1"], "role1", Targets.type ) self.trusted_set.update_delegated_targets( self.metadata["role2"], "role2", "role1" @@ -129,44 +133,62 @@ def test_update(self): self.assertTrue(count, 6) - def test_out_of_order_ops(self): + def test_update_metadata_output(self) -> None: + timestamp = self.trusted_set.update_timestamp( + self.metadata["timestamp"] + ) + snapshot = self.trusted_set.update_snapshot(self.metadata["snapshot"]) + targets = self.trusted_set.update_targets(self.metadata["targets"]) + delegeted_targets_1 = self.trusted_set.update_delegated_targets( + self.metadata["role1"], "role1", "targets" + ) + delegeted_targets_2 = self.trusted_set.update_delegated_targets( + self.metadata["role2"], "role2", "role1" + ) + self.assertIsInstance(timestamp.signed, Timestamp) + self.assertIsInstance(snapshot.signed, Snapshot) + self.assertIsInstance(targets.signed, Targets) + self.assertIsInstance(delegeted_targets_1.signed, Targets) + self.assertIsInstance(delegeted_targets_2.signed, Targets) + + def test_out_of_order_ops(self) -> None: # Update snapshot before timestamp with self.assertRaises(RuntimeError): - self.trusted_set.update_snapshot(self.metadata["snapshot"]) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) - self.trusted_set.update_timestamp(self.metadata["timestamp"]) + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) # Update root after timestamp with self.assertRaises(RuntimeError): - self.trusted_set.update_root(self.metadata["root"]) + self.trusted_set.update_root(self.metadata[Root.type]) # Update targets before snapshot with self.assertRaises(RuntimeError): - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) - self.trusted_set.update_snapshot(self.metadata["snapshot"]) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) # update timestamp after snapshot with self.assertRaises(RuntimeError): - self.trusted_set.update_timestamp(self.metadata["timestamp"]) + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) # Update delegated targets before targets with self.assertRaises(RuntimeError): self.trusted_set.update_delegated_targets( - self.metadata["role1"], "role1", "targets" + self.metadata["role1"], "role1", Targets.type ) - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) # Update snapshot after sucessful targets update with self.assertRaises(RuntimeError): - self.trusted_set.update_snapshot(self.metadata["snapshot"]) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) self.trusted_set.update_delegated_targets( - self.metadata["role1"], "role1", "targets" + self.metadata["role1"], "role1", Targets.type ) - def test_root_with_invalid_json(self): + def test_root_with_invalid_json(self) -> None: # Test loading initial root and root update for test_func in [TrustedMetadataSet, self.trusted_set.update_root]: # root is not json @@ -174,20 +196,20 @@ def test_root_with_invalid_json(self): test_func(b"") # root is invalid - root = Metadata.from_bytes(self.metadata["root"]) + root = Metadata.from_bytes(self.metadata[Root.type]) root.signed.version += 1 with self.assertRaises(exceptions.UnsignedMetadataError): test_func(root.to_bytes()) # metadata is of wrong type with self.assertRaises(exceptions.RepositoryError): - test_func(self.metadata["snapshot"]) + test_func(self.metadata[Snapshot.type]) - def test_top_level_md_with_invalid_json(self): - top_level_md = [ - (self.metadata["timestamp"], self.trusted_set.update_timestamp), - (self.metadata["snapshot"], self.trusted_set.update_snapshot), - (self.metadata["targets"], self.trusted_set.update_targets), + def test_top_level_md_with_invalid_json(self) -> None: + top_level_md: List[Tuple[bytes, Callable[[bytes], Metadata]]] = [ + (self.metadata[Timestamp.type], self.trusted_set.update_timestamp), + (self.metadata[Snapshot.type], self.trusted_set.update_snapshot), + (self.metadata[Targets.type], self.trusted_set.update_targets), ] for metadata, update_func in top_level_md: md = Metadata.from_bytes(metadata) @@ -202,153 +224,155 @@ def test_top_level_md_with_invalid_json(self): # metadata is of wrong type with self.assertRaises(exceptions.RepositoryError): - update_func(self.metadata["root"]) + update_func(self.metadata[Root.type]) update_func(metadata) - def test_update_root_new_root(self): + def test_update_root_new_root(self) -> None: # test that root can be updated with a new valid version def root_new_version_modifier(root: Root) -> None: root.version += 1 - root = self.modify_metadata("root", root_new_version_modifier) + root = self.modify_metadata(Root.type, root_new_version_modifier) self.trusted_set.update_root(root) - def test_update_root_new_root_cannot_be_verified_with_threshold(self): + def test_update_root_new_root_fail_threshold_verification(self) -> None: # new_root data with threshold which cannot be verified. - root = Metadata.from_bytes(self.metadata["root"]) + root = Metadata.from_bytes(self.metadata[Root.type]) # remove root role keyids representing root signatures - root.signed.roles["root"].keyids = [] + root.signed.roles[Root.type].keyids = set() with self.assertRaises(exceptions.UnsignedMetadataError): self.trusted_set.update_root(root.to_bytes()) - def test_update_root_new_root_ver_same_as_trusted_root_ver(self): + def test_update_root_new_root_ver_same_as_trusted_root_ver(self) -> None: with self.assertRaises(exceptions.ReplayedMetadataError): - self.trusted_set.update_root(self.metadata["root"]) + self.trusted_set.update_root(self.metadata[Root.type]) - def test_root_expired_final_root(self): + def test_root_expired_final_root(self) -> None: def root_expired_modifier(root: Root) -> None: root.expires = datetime(1970, 1, 1) # intermediate root can be expired - root = self.modify_metadata("root", root_expired_modifier) + root = self.modify_metadata(Root.type, root_expired_modifier) tmp_trusted_set = TrustedMetadataSet(root) # update timestamp to trigger final root expiry check with self.assertRaises(exceptions.ExpiredMetadataError): - tmp_trusted_set.update_timestamp(self.metadata["timestamp"]) + tmp_trusted_set.update_timestamp(self.metadata[Timestamp.type]) - def test_update_timestamp_new_timestamp_ver_below_trusted_ver(self): + def test_update_timestamp_new_timestamp_ver_below_trusted_ver(self) -> None: # new_timestamp.version < trusted_timestamp.version def version_modifier(timestamp: Timestamp) -> None: timestamp.version = 3 - timestamp = self.modify_metadata("timestamp", version_modifier) + timestamp = self.modify_metadata(Timestamp.type, version_modifier) self.trusted_set.update_timestamp(timestamp) with self.assertRaises(exceptions.ReplayedMetadataError): - self.trusted_set.update_timestamp(self.metadata["timestamp"]) + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) - def test_update_timestamp_snapshot_ver_below_current(self): + def test_update_timestamp_snapshot_ver_below_current(self) -> None: def bump_snapshot_version(timestamp: Timestamp) -> None: timestamp.snapshot_meta.version = 2 # set current known snapshot.json version to 2 - timestamp = self.modify_metadata("timestamp", bump_snapshot_version) + timestamp = self.modify_metadata(Timestamp.type, bump_snapshot_version) self.trusted_set.update_timestamp(timestamp) # newtimestamp.meta.version < trusted_timestamp.meta.version with self.assertRaises(exceptions.ReplayedMetadataError): - self.trusted_set.update_timestamp(self.metadata["timestamp"]) + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) - def test_update_timestamp_expired(self): + def test_update_timestamp_expired(self) -> None: # new_timestamp has expired def timestamp_expired_modifier(timestamp: Timestamp) -> None: timestamp.expires = datetime(1970, 1, 1) # expired intermediate timestamp is loaded but raises timestamp = self.modify_metadata( - "timestamp", timestamp_expired_modifier + Timestamp.type, timestamp_expired_modifier ) with self.assertRaises(exceptions.ExpiredMetadataError): self.trusted_set.update_timestamp(timestamp) # snapshot update does start but fails because timestamp is expired with self.assertRaises(exceptions.ExpiredMetadataError): - self.trusted_set.update_snapshot(self.metadata["snapshot"]) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) - def test_update_snapshot_length_or_hash_mismatch(self): + def test_update_snapshot_length_or_hash_mismatch(self) -> None: def modify_snapshot_length(timestamp: Timestamp) -> None: timestamp.snapshot_meta.length = 1 # set known snapshot.json length to 1 - timestamp = self.modify_metadata("timestamp", modify_snapshot_length) + timestamp = self.modify_metadata(Timestamp.type, modify_snapshot_length) self.trusted_set.update_timestamp(timestamp) with self.assertRaises(exceptions.RepositoryError): - self.trusted_set.update_snapshot(self.metadata["snapshot"]) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) - def test_update_snapshot_cannot_verify_snapshot_with_threshold(self): - self.trusted_set.update_timestamp(self.metadata["timestamp"]) - snapshot = Metadata.from_bytes(self.metadata["snapshot"]) + def test_update_snapshot_fail_threshold_verification(self) -> None: + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) + snapshot = Metadata.from_bytes(self.metadata[Snapshot.type]) snapshot.signatures.clear() with self.assertRaises(exceptions.UnsignedMetadataError): self.trusted_set.update_snapshot(snapshot.to_bytes()) - def test_update_snapshot_version_different_timestamp_snapshot_version(self): + def test_update_snapshot_version_diverge_timestamp_snapshot_version( + self, + ) -> None: def timestamp_version_modifier(timestamp: Timestamp) -> None: timestamp.snapshot_meta.version = 2 timestamp = self.modify_metadata( - "timestamp", timestamp_version_modifier + Timestamp.type, timestamp_version_modifier ) self.trusted_set.update_timestamp(timestamp) # if intermediate snapshot version is incorrect, load it but also raise with self.assertRaises(exceptions.BadVersionNumberError): - self.trusted_set.update_snapshot(self.metadata["snapshot"]) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) # targets update starts but fails if snapshot version does not match with self.assertRaises(exceptions.BadVersionNumberError): - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) - def test_update_snapshot_file_removed_from_meta(self): - self._update_all_besides_targets(self.metadata["timestamp"]) + def test_update_snapshot_file_removed_from_meta(self) -> None: + self._update_all_besides_targets(self.metadata[Timestamp.type]) def remove_file_from_meta(snapshot: Snapshot) -> None: del snapshot.meta["targets.json"] # Test removing a meta_file in new_snapshot compared to the old snapshot - snapshot = self.modify_metadata("snapshot", remove_file_from_meta) + snapshot = self.modify_metadata(Snapshot.type, remove_file_from_meta) with self.assertRaises(exceptions.RepositoryError): self.trusted_set.update_snapshot(snapshot) - def test_update_snapshot_meta_version_decreases(self): - self.trusted_set.update_timestamp(self.metadata["timestamp"]) + def test_update_snapshot_meta_version_decreases(self) -> None: + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) def version_meta_modifier(snapshot: Snapshot) -> None: snapshot.meta["targets.json"].version += 1 - snapshot = self.modify_metadata("snapshot", version_meta_modifier) + snapshot = self.modify_metadata(Snapshot.type, version_meta_modifier) self.trusted_set.update_snapshot(snapshot) with self.assertRaises(exceptions.BadVersionNumberError): - self.trusted_set.update_snapshot(self.metadata["snapshot"]) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) - def test_update_snapshot_expired_new_snapshot(self): - self.trusted_set.update_timestamp(self.metadata["timestamp"]) + def test_update_snapshot_expired_new_snapshot(self) -> None: + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) def snapshot_expired_modifier(snapshot: Snapshot) -> None: snapshot.expires = datetime(1970, 1, 1) # expired intermediate snapshot is loaded but will raise - snapshot = self.modify_metadata("snapshot", snapshot_expired_modifier) + snapshot = self.modify_metadata(Snapshot.type, snapshot_expired_modifier) with self.assertRaises(exceptions.ExpiredMetadataError): self.trusted_set.update_snapshot(snapshot) # targets update does start but fails because snapshot is expired with self.assertRaises(exceptions.ExpiredMetadataError): - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) - def test_update_snapshot_successful_rollback_checks(self): + def test_update_snapshot_successful_rollback_checks(self) -> None: def meta_version_bump(timestamp: Timestamp) -> None: timestamp.snapshot_meta.version += 1 @@ -356,59 +380,59 @@ def version_bump(snapshot: Snapshot) -> None: snapshot.version += 1 # load a "local" timestamp, then update to newer one: - self.trusted_set.update_timestamp(self.metadata["timestamp"]) - new_timestamp = self.modify_metadata("timestamp", meta_version_bump) + self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) + new_timestamp = self.modify_metadata(Timestamp.type, meta_version_bump) self.trusted_set.update_timestamp(new_timestamp) # load a "local" snapshot with mismatching version (loading happens but # BadVersionNumberError is raised), then update to newer one: with self.assertRaises(exceptions.BadVersionNumberError): - self.trusted_set.update_snapshot(self.metadata["snapshot"]) - new_snapshot = self.modify_metadata("snapshot", version_bump) + self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) + new_snapshot = self.modify_metadata(Snapshot.type, version_bump) self.trusted_set.update_snapshot(new_snapshot) # update targets to trigger final snapshot meta version check - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) - def test_update_targets_no_meta_in_snapshot(self): + def test_update_targets_no_meta_in_snapshot(self) -> None: def no_meta_modifier(snapshot: Snapshot) -> None: snapshot.meta = {} - snapshot = self.modify_metadata("snapshot", no_meta_modifier) - self._update_all_besides_targets(self.metadata["timestamp"], snapshot) + snapshot = self.modify_metadata(Snapshot.type, no_meta_modifier) + self._update_all_besides_targets(self.metadata[Timestamp.type], snapshot) # remove meta information with information about targets from snapshot with self.assertRaises(exceptions.RepositoryError): - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) - def test_update_targets_hash_different_than_snapshot_meta_hash(self): + def test_update_targets_hash_diverge_from_snapshot_meta_hash(self) -> None: def meta_length_modifier(snapshot: Snapshot) -> None: for metafile_path in snapshot.meta: snapshot.meta[metafile_path] = MetaFile(version=1, length=1) - snapshot = self.modify_metadata("snapshot", meta_length_modifier) - self._update_all_besides_targets(self.metadata["timestamp"], snapshot) + snapshot = self.modify_metadata(Snapshot.type, meta_length_modifier) + self._update_all_besides_targets(self.metadata[Timestamp.type], snapshot) # observed_hash != stored hash in snapshot meta for targets with self.assertRaises(exceptions.RepositoryError): - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) - def test_update_targets_version_different_snapshot_meta_version(self): + def test_update_targets_version_diverge_snapshot_meta_version(self) -> None: def meta_modifier(snapshot: Snapshot) -> None: for metafile_path in snapshot.meta: snapshot.meta[metafile_path] = MetaFile(version=2) - snapshot = self.modify_metadata("snapshot", meta_modifier) - self._update_all_besides_targets(self.metadata["timestamp"], snapshot) + snapshot = self.modify_metadata(Snapshot.type, meta_modifier) + self._update_all_besides_targets(self.metadata[Timestamp.type], snapshot) # new_delegate.signed.version != meta.version stored in snapshot with self.assertRaises(exceptions.BadVersionNumberError): - self.trusted_set.update_targets(self.metadata["targets"]) + self.trusted_set.update_targets(self.metadata[Targets.type]) - def test_update_targets_expired_new_target(self): + def test_update_targets_expired_new_target(self) -> None: self._update_all_besides_targets() # new_delegated_target has expired def target_expired_modifier(target: Targets) -> None: target.expires = datetime(1970, 1, 1) - targets = self.modify_metadata("targets", target_expired_modifier) + targets = self.modify_metadata(Targets.type, target_expired_modifier) with self.assertRaises(exceptions.ExpiredMetadataError): self.trusted_set.update_targets(targets) diff --git a/tests/test_updater_consistent_snapshot.py b/tests/test_updater_consistent_snapshot.py new file mode 100644 index 0000000000..4289d7b860 --- /dev/null +++ b/tests/test_updater_consistent_snapshot.py @@ -0,0 +1,237 @@ +#!/usr/bin/env python + +# Copyright 2021, New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +"""Test ngclient Updater toggling consistent snapshot""" + +import os +import sys +import tempfile +import unittest +from typing import Any, Dict, Iterable, List, Optional +from unittest.mock import call, patch + +from tests import utils +from tests.repository_simulator import RepositorySimulator +from tuf.api.metadata import ( + SPECIFICATION_VERSION, + TOP_LEVEL_ROLE_NAMES, + Targets, +) +from tuf.ngclient import Updater + + +class TestConsistentSnapshot(unittest.TestCase): + """Test different combinations of 'consistent_snapshot' and + 'prefix_targets_with_hash' and verify that the correct URLs + are formed for each combination""" + + def setUp(self) -> None: + self.temp_dir = tempfile.TemporaryDirectory() + self.metadata_dir = os.path.join(self.temp_dir.name, "metadata") + self.targets_dir = os.path.join(self.temp_dir.name, "targets") + os.mkdir(self.metadata_dir) + os.mkdir(self.targets_dir) + + def tearDown(self) -> None: + self.temp_dir.cleanup() + + def _init_repo( + self, consistent_snapshot: bool, prefix_targets: bool = True + ) -> RepositorySimulator: + """Create a new RepositorySimulator instance""" + sim = RepositorySimulator() + sim.root.consistent_snapshot = consistent_snapshot + sim.root.version += 1 + sim.publish_root() + sim.prefix_targets_with_hash = prefix_targets + + # Init trusted root with the latest consistent_snapshot + with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f: + root = sim.download_bytes( + "https://example.com/metadata/2.root.json", 100000 + ) + f.write(root) + + return sim + + def _init_updater(self, sim: RepositorySimulator) -> Updater: + """Create a new Updater instance""" + return Updater( + self.metadata_dir, + "https://example.com/metadata/", + self.targets_dir, + "https://example.com/targets/", + sim, + ) + + @staticmethod + def _cleanup_dir(path: str) -> None: + """Delete all files inside a directory""" + for filepath in [ + os.path.join(path, filename) for filename in os.listdir(path) + ]: + os.remove(filepath) + + def _assert_metadata_files_exist(self, roles: Iterable[str]) -> None: + """Assert that local metadata files exist for 'roles'""" + local_metadata_files = os.listdir(self.metadata_dir) + for role in roles: + self.assertIn(f"{role}.json", local_metadata_files) + + def _assert_targets_files_exist(self, filenames: Iterable[str]) -> None: + """Assert that local files with 'filenames' exist""" + local_target_files = os.listdir(self.targets_dir) + for filename in filenames: + self.assertIn(filename, local_target_files) + + top_level_roles_data: utils.DataSet = { + "consistent_snaphot disabled": { + "consistent_snapshot": False, + "calls": [ + call("root", 3), + call("timestamp", None), + call("snapshot", None), + call("targets", None), + ], + }, + "consistent_snaphot enabled": { + "consistent_snapshot": True, + "calls": [ + call("root", 3), + call("timestamp", None), + call("snapshot", 1), + call("targets", 1), + ], + }, + } + + @utils.run_sub_tests_with_dataset(top_level_roles_data) + def test_top_level_roles_update(self, test_case_data: Dict[str, Any]): + # Test if the client fetches and stores metadata files with the + # correct version prefix, depending on 'consistent_snapshot' config + consistent_snapshot: bool = test_case_data["consistent_snapshot"] + expected_calls: List[Any] = test_case_data["calls"] + + sim = self._init_repo(consistent_snapshot) + updater = self._init_updater(sim) + + with patch.object( + sim, "_fetch_metadata", wraps=sim._fetch_metadata + ) as wrapped_fetch: + updater.refresh() + + # metadata files are fetched with the expected version (or None) + self.assertListEqual(wrapped_fetch.call_args_list, expected_calls) + # metadata files are always persisted without a version prefix + self._assert_metadata_files_exist(TOP_LEVEL_ROLE_NAMES) + + self._cleanup_dir(self.metadata_dir) + + delegated_roles_data: utils.DataSet = { + "consistent_snaphot disabled": { + "consistent_snapshot": False, + "expected_version": None, + }, + "consistent_snaphot enabled": { + "consistent_snapshot": True, + "expected_version": 1, + }, + } + + @utils.run_sub_tests_with_dataset(delegated_roles_data) + def test_delegated_roles_update(self, test_case_data: Dict[str, Any]): + # Test if the client fetches and stores delegated metadata files with + # the correct version prefix, depending on 'consistent_snapshot' config + consistent_snapshot: bool = test_case_data["consistent_snapshot"] + expected_version: Optional[int] = test_case_data["expected_version"] + rolenames = ["role1", "..", "."] + expected_calls = [call(role, expected_version) for role in rolenames] + + sim = self._init_repo(consistent_snapshot) + # Add new delegated targets + spec_version = ".".join(SPECIFICATION_VERSION) + targets = Targets(1, spec_version, sim.safe_expiry, {}, None) + for role in rolenames: + sim.add_delegation("targets", role, targets, False, ["*"], None) + sim.update_snapshot() + updater = self._init_updater(sim) + updater.refresh() + + with patch.object( + sim, "_fetch_metadata", wraps=sim._fetch_metadata + ) as wrapped_fetch: + # trigger updater to fetch the delegated metadata + updater.get_targetinfo("anything") + # metadata files are fetched with the expected version (or None) + self.assertListEqual(wrapped_fetch.call_args_list, expected_calls) + # metadata files are always persisted without a version prefix + self._assert_metadata_files_exist(rolenames) + + self._cleanup_dir(self.metadata_dir) + + targets_download_data: utils.DataSet = { + "consistent_snaphot disabled": { + "consistent_snapshot": False, + "prefix_targets": True, + "hash_algo": None, + }, + "consistent_snaphot enabled without prefixed targets": { + "consistent_snapshot": True, + "prefix_targets": False, + "hash_algo": None, + }, + "consistent_snaphot enabled with prefixed targets": { + "consistent_snapshot": True, + "prefix_targets": True, + "hash_algo": "sha256", + }, + } + + @utils.run_sub_tests_with_dataset(targets_download_data) + def test_download_targets(self, test_case_data: Dict[str, Any]): + # Test if the client fetches and stores target files with + # the correct hash prefix, depending on 'consistent_snapshot' + # and 'prefix_targets_with_hash' config + consistent_snapshot: bool = test_case_data["consistent_snapshot"] + prefix_targets_with_hash: bool = test_case_data["prefix_targets"] + hash_algo: Optional[str] = test_case_data["hash_algo"] + targetpaths = ["file", "file.txt", "..file.ext", "f.le"] + + sim = self._init_repo(consistent_snapshot, prefix_targets_with_hash) + # Add targets to repository + for targetpath in targetpaths: + sim.targets.version += 1 + sim.add_target("targets", b"content", targetpath) + sim.update_snapshot() + + updater = self._init_updater(sim) + updater.config.prefix_targets_with_hash = prefix_targets_with_hash + updater.refresh() + + with patch.object( + sim, "_fetch_target", wraps=sim._fetch_target + ) as wrapped_fetch_target: + + for targetpath in targetpaths: + info = updater.get_targetinfo(targetpath) + updater.download_target(info) + expected_prefix = ( + None if not hash_algo else info.hashes[hash_algo] + ) + # files are fetched with the expected hash prefix (or None) + wrapped_fetch_target.assert_called_once_with( + info.path, expected_prefix + ) + # target files are always persisted without hash prefix + self._assert_targets_files_exist([info.path]) + wrapped_fetch_target.reset_mock() + + self._cleanup_dir(self.targets_dir) + + +if __name__ == "__main__": + + utils.configure_test_logging(sys.argv) + unittest.main() diff --git a/tests/test_updater_key_rotations.py b/tests/test_updater_key_rotations.py index 2a07fc6761..2aa93b33a7 100644 --- a/tests/test_updater_key_rotations.py +++ b/tests/test_updater_key_rotations.py @@ -17,7 +17,7 @@ from tests import utils from tests.repository_simulator import RepositorySimulator from tests.utils import run_sub_tests_with_dataset -from tuf.api.metadata import Key +from tuf.api.metadata import Key, Root from tuf.exceptions import UnsignedMetadataError from tuf.ngclient import Updater @@ -37,8 +37,8 @@ class TestUpdaterKeyRotations(unittest.TestCase): dump_dir: Optional[str] = None def setUp(self) -> None: - self.sim = None - self.metadata_dir = None + self.sim: RepositorySimulator + self.metadata_dir: str self.subtest_count = 0 # pylint: disable-next=consider-using-with self.temp_dir = tempfile.TemporaryDirectory() @@ -176,14 +176,14 @@ def test_root_rotation(self, root_versions: List[RootVersion]) -> None: # Publish all remote root versions defined in root_versions for rootver in root_versions: # clear root keys, signers - self.sim.root.roles["root"].keyids.clear() - self.sim.signers["root"].clear() + self.sim.root.roles[Root.type].keyids.clear() + self.sim.signers[Root.type].clear() - self.sim.root.roles["root"].threshold = rootver.threshold + self.sim.root.roles[Root.type].threshold = rootver.threshold for i in rootver.keys: - self.sim.root.add_key("root", self.keys[i]) + self.sim.root.add_key(Root.type, self.keys[i]) for i in rootver.sigs: - self.sim.add_signer("root", self.signers[i]) + self.sim.add_signer(Root.type, self.signers[i]) self.sim.root.version += 1 self.sim.publish_root() diff --git a/tests/test_updater_ng.py b/tests/test_updater_ng.py index 607dd87580..57907fe795 100644 --- a/tests/test_updater_ng.py +++ b/tests/test_updater_ng.py @@ -12,14 +12,14 @@ import sys import tempfile import unittest -from typing import List +from typing import Callable, ClassVar, List from securesystemslib.interface import import_rsa_privatekey_from_file from securesystemslib.signer import SSlibSigner from tests import utils from tuf import exceptions, ngclient, unittest_toolbox -from tuf.api.metadata import Metadata, TargetFile +from tuf.api.metadata import Metadata, Root, Snapshot, TargetFile, Targets, Timestamp logger = logging.getLogger(__name__) @@ -27,8 +27,11 @@ class TestUpdater(unittest_toolbox.Modified_TestCase): """Test the Updater class from 'tuf/ngclient/updater.py'.""" + temporary_directory: ClassVar[str] + server_process_handler: ClassVar[utils.TestServerProcess] + @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: # Create a temporary directory to store the repository, metadata, and # target files. 'temporary_directory' must be deleted in # TearDownModule() so that temporary files are always removed, even when @@ -38,18 +41,18 @@ def setUpClass(cls): # Needed because in some tests simple_server.py cannot be found. # The reason is that the current working directory # has been changed when executing a subprocess. - cls.SIMPLE_SERVER_PATH = os.path.join(os.getcwd(), "simple_server.py") + SIMPLE_SERVER_PATH = os.path.join(os.getcwd(), "simple_server.py") # Launch a SimpleHTTPServer (serves files in the current directory). # Test cases will request metadata and target files that have been # pre-generated in 'tuf/tests/repository_data', which will be served # by the SimpleHTTPServer launched here. cls.server_process_handler = utils.TestServerProcess( - log=logger, server=cls.SIMPLE_SERVER_PATH + log=logger, server=SIMPLE_SERVER_PATH ) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: # Cleans the resources and flush the logged lines (if any). cls.server_process_handler.clean() @@ -57,7 +60,7 @@ def tearDownClass(cls): # the metadata, targets, and key files generated for the test cases shutil.rmtree(cls.temporary_directory) - def setUp(self): + def setUp(self) -> None: # We are inheriting from custom class. unittest_toolbox.Modified_TestCase.setUp(self) @@ -109,7 +112,7 @@ def setUp(self): + utils.TEST_HOST_ADDRESS + ":" + str(self.server_process_handler.port) - + repository_basepath + + repository_basepath.replace("\\","/") ) self.metadata_url = f"{url_prefix}/metadata/" @@ -124,35 +127,17 @@ def setUp(self): target_base_url=self.targets_url, ) - def tearDown(self): + def tearDown(self) -> None: # We are inheriting from custom class. unittest_toolbox.Modified_TestCase.tearDown(self) # Logs stdout and stderr from the sever subprocess. self.server_process_handler.flush_log() - def _create_consistent_target( - self, targetname: str, target_hash: str - ) -> None: - """Create consistent targets copies of their non-consistent counterparts - inside the repository directory. - - Args: - targetname: A string denoting the name of the target file. - target_hash: A string denoting the hash of the target. - - """ - consistent_target_name = f"{target_hash}.{targetname}" - source_path = os.path.join( - self.repository_directory, "targets", targetname - ) - destination_path = os.path.join( - self.repository_directory, "targets", consistent_target_name - ) - shutil.copy(source_path, destination_path) - def _modify_repository_root( - self, modification_func, bump_version=False + self, + modification_func: Callable[[Metadata], None], + bump_version: bool = False, ) -> None: """Apply 'modification_func' to root and persist it.""" role_path = os.path.join( @@ -179,58 +164,13 @@ def _modify_repository_root( ) ) - def _assert_files(self, roles: List[str]): + def _assert_files(self, roles: List[str]) -> None: """Assert that local metadata files exist for 'roles'""" expected_files = [f"{role}.json" for role in roles] client_files = sorted(os.listdir(self.client_directory)) self.assertEqual(client_files, expected_files) - # pylint: disable=protected-access - def test_refresh_on_consistent_targets(self): - # Generate a new root version where consistent_snapshot is set to true - def consistent_snapshot_modifier(root): - root.signed.consistent_snapshot = True - - self._modify_repository_root( - consistent_snapshot_modifier, bump_version=True - ) - updater = ngclient.Updater( - self.client_directory, - self.metadata_url, - self.dl_dir, - self.targets_url, - ) - - # All metadata is in local directory already - updater.refresh() - # Make sure that consistent snapshot is enabled - self.assertTrue(updater._trusted_set.root.signed.consistent_snapshot) - - # Get targetinfos, assert cache does not contain the files - info1 = updater.get_targetinfo("file1.txt") - info3 = updater.get_targetinfo("file3.txt") - self.assertIsNone(updater.find_cached_target(info1)) - self.assertIsNone(updater.find_cached_target(info3)) - - # Create consistent targets with file path HASH.FILENAME.EXT - target1_hash = list(info1.hashes.values())[0] - target3_hash = list(info3.hashes.values())[0] - self._create_consistent_target("file1.txt", target1_hash) - self._create_consistent_target("file3.txt", target3_hash) - - # Download files, assert that cache has correct files - updater.download_target(info1) - path = updater.find_cached_target(info1) - self.assertEqual(path, os.path.join(self.dl_dir, info1.path)) - self.assertIsNone(updater.find_cached_target(info3)) - - updater.download_target(info3) - path = updater.find_cached_target(info1) - self.assertEqual(path, os.path.join(self.dl_dir, info1.path)) - path = updater.find_cached_target(info3) - self.assertEqual(path, os.path.join(self.dl_dir, info3.path)) - - def test_refresh_and_download(self): + def test_refresh_and_download(self) -> None: # Test refresh without consistent targets - targets without hash prefix. # top-level targets are already in local cache (but remove others) @@ -240,15 +180,17 @@ def test_refresh_and_download(self): # top-level metadata is in local directory already self.updater.refresh() - self._assert_files(["root", "snapshot", "targets", "timestamp"]) + self._assert_files([Root.type, Snapshot.type, Targets.type, Timestamp.type]) # Get targetinfos, assert that cache does not contain files info1 = self.updater.get_targetinfo("file1.txt") - self._assert_files(["root", "snapshot", "targets", "timestamp"]) + assert isinstance(info1, TargetFile) + self._assert_files([Root.type, Snapshot.type, Targets.type, Timestamp.type]) # Get targetinfo for 'file3.txt' listed in the delegated role1 info3 = self.updater.get_targetinfo("file3.txt") - expected_files = ["role1", "root", "snapshot", "targets", "timestamp"] + assert isinstance(info3, TargetFile) + expected_files = ["role1", Root.type, Snapshot.type, Targets.type, Timestamp.type] self._assert_files(expected_files) self.assertIsNone(self.updater.find_cached_target(info1)) self.assertIsNone(self.updater.find_cached_target(info3)) @@ -265,24 +207,38 @@ def test_refresh_and_download(self): path = self.updater.find_cached_target(info3) self.assertEqual(path, os.path.join(self.dl_dir, info3.path)) - def test_refresh_with_only_local_root(self): + def test_refresh_with_only_local_root(self) -> None: os.remove(os.path.join(self.client_directory, "timestamp.json")) os.remove(os.path.join(self.client_directory, "snapshot.json")) os.remove(os.path.join(self.client_directory, "targets.json")) os.remove(os.path.join(self.client_directory, "role1.json")) os.remove(os.path.join(self.client_directory, "role2.json")) os.remove(os.path.join(self.client_directory, "1.root.json")) - self._assert_files(["root"]) + self._assert_files([Root.type]) self.updater.refresh() - self._assert_files(["root", "snapshot", "targets", "timestamp"]) + self._assert_files([Root.type, Snapshot.type, Targets.type, Timestamp.type]) # Get targetinfo for 'file3.txt' listed in the delegated role1 self.updater.get_targetinfo("file3.txt") + expected_files = ["role1", Root.type, Snapshot.type, Targets.type, Timestamp.type] + self._assert_files(expected_files) + + def test_implicit_refresh_with_only_local_root(self) -> None: + os.remove(os.path.join(self.client_directory, "timestamp.json")) + os.remove(os.path.join(self.client_directory, "snapshot.json")) + os.remove(os.path.join(self.client_directory, "targets.json")) + os.remove(os.path.join(self.client_directory, "role1.json")) + os.remove(os.path.join(self.client_directory, "role2.json")) + os.remove(os.path.join(self.client_directory, "1.root.json")) + self._assert_files(["root"]) + + # Get targetinfo for 'file3.txt' listed in the delegated role1 + targetinfo3 = self.updater.get_targetinfo("file3.txt") expected_files = ["role1", "root", "snapshot", "targets", "timestamp"] self._assert_files(expected_files) - def test_both_target_urls_not_set(self): + def test_both_target_urls_not_set(self) -> None: # target_base_url = None and Updater._target_base_url = None updater = ngclient.Updater( self.client_directory, self.metadata_url, self.dl_dir @@ -291,7 +247,7 @@ def test_both_target_urls_not_set(self): with self.assertRaises(ValueError): updater.download_target(info) - def test_no_target_dir_no_filepath(self): + def test_no_target_dir_no_filepath(self) -> None: # filepath = None and Updater.target_dir = None updater = ngclient.Updater(self.client_directory, self.metadata_url) info = TargetFile(1, {"sha256": ""}, "targetpath") @@ -300,15 +256,17 @@ def test_no_target_dir_no_filepath(self): with self.assertRaises(ValueError): updater.download_target(info) - def test_external_targets_url(self): + def test_external_targets_url(self) -> None: self.updater.refresh() info = self.updater.get_targetinfo("file1.txt") + assert isinstance(info, TargetFile) self.updater.download_target(info, target_base_url=self.targets_url) - def test_length_hash_mismatch(self): + def test_length_hash_mismatch(self) -> None: self.updater.refresh() targetinfo = self.updater.get_targetinfo("file1.txt") + assert isinstance(targetinfo, TargetFile) length = targetinfo.length with self.assertRaises(exceptions.RepositoryError): @@ -321,13 +279,13 @@ def test_length_hash_mismatch(self): self.updater.download_target(targetinfo) # pylint: disable=protected-access - def test_updating_root(self): + def test_updating_root(self) -> None: # Bump root version, resign and refresh self._modify_repository_root(lambda root: None, bump_version=True) self.updater.refresh() self.assertEqual(self.updater._trusted_set.root.signed.version, 2) - def test_missing_targetinfo(self): + def test_missing_targetinfo(self) -> None: self.updater.refresh() # Get targetinfo for non-existing file diff --git a/tests/test_updater_top_level_update.py b/tests/test_updater_top_level_update.py index 2dbb973979..56e09f16c5 100644 --- a/tests/test_updater_top_level_update.py +++ b/tests/test_updater_top_level_update.py @@ -14,7 +14,7 @@ from tests import utils from tests.repository_simulator import RepositorySimulator -from tuf.api.metadata import TOP_LEVEL_ROLE_NAMES, Metadata +from tuf.api.metadata import TOP_LEVEL_ROLE_NAMES, Metadata, Root, Snapshot, Targets, Timestamp from tuf.exceptions import ( BadVersionNumberError, ExpiredMetadataError, @@ -25,6 +25,7 @@ from tuf.ngclient import Updater +# pylint: disable=too-many-public-methods class TestRefresh(unittest.TestCase): """Test update of top-level metadata following 'Detailed client workflow' in the specification.""" @@ -81,6 +82,7 @@ def _assert_content_equals( self, role: str, version: Optional[int] = None ) -> None: """Assert that local file content is the expected""" + # pylint: disable=protected-access expected_content = self.sim._fetch_metadata(role, version) with open(os.path.join(self.metadata_dir, f"{role}.json"), "rb") as f: self.assertEqual(f.read(), expected_content) @@ -92,7 +94,7 @@ def _assert_version_equals(self, role: str, expected_version: int) -> None: def test_first_time_refresh(self) -> None: # Metadata dir contains only the mandatory initial root.json - self._assert_files_exist(["root"]) + self._assert_files_exist([Root.type]) # Add one more root version to repository so that # refresh() updates from local trusted root (v1) to @@ -104,7 +106,7 @@ def test_first_time_refresh(self) -> None: self._assert_files_exist(TOP_LEVEL_ROLE_NAMES) for role in TOP_LEVEL_ROLE_NAMES: - version = 2 if role == "root" else None + version = 2 if role == Root.type else None self._assert_content_equals(role, version) def test_trusted_root_missing(self) -> None: @@ -127,8 +129,8 @@ def test_trusted_root_expired(self) -> None: with self.assertRaises(ExpiredMetadataError): updater.refresh() - self._assert_files_exist(["root"]) - self._assert_content_equals("root", 2) + self._assert_files_exist([Root.type]) + self._assert_content_equals(Root.type, 2) # Local root metadata can be loaded even if expired updater = self._init_updater() @@ -141,7 +143,7 @@ def test_trusted_root_expired(self) -> None: # Root is successfully updated to latest version self._assert_files_exist(TOP_LEVEL_ROLE_NAMES) - self._assert_content_equals("root", 3) + self._assert_content_equals(Root.type, 3) def test_trusted_root_unsigned(self) -> None: # Local trusted root is not signed @@ -154,7 +156,7 @@ def test_trusted_root_unsigned(self) -> None: self._run_refresh() # The update failed, no changes in metadata - self._assert_files_exist(["root"]) + self._assert_files_exist([Root.type]) md_root_after = Metadata.from_file(root_path) self.assertEqual(md_root.to_bytes(), md_root_after.to_bytes()) @@ -179,7 +181,7 @@ def test_max_root_rotations(self) -> None: # Assert that root version was increased with no more # than 'max_root_rotations' self._assert_version_equals( - "root", initial_root_version + updater.config.max_root_rotations + Root.type, initial_root_version + updater.config.max_root_rotations ) def test_intermediate_root_incorrectly_signed(self) -> None: @@ -187,13 +189,13 @@ def test_intermediate_root_incorrectly_signed(self) -> None: # Intermediate root v2 is unsigned self.sim.root.version += 1 - root_signers = self.sim.signers["root"].copy() - self.sim.signers["root"].clear() + root_signers = self.sim.signers[Root.type].copy() + self.sim.signers[Root.type].clear() self.sim.publish_root() # Final root v3 is correctly signed self.sim.root.version += 1 - self.sim.signers["root"] = root_signers + self.sim.signers[Root.type] = root_signers self.sim.publish_root() # Incorrectly signed intermediate root is detected @@ -201,8 +203,8 @@ def test_intermediate_root_incorrectly_signed(self) -> None: self._run_refresh() # The update failed, latest root version is v1 - self._assert_files_exist(["root"]) - self._assert_content_equals("root", 1) + self._assert_files_exist([Root.type]) + self._assert_content_equals(Root.type, 1) def test_intermediate_root_expired(self) -> None: # The expiration of the new (intermediate) root metadata file @@ -222,20 +224,20 @@ def test_intermediate_root_expired(self) -> None: # Successfully updated to root v3 self._assert_files_exist(TOP_LEVEL_ROLE_NAMES) - self._assert_content_equals("root", 3) + self._assert_content_equals(Root.type, 3) def test_final_root_incorrectly_signed(self) -> None: # Check for an arbitrary software attack self.sim.root.version += 1 # root v2 - self.sim.signers["root"].clear() + self.sim.signers[Root.type].clear() self.sim.publish_root() with self.assertRaises(UnsignedMetadataError): self._run_refresh() # The update failed, latest root version is v1 - self._assert_files_exist(["root"]) - self._assert_content_equals("root", 1) + self._assert_files_exist([Root.type]) + self._assert_content_equals(Root.type, 1) def test_new_root_same_version(self) -> None: # Check for a rollback_attack @@ -245,8 +247,8 @@ def test_new_root_same_version(self) -> None: self._run_refresh() # The update failed, latest root version is v1 - self._assert_files_exist(["root"]) - self._assert_content_equals("root", 1) + self._assert_files_exist([Root.type]) + self._assert_content_equals(Root.type, 1) def test_new_root_nonconsecutive_version(self) -> None: # Repository serves non-consecutive root version @@ -256,8 +258,8 @@ def test_new_root_nonconsecutive_version(self) -> None: self._run_refresh() # The update failed, latest root version is v1 - self._assert_files_exist(["root"]) - self._assert_content_equals("root", 1) + self._assert_files_exist([Root.type]) + self._assert_content_equals(Root.type, 1) def test_final_root_expired(self) -> None: # Check for a freeze attack @@ -270,16 +272,16 @@ def test_final_root_expired(self) -> None: self._run_refresh() # The update failed but final root is persisted on the file system - self._assert_files_exist(["root"]) - self._assert_content_equals("root", 2) + self._assert_files_exist([Root.type]) + self._assert_content_equals(Root.type, 2) def test_new_timestamp_unsigned(self) -> None: # Check for an arbitrary software attack - self.sim.signers["timestamp"].clear() + self.sim.signers[Timestamp.type].clear() with self.assertRaises(UnsignedMetadataError): self._run_refresh() - self._assert_files_exist(["root"]) + self._assert_files_exist([Root.type]) def test_new_timestamp_version_rollback(self) -> None: # Check for a rollback attack @@ -290,7 +292,7 @@ def test_new_timestamp_version_rollback(self) -> None: with self.assertRaises(ReplayedMetadataError): self._run_refresh() - self._assert_version_equals("timestamp", 2) + self._assert_version_equals(Timestamp.type, 2) def test_new_timestamp_snapshot_rollback(self) -> None: # Check for a rollback attack. @@ -305,7 +307,7 @@ def test_new_timestamp_snapshot_rollback(self) -> None: with self.assertRaises(ReplayedMetadataError): self._run_refresh() - self._assert_version_equals("timestamp", 2) + self._assert_version_equals(Timestamp.type, 2) def test_new_timestamp_expired(self) -> None: # Check for a freeze attack @@ -315,7 +317,7 @@ def test_new_timestamp_expired(self) -> None: with self.assertRaises(ExpiredMetadataError): self._run_refresh() - self._assert_files_exist(["root"]) + self._assert_files_exist([Root.type]) def test_new_snapshot_hash_mismatch(self) -> None: # Check against timestamp role’s snapshot hash @@ -336,18 +338,18 @@ def test_new_snapshot_hash_mismatch(self) -> None: with self.assertRaises(RepositoryError): self._run_refresh() - self._assert_version_equals("timestamp", 3) - self._assert_version_equals("snapshot", 1) + self._assert_version_equals(Timestamp.type, 3) + self._assert_version_equals(Snapshot.type, 1) def test_new_snapshot_unsigned(self) -> None: # Check for an arbitrary software attack - self.sim.signers["snapshot"].clear() + self.sim.signers[Snapshot.type].clear() with self.assertRaises(UnsignedMetadataError): self._run_refresh() - self._assert_files_exist(["root", "timestamp"]) + self._assert_files_exist([Root.type, Timestamp.type]) - def test_new_snapshot_version_mismatch(self): + def test_new_snapshot_version_mismatch(self) -> None: # Check against timestamp role’s snapshot version # Increase snapshot version without updating timestamp @@ -355,7 +357,7 @@ def test_new_snapshot_version_mismatch(self): with self.assertRaises(BadVersionNumberError): self._run_refresh() - self._assert_files_exist(["root", "timestamp"]) + self._assert_files_exist([Root.type, Timestamp.type]) def test_new_snapshot_version_rollback(self) -> None: # Check for a rollback attack @@ -369,7 +371,7 @@ def test_new_snapshot_version_rollback(self) -> None: with self.assertRaises(ReplayedMetadataError): self._run_refresh() - self._assert_version_equals("snapshot", 2) + self._assert_version_equals(Snapshot.type, 2) def test_new_snapshot_expired(self) -> None: # Check for a freeze attack @@ -379,7 +381,7 @@ def test_new_snapshot_expired(self) -> None: with self.assertRaises(ExpiredMetadataError): self._run_refresh() - self._assert_files_exist(["root", "timestamp"]) + self._assert_files_exist([Root.type, Timestamp.type]) def test_new_targets_hash_mismatch(self) -> None: # Check against snapshot role’s targets hashes @@ -401,18 +403,18 @@ def test_new_targets_hash_mismatch(self) -> None: with self.assertRaises(RepositoryError): self._run_refresh() - self._assert_version_equals("snapshot", 3) - self._assert_version_equals("targets", 1) + self._assert_version_equals(Snapshot.type, 3) + self._assert_version_equals(Targets.type, 1) def test_new_targets_unsigned(self) -> None: # Check for an arbitrary software attack - self.sim.signers["targets"].clear() + self.sim.signers[Targets.type].clear() with self.assertRaises(UnsignedMetadataError): self._run_refresh() - self._assert_files_exist(["root", "timestamp", "snapshot"]) + self._assert_files_exist([Root.type, Timestamp.type, Snapshot.type]) - def test_new_targets_version_mismatch(self): + def test_new_targets_version_mismatch(self) -> None: # Check against snapshot role’s targets version # Increase targets version without updating snapshot @@ -420,7 +422,7 @@ def test_new_targets_version_mismatch(self): with self.assertRaises(BadVersionNumberError): self._run_refresh() - self._assert_files_exist(["root", "timestamp", "snapshot"]) + self._assert_files_exist([Root.type, Timestamp.type, Snapshot.type]) def test_new_targets_expired(self) -> None: # Check for a freeze attack. @@ -430,7 +432,7 @@ def test_new_targets_expired(self) -> None: with self.assertRaises(ExpiredMetadataError): self._run_refresh() - self._assert_files_exist(["root", "timestamp", "snapshot"]) + self._assert_files_exist([Root.type, Timestamp.type, Snapshot.type]) if __name__ == "__main__": diff --git a/tests/test_updater_with_simulator.py b/tests/test_updater_with_simulator.py index 25d536f456..72c16ee162 100644 --- a/tests/test_updater_with_simulator.py +++ b/tests/test_updater_with_simulator.py @@ -6,15 +6,17 @@ """Test ngclient Updater using the repository simulator. """ +import builtins import os import sys import tempfile import unittest from typing import Optional, Tuple +from unittest.mock import MagicMock, Mock, patch from tests import utils from tests.repository_simulator import RepositorySimulator -from tuf.api.metadata import SPECIFICATION_VERSION, Targets +from tuf.api.metadata import SPECIFICATION_VERSION, TargetFile, Targets from tuf.exceptions import BadVersionNumberError, UnsignedMetadataError from tuf.ngclient import Updater @@ -25,7 +27,7 @@ class TestUpdater(unittest.TestCase): # set dump_dir to trigger repository state dumps dump_dir: Optional[str] = None - def setUp(self): + def setUp(self) -> None: # pylint: disable-next=consider-using-with self.temp_dir = tempfile.TemporaryDirectory() self.metadata_dir = os.path.join(self.temp_dir.name, "metadata") @@ -47,7 +49,7 @@ def setUp(self): self.sim.dump_dir = os.path.join(self.dump_dir, name) os.mkdir(self.sim.dump_dir) - def tearDown(self): + def tearDown(self) -> None: self.temp_dir.cleanup() def _run_refresh(self) -> Updater: @@ -65,7 +67,7 @@ def _run_refresh(self) -> Updater: updater.refresh() return updater - def test_refresh(self): + def test_refresh(self) -> None: # Update top level metadata self._run_refresh() @@ -97,7 +99,7 @@ def test_refresh(self): } @utils.run_sub_tests_with_dataset(targets) - def test_targets(self, test_case_data: Tuple[str, bytes, str]): + def test_targets(self, test_case_data: Tuple[str, bytes, str]) -> None: targetpath, content, encoded_path = test_case_data path = os.path.join(self.targets_dir, encoded_path) @@ -115,7 +117,7 @@ def test_targets(self, test_case_data: Tuple[str, bytes, str]): updater = self._run_refresh() # target now exists, is not in cache yet info = updater.get_targetinfo(targetpath) - self.assertIsNotNone(info) + assert info is not None # Test without and with explicit local filepath self.assertIsNone(updater.find_cached_target(info)) self.assertIsNone(updater.find_cached_target(info, path)) @@ -134,10 +136,9 @@ def test_targets(self, test_case_data: Tuple[str, bytes, str]): self.assertEqual(path, updater.find_cached_target(info)) self.assertEqual(path, updater.find_cached_target(info, path)) - def test_fishy_rolenames(self): + def test_fishy_rolenames(self) -> None: roles_to_filenames = { "../a": "..%2Fa.json", - "": ".json", ".": "..json", "/": "%2F.json", "ö": "%C3%B6.json", @@ -160,7 +161,7 @@ def test_fishy_rolenames(self): for fname in roles_to_filenames.values(): self.assertTrue(fname in local_metadata) - def test_keys_and_signatures(self): + def test_keys_and_signatures(self) -> None: """Example of the two trickiest test areas: keys and root updates""" # Update top level metadata @@ -200,7 +201,7 @@ def test_keys_and_signatures(self): self._run_refresh() - def test_snapshot_rollback_with_local_snapshot_hash_mismatch(self): + def test_snapshot_rollback_with_local_snapshot_hash_mismatch(self) -> None: # Test triggering snapshot rollback check on a newly downloaded snapshot # when the local snapshot is loaded even when there is a hash mismatch # with timestamp.snapshot_meta. @@ -230,6 +231,32 @@ def test_snapshot_rollback_with_local_snapshot_hash_mismatch(self): with self.assertRaises(BadVersionNumberError): self._run_refresh() + @patch.object(builtins, "open", wraps=builtins.open) + def test_not_loading_targets_twice(self, wrapped_open: MagicMock) -> None: + # Do not load targets roles more than once when traversing + # the delegations tree + + # Add new delegated targets, update the snapshot + spec_version = ".".join(SPECIFICATION_VERSION) + targets = Targets(1, spec_version, self.sim.safe_expiry, {}, None) + self.sim.add_delegation("targets", "role1", targets, False, ["*"], None) + self.sim.update_snapshot() + + # Run refresh, top-level roles are loaded + updater = self._run_refresh() + # Clean up calls to open during refresh() + wrapped_open.reset_mock() + + # First time looking for "somepath", only 'role1' must be loaded + updater.get_targetinfo("somepath") + wrapped_open.assert_called_once_with( + os.path.join(self.metadata_dir, "role1.json"), "rb" + ) + wrapped_open.reset_mock() + # Second call to get_targetinfo, all metadata is already loaded + updater.get_targetinfo("somepath") + wrapped_open.assert_not_called() + if __name__ == "__main__": if "--dump" in sys.argv: diff --git a/tests/utils.py b/tests/utils.py index 15f2892414..e5c251d0f7 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -21,7 +21,7 @@ """ from contextlib import contextmanager -from typing import Dict, Any, Callable +from typing import Any, Callable, Dict, IO, Optional, Callable, List, Iterator import unittest import argparse import errno @@ -48,9 +48,13 @@ # Test runner decorator: Runs the test as a set of N SubTests, # (where N is number of items in dataset), feeding the actual test # function one test case at a time -def run_sub_tests_with_dataset(dataset: DataSet): - def real_decorator(function: Callable[[unittest.TestCase, Any], None]): - def wrapper(test_cls: unittest.TestCase): +def run_sub_tests_with_dataset( + dataset: DataSet +) -> Callable[[Callable], Callable]: + def real_decorator( + function: Callable[[unittest.TestCase, Any], None] + ) -> Callable[[unittest.TestCase], None]: + def wrapper(test_cls: unittest.TestCase) -> None: for case, data in dataset.items(): with test_cls.subTest(case=case): function(test_cls, data) @@ -60,15 +64,15 @@ def wrapper(test_cls: unittest.TestCase): class TestServerProcessError(Exception): - def __init__(self, value="TestServerProcess"): + def __init__(self, value: str="TestServerProcess") -> None: self.value = value - def __str__(self): + def __str__(self) -> str: return repr(self.value) @contextmanager -def ignore_deprecation_warnings(module): +def ignore_deprecation_warnings(module: str) -> Iterator[None]: with warnings.catch_warnings(): warnings.filterwarnings('ignore', category=DeprecationWarning, @@ -82,13 +86,16 @@ def ignore_deprecation_warnings(module): # but the current blocking connect() seems to work fast on Linux and seems # to at least work on Windows (ECONNREFUSED unfortunately has a 2 second # timeout on Windows) -def wait_for_server(host, server, port, timeout=10): +def wait_for_server(host: str, server: str, port: int, timeout: int=10) -> None: start = time.time() remaining_timeout = timeout succeeded = False while not succeeded and remaining_timeout > 0: try: - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock: Optional[socket.socket] = socket.socket( + socket.AF_INET, socket.SOCK_STREAM + ) + assert sock is not None sock.settimeout(remaining_timeout) sock.connect((host, port)) succeeded = True @@ -104,14 +111,14 @@ def wait_for_server(host, server, port, timeout=10): if sock: sock.close() sock = None - remaining_timeout = timeout - (time.time() - start) + remaining_timeout = int(timeout - (time.time() - start)) if not succeeded: raise TimeoutError("Could not connect to the " + server \ + " on port " + str(port) + "!") -def configure_test_logging(argv): +def configure_test_logging(argv: List[str]) -> None: # parse arguments but only handle '-v': argv may contain # other things meant for unittest argument parser parser = argparse.ArgumentParser(add_help=False) @@ -165,13 +172,14 @@ class TestServerProcess(): """ - def __init__(self, log, server='simple_server.py', - timeout=10, popen_cwd=".", extra_cmd_args=None): + def __init__(self, log: logging.Logger, server: str='simple_server.py', + timeout: int=10, popen_cwd: str=".", extra_cmd_args: Optional[List[str]]=None + ): self.server = server self.__logger = log # Stores popped messages from the queue. - self.__logged_messages = [] + self.__logged_messages: List[str] = [] if extra_cmd_args is None: extra_cmd_args = [] @@ -185,7 +193,9 @@ def __init__(self, log, server='simple_server.py', - def _start_server(self, timeout, extra_cmd_args, popen_cwd): + def _start_server( + self, timeout: int, extra_cmd_args: List[str], popen_cwd: str + ) -> None: """ Start the server subprocess and a thread responsible to redirect stdout/stderr to the Queue. @@ -201,7 +211,7 @@ def _start_server(self, timeout, extra_cmd_args, popen_cwd): - def _start_process(self, extra_cmd_args, popen_cwd): + def _start_process(self, extra_cmd_args: List[str], popen_cwd: str) -> None: """Starts the process running the server.""" # The "-u" option forces stdin, stdout and stderr to be unbuffered. @@ -213,7 +223,7 @@ def _start_process(self, extra_cmd_args, popen_cwd): - def _start_redirect_thread(self): + def _start_redirect_thread(self) -> None: """Starts a thread responsible to redirect stdout/stderr to the Queue.""" # Run log_queue_worker() in a thread. @@ -228,7 +238,7 @@ def _start_redirect_thread(self): @staticmethod - def _log_queue_worker(stream, line_queue): + def _log_queue_worker(stream: IO, line_queue: queue.Queue) -> None: """ Worker function to run in a seprate thread. Reads from 'stream', puts lines in a Queue (Queue is thread-safe). @@ -247,7 +257,7 @@ def _log_queue_worker(stream, line_queue): - def _wait_for_port(self, timeout): + def _wait_for_port(self, timeout: int) -> None: """ Validates the first item from the Queue against the port message. If validation is successful, self.port is set. @@ -279,7 +289,7 @@ def _wait_for_port(self, timeout): - def _kill_server_process(self): + def _kill_server_process(self) -> None: """Kills the server subprocess if it's running.""" if self.is_process_running(): @@ -290,7 +300,7 @@ def _kill_server_process(self): - def flush_log(self): + def flush_log(self) -> None: """Flushes the log lines from the logging queue.""" while True: @@ -311,7 +321,7 @@ def flush_log(self): - def clean(self): + def clean(self) -> None: """ Kills the subprocess and closes the TempFile. Calls flush_log to check for logged information, but not yet flushed. @@ -324,5 +334,5 @@ def clean(self): - def is_process_running(self): + def is_process_running(self) -> bool: return True if self.__server_process.poll() is None else False diff --git a/tox.ini b/tox.ini index 981aec34c1..f93466dc92 100644 --- a/tox.ini +++ b/tox.ini @@ -40,10 +40,9 @@ commands = changedir = {toxinidir} commands = # Use different configs for new (tuf/api/*) and legacy code - # TODO: configure black and isort args in pyproject.toml (see #1161) - black --check --diff --line-length 80 tuf/api tuf/ngclient examples - isort --check --diff --line-length 80 --profile black -p tuf tuf/api tuf/ngclient examples - pylint -j 0 tuf/api tuf/ngclient examples --rcfile=tuf/api/pylintrc + black --check --diff examples tuf/api tuf/ngclient + isort --check --diff examples tuf/api tuf/ngclient + pylint -j 0 examples tuf/api tuf/ngclient --rcfile=pyproject.toml # NOTE: Contrary to what the pylint docs suggest, ignoring full paths does # work, unfortunately each subdirectory has to be ignored explicitly. diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 25f14fe772..6adcf412da 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -61,6 +61,11 @@ SignedSerializer, ) +_ROOT = "root" +_SNAPSHOT = "snapshot" +_TARGETS = "targets" +_TIMESTAMP = "timestamp" + # pylint: disable=too-many-lines logger = logging.getLogger(__name__) @@ -68,7 +73,7 @@ # We aim to support SPECIFICATION_VERSION and require the input metadata # files to have the same major version (the first number) as ours. SPECIFICATION_VERSION = ["1", "0", "19"] -TOP_LEVEL_ROLE_NAMES = {"root", "timestamp", "snapshot", "targets"} +TOP_LEVEL_ROLE_NAMES = {_ROOT, _TIMESTAMP, _SNAPSHOT, _TARGETS} # T is a Generic type constraint for Metadata.signed T = TypeVar("T", "Root", "Timestamp", "Snapshot", "Targets") @@ -130,13 +135,13 @@ def from_dict(cls, metadata: Dict[str, Any]) -> "Metadata[T]": # Dispatch to contained metadata class on metadata _type field. _type = metadata["signed"]["_type"] - if _type == "targets": + if _type == _TARGETS: inner_cls: Type[Signed] = Targets - elif _type == "snapshot": + elif _type == _SNAPSHOT: inner_cls = Snapshot - elif _type == "timestamp": + elif _type == _TIMESTAMP: inner_cls = Timestamp - elif _type == "root": + elif _type == _ROOT: inner_cls = Root else: raise ValueError(f'unrecognized metadata type "{_type}"') @@ -394,18 +399,13 @@ class Signed(metaclass=abc.ABCMeta): unrecognized_fields: Dictionary of all unrecognized fields. """ - # Signed implementations are expected to override this - _signed_type: ClassVar[str] = "signed" + # type is required for static reference without changing the API + type: ClassVar[str] = "signed" # _type and type are identical: 1st replicates file format, 2nd passes lint @property def _type(self) -> str: - return self._signed_type - - @property - def type(self) -> str: - """Metadata type as string.""" - return self._signed_type + return self.type # NOTE: Signed is a stupid name, because this might not be signed yet, but # we keep it to match spec terminology (I often refer to this as "payload", @@ -458,8 +458,8 @@ def _common_fields_from_dict( """ _type = signed_dict.pop("_type") - if _type != cls._signed_type: - raise ValueError(f"Expected type {cls._signed_type}, got {_type}") + if _type != cls.type: + raise ValueError(f"Expected type {cls.type}, got {_type}") version = signed_dict.pop("version") spec_version = signed_dict.pop("spec_version") @@ -539,7 +539,7 @@ def __init__( ): if not all( isinstance(at, str) for at in [keyid, keytype, scheme] - ) or not isinstance(keyval, Dict): + ) or not isinstance(keyval, dict): raise TypeError("Unexpected Key attributes types!") self.keyid = keyid self.keytype = keytype @@ -712,7 +712,7 @@ class Root(Signed): unrecognized_fields: Dictionary of all unrecognized fields. """ - _signed_type = "root" + type = _ROOT # TODO: determine an appropriate value for max-args # pylint: disable=too-many-arguments @@ -722,7 +722,7 @@ def __init__( spec_version: str, expires: datetime, keys: Dict[str, Key], - roles: Dict[str, Role], + roles: Mapping[str, Role], consistent_snapshot: Optional[bool] = None, unrecognized_fields: Optional[Mapping[str, Any]] = None, ): @@ -965,7 +965,7 @@ class Timestamp(Signed): snapshot_meta: Meta information for snapshot metadata. """ - _signed_type = "timestamp" + type = _TIMESTAMP def __init__( self, @@ -1015,7 +1015,7 @@ class Snapshot(Signed): meta: A dictionary of target metadata filenames to MetaFile objects. """ - _signed_type = "snapshot" + type = _SNAPSHOT def __init__( self, @@ -1100,6 +1100,13 @@ def __init__( if paths is None and path_hash_prefixes is None: raise ValueError("One of paths or path_hash_prefixes must be set") + if paths is not None and any(not isinstance(p, str) for p in paths): + raise ValueError("Paths must be strings") + if path_hash_prefixes is not None and any( + not isinstance(p, str) for p in path_hash_prefixes + ): + raise ValueError("Path_hash_prefixes must be strings") + self.paths = paths self.path_hash_prefixes = path_hash_prefixes @@ -1213,6 +1220,13 @@ def __init__( unrecognized_fields: Optional[Mapping[str, Any]] = None, ): self.keys = keys + + for role in roles: + if not role or role in TOP_LEVEL_ROLE_NAMES: + raise ValueError( + "Delegated roles cannot be empty or use top-level role names" + ) + self.roles = roles self.unrecognized_fields = unrecognized_fields or {} @@ -1402,7 +1416,7 @@ class Targets(Signed): unrecognized_fields: Dictionary of all unrecognized fields. """ - _signed_type = "targets" + type = _TARGETS # TODO: determine an appropriate value for max-args # pylint: disable=too-many-arguments @@ -1423,7 +1437,7 @@ def __init__( def from_dict(cls, signed_dict: Dict[str, Any]) -> "Targets": """Creates Targets object from its dict representation.""" common_args = cls._common_fields_from_dict(signed_dict) - targets = signed_dict.pop("targets") + targets = signed_dict.pop(_TARGETS) try: delegations_dict = signed_dict.pop("delegations") except KeyError: @@ -1444,7 +1458,7 @@ def to_dict(self) -> Dict[str, Any]: targets = {} for target_path, target_file_obj in self.targets.items(): targets[target_path] = target_file_obj.to_dict() - targets_dict["targets"] = targets + targets_dict[_TARGETS] = targets if self.delegations is not None: targets_dict["delegations"] = self.delegations.to_dict() return targets_dict diff --git a/tuf/api/pylintrc b/tuf/api/pylintrc deleted file mode 100644 index d9b1da754a..0000000000 --- a/tuf/api/pylintrc +++ /dev/null @@ -1,46 +0,0 @@ -# Minimal pylint configuration file for Secure Systems Lab Python Style Guide: -# https://github.com/secure-systems-lab/code-style-guidelines -# -# Based on Google Python Style Guide pylintrc and pylint defaults: -# https://google.github.io/styleguide/pylintrc -# http://pylint.pycqa.org/en/latest/technical_reference/features.html - -[MESSAGES CONTROL] -# Disable the message, report, category or checker with the given id(s). -# NOTE: To keep this config as short as possible we only disable checks that -# are currently in conflict with our code. If new code displeases the linter -# (for good reasons) consider updating this config file, or disable checks with -# 'pylint: disable=XYZ' comments. -disable=fixme, - too-few-public-methods, - too-many-arguments, - format, - duplicate-code, - -[BASIC] -good-names=i,j,k,v,e,f,fn,fp,_type,_ -# Regexes for allowed names are copied from the Google pylintrc -# NOTE: Pylint captures regex name groups such as 'snake_case' or 'camel_case'. -# If there are multiple groups it enfoces the prevalent naming style inside -# each modules. Names in the exempt capturing group are ignored. -function-rgx=^(?:(?PsetUp|tearDown|setUpModule|tearDownModule)|(?P_?[A-Z][a-zA-Z0-9]*)|(?P_?[a-z][a-z0-9_]*))$ -method-rgx=(?x)^(?:(?P_[a-z0-9_]+__|runTest|setUp|tearDown|setUpTestCase|tearDownTestCase|setupSelf|tearDownClass|setUpClass|(test|assert)_*[A-Z0-9][a-zA-Z0-9_]*|next)|(?P_{0,2}[A-Z][a-zA-Z0-9_]*)|(?P_{0,2}[a-z][a-z0-9_]*))$ -argument-rgx=^[a-z][a-z0-9_]*$ -attr-rgx=^_{0,2}[a-z][a-z0-9_]*$ -class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ -class-rgx=^_?[A-Z][a-zA-Z0-9]*$ -const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ -inlinevar-rgx=^[a-z][a-z0-9_]*$ -module-rgx=^(_?[a-z][a-z0-9_]*|__init__)$ -no-docstring-rgx=(__.*__|main|test.*|.*test|.*Test)$ -variable-rgx=^[a-z][a-z0-9_]*$ -docstring-min-length=10 - -[LOGGING] -logging-format-style=old - -[MISCELLANEOUS] -notes=TODO - -[STRING] -check-quote-consistency=yes diff --git a/tuf/log.py b/tuf/log.py index 9865b04369..f9ae6c7721 100755 --- a/tuf/log.py +++ b/tuf/log.py @@ -182,7 +182,7 @@ def filter(self, record): -def set_log_level(log_level=_DEFAULT_LOG_LEVEL): +def set_log_level(log_level: int=_DEFAULT_LOG_LEVEL): """ Allow the default log level to be overridden. If 'log_level' is not diff --git a/tuf/ngclient/_internal/trusted_metadata_set.py b/tuf/ngclient/_internal/trusted_metadata_set.py index 59fe32a8f6..e502609cd0 100644 --- a/tuf/ngclient/_internal/trusted_metadata_set.py +++ b/tuf/ngclient/_internal/trusted_metadata_set.py @@ -10,7 +10,7 @@ network IO, which are not handled here. Loaded metadata can be accessed via index access with rolename as key -(trusted_set["root"]) or, in the case of top-level metadata, using the helper +(trusted_set[Root.type]) or, in the case of top-level metadata, using the helper properties (trusted_set.root). The rules that TrustedMetadataSet follows for top-level metadata are @@ -35,7 +35,7 @@ >>> trusted_set = TrustedMetadataSet(f.read()) >>> >>> # update root from remote until no more are available ->>> with download("root", trusted_set.root.signed.version + 1) as f: +>>> with download(Root.type, trusted_set.root.signed.version + 1) as f: >>> trusted_set.update_root(f.read()) >>> >>> # load local timestamp, then update from remote @@ -45,7 +45,7 @@ >>> except (RepositoryError, OSError): >>> pass # failure to load a local file is ok >>> ->>> with download("timestamp") as f: +>>> with download(Timestamp.type) as f: >>> trusted_set.update_timestamp(f.read()) >>> >>> # load local snapshot, then update from remote if needed @@ -55,7 +55,7 @@ >>> except (RepositoryError, OSError): >>> # local snapshot is not valid, load from remote >>> # (RepositoryErrors here stop the update) ->>> with download("snapshot", version) as f: +>>> with download(Snapshot.type, version) as f: >>> trusted_set.update_snapshot(f.read()) TODO: @@ -123,25 +123,25 @@ def __iter__(self) -> Iterator[Metadata]: @property def root(self) -> Metadata[Root]: """Current root Metadata""" - return self._trusted_set["root"] + return self._trusted_set[Root.type] @property def timestamp(self) -> Optional[Metadata[Timestamp]]: """Current timestamp Metadata or None""" - return self._trusted_set.get("timestamp") + return self._trusted_set.get(Timestamp.type) @property def snapshot(self) -> Optional[Metadata[Snapshot]]: """Current snapshot Metadata or None""" - return self._trusted_set.get("snapshot") + return self._trusted_set.get(Snapshot.type) @property def targets(self) -> Optional[Metadata[Targets]]: """Current targets Metadata or None""" - return self._trusted_set.get("targets") + return self._trusted_set.get(Targets.type) # Methods for updating metadata - def update_root(self, data: bytes) -> None: + def update_root(self, data: bytes) -> Metadata[Root]: """Verifies and loads 'data' as new root metadata. Note that an expired intermediate root is considered valid: expiry is @@ -153,6 +153,9 @@ def update_root(self, data: bytes) -> None: Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. + + Returns: + Deserialized and verified root Metadata object """ if self.timestamp is not None: raise RuntimeError("Cannot update root after timestamp") @@ -163,26 +166,30 @@ def update_root(self, data: bytes) -> None: except DeserializationError as e: raise exceptions.RepositoryError("Failed to load root") from e - if new_root.signed.type != "root": + if new_root.signed.type != Root.type: raise exceptions.RepositoryError( f"Expected 'root', got '{new_root.signed.type}'" ) # Verify that new root is signed by trusted root - self.root.verify_delegate("root", new_root) + self.root.verify_delegate(Root.type, new_root) if new_root.signed.version != self.root.signed.version + 1: raise exceptions.ReplayedMetadataError( - "root", new_root.signed.version, self.root.signed.version + Root.type, + new_root.signed.version, + self.root.signed.version, ) # Verify that new root is signed by itself - new_root.verify_delegate("root", new_root) + new_root.verify_delegate(Root.type, new_root) - self._trusted_set["root"] = new_root + self._trusted_set[Root.type] = new_root logger.info("Updated root v%d", new_root.signed.version) - def update_timestamp(self, data: bytes) -> None: + return new_root + + def update_timestamp(self, data: bytes) -> Metadata[Timestamp]: """Verifies and loads 'data' as new timestamp metadata. Note that an intermediate timestamp is allowed to be expired: @@ -199,6 +206,9 @@ def update_timestamp(self, data: bytes) -> None: RepositoryError: Metadata failed to load or verify as final timestamp. The actual error type and content will contain more details. + + Returns: + Deserialized and verified timestamp Metadata object """ if self.snapshot is not None: raise RuntimeError("Cannot update timestamp after snapshot") @@ -214,12 +224,12 @@ def update_timestamp(self, data: bytes) -> None: except DeserializationError as e: raise exceptions.RepositoryError("Failed to load timestamp") from e - if new_timestamp.signed.type != "timestamp": + if new_timestamp.signed.type != Timestamp.type: raise exceptions.RepositoryError( f"Expected 'timestamp', got '{new_timestamp.signed.type}'" ) - self.root.verify_delegate("timestamp", new_timestamp) + self.root.verify_delegate(Timestamp.type, new_timestamp) # If an existing trusted timestamp is updated, # check for a rollback attack @@ -227,7 +237,7 @@ def update_timestamp(self, data: bytes) -> None: # Prevent rolling back timestamp version if new_timestamp.signed.version < self.timestamp.signed.version: raise exceptions.ReplayedMetadataError( - "timestamp", + Timestamp.type, new_timestamp.signed.version, self.timestamp.signed.version, ) @@ -237,7 +247,7 @@ def update_timestamp(self, data: bytes) -> None: < self.timestamp.signed.snapshot_meta.version ): raise exceptions.ReplayedMetadataError( - "snapshot", + Snapshot.type, new_timestamp.signed.snapshot_meta.version, self.timestamp.signed.snapshot_meta.version, ) @@ -245,12 +255,14 @@ def update_timestamp(self, data: bytes) -> None: # expiry not checked to allow old timestamp to be used for rollback # protection of new timestamp: expiry is checked in update_snapshot() - self._trusted_set["timestamp"] = new_timestamp + self._trusted_set[Timestamp.type] = new_timestamp logger.info("Updated timestamp v%d", new_timestamp.signed.version) # timestamp is loaded: raise if it is not valid _final_ timestamp self._check_final_timestamp() + return new_timestamp + def _check_final_timestamp(self) -> None: """Raise if timestamp is expired""" @@ -260,7 +272,7 @@ def _check_final_timestamp(self) -> None: def update_snapshot( self, data: bytes, trusted: Optional[bool] = False - ) -> None: + ) -> Metadata[Snapshot]: """Verifies and loads 'data' as new snapshot metadata. Note that an intermediate snapshot is allowed to be expired and version @@ -282,6 +294,9 @@ def update_snapshot( Raises: RepositoryError: data failed to load or verify as final snapshot. The actual error type and content will contain more details. + + Returns: + Deserialized and verified snapshot Metadata object """ if self.timestamp is None: @@ -310,12 +325,12 @@ def update_snapshot( except DeserializationError as e: raise exceptions.RepositoryError("Failed to load snapshot") from e - if new_snapshot.signed.type != "snapshot": + if new_snapshot.signed.type != Snapshot.type: raise exceptions.RepositoryError( f"Expected 'snapshot', got '{new_snapshot.signed.type}'" ) - self.root.verify_delegate("snapshot", new_snapshot) + self.root.verify_delegate(Snapshot.type, new_snapshot) # version not checked against meta version to allow old snapshot to be # used in rollback protection: it is checked when targets is updated @@ -341,12 +356,14 @@ def update_snapshot( # expiry not checked to allow old snapshot to be used for rollback # protection of new snapshot: it is checked when targets is updated - self._trusted_set["snapshot"] = new_snapshot + self._trusted_set[Snapshot.type] = new_snapshot logger.info("Updated snapshot v%d", new_snapshot.signed.version) # snapshot is loaded, but we raise if it's not valid _final_ snapshot self._check_final_snapshot() + return new_snapshot + def _check_final_snapshot(self) -> None: """Raise if snapshot is expired or meta version does not match""" @@ -361,7 +378,7 @@ def _check_final_snapshot(self) -> None: f"got {self.snapshot.signed.version}" ) - def update_targets(self, data: bytes) -> None: + def update_targets(self, data: bytes) -> Metadata[Targets]: """Verifies and loads 'data' as new top-level targets metadata. Args: @@ -370,12 +387,15 @@ def update_targets(self, data: bytes) -> None: Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. + + Returns: + Deserialized and verified targets Metadata object """ - self.update_delegated_targets(data, "targets", "root") + return self.update_delegated_targets(data, Targets.type, Root.type) def update_delegated_targets( self, data: bytes, role_name: str, delegator_name: str - ) -> None: + ) -> Metadata[Targets]: """Verifies and loads 'data' as new metadata for target 'role_name'. Args: @@ -386,6 +406,9 @@ def update_delegated_targets( Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. + + Returns: + Deserialized and verified targets Metadata object """ if self.snapshot is None: raise RuntimeError("Cannot load targets before snapshot") @@ -419,7 +442,7 @@ def update_delegated_targets( except DeserializationError as e: raise exceptions.RepositoryError("Failed to load snapshot") from e - if new_delegate.signed.type != "targets": + if new_delegate.signed.type != Targets.type: raise exceptions.RepositoryError( f"Expected 'targets', got '{new_delegate.signed.type}'" ) @@ -438,6 +461,8 @@ def update_delegated_targets( self._trusted_set[role_name] = new_delegate logger.info("Updated %s v%d", role_name, version) + return new_delegate + def _load_trusted_root(self, data: bytes) -> None: """Verifies and loads 'data' as trusted root metadata. @@ -449,12 +474,12 @@ def _load_trusted_root(self, data: bytes) -> None: except DeserializationError as e: raise exceptions.RepositoryError("Failed to load root") from e - if new_root.signed.type != "root": + if new_root.signed.type != Root.type: raise exceptions.RepositoryError( f"Expected 'root', got '{new_root.signed.type}'" ) - new_root.verify_delegate("root", new_root) + new_root.verify_delegate(Root.type, new_root) - self._trusted_set["root"] = new_root + self._trusted_set[Root.type] = new_root logger.info("Loaded trusted root v%d", new_root.signed.version) diff --git a/tuf/ngclient/fetcher.py b/tuf/ngclient/fetcher.py index 6e8f2df27d..f10c5156e2 100644 --- a/tuf/ngclient/fetcher.py +++ b/tuf/ngclient/fetcher.py @@ -10,7 +10,6 @@ import tempfile from contextlib import contextmanager from typing import IO, Iterator -from urllib import parse from tuf import exceptions @@ -61,13 +60,6 @@ def download_file(self, url: str, max_length: int) -> Iterator[IO]: Yields: A TemporaryFile object that points to the contents of 'url'. """ - # 'url.replace('\\', '/')' is needed for compatibility with - # Windows-based systems, because they might use back-slashes in place - # of forward-slashes. This converts it to the common format. - # unquote() replaces %xx escapes in a url with their single-character - # equivalent. A back-slash may beencoded as %5c in the url, which - # should also be replaced with a forward slash. - url = parse.unquote(url).replace("\\", "/") logger.debug("Downloading: %s", url) number_of_bytes_received = 0 diff --git a/tuf/ngclient/updater.py b/tuf/ngclient/updater.py index 5857be13d4..955d930df6 100644 --- a/tuf/ngclient/updater.py +++ b/tuf/ngclient/updater.py @@ -10,24 +10,23 @@ secure manner: All downloaded files are verified by signed metadata. High-level description of Updater functionality: - * Initializing an :class:`~tuf.ngclient.updater.Updater` loads and validates - the trusted local root metadata: This root metadata is used as the source - of trust for all other metadata. - * Calling :func:`~tuf.ngclient.updater.Updater.refresh()` will update root - metadata and load all other top-level metadata as described in the - specification, using both locally cached metadata and metadata downloaded - from the remote repository. - * When metadata is up-to-date, targets can be dowloaded. The repository - snapshot is consistent so multiple targets can be downloaded without - fear of repository content changing. For each target: - - * :func:`~tuf.ngclient.updater.Updater.get_targetinfo()` is - used to find information about a specific target. This will load new - targets metadata as needed (from local cache or remote repository). - * :func:`~tuf.ngclient.updater.Updater.find_cached_target()` can be used - to check if a target file is already locally cached. - * :func:`~tuf.ngclient.updater.Updater.download_target()` downloads a - target file and ensures it is verified correct by the metadata. + * Initializing an ``Updater`` loads and validates the trusted local root + metadata: This root metadata is used as the source of trust for all other + metadata. + * ``refresh()`` can optionally be called to update and load all top-level + metadata as described in the specification, using both locally cached + metadata and metadata downloaded from the remote repository. If refresh is + not done explicitly, it will happen automatically during the first target + info lookup. + * Updater can be used to download targets. For each target: + + * ``Updater.get_targetinfo()`` is first used to find information about a + specific target. This will load new targets metadata as needed (from + local cache or remote repository). + * ``Updater.find_cached_target()`` can optionally be used to check if a + target file is already locally cached. + * ``Updater.download_target()`` downloads a target file and ensures it is + verified correct by the metadata. Below is a simple example of using the Updater to download and verify "file.txt" from a remote repository. The required environment for this example @@ -52,9 +51,6 @@ target_base_url="http://localhost:8000/targets/", ) - # Update top-level metadata from remote - updater.refresh() - # Update metadata, then download target if needed info = updater.get_targetinfo("file.txt") path = updater.find_cached_target(info) @@ -66,13 +62,20 @@ import logging import os import tempfile -from typing import Optional, Set, Tuple +from typing import Optional, Set from urllib import parse from securesystemslib import util as sslib_util from tuf import exceptions -from tuf.api.metadata import TargetFile, Targets +from tuf.api.metadata import ( + Metadata, + Root, + Snapshot, + TargetFile, + Targets, + Timestamp, +) from tuf.ngclient._internal import requests_fetcher, trusted_metadata_set from tuf.ngclient.config import UpdaterConfig from tuf.ngclient.fetcher import FetcherInterface @@ -118,7 +121,7 @@ def __init__( self._target_base_url = _ensure_trailing_slash(target_base_url) # Read trusted local root metadata - data = self._load_local_metadata("root") + data = self._load_local_metadata(Root.type) self._trusted_set = trusted_metadata_set.TrustedMetadataSet(data) self._fetcher = fetcher or requests_fetcher.RequestsFetcher() self.config = config or UpdaterConfig() @@ -130,11 +133,16 @@ def refresh(self) -> None: specified order (root -> timestamp -> snapshot -> targets) implementing all the checks required in the TUF client workflow. - The metadata for delegated roles are not refreshed by this method as - that happens on demand during get_targetinfo(). + A ``refresh()`` can be done only once during the lifetime of an Updater. + If ``refresh()`` has not been explicitly called before the first + ``get_targetinfo()`` call, it will be done implicitly at that time. - The refresh() method should be called by the client before any other - method calls. + The metadata for delegated roles is not updated by ``refresh()``: + that happens on demand during ``get_targetinfo()``. However, if the + repository uses `consistent_snapshot + `_, + then all metadata downloaded downloaded by the Updater will use the same + consistent repository state. Raises: OSError: New metadata could not be written to disk @@ -145,7 +153,7 @@ def refresh(self) -> None: self._load_root() self._load_timestamp() self._load_snapshot() - self._load_targets("targets", "root") + self._load_targets(Targets.type, Root.type) def _generate_target_file_path(self, targetinfo: TargetFile) -> str: if self.target_dir is None: @@ -159,22 +167,18 @@ def get_targetinfo(self, target_path: str) -> Optional[TargetFile]: """Returns TargetFile instance with information for 'target_path'. The return value can be used as an argument to - :func:`download_target()` and :func:`find_cached_target()`. - :func:`refresh()` must be called before calling - `get_targetinfo()`. Subsequent calls to - `get_targetinfo()` will use the same consistent repository - state: Changes that happen in the repository between calling - :func:`refresh()` and `get_targetinfo()` will not be - seen by the updater. + ``download_target()`` and ``find_cached_target()``. + + If ``refresh()`` has not been called before calling + ``get_targetinfo()``, the refresh will be done implicitly. As a side-effect this method downloads all the additional (delegated targets) metadata it needs to return the target information. Args: - target_path: A target identifier that is a path-relative-URL string - (https://url.spec.whatwg.org/#path-relative-url-string). - Typically this is also the unix file path of the eventually - downloaded file. + target_path: A `path-relative-URL string + `_ + that uniquely identifies the target within the repository. Raises: OSError: New metadata could not be written to disk @@ -184,6 +188,9 @@ def get_targetinfo(self, target_path: str) -> Optional[TargetFile]: Returns: A TargetFile instance or None. """ + + if self._trusted_set.targets is None: + self.refresh() return self._preorder_depth_first_walk(target_path) def find_cached_target( @@ -320,10 +327,12 @@ def _load_root(self) -> None: for next_version in range(lower_bound, upper_bound): try: data = self._download_metadata( - "root", self.config.root_max_length, next_version + Root.type, + self.config.root_max_length, + next_version, ) self._trusted_set.update_root(data) - self._persist_metadata("root", data) + self._persist_metadata(Root.type, data) except exceptions.FetcherHTTPError as exception: if exception.status_code not in {403, 404}: @@ -334,7 +343,7 @@ def _load_root(self) -> None: def _load_timestamp(self) -> None: """Load local and remote timestamp metadata""" try: - data = self._load_local_metadata("timestamp") + data = self._load_local_metadata(Timestamp.type) self._trusted_set.update_timestamp(data) except (OSError, exceptions.RepositoryError) as e: # Local timestamp does not exist or is invalid @@ -342,15 +351,15 @@ def _load_timestamp(self) -> None: # Load from remote (whether local load succeeded or not) data = self._download_metadata( - "timestamp", self.config.timestamp_max_length + Timestamp.type, self.config.timestamp_max_length ) self._trusted_set.update_timestamp(data) - self._persist_metadata("timestamp", data) + self._persist_metadata(Timestamp.type, data) def _load_snapshot(self) -> None: """Load local (and if needed remote) snapshot metadata""" try: - data = self._load_local_metadata("snapshot") + data = self._load_local_metadata(Snapshot.type) self._trusted_set.update_snapshot(data, trusted=True) logger.debug("Local snapshot is valid: not downloading new one") except (OSError, exceptions.RepositoryError) as e: @@ -364,16 +373,24 @@ def _load_snapshot(self) -> None: if self._trusted_set.root.signed.consistent_snapshot: version = snapshot_meta.version - data = self._download_metadata("snapshot", length, version) + data = self._download_metadata(Snapshot.type, length, version) self._trusted_set.update_snapshot(data) - self._persist_metadata("snapshot", data) + self._persist_metadata(Snapshot.type, data) - def _load_targets(self, role: str, parent_role: str) -> None: + def _load_targets(self, role: str, parent_role: str) -> Metadata[Targets]: """Load local (and if needed remote) metadata for 'role'.""" + + # Avoid loading 'role' more than once during "get_targetinfo" + if role in self._trusted_set: + return self._trusted_set[role] + try: data = self._load_local_metadata(role) - self._trusted_set.update_delegated_targets(data, role, parent_role) + delegated_targets = self._trusted_set.update_delegated_targets( + data, role, parent_role + ) logger.debug("Local %s is valid: not downloading new one", role) + return delegated_targets except (OSError, exceptions.RepositoryError) as e: # Local 'role' does not exist or is invalid: update from remote logger.debug("Failed to load local %s: %s", role, e) @@ -386,9 +403,13 @@ def _load_targets(self, role: str, parent_role: str) -> None: version = metainfo.version data = self._download_metadata(role, length, version) - self._trusted_set.update_delegated_targets(data, role, parent_role) + delegated_targets = self._trusted_set.update_delegated_targets( + data, role, parent_role + ) self._persist_metadata(role, data) + return delegated_targets + def _preorder_depth_first_walk( self, target_filepath: str ) -> Optional[TargetFile]: @@ -400,8 +421,8 @@ def _preorder_depth_first_walk( # List of delegations to be interrogated. A (role, parent role) pair # is needed to load and verify the delegated targets metadata. - delegations_to_visit = [("targets", "root")] - visited_role_names: Set[Tuple[str, str]] = set() + delegations_to_visit = [(Targets.type, Root.type)] + visited_role_names: Set[str] = set() number_of_delegations = self.config.max_delegations # Preorder depth-first traversal of the graph of target delegations. @@ -411,32 +432,31 @@ def _preorder_depth_first_walk( role_name, parent_role = delegations_to_visit.pop(-1) # Skip any visited current role to prevent cycles. - if (role_name, parent_role) in visited_role_names: + if role_name in visited_role_names: logger.debug("Skipping visited current role %s", role_name) continue # The metadata for 'role_name' must be downloaded/updated before # its targets, delegations, and child roles can be inspected. - self._load_targets(role_name, parent_role) + targets = self._load_targets(role_name, parent_role).signed - role_metadata: Targets = self._trusted_set[role_name].signed - target = role_metadata.targets.get(target_filepath) + target = targets.targets.get(target_filepath) if target is not None: logger.debug("Found target in current role %s", role_name) return target # After preorder check, add current role to set of visited roles. - visited_role_names.add((role_name, parent_role)) + visited_role_names.add(role_name) # And also decrement number of visited roles. number_of_delegations -= 1 - if role_metadata.delegations is not None: + if targets.delegations is not None: child_roles_to_visit = [] # NOTE: This may be a slow operation if there are many # delegated roles. - for child_role in role_metadata.delegations.roles.values(): + for child_role in targets.delegations.roles.values(): if child_role.is_delegated_path(target_filepath): logger.debug("Adding child role %s", child_role.name) diff --git a/tuf/py.typed b/tuf/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tuf/unittest_toolbox.py b/tuf/unittest_toolbox.py index 063bec8df6..ac1305918b 100755 --- a/tuf/unittest_toolbox.py +++ b/tuf/unittest_toolbox.py @@ -29,6 +29,7 @@ import random import string +from typing import Optional class Modified_TestCase(unittest.TestCase): """ @@ -70,12 +71,12 @@ def setUp(): """ - def setUp(self): + def setUp(self) -> None: self._cleanup = [] - def tearDown(self): + def tearDown(self) -> None: for cleanup_function in self._cleanup: # Perform clean up by executing clean-up functions. try: @@ -87,7 +88,7 @@ def tearDown(self): - def make_temp_directory(self, directory=None): + def make_temp_directory(self, directory: Optional[str]=None) -> str: """Creates and returns an absolute path of a directory.""" prefix = self.__class__.__name__+'_' @@ -102,7 +103,9 @@ def _destroy_temp_directory(): - def make_temp_file(self, suffix='.txt', directory=None): + def make_temp_file( + self,suffix: str='.txt', directory: Optional[str]=None + ) -> str: """Creates and returns an absolute path of an empty file.""" prefix='tmp_file_'+self.__class__.__name__+'_' temp_file = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=directory) @@ -113,7 +116,9 @@ def _destroy_temp_file(): - def make_temp_data_file(self, suffix='', directory=None, data = 'junk data'): + def make_temp_data_file( + self, suffix: str='', directory: Optional[str]=None, data: str = 'junk data' + ) -> str: """Returns an absolute path of a temp file containing data.""" temp_file_path = self.make_temp_file(suffix=suffix, directory=directory) temp_file = open(temp_file_path, 'wt', encoding='utf8') @@ -123,7 +128,7 @@ def make_temp_data_file(self, suffix='', directory=None, data = 'junk data'): - def random_path(self, length = 7): + def random_path(self, length: int = 7) -> str: """Generate a 'random' path consisting of random n-length strings.""" rand_path = '/' + self.random_string(length) @@ -136,7 +141,7 @@ def random_path(self, length = 7): @staticmethod - def random_string(length=15): + def random_string(length: int=15) -> str: """Generate a random string of specified length.""" rand_str = ''