diff --git a/tests/fast_server_exit_old.py b/tests/fast_server_exit_old.py deleted file mode 100644 index b54b7b9230..0000000000 --- a/tests/fast_server_exit_old.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2020, TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - fast_server_exit.py - - - Martin Vrachev. - - - October 29, 2020. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Used for tests in tests/test_utils.py. -""" - -import sys - -sys.exit(0) diff --git a/tests/simple_https_server_old.py b/tests/simple_https_server_old.py deleted file mode 100755 index bf29d0dac6..0000000000 --- a/tests/simple_https_server_old.py +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - simple_https_server_old.py - - - Vladimir Diaz. - - - June 17, 2014 - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Provide a simple https server that can be used by the unit tests. For - example, 'download.py' can connect to the https server started by this module - to verify that https downloads are permitted. - - - ssl.SSLContext.wrap_socket: - https://docs.python.org/3/library/ssl.html#ssl.SSLContext.wrap_socket - - SimpleHTTPServer: - http://docs.python.org/library/simplehttpserver.html#module-SimpleHTTPServer -""" - -import sys -import ssl -import os -import http.server - -keyfile = os.path.join('ssl_certs', 'ssl_cert.key') -certfile = os.path.join('ssl_certs', 'ssl_cert.crt') - - -if len(sys.argv) > 1 and os.path.exists(sys.argv[1]): - certfile = sys.argv[1] - -httpd = http.server.HTTPServer(('localhost', 0), - http.server.SimpleHTTPRequestHandler) - -context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) -context.load_cert_chain(certfile, keyfile) -httpd.socket = context.wrap_socket(httpd.socket, server_side=True) - -port_message = 'bind succeeded, server port is: ' \ - + str(httpd.server_address[1]) -print(port_message) - -if len(sys.argv) > 1 and certfile != sys.argv[1]: - print('simple_https_server_old: cert file was not found: ' + sys.argv[1] + - '; using default: ' + certfile + " certfile") - -httpd.serve_forever() diff --git a/tests/slow_retrieval_server_old.py b/tests/slow_retrieval_server_old.py deleted file mode 100755 index c2586f2ef4..0000000000 --- a/tests/slow_retrieval_server_old.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - slow_retrieval_server_old.py - - - Konstantin Andrianov. - - - March 13, 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Server that throttles data by sending one byte at a time (specified time - interval 'DELAY'). The server is used in 'test_slow_retrieval_attack_old.py'. -""" - -import os -import time -import http.server - - - -# HTTP request handler. -class Handler(http.server.BaseHTTPRequestHandler): - - # Overwrite do_GET. - def do_GET(self): - current_dir = os.getcwd() - try: - filepath = os.path.join(current_dir, self.path.lstrip('/')) - data = None - with open(filepath, 'r') as fileobj: - data = fileobj.read() - - self.send_response(200) - self.send_header('Content-length', str(len(data))) - self.end_headers() - - # Before sending any data, the server does nothing for a long time. - DELAY = 40 - time.sleep(DELAY) - self.wfile.write((data.encode('utf-8'))) - - except IOError as e: - self.send_error(404, 'File Not Found!') - - - -if __name__ == '__main__': - server_address = ('localhost', 0) - - httpd = http.server.HTTPServer(server_address, Handler) - port_message = 'bind succeeded, server port is: ' \ - + str(httpd.server_address[1]) - print(port_message) - httpd.serve_forever() diff --git a/tests/ssl_certs/proxy_ca.crt b/tests/ssl_certs/proxy_ca.crt deleted file mode 100644 index f079e58b7c..0000000000 --- a/tests/ssl_certs/proxy_ca.crt +++ /dev/null @@ -1,17 +0,0 @@ ------BEGIN CERTIFICATE----- -MIICpDCCAYwCCQCFr/EhHmzVajANBgkqhkiG9w0BAQsFADAUMRIwEAYDVQQDDAlw -cm94eTIgQ0EwHhcNMTgwOTIwMTkyOTQ2WhcNMjgwOTE3MTkyOTQ2WjAUMRIwEAYD -VQQDDAlwcm94eTIgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC/ -rVOeqSzJb01Vyliw3dnfLJsWfDfs/Lq5HLn+Xqnzl6MqnYirDqHzTErD3vl8lo/o -OJrziO0vYCWGXEylRQlZp+P37bLToSWiVqWZ8pH6CAh+AhA3WtegN5JwTgIUSP7A -aDlxuZrXlJM50QVlXJIPkc74M8ALz0nu5zmyWkGFvmTYS8503T8cXs9Alr4Bo++9 -Ilixv6lW4QS7FKTeQXlI49K4TeGGGsfmEO6Uj4WTUkwMZym9wfiqtaWc6I9ZMese -WmU3LuufY+pFCdjsdMWDJpYc+HabTSrbgXSF5Iq9a84Xuum39qhVpYhBwBtLk3ye -cxZmIxde1vnkWAitJFETAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAKV09r/x3WyO -McH0RU4WRVzvQN5F0e7swpDlLUX7YnfvpPEkavqQfmrL1cYyEDgsm/347Gvcs1Aa -iaT77axYroXOvCEJ3DxZdzUErKH6Jr3MmHKcZ/L35u6ZXKnmx/edFjdWr6ENkjuZ -NVvKbTrm4cl6Wy4bXkp6b24rBa9IFJncOouSkIvHENEcH//OD4xeTK8vSJTJ9nmw -TiJ0TjCRujtJWC6yb03ZV32VbeiHa1zLlZhcyKqUtt81dLti5t5+L2hAAVCcnEgI -DBWQdlRs/wilHGWVBo/9srOoMNsmvecTBpLH2JyC5VZ1+faYLPrNlgkWgHIFOTTi -h4ByR95Wbi8= ------END CERTIFICATE----- diff --git a/tests/ssl_certs/proxy_ca.key b/tests/ssl_certs/proxy_ca.key deleted file mode 100644 index 0e08b82d76..0000000000 --- a/tests/ssl_certs/proxy_ca.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEpQIBAAKCAQEAv61TnqksyW9NVcpYsN3Z3yybFnw37Py6uRy5/l6p85ejKp2I -qw6h80xKw975fJaP6Dia84jtL2AlhlxMpUUJWafj9+2y06ElolalmfKR+ggIfgIQ -N1rXoDeScE4CFEj+wGg5cbma15STOdEFZVySD5HO+DPAC89J7uc5slpBhb5k2EvO -dN0/HF7PQJa+AaPvvSJYsb+pVuEEuxSk3kF5SOPSuE3hhhrH5hDulI+Fk1JMDGcp -vcH4qrWlnOiPWTHrHlplNy7rn2PqRQnY7HTFgyaWHPh2m00q24F0heSKvWvOF7rp -t/aoVaWIQcAbS5N8nnMWZiMXXtb55FgIrSRREwIDAQABAoIBACxJObbA064+3xlh -RRioSXx86+BIFwvUYLgAYSDacl3rvTFNcJRFLznteKDE1dPpXZqD6Zk3G8YEauce -UD8nMj/awJs5+kVXSEC30E8/cmbYkE284E5J2OQVsunrvCM/skx2SD90aMhCdbm4 -B40h1EVwpOdH3alc3XIrTnNc0yK5MWAu41qwkxYxXHmW9Y0L8AjZve9JBrnKsJMB -ETEZFhHgi/IWtfh5PLbJO2dbSe7Nqo4ikyWo3r5b3yvuphFz1il88ZLjJ5nDmtlH -is7sk7pd0tYNsK1Di5G1ku50XvcbOE4F7mOVCxICTwjN+sdyG8o+AVlgbTKBo/JF -uEhthCECgYEA/3YXS9mAEujlstrV4VOksYWtySSrLHC56tLjj8cHVPJ1qkzT4OOC -X9TsWReDG4J8/t0DOHn+5dnhnqGcYjMMAQx095KHU1bQGrcRdmi6cjnNLTvfEbge -IcJTYG5P7NpLfLjB3DOGqFR4o0iz4K9ZLTYJc+BaCB9qJBEw6nuoP+sCgYEAwBTN -WpRDrmch0+LFPQwboLwtEPiFscTj8SInV0KsI/MK8+5Sm+tXS8PQHYJYcECEQxQM -2gfyM8vy33UP4yn4edJGWlaz7a4hyDxn944vv2fBQ3vjJTNz3X3skkhZ2/F+ZW9e -SFxPj+Vbif8VTEU+wK0f5SUmpRec4E7y3fq+kXkCgYEAib8ZbLLI1mlygfBx51/8 -rCRSwuTcz8ew2CgCwGInV+ys+bkXfmnuwNHE531AGrNPxvVRaUCO602C1NB7zI+N -53raDyyZf5yN9fnElr592l3EfqGL9Lf8t2NbJeIVgrdqgMP29E9sSpPRwOnQ5FRo -l3JNwoe0xDB8QRpr7+PhoyUCgYEAp+GGmmR7wzLgnhDV00WB4DqYKP0N3RH5KAhx -2hKr4b/LEuh5y00mP1Il06TZJ0M8VmRv1yCa0CqxXB00hZdpVRAz7UFagaJwZFJn -jDb6BJDqmdDt9tXBrxUgb7pMz6+CiaWNAjGsWFheaX5JXyAmeMDX369Y13KL6oEW -RG2jogECgYEA/1vLZcWNK/0yd4ClU+Xbu8xC29q82JUMsaazHtbgSNlOfo9LMQlH -z6xBiMYfHZ/SiHCy9RsO8GD4caXiF0RsTVnhqjSRJf3EARamufelNsu2ApLclkSN -fzSoB7ZHddGaYKYpXkGzcwFcKd/QjAlHm1yIsZu4B52AhCxC/WS2X54= ------END RSA PRIVATE KEY----- diff --git a/tests/ssl_certs/proxy_cert.key b/tests/ssl_certs/proxy_cert.key deleted file mode 100644 index 7693865610..0000000000 --- a/tests/ssl_certs/proxy_cert.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEpAIBAAKCAQEAzZO36nZvb9wLxBNB2cZyHqcX5poChJd1YnFBtxbtQwiISxid -eGdiWImQE80vpUyTQbI7TxM+w1xZeEeu4PXuYrOgdTDRFEnjM2mteG+3WpHQBN4H -xoah0msp3046fMkYqcEvhvHbsc5DAWgLK4JFHQPtG/+CIH0ZY+lBBPQhFIhBLYkt -YxNVqwpsXOGreASSw6mO6cVehCuVFJQO5NnI1sCAvp3SeosMKeIcDZxpZWmZhSwH -n3Rj6RMNM66C8zG4YlpvIniGzgV4UiW8XrTUG8HmzQ2295IcfB4No2DZeJDSR9oq -jOkyqJXll+tSiAMuzBRtTQKvGZ5bpZWW4XELEQIDAQABAoIBAQCAfW2cjD4GimCI -QwkLlq9JXWLg7S3ZtdjWmLdcOmY9WZ3mYhI6aVPcxs5Ysgyvonb/vui2+e5mqNf7 -B8LUNKK06lTGKqbjqXLqdYjJF/pgD3cXM7dkbE3EeNqJChogWIijwW11SMHqFmNn -A6LHpPqRshyHPWIV8FroSagr8nKio5BjUEuUiQUUAmSJPGN5qUhdIWXcQu8R1JB8 -9qqqtwPR4FELbFVGI2vYHaSWGnf9V0boPOsfFXWbSq/Ksj3Lm3gAqMtlAeOFu84l -fhP9RkgeXfaCXq0VaOM83UDgLqXm4Ni4wAMKRLwNs4LzumqMM/dfUTn+mGncj33q -idp5qnDhAoGBAOXkwuf60F7aBbo98A0vWZli2CbkspsJz2J573pf+lVWI+ZHBZLI -MOM2DgCOEIUfa2TIMkwFr2t9x6uXlACEwFbEtEBpM4J5qUHgGtXZIsnTsv3qUg/C -L89cNrMddOuuRkxQbyK1QMYZZmZQjSKG2jW6m1KING+shtkOzQ/P9ildAoGBAOTs -DLyyPeEZPj1UMqxVNmeYYRfWnt+YyTPulOIbSuFN0DhZPNLsjrhSxvDwe/3sYH/p -nKdjnlFlx8frz9wtkCt0hWvY0pG2Zam4IBCvreFN7rSvpzHwUAK3oXic2TRKKu1m -xUPZqMJwnWAPX+XxGFn0m7UJj+95VTEOJ2d12ClFAoGAdexXMgmM8uqg/3yf8xNz -wWNbfu/W0gJBN8FWXw52aWmrNob9y+IWeaYTnqNAxBhuzR6H9kkAR4IYduNkzrNJ -ufhigZu1CVuAv8LF4SXlW2PVL7wPZff08Efb4xrcC7y0YJbtuv8Af90tkpQFIU3N -Brx2yeoGA7aa4SJfe5nwKh0CgYAo1yP+lh4MBqDf+CGCNUGbgcfwpM17PprGtQ3C -uPPG9kbrhqAfUSy1Ha94VK8KQh2FNHxKMK+R/gKCXEOdGFPcLNGQyAHpFQ1WFg9C -atUumOS5P40oj6L2mSQpjHIDrieyat9Ol4pQBh9Nf/Cv6S9a/RS6W5ZeNttIASpu -fsutsQKBgQCq+BFeDYJH4f+C1233W3PXM0P1ivj+9TJMRUP63RRay6rv2ZTZXyPc -Rx6Lv4OVWh9VMfv1kHRloJ1GKEBo/uD3nid1WqoNxpXv1iwxeGtjXkFHfvCB7Ruu -vTyQhJQQ7WSCJJOfarstusIn0udOG3MLRgG4X1pPQghyS1AT8NUglw== ------END RSA PRIVATE KEY----- diff --git a/tests/ssl_certs/ssl_cert.crt b/tests/ssl_certs/ssl_cert.crt deleted file mode 100644 index 4812078bcf..0000000000 --- a/tests/ssl_certs/ssl_cert.crt +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIE1TCCAz2gAwIBAgIJAKqz8ew7Z44mMA0GCSqGSIb3DQEBCwUAMIGAMQswCQYD -VQQGEwJVUzERMA8GA1UECAwITmV3IFlvcmsxETAPBgNVBAcMCEJyb29rbHluMQww -CgYDVQQKDANOWVUxKTAnBgNVBAsMIENvbXB1dGVyIFNjaWVuY2UgYW5kIEVuZ2lu -ZWVyaW5nMRIwEAYDVQQDDAlsb2NhbGhvc3QwHhcNMTYwMTI3MjEyMTMxWhcNMjYw -MTI0MjEyMTMxWjCBgDELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5ldyBZb3JrMREw -DwYDVQQHDAhCcm9va2x5bjEMMAoGA1UECgwDTllVMSkwJwYDVQQLDCBDb21wdXRl -ciBTY2llbmNlIGFuZCBFbmdpbmVlcmluZzESMBAGA1UEAwwJbG9jYWxob3N0MIIB -ojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAxyFVeRsWnb1UlCKBks2azM9W -9K+J/ZkzdSb6eCxOIxv79M/Ug54CfWqkySSaQejsu0U/gJxkFYRvwQAy5lATrspY -2kyiWYiggWXFDWz+i8ETPkL9zn59v13sNIpT/IXQj0S3Mr9ZnsUn1qCyEOOIxJxZ -lyuV/M/XP1DP4tArhEvrex12V6MQIK+8fYzEjHG/W7vIIet+wTStIR8ArvVQi0Kv -PbbGCfrZ+e+gq+UpBLBuAfMzM95TW+YJ5duMchie2n6LDmOeegA4jMEv2ppeOr8Q -JJtZuKpXWVbJvLg81yrDjr1rAwJR/WQrnk8GQWPCyPLneAA4mJbi75LqjLxn0AoJ -b3kzLfGEMJJEWXspxNg06bLQU948hB4L7nKARq6s7KoESjEV+/L4koMPWJoNq6fx -OUVw2+S3ITNrDctecRQ1j3RGVPaj5l6bn03C7KV9uRrfqFY3OUjn7A0kDczvRnmr -e1BZIpe+mfGFB+Uu7JiQoBv6I6fqyrdH9rX1LUKlAgMBAAGjUDBOMB0GA1UdDgQW -BBT8LvRkvodP9bR/bBs/aI+AydRIvTAfBgNVHSMEGDAWgBT8LvRkvodP9bR/bBs/ -aI+AydRIvTAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBgQC6kwuSEF0Y -5yLMf1TKfVkeBaZ4tOqR2kzpggzPPog+JcfIQgVmI2QTUDritHWFIM4YUwQ/00WU -uol2BCUpgaLci5gNgyTw8p538Q5cZHXE3kK/CWJA4zKag+oHdmXzGjMalqzvPuVJ -9VdtPrwHhB0Xntf72iWWhE2dIn1QZqVmJ/8hhIU8cQ91pIqTjYjhrYE/GhGH7HMW -bRiRolt37VxbzfXjEBMqVH6fOQq0piTRxwTNPBFp6JO5mRakRmWRvN3dnR8J9qXi -6tQhNNn2uQIpPlKlqVQnh5j5YxFrb50b0FCjDw+eNilXP93yjV4+lWK2QZychcGl -6/7Wu8snZkJCImPbwmcT80XSKesf918zIkauekWiaJE02+ljNtbM7MUAE+XLsKJy -NFGzpyZJ9LihGC/eeVl7K+xqC41jGVOXOOHtbDMbIQfaEZd1nPvy3+V/tublv+am -jPSlj/FW3bLTkjF0OspFjHvJeCeAJdM9kJdYfZoahd6kcejGJc+vjXE= ------END CERTIFICATE----- diff --git a/tests/ssl_certs/ssl_cert.key b/tests/ssl_certs/ssl_cert.key deleted file mode 100644 index b483851d7b..0000000000 --- a/tests/ssl_certs/ssl_cert.key +++ /dev/null @@ -1,39 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIG4wIBAAKCAYEAxyFVeRsWnb1UlCKBks2azM9W9K+J/ZkzdSb6eCxOIxv79M/U -g54CfWqkySSaQejsu0U/gJxkFYRvwQAy5lATrspY2kyiWYiggWXFDWz+i8ETPkL9 -zn59v13sNIpT/IXQj0S3Mr9ZnsUn1qCyEOOIxJxZlyuV/M/XP1DP4tArhEvrex12 -V6MQIK+8fYzEjHG/W7vIIet+wTStIR8ArvVQi0KvPbbGCfrZ+e+gq+UpBLBuAfMz -M95TW+YJ5duMchie2n6LDmOeegA4jMEv2ppeOr8QJJtZuKpXWVbJvLg81yrDjr1r -AwJR/WQrnk8GQWPCyPLneAA4mJbi75LqjLxn0AoJb3kzLfGEMJJEWXspxNg06bLQ -U948hB4L7nKARq6s7KoESjEV+/L4koMPWJoNq6fxOUVw2+S3ITNrDctecRQ1j3RG -VPaj5l6bn03C7KV9uRrfqFY3OUjn7A0kDczvRnmre1BZIpe+mfGFB+Uu7JiQoBv6 -I6fqyrdH9rX1LUKlAgMBAAECggGAEogMn0ehFC7xdxO7AUF3HYZSLlVDv0EJo+Zr -utFMuEG7ce4Bdfo3exp4mWt5m5akqUzpevuS6Nm5WLm/AuYC3upf2Hj3RuPLJB+n -dfdlvPXL56huXFAzPaLs/3q8FC0T2rFnZyadnYP1kCjGSYITUVDHmaTpwWxKOM85 -eX8r/ZTfJkb4o3E+Z/xSy1BVXkibqVrRZi63Th2r2wA6nQ2hYERlcJXY2kbpEDR3 -vGeIKLKOmknawwH2uf+vfh+vc1LNE7p9C5w16ex0OcmCo6G1ln7/dcwmXmcS3M0S -Bax5Jzu5ozaJFL9G59o0AUGJoZj9Gj9leeKPZvShsGcA0JmBMQiLIdhgRwj0B83x -HrYXTZ6P5BjJmwrIv4mGdv2bHV20pbWKAATUwo8EVBzylipexhhAtQJ5B6OsPDPS -HTluaEC2niD6lE613uRnzzbjw4SlwkoMLE0aqOhQyWIPS9/8oRjTzQi4otL7Dt69 -oMrVhmSfxUqZhh2R3KMHDcMKt5nBAoHBAOXkDovYOhTMD3ei0WbKpbSB1sJp5t2d -/9gVil4nWLa4ahw7/TsZi3Co+c9gD2UJku1L9JbOy6TVZ2LoXOybLvIJfeAjNdYH -vi/ElG7498fgsSyw6bua/1VEd7VtbtpWJIQt1LdJG1+O3ZbJNTY6tbLbYVuy4FIO -e/484F8kdZ9PtRsn+I0I7kfoYJ2IFoM0UWgwQETOBguBCua43ZnHoxrvyHKABAO+ -Iuvw4RBZKphGVxMCEjvTCB9S/CpGCRAkkQKBwQDdvu3reA/lVdFDN56VNUn0u3vr -zPSoiOjojlHDyWVAWiLB9I0qaE61UMvVgChM8VkmjhHYQEW6Cj0XMZMkCnsfKDQn -TYF16jt/sTteWSTcx0PTeiCGs3yM5wK4B8q9coOlzSqDd39mjDIFiUz4e+44OIcU -+ISc8pGbwxw0W8qRwIUJPTSVoaUZDnupuR/IE48q8CTPT1Gf00sMLWuv3SYuFHKX -djpcMLWVf4HclIY6y3BqNIZ0JaUAOd+OZT2kdtUCgcBLWPwLics/lcJcC9lmP3Ug -PI4PGna4nFiGkkjPo0XIXZkpt9+/xxeUzU1TUsC49PJbJFH+O7kzRV6lZFNQmWxB -mCrRk7jJdbA4J84esStFL7fiVfnFq3+UiuRRapSyqxk82WimyidWopSuHzR5mbSD -8rNuQqqTOnwZUAqaJHEIzi8lv2wPjaXLm7ZO65O1XShxZZ8q7fu9OYZBKMY46N3k -rkKchKjMMT1w53pcyVzUm/leGYewY/J9kc1kbZ/60oECgcEAj/qdzwt4/sa3BncB -wA4GxCJL9zJwFVI4MG/gRUjqNluQP/GDC2sI2A/rGeiJwlPfN/p9ObWZ0I8/VWT6 -DifEA9n96xsXGTIKigHQ85TcK4Iy1whwQCYgk/iXOljM2i+VrT1HAm+/yBz1icS5 -ton5hoWlqAcpTCLwSnvoP1Lud67ScspL73Aym89cmjo6mZWhmxasP/NXo3f1PaXs -SxdD6B2cvh2lDSEPdk+BSXEiquBXUI5kUtvyg/AP6Qxxdu01AoHAO05qTh9zokkT -yg0sZf4Z5i01em2ys4ZhQjhhbw+I5lIO76e/ZyUWpEZusBVd9TV5BHgiATOHw4yr -nbjEZKwLEb3SXoHl3/CD/l9vWk4gKAYDJdW+oPZttDlkp6dfPJVDupQwLhrxXYmE -fgs4WFmY3Q5b1wut2pnSs1UEPDqJBvykt59gFgn7yVwyTy8VLihNVtH4mwVPYXha -jz2T6BzRAPlYqx/FpkK2YHHNcyj+HFtnBUMMzacnSl/aXpJgHTKw ------END RSA PRIVATE KEY----- diff --git a/tests/ssl_certs/ssl_cert_2.crt b/tests/ssl_certs/ssl_cert_2.crt deleted file mode 100644 index 6d6fb63a53..0000000000 --- a/tests/ssl_certs/ssl_cert_2.crt +++ /dev/null @@ -1,30 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIFOTCCA6GgAwIBAgIJAO+bbero+zKtMA0GCSqGSIb3DQEBCwUAMIGAMQswCQYD -VQQGEwJVUzERMA8GA1UECAwITmV3IFlvcmsxETAPBgNVBAcMCEJyb29rbHluMQww -CgYDVQQKDANOWVUxKTAnBgNVBAsMIENvbXB1dGVyIFNjaWVuY2UgYW5kIEVuZ2lu -ZWVyaW5nMRIwEAYDVQQDDAlsb2NhbGhvc3QwHhcNMTgwOTI2MTgwMDAzWhcNMzgw -OTIxMTgwMDAzWjCBgDELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5ldyBZb3JrMREw -DwYDVQQHDAhCcm9va2x5bjEMMAoGA1UECgwDTllVMSkwJwYDVQQLDCBDb21wdXRl -ciBTY2llbmNlIGFuZCBFbmdpbmVlcmluZzESMBAGA1UEAwwJbG9jYWxob3N0MIIB -ojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAxyFVeRsWnb1UlCKBks2azM9W -9K+J/ZkzdSb6eCxOIxv79M/Ug54CfWqkySSaQejsu0U/gJxkFYRvwQAy5lATrspY -2kyiWYiggWXFDWz+i8ETPkL9zn59v13sNIpT/IXQj0S3Mr9ZnsUn1qCyEOOIxJxZ -lyuV/M/XP1DP4tArhEvrex12V6MQIK+8fYzEjHG/W7vIIet+wTStIR8ArvVQi0Kv -PbbGCfrZ+e+gq+UpBLBuAfMzM95TW+YJ5duMchie2n6LDmOeegA4jMEv2ppeOr8Q -JJtZuKpXWVbJvLg81yrDjr1rAwJR/WQrnk8GQWPCyPLneAA4mJbi75LqjLxn0AoJ -b3kzLfGEMJJEWXspxNg06bLQU948hB4L7nKARq6s7KoESjEV+/L4koMPWJoNq6fx -OUVw2+S3ITNrDctecRQ1j3RGVPaj5l6bn03C7KV9uRrfqFY3OUjn7A0kDczvRnmr -e1BZIpe+mfGFB+Uu7JiQoBv6I6fqyrdH9rX1LUKlAgMBAAGjgbMwgbAwgZ8GA1Ud -IwSBlzCBlKGBhqSBgzCBgDELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5ldyBZb3Jr -MREwDwYDVQQHDAhCcm9va2x5bjEMMAoGA1UECgwDTllVMSkwJwYDVQQLDCBDb21w -dXRlciBTY2llbmNlIGFuZCBFbmdpbmVlcmluZzESMBAGA1UEAwwJbG9jYWxob3N0 -ggkA75tt6uj7Mq0wDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAYEAFWcl -1tAmt/3DJDjk0ppF62jbwcEOu1N9Nono9a70ojAQYYuMC7Ditw6rLbeXS8tP8ae/ -drlci3VxlE5PpmAjuP67Uv2CuGu/2iMqa99AWZ4mVN+x4YL6awvYs8ea6I1Xe8tQ -5+RqvNA+QtnjtfOeb6yWQBAGrc2eTX87IzqvV/EewkdKAs4GZUWG1Zjv3effqjTO -qRX94ltW1GWud7fVcqpZLOaK9U+4IaI2nNHuCtWODoyQmMoVApXyig/YQqFe0eyj -76m1T+2SZLRtn0xn1fTHuLZ2bdtTMZ7k5PTAKnBNEn1Rr9MAS+WEASN1ZyoQ3reL -VYrgkMTrrXPO8bdDTvP7z1Jzv5Cq9WMHFvOLfnj/vN9ZPH6w4QT3Zb97SAAOSPK/ -gzOzRtIe+hqCYBh/cwMoeeoAzes/nJgorj3IOTu8JXmtZrZGrdLIhu2Q8U+yKasf -+TUrr6xdcJI/fyVM5BVelpGhqHzzOQe1tO4VYQlAVaaVvFidDPHqTI2/S272 ------END CERTIFICATE----- diff --git a/tests/ssl_certs/ssl_cert_expired.crt b/tests/ssl_certs/ssl_cert_expired.crt deleted file mode 100644 index f0b79cb95a..0000000000 --- a/tests/ssl_certs/ssl_cert_expired.crt +++ /dev/null @@ -1,30 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIFOTCCA6GgAwIBAgIJALtyUsChEIJpMA0GCSqGSIb3DQEBCwUAMIGAMQswCQYD -VQQGEwJVUzERMA8GA1UECAwITmV3IFlvcmsxETAPBgNVBAcMCEJyb29rbHluMQww -CgYDVQQKDANOWVUxKTAnBgNVBAsMIENvbXB1dGVyIFNjaWVuY2UgYW5kIEVuZ2lu -ZWVyaW5nMRIwEAYDVQQDDAlsb2NhbGhvc3QwHhcNMTgwOTI2MTc0NTM2WhcNMTgw -OTI1MTc0NTM2WjCBgDELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5ldyBZb3JrMREw -DwYDVQQHDAhCcm9va2x5bjEMMAoGA1UECgwDTllVMSkwJwYDVQQLDCBDb21wdXRl -ciBTY2llbmNlIGFuZCBFbmdpbmVlcmluZzESMBAGA1UEAwwJbG9jYWxob3N0MIIB -ojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAxyFVeRsWnb1UlCKBks2azM9W -9K+J/ZkzdSb6eCxOIxv79M/Ug54CfWqkySSaQejsu0U/gJxkFYRvwQAy5lATrspY -2kyiWYiggWXFDWz+i8ETPkL9zn59v13sNIpT/IXQj0S3Mr9ZnsUn1qCyEOOIxJxZ -lyuV/M/XP1DP4tArhEvrex12V6MQIK+8fYzEjHG/W7vIIet+wTStIR8ArvVQi0Kv -PbbGCfrZ+e+gq+UpBLBuAfMzM95TW+YJ5duMchie2n6LDmOeegA4jMEv2ppeOr8Q -JJtZuKpXWVbJvLg81yrDjr1rAwJR/WQrnk8GQWPCyPLneAA4mJbi75LqjLxn0AoJ -b3kzLfGEMJJEWXspxNg06bLQU948hB4L7nKARq6s7KoESjEV+/L4koMPWJoNq6fx -OUVw2+S3ITNrDctecRQ1j3RGVPaj5l6bn03C7KV9uRrfqFY3OUjn7A0kDczvRnmr -e1BZIpe+mfGFB+Uu7JiQoBv6I6fqyrdH9rX1LUKlAgMBAAGjgbMwgbAwgZ8GA1Ud -IwSBlzCBlKGBhqSBgzCBgDELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5ldyBZb3Jr -MREwDwYDVQQHDAhCcm9va2x5bjEMMAoGA1UECgwDTllVMSkwJwYDVQQLDCBDb21w -dXRlciBTY2llbmNlIGFuZCBFbmdpbmVlcmluZzESMBAGA1UEAwwJbG9jYWxob3N0 -ggkAu3JSwKEQgmkwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAYEAW4I1 -TacdFv3L9ENFkSLciPb7zFMckLUZfk/P+4VjdapWrfuydO4W/ogMxA4DK09thTsK -N/BgcExyKjDldGUfUv57Tqv3v2E5kbygNcNtP53fwMz3y+7QourzkDE5HWciw1Lb -hmbnCBTzt/UioSBdJnAH29GWpSS+Jzu745sRaI48AS/J5ApH2aVEnNQTCE7v1LNH -2bTTPYl3eDXiD8yOhvyiW1F4y2BSFbQRH/3aE6Goe4A75m8sX50+JlOgjyyQnAMf -vbfvZsjGfqdXv9Qpci50qKCFxHJLXXNAUbX3fDgKE+RoZUNZnmn2VDgJYnToz6on -RcVnppV09kmSjHXZBT04XXUA0vG3p+oU0TO4puJlePVf4Oz23/DRCPHSfVWgMeB2 -c1PpKit4+Bz7mypnsWVw8kk//l0GJ1cHnkkZElKJtPEB7I587jgTCDcN811TGNBc -rLLd/JwtYAvi1CPFt2ICGDvA4AKLY3rBNg5z1DrSE/iom1NTC00SFZJztYiX ------END CERTIFICATE----- diff --git a/tests/ssl_certs/ssl_cert_wronghost.crt b/tests/ssl_certs/ssl_cert_wronghost.crt deleted file mode 100644 index df7bfa37a6..0000000000 --- a/tests/ssl_certs/ssl_cert_wronghost.crt +++ /dev/null @@ -1,31 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIFRTCCA62gAwIBAgIJAKY6b706lpuDMA0GCSqGSIb3DQEBCwUAMIGEMQswCQYD -VQQGEwJVUzERMA8GA1UECAwITmV3IFlvcmsxETAPBgNVBAcMCEJyb29rbHluMQww -CgYDVQQKDANOWVUxKTAnBgNVBAsMIENvbXB1dGVyIFNjaWVuY2UgYW5kIEVuZ2lu -ZWVyaW5nMRYwFAYDVQQDDA1ub3RteWhvc3RuYW1lMB4XDTE4MDkxMjE2NTkxN1oX -DTM4MDkwNzE2NTkxN1owgYQxCzAJBgNVBAYTAlVTMREwDwYDVQQIDAhOZXcgWW9y -azERMA8GA1UEBwwIQnJvb2tseW4xDDAKBgNVBAoMA05ZVTEpMCcGA1UECwwgQ29t -cHV0ZXIgU2NpZW5jZSBhbmQgRW5naW5lZXJpbmcxFjAUBgNVBAMMDW5vdG15aG9z -dG5hbWUwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDHIVV5GxadvVSU -IoGSzZrMz1b0r4n9mTN1Jvp4LE4jG/v0z9SDngJ9aqTJJJpB6Oy7RT+AnGQVhG/B -ADLmUBOuyljaTKJZiKCBZcUNbP6LwRM+Qv3Ofn2/Xew0ilP8hdCPRLcyv1mexSfW -oLIQ44jEnFmXK5X8z9c/UM/i0CuES+t7HXZXoxAgr7x9jMSMcb9bu8gh637BNK0h -HwCu9VCLQq89tsYJ+tn576Cr5SkEsG4B8zMz3lNb5gnl24xyGJ7afosOY556ADiM -wS/aml46vxAkm1m4qldZVsm8uDzXKsOOvWsDAlH9ZCueTwZBY8LI8ud4ADiYluLv -kuqMvGfQCglveTMt8YQwkkRZeynE2DTpstBT3jyEHgvucoBGrqzsqgRKMRX78viS -gw9Ymg2rp/E5RXDb5LchM2sNy15xFDWPdEZU9qPmXpufTcLspX25Gt+oVjc5SOfs -DSQNzO9Geat7UFkil76Z8YUH5S7smJCgG/ojp+rKt0f2tfUtQqUCAwEAAaOBtzCB -tDCBowYDVR0jBIGbMIGYoYGKpIGHMIGEMQswCQYDVQQGEwJVUzERMA8GA1UECAwI -TmV3IFlvcmsxETAPBgNVBAcMCEJyb29rbHluMQwwCgYDVQQKDANOWVUxKTAnBgNV -BAsMIENvbXB1dGVyIFNjaWVuY2UgYW5kIEVuZ2luZWVyaW5nMRYwFAYDVQQDDA1u -b3RteWhvc3RuYW1lggkApjpvvTqWm4MwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0B -AQsFAAOCAYEAvpBMce3kxwo9W0o4RqezkSxnNyax0ezbUNodIkx5kbzX09qQLqhK -SkhQY3CNmtrpsczUg1W2nldxioEouwfTlhi15H98E/8XytpGaHO7Rnbtq8nkOp3E -N1+DMfFR95OynbHSd7bfK9UEmH1CmCnttvCuQkLTxDCpEsQNAxvmU/yDONoDr+cu -jGo80XTnYTqHl5/UtGbCS4SAIdWgrXTIqVvY/eF+mR+3nQEYjBuqW0cNfXLyYLXH -XMc6qtfGX1P+NRWtlrWgGQmc0fry+GczRHMJuKtJMV2xZzPJAJqwwvj3Fjz8HNGu -ZX3kVdbkDjf8is2cWgyZqDecqPHDBW4Ey539s/5eurgOkEvhriS4/9RnVhgdzduj -nRdXkD10ficrFcBQO0KaTWT+iFBc9duuYPuLRyRTye5p3t0liOikH2XrRXs4IBfz -2mT4npXQl1liNixcCf/yUEUOSQAJDG6aRjDjD4SZBUPDLjfqKLid8M0BpLQrks9L -5hAg1WZXorY6 ------END CERTIFICATE----- diff --git a/tests/test_arbitrary_package_attack_old.py b/tests/test_arbitrary_package_attack_old.py deleted file mode 100755 index 0791751220..0000000000 --- a/tests/test_arbitrary_package_attack_old.py +++ /dev/null @@ -1,287 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_arbitrary_package_attack_old.py - - - Konstantin Andrianov. - - - February 22, 2012. - - March 21, 2014. - Refactored to use the 'unittest' module (test conditions in code, rather - than verifying text output), use pre-generated repository files, and - discontinue use of the old repository tools. -vladimir.v.diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Simulate an arbitrary package attack, where an updater client attempts to - download a malicious file. TUF and non-TUF client scenarios are tested. - - There is no difference between 'updates' and 'target' files. -""" - -import os -import tempfile -import shutil -import json -import logging -import unittest -import sys -from urllib import request - -import tuf -import tuf.formats -import tuf.roledb -import tuf.keydb -import tuf.log -import tuf.client.updater as updater -import tuf.unittest_toolbox as unittest_toolbox - -from tests import utils - -import securesystemslib - -logger = logging.getLogger(__name__) - - -class TestArbitraryPackageAttack(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated of all the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.client_directory = os.path.join(temporary_repository_root, 'client') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - - - def tearDown(self): - # updater.Updater() populates the roledb with the name "test_repository1" - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - - def test_without_tuf(self): - # Verify that a target file replaced with a malicious version is downloaded - # by a non-TUF client (i.e., a non-TUF client that does not verify hashes, - # detect mix-and-mix attacks, etc.) A tuf client, on the other hand, should - # detect that the downloaded target file is invalid. - - # Test: Download a valid target file from the repository. - # Ensure the target file to be downloaded has not already been downloaded, - # and generate its file size and digest. The file size and digest is needed - # to check that the malicious file was indeed downloaded. - target_path = os.path.join(self.repository_directory, 'targets', 'file1.txt') - client_target_path = os.path.join(self.client_directory, 'file1.txt') - self.assertFalse(os.path.exists(client_target_path)) - length, hashes = securesystemslib.util.get_file_details(target_path) - fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'targets', 'file1.txt') - - # On Windows, the URL portion should not contain back slashes. - request.urlretrieve(url_file.replace('\\', '/'), client_target_path) - - self.assertTrue(os.path.exists(client_target_path)) - length, hashes = securesystemslib.util.get_file_details(client_target_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - self.assertEqual(fileinfo, download_fileinfo) - - # Test: Download a target file that has been modified by an attacker. - with open(target_path, 'wt') as file_object: - file_object.write('add malicious content.') - length, hashes = securesystemslib.util.get_file_details(target_path) - malicious_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # On Windows, the URL portion should not contain back slashes. - request.urlretrieve(url_file.replace('\\', '/'), client_target_path) - - length, hashes = securesystemslib.util.get_file_details(client_target_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Verify 'download_fileinfo' is unequal to the original trusted version. - self.assertNotEqual(download_fileinfo, fileinfo) - - # Verify 'download_fileinfo' is equal to the malicious version. - self.assertEqual(download_fileinfo, malicious_fileinfo) - - - - def test_with_tuf(self): - # Verify that a target file (on the remote repository) modified by an - # attacker is not downloaded by the TUF client. - # First test that the valid target file is successfully downloaded. - file1_fileinfo = self.repository_updater.get_one_valid_targetinfo('file1.txt') - destination = os.path.join(self.client_directory) - self.repository_updater.download_target(file1_fileinfo, destination) - client_target_path = os.path.join(destination, 'file1.txt') - self.assertTrue(os.path.exists(client_target_path)) - - # Modify 'file1.txt' and confirm that the TUF client rejects it. - target_path = os.path.join(self.repository_directory, 'targets', 'file1.txt') - with open(target_path, 'wt') as file_object: - file_object.write('malicious content, size 33 bytes.') - - try: - self.repository_updater.download_target(file1_fileinfo, destination) - - except tuf.exceptions.NoWorkingMirrorError as exception: - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'targets', 'file1.txt') - - # Verify that only one exception is raised for 'url_file'. - self.assertTrue(len(exception.mirror_errors), 1) - - # Verify that the expected 'tuf.exceptions.DownloadLengthMismatchError' exception - # is raised for 'url_file'. - self.assertTrue(url_file.replace('\\', '/') in exception.mirror_errors) - self.assertTrue( - isinstance(exception.mirror_errors[url_file.replace('\\', '/')], - securesystemslib.exceptions.BadHashError)) - - else: - self.fail('TUF did not prevent an arbitrary package attack.') - - - def test_with_tuf_and_metadata_tampering(self): - # Test that a TUF client does not download a malicious target file, and a - # 'targets.json' metadata file that has also been modified by the attacker. - # The attacker does not attach a valid signature to 'targets.json' - - # An attacker modifies 'file1.txt'. - target_path = os.path.join(self.repository_directory, 'targets', 'file1.txt') - with open(target_path, 'wt') as file_object: - file_object.write('malicious content, size 33 bytes.') - - # An attacker also tries to add the malicious target's length and digest - # to its metadata file. - length, hashes = securesystemslib.util.get_file_details(target_path) - - metadata_path = \ - os.path.join(self.repository_directory, 'metadata', 'targets.json') - - metadata = securesystemslib.util.load_json_file(metadata_path) - metadata['signed']['targets']['file1.txt']['hashes'] = hashes - metadata['signed']['targets']['file1.txt']['length'] = length - - tuf.formats.check_signable_object_format(metadata) - - with open(metadata_path, 'wb') as file_object: - file_object.write(json.dumps(metadata, indent=1, - separators=(',', ': '), sort_keys=True).encode('utf-8')) - - # Verify that the malicious 'targets.json' is not downloaded. Perform - # a refresh of top-level metadata to demonstrate that the malicious - # 'targets.json' is not downloaded. - try: - self.repository_updater.refresh() - file1_fileinfo = self.repository_updater.get_one_valid_targetinfo('file1.txt') - destination = os.path.join(self.client_directory) - self.repository_updater.download_target(file1_fileinfo, destination) - - except tuf.exceptions.NoWorkingMirrorError as exception: - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'targets', 'file1.txt') - - # Verify that an exception raised for only the malicious 'url_file'. - self.assertTrue(len(exception.mirror_errors), 1) - - # Verify that the specific and expected mirror exception is raised. - self.assertTrue(url_file.replace('\\', '/') in exception.mirror_errors) - self.assertTrue( - isinstance(exception.mirror_errors[url_file.replace('\\', '/')], - securesystemslib.exceptions.BadHashError)) - - else: - self.fail('TUF did not prevent an arbitrary package attack.') - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_developer_tool_old.py b/tests/test_developer_tool_old.py deleted file mode 100755 index bec0d62e8f..0000000000 --- a/tests/test_developer_tool_old.py +++ /dev/null @@ -1,428 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_developer_tool_old.py. - - - Santiago Torres Arias - Zane Fisher - - - January 22, 2014. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for the 'developer_tool.py' module. -""" - -import os -import unittest -import logging -import tempfile -import shutil -import sys - -import tuf -import tuf.log -import tuf.roledb -import tuf.keydb -import tuf.developer_tool as developer_tool -import tuf.exceptions - -import securesystemslib -import securesystemslib.exceptions - -from tuf.developer_tool import METADATA_DIRECTORY_NAME -from tuf.developer_tool import TARGETS_DIRECTORY_NAME - -from tests import utils - -logger = logging.getLogger(__name__) - -developer_tool.disable_console_log_messages() - -class TestProject(unittest.TestCase): - - tmp_dir = None - - @classmethod - def setUpClass(cls): - cls.tmp_dir = tempfile.mkdtemp(dir = os.getcwd()) - - - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.tmp_dir) - - - def tearDown(self): - # called after every test case - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - - def test_create_new_project(self): - # Test cases for the create_new_project function. In this test we will - # check input, correct file creation and format. We also check - # that a proper object is generated. We will use the normal layout for this - # test suite. - - # Create a local subfolder for this test. - local_tmp = tempfile.mkdtemp(dir = self.tmp_dir) - - # These are the usual values we will be throwing to the function, however - # we will swap these for nulls or malformed values every now and then to - # test input. - project_name = 'test_suite' - metadata_directory = local_tmp - location_in_repository = '/prefix' - targets_directory = None - key = None - - # Create a blank project. - project = developer_tool.create_new_project(project_name, metadata_directory, - location_in_repository) - - self.assertTrue(isinstance(project, developer_tool.Project)) - self.assertTrue(project.layout_type == 'repo-like') - self.assertTrue(project.prefix == location_in_repository) - self.assertTrue(project.project_name == project_name) - self.assertTrue(project.metadata_directory == - os.path.join(metadata_directory,METADATA_DIRECTORY_NAME)) - self.assertTrue(project.targets_directory == - os.path.join(metadata_directory,TARGETS_DIRECTORY_NAME)) - - # Create a blank project without a prefix. - project = developer_tool.create_new_project(project_name, metadata_directory) - self.assertTrue(isinstance(project, developer_tool.Project)) - self.assertTrue(project.layout_type == 'repo-like') - self.assertTrue(project.prefix == '') - self.assertTrue(project.project_name == project_name) - self.assertTrue(project.metadata_directory == - os.path.join(metadata_directory,METADATA_DIRECTORY_NAME)) - self.assertTrue(project.targets_directory == - os.path.join(metadata_directory,TARGETS_DIRECTORY_NAME)) - - # Create a blank project without a valid metadata directory. - self.assertRaises(securesystemslib.exceptions.FormatError, developer_tool.create_new_project, - 0, metadata_directory, location_in_repository) - self.assertRaises(securesystemslib.exceptions.FormatError, developer_tool.create_new_project, - project_name, 0, location_in_repository) - self.assertRaises(securesystemslib.exceptions.FormatError, developer_tool.create_new_project, - project_name, metadata_directory, 0) - - - # Create a new project with a flat layout. - targets_directory = tempfile.mkdtemp(dir = local_tmp) - metadata_directory = tempfile.mkdtemp(dir = local_tmp) - project = developer_tool.create_new_project(project_name, metadata_directory, - location_in_repository, targets_directory) - self.assertTrue(isinstance(project, developer_tool.Project)) - self.assertTrue(project.layout_type == 'flat') - self.assertTrue(project.prefix == location_in_repository) - self.assertTrue(project.project_name == project_name) - self.assertTrue(project.metadata_directory == metadata_directory) - self.assertTrue(project.targets_directory == targets_directory) - - # Finally, check that if targets_directory is set, it is valid. - self.assertRaises(securesystemslib.exceptions.FormatError, developer_tool.create_new_project, - project_name, metadata_directory, location_in_repository, 0) - - # Copy a key to our workspace and create a new project with it. - keystore_path = os.path.join('repository_data','keystore') - - # I will use the same key as the one provided in the repository - # tool tests for the root role, but this is not a root role... - root_key_path = os.path.join(keystore_path,'root_key.pub') - project_key = developer_tool.import_rsa_publickey_from_file(root_key_path) - - # Test create new project with a key added by default. - project = developer_tool.create_new_project(project_name, metadata_directory, - location_in_repository, targets_directory, project_key) - - self.assertTrue(isinstance(project, developer_tool.Project)) - self.assertTrue(project.layout_type == 'flat') - self.assertTrue(project.prefix == location_in_repository) - self.assertTrue(project.project_name == project_name) - self.assertTrue(project.metadata_directory == metadata_directory) - self.assertTrue(project.targets_directory == targets_directory) - self.assertTrue(len(project.keys) == 1) - self.assertTrue(project.keys[0] == project_key['keyid']) - - # Try to write to an invalid location. The OSError should be re-raised by - # create_new_project(). - shutil.rmtree(targets_directory) - tuf.roledb.clear_roledb() - tuf.keydb.clear_keydb() - - metadata_directory = '/' - valid_metadata_directory_name = developer_tool.METADATA_DIRECTORY_NAME - developer_tool.METADATA_DIRECTORY_NAME = '/' - - try: - developer_tool.create_new_project(project_name, metadata_directory, - location_in_repository, targets_directory, project_key) - - except (OSError, tuf.exceptions.RepositoryError): - pass - - developer_tool.METADATA_DIRECTORY_NAME = valid_metadata_directory_name - - - - def test_load_project(self): - # This test case will first try to load an existing project and test for - # verify the loaded object. It will next try to load a nonexisting project - # and expect a correct error handler. Finally, it will try to overwrite the - # existing prefix of the loaded project. - - # Create a local subfolder for this test. - local_tmp = tempfile.mkdtemp(dir = self.tmp_dir) - - # Test non-existent project filepath. - nonexistent_path = os.path.join(local_tmp, 'nonexistent') - self.assertRaises(securesystemslib.exceptions.StorageError, - developer_tool.load_project, nonexistent_path) - - # Copy the pregenerated metadata. - project_data_filepath = os.path.join('repository_data', 'project') - target_project_data_filepath = os.path.join(local_tmp, 'project') - shutil.copytree('repository_data/project', target_project_data_filepath) - - # Properly load a project. - repo_filepath = os.path.join(local_tmp, 'project', 'test-flat') - new_targets_path = os.path.join(local_tmp, 'project', 'targets') - project = developer_tool.load_project(repo_filepath, - new_targets_location = new_targets_path) - self.assertTrue(project._targets_directory == new_targets_path) - self.assertTrue(project.layout_type == 'flat') - - # Load a project overwriting the prefix. - project = developer_tool.load_project(repo_filepath, prefix='new') - self.assertTrue(project.prefix == 'new') - - # Load a project with a file missing. - file_to_corrupt = os.path.join(repo_filepath, 'test-flat.json') - with open(file_to_corrupt, 'wt') as fp: - fp.write('this is not a json file') - - self.assertRaises(securesystemslib.exceptions.Error, developer_tool.load_project, repo_filepath) - - - - - def test_add_verification_keys(self): - # Create a new project instance. - project = developer_tool.Project('test_verification_keys', 'somepath', - 'someotherpath', 'prefix') - - # Add invalid verification key. - self.assertRaises(securesystemslib.exceptions.FormatError, project.add_verification_key, 'invalid') - - # Add verification key. - # - load it first - keystore_path = os.path.join('repository_data', 'keystore') - first_verification_key_path = os.path.join(keystore_path,'root_key.pub') - first_verification_key = \ - developer_tool.import_rsa_publickey_from_file(first_verification_key_path) - - project.add_verification_key(first_verification_key) - - - # Add another verification key (should expect exception.) - second_verification_key_path = os.path.join(keystore_path, 'snapshot_key.pub') - second_verification_key = \ - developer_tool.import_ed25519_publickey_from_file(second_verification_key_path) - - self.assertRaises(securesystemslib.exceptions.Error, - project.add_verification_key,(second_verification_key)) - - - - # Add a verification key for the delegation. - project.delegate('somedelegation', [], []) - project('somedelegation').add_verification_key(first_verification_key) - project('somedelegation').add_verification_key(second_verification_key) - - - # Add another delegation of the delegation. - project('somedelegation').delegate('somesubdelegation', [], []) - project('somesubdelegation').add_verification_key(first_verification_key) - project('somesubdelegation').add_verification_key(second_verification_key) - - - def test_write(self): - - # Create tmp directory. - local_tmp = tempfile.mkdtemp(dir=self.tmp_dir) - - # Create new project inside tmp directory. - project = developer_tool.create_new_project('new_project', local_tmp, - 'prefix'); - - # Create some target files inside the tmp directory. - target_filepath = os.path.join(local_tmp, 'targets', 'test_target') - with open(target_filepath, 'wt') as fp: - fp.write('testing file') - - - # Add the targets. - project.add_target(os.path.basename(target_filepath)) - - # Add verification keys. - keystore_path = os.path.join('repository_data', 'keystore') - project_key_path = os.path.join(keystore_path, 'root_key.pub') - project_key = \ - developer_tool.import_rsa_publickey_from_file(project_key_path) - - - # Call status (for the sake of doing it and to improve test coverage by - # executing its statements.) - project.status() - - project.add_verification_key(project_key) - - - # Add another verification key (should expect exception.) - delegation_key_path = os.path.join(keystore_path, 'snapshot_key.pub') - delegation_key = \ - developer_tool.import_ed25519_publickey_from_file(delegation_key_path) - - # Add a subdelegation. - subdelegation_key_path = os.path.join(keystore_path, 'timestamp_key.pub') - subdelegation_key = \ - developer_tool.import_ed25519_publickey_from_file(subdelegation_key_path) - - # Add a delegation. - project.delegate('delegation', [delegation_key], []) - project('delegation').delegate('subdelegation', [subdelegation_key], []) - - # call write (except) - self.assertRaises(securesystemslib.exceptions.Error, project.write, ()) - - # Call status (for the sake of doing it and executing its statements.) - project.status() - - # Load private keys. - project_private_key_path = os.path.join(keystore_path, 'root_key') - project_private_key = \ - developer_tool.import_rsa_privatekey_from_file(project_private_key_path, - 'password') - - delegation_private_key_path = os.path.join(keystore_path, 'snapshot_key') - delegation_private_key = \ - developer_tool.import_ed25519_privatekey_from_file(delegation_private_key_path, - 'password') - - subdelegation_private_key_path = \ - os.path.join(keystore_path, 'timestamp_key') - subdelegation_private_key = \ - developer_tool.import_ed25519_privatekey_from_file(subdelegation_private_key_path, - 'password') - - # Test partial write. - # backup everything (again) - # + backup targets. - targets_backup = project.target_files - - # + backup delegations. - delegations_backup = \ - tuf.roledb.get_delegated_rolenames(project.project_name) - - # + backup layout type. - layout_type_backup = project.layout_type - - # + backup keyids. - keys_backup = project.keys - delegation_keys_backup = project('delegation').keys - - # + backup the prefix. - prefix_backup = project.prefix - - # + backup the name. - name_backup = project.project_name - - # Write and reload. - self.assertRaises(securesystemslib.exceptions.Error, project.write) - project.write(write_partial=True) - - project = developer_tool.load_project(local_tmp) - - # Check against backup. - self.assertEqual(list(project.target_files.keys()), list(targets_backup.keys())) - new_delegations = tuf.roledb.get_delegated_rolenames(project.project_name) - self.assertEqual(new_delegations, delegations_backup) - self.assertEqual(project.layout_type, layout_type_backup) - self.assertEqual(project.keys, keys_backup) - - self.assertEqual(project('delegation').keys, delegation_keys_backup) - - self.assertEqual(project.prefix, prefix_backup) - self.assertEqual(project.project_name, name_backup) - - roleinfo = tuf.roledb.get_roleinfo(project.project_name) - - self.assertEqual(roleinfo['partial_loaded'], True) - - - - # Load_signing_keys. - project('delegation').load_signing_key(delegation_private_key) - - project.status() - - project.load_signing_key(project_private_key) - - # Backup everything. - # + backup targets. - targets_backup = project.target_files - - # + backup delegations. - delegations_backup = \ - tuf.roledb.get_delegated_rolenames(project.project_name) - - # + backup layout type. - layout_type_backup = project.layout_type - - # + backup keyids - keys_backup = project.keys - delegation_keys_backup = project('delegation').keys - - # + backup the prefix. - prefix_backup = project.prefix - - # + backup the name. - name_backup = project.project_name - - # Call status (for the sake of doing it.) - project.status() - - # Call write. - project.write() - - # Call load. - project = developer_tool.load_project(local_tmp) - - - # Check against backup. - self.assertEqual(list(project.target_files.keys()), list(targets_backup.keys())) - - new_delegations = tuf.roledb.get_delegated_rolenames(project.project_name) - self.assertEqual(new_delegations, delegations_backup) - self.assertEqual(project.layout_type, layout_type_backup) - self.assertEqual(project.keys, keys_backup) - self.assertEqual(project('delegation').keys, delegation_keys_backup) - self.assertEqual(project.prefix, prefix_backup) - self.assertEqual(project.project_name, name_backup) - - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_download_old.py b/tests/test_download_old.py deleted file mode 100755 index 4af22738de..0000000000 --- a/tests/test_download_old.py +++ /dev/null @@ -1,392 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_download_old.py - - - Konstantin Andrianov. - - - March 26, 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'download.py'. - - NOTE: Make sure test_download_old.py is ran in 'tuf/tests/' directory. - Otherwise, module that launches simple server would not be found. - - TODO: Adopt the environment variable management from test_proxy_use.py here. -""" - -import hashlib -import logging -import os -import sys -import unittest -import urllib3 -import warnings - -import tuf -import tuf.download as download -import tuf.requests_fetcher -import tuf.log -import tuf.unittest_toolbox as unittest_toolbox -import tuf.exceptions - -from tests import utils - -import requests.exceptions - -import securesystemslib - -logger = logging.getLogger(__name__) - - -class TestDownload(unittest_toolbox.Modified_TestCase): - def setUp(self): - """ - Create a temporary file and launch a simple server in the - current working directory. - """ - - unittest_toolbox.Modified_TestCase.setUp(self) - - # Making a temporary file. - current_dir = os.getcwd() - target_filepath = self.make_temp_data_file(directory=current_dir) - self.target_fileobj = open(target_filepath, 'r') - self.target_data = self.target_fileobj.read() - self.target_data_length = len(self.target_data) - - # Launch a SimpleHTTPServer (serves files in the current dir). - self.server_process_handler = utils.TestServerProcess(log=logger) - - rel_target_filepath = os.path.basename(target_filepath) - self.url = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + '/' + rel_target_filepath - - # Computing hash of target file data. - m = hashlib.md5() - m.update(self.target_data.encode('utf-8')) - digest = m.hexdigest() - self.target_hash = {'md5':digest} - - # Initialize the default fetcher for the download - self.fetcher = tuf.requests_fetcher.RequestsFetcher() - - - - # Stop server process and perform clean up. - def tearDown(self): - # Cleans the resources and flush the logged lines (if any). - self.server_process_handler.clean() - - self.target_fileobj.close() - - # Remove temp directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - # Test: Normal case. - def test_download_url_to_tempfileobj(self): - - download_file = download.safe_download - with download_file(self.url, self.target_data_length, self.fetcher) as temp_fileobj: - temp_fileobj.seek(0) - temp_file_data = temp_fileobj.read().decode('utf-8') - self.assertEqual(self.target_data, temp_file_data) - self.assertEqual(self.target_data_length, len(temp_file_data)) - - - # Test: Download url in more than one chunk. - def test_download_url_in_chunks(self): - - # Set smaller chunk size to ensure that the file will be downloaded - # in more than one chunk - default_chunk_size = tuf.settings.CHUNK_SIZE - tuf.settings.CHUNK_SIZE = 4 - # We don't have access to chunks from download_file() - # so we just confirm that the expectation of more than one chunk is - # correct and verify that no errors are raised during download - chunks_count = self.target_data_length/tuf.settings.CHUNK_SIZE - self.assertGreater(chunks_count, 1) - - download_file = download.safe_download - with download_file(self.url, self.target_data_length, self.fetcher) as temp_fileobj: - temp_fileobj.seek(0) - temp_file_data = temp_fileobj.read().decode('utf-8') - self.assertEqual(self.target_data, temp_file_data) - self.assertEqual(self.target_data_length, len(temp_file_data)) - - # Restore default settings - tuf.settings.CHUNK_SIZE = default_chunk_size - - - # Test: Incorrect lengths. - def test_download_url_to_tempfileobj_and_lengths(self): - # We do *not* catch - # 'securesystemslib.exceptions.DownloadLengthMismatchError' in the - # following two calls because the file at 'self.url' contains enough bytes - # to satisfy the smaller number of required bytes requested. - # safe_download() and unsafe_download() will only log a warning when the - # the server-reported length of the file does not match the - # required_length. 'updater.py' *does* verify the hashes of downloaded - # content. - download.safe_download(self.url, self.target_data_length - 4, self.fetcher).close() - download.unsafe_download(self.url, self.target_data_length - 4, self.fetcher).close() - - # We catch 'tuf.exceptions.DownloadLengthMismatchError' for safe_download() - # because it will not download more bytes than requested (in this case, a - # length greater than the size of the target file). - self.assertRaises(tuf.exceptions.DownloadLengthMismatchError, - download.safe_download, self.url, self.target_data_length + 1, self.fetcher) - - # Calling unsafe_download() with a mismatched length should not raise an - # exception. - download.unsafe_download(self.url, self.target_data_length + 1, self.fetcher).close() - - - - def test_download_url_to_tempfileobj_and_performance(self): - - """ - # Measuring performance of 'auto_flush = False' vs. 'auto_flush = True' - # in download._download_file() during write. No change was observed. - star_cpu = time.clock() - star_real = time.time() - - temp_fileobj = download_file(self.url, - self.target_data_length) - - end_cpu = time.clock() - end_real = time.time() - - self.assertEqual(self.target_data, temp_fileobj.read()) - self.assertEqual(self.target_data_length, len(temp_fileobj.read())) - temp_fileobj.close() - - print "Performance cpu time: "+str(end_cpu - star_cpu) - print "Performance real time: "+str(end_real - star_real) - - # TODO: [Not urgent] Show the difference by setting write(auto_flush=False) - """ - - - # Test: Incorrect/Unreachable URLs. - def test_download_url_to_tempfileobj_and_urls(self): - - download_file = download.safe_download - unsafe_download_file = download.unsafe_download - - with self.assertRaises(securesystemslib.exceptions.FormatError): - download_file(None, self.target_data_length, self.fetcher) - - url = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + '/' + self.random_string() - with self.assertRaises(tuf.exceptions.FetcherHTTPError) as cm: - download_file(url, self.target_data_length, self.fetcher) - self.assertEqual(cm.exception.status_code, 404) - - url1 = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port + 1) + '/' + self.random_string() - with self.assertRaises(requests.exceptions.ConnectionError): - download_file(url1, self.target_data_length, self.fetcher) - - # Specify an unsupported URI scheme. - url_with_unsupported_uri = self.url.replace('http', 'file') - self.assertRaises(requests.exceptions.InvalidSchema, download_file, url_with_unsupported_uri, - self.target_data_length, self.fetcher) - self.assertRaises(requests.exceptions.InvalidSchema, unsafe_download_file, - url_with_unsupported_uri, self.target_data_length, self.fetcher) - - - - - - ''' - # This test uses sites on the internet, requiring a net connection to succeed. - # Since this is the only such test in TUF, I'm not going to enable it... but - # it's here in case it's useful for diagnosis. - def test_https_validation(self): - """ - Use some known URLs on the net to ensure that TUF download checks SSL - certificates appropriately. - """ - # We should never get as far as the target file download itself, so the - # length we pass to safe_download and unsafe_download shouldn't matter. - irrelevant_length = 10 - - for bad_url in [ - 'https://expired.badssl.com/', # expired certificate - 'https://wrong.host.badssl.com/', ]: # hostname verification fail - - with self.assertRaises(requests.exceptions.SSLError): - download.safe_download(bad_url, irrelevant_length) - - with self.assertRaises(requests.exceptions.SSLError): - download.unsafe_download(bad_url, irrelevant_length) - ''' - - - - - def test_https_connection(self): - """ - Try various HTTPS downloads using trusted and untrusted certificates with - and without the correct hostname listed in the SSL certificate. - """ - # Make a temporary file to be served to the client. - current_directory = os.getcwd() - target_filepath = self.make_temp_data_file(directory=current_directory) - - with open(target_filepath, 'r') as target_file_object: - target_data_length = len(target_file_object.read()) - - # These cert files provide various test cases: - # good: A valid cert from an older generation of test_download.py tests. - # good2: A valid cert made simultaneous to the bad certs below, with the - # same settings otherwise, tested here in case the difference - # between the way the new bad certs and the old good cert were - # generated turns out to matter at some point. - # bad: An otherwise-valid cert with the wrong hostname. The good certs - # list "localhost", but this lists "notmyhostname". - # expired: An otherwise-valid cert but which is expired (no valid dates - # exist, fwiw: startdate > enddate). - good_cert_fname = os.path.join('ssl_certs', 'ssl_cert.crt') - good2_cert_fname = os.path.join('ssl_certs', 'ssl_cert_2.crt') - bad_cert_fname = os.path.join('ssl_certs', 'ssl_cert_wronghost.crt') - expired_cert_fname = os.path.join('ssl_certs', 'ssl_cert_expired.crt') - - # Launch four HTTPS servers (serve files in the current dir). - # 1: we expect to operate correctly - # 2: also good; uses a slightly different cert (controls for the cert - # generation method used for the next two, in case it comes to matter) - # 3: run with an HTTPS certificate with an unexpected hostname - # 4: run with an HTTPS certificate that is expired - # Be sure to offset from the port used in setUp to avoid collision. - - - good_https_server_handler = utils.TestServerProcess(log=logger, - server='simple_https_server_old.py', - extra_cmd_args=[good_cert_fname]) - good2_https_server_handler = utils.TestServerProcess(log=logger, - server='simple_https_server_old.py', - extra_cmd_args=[good2_cert_fname]) - bad_https_server_handler = utils.TestServerProcess(log=logger, - server='simple_https_server_old.py', - extra_cmd_args=[bad_cert_fname]) - expd_https_server_handler = utils.TestServerProcess(log=logger, - server='simple_https_server_old.py', - extra_cmd_args=[expired_cert_fname]) - - suffix = '/' + os.path.basename(target_filepath) - good_https_url = 'https://localhost:' \ - + str(good_https_server_handler.port) + suffix - good2_https_url = 'https://localhost:' \ - + str(good2_https_server_handler.port) + suffix - bad_https_url = 'https://localhost:' \ - + str(bad_https_server_handler.port) + suffix - expired_https_url = 'https://localhost:' \ - + str(expd_https_server_handler.port) + suffix - - # Download the target file using an HTTPS connection. - - # Use try-finally solely to ensure that the server processes are killed. - try: - # Trust the certfile that happens to use a different hostname than we - # will expect. - os.environ['REQUESTS_CA_BUNDLE'] = bad_cert_fname - # Clear sessions to ensure that the certificate we just specified is used. - # TODO: Confirm necessity of this session clearing and lay out mechanics. - self.fetcher._sessions = {} - - # Try connecting to the server process with the bad cert while trusting - # the bad cert. Expect failure because even though we trust it, the - # hostname we're connecting to does not match the hostname in the cert. - logger.info('Trying HTTPS download of target file: ' + bad_https_url) - with warnings.catch_warnings(): - # We're ok with a slightly fishy localhost cert - warnings.filterwarnings('ignore', - category=urllib3.exceptions.SubjectAltNameWarning) - - with self.assertRaises(requests.exceptions.SSLError): - download.safe_download(bad_https_url, target_data_length, self.fetcher) - with self.assertRaises(requests.exceptions.SSLError): - download.unsafe_download(bad_https_url, target_data_length, self.fetcher) - - # Try connecting to the server processes with the good certs while not - # trusting the good certs (trusting the bad cert instead). Expect failure - # because even though the server's cert file is otherwise OK, we don't - # trust it. - logger.info('Trying HTTPS download of target file: ' + good_https_url) - with self.assertRaises(requests.exceptions.SSLError): - download.safe_download(good_https_url, target_data_length, self.fetcher) - with self.assertRaises(requests.exceptions.SSLError): - download.unsafe_download(good_https_url, target_data_length, self.fetcher) - - logger.info('Trying HTTPS download of target file: ' + good2_https_url) - with self.assertRaises(requests.exceptions.SSLError): - download.safe_download(good2_https_url, target_data_length, self.fetcher) - with self.assertRaises(requests.exceptions.SSLError): - download.unsafe_download(good2_https_url, target_data_length, self.fetcher) - - - # Configure environment to now trust the certfile that is expired. - os.environ['REQUESTS_CA_BUNDLE'] = expired_cert_fname - # Clear sessions to ensure that the certificate we just specified is used. - # TODO: Confirm necessity of this session clearing and lay out mechanics. - self.fetcher._sessions = {} - - # Try connecting to the server process with the expired cert while - # trusting the expired cert. Expect failure because even though we trust - # it, it is expired. - logger.info('Trying HTTPS download of target file: ' + expired_https_url) - with self.assertRaises(requests.exceptions.SSLError): - download.safe_download(expired_https_url, target_data_length, self.fetcher) - with self.assertRaises(requests.exceptions.SSLError): - download.unsafe_download(expired_https_url, target_data_length, self.fetcher) - - - # Try connecting to the server processes with the good certs while - # trusting the appropriate good certs. Expect success. - # TODO: expand testing to switch expected certificates back and forth a - # bit more while clearing / not clearing sessions. - os.environ['REQUESTS_CA_BUNDLE'] = good_cert_fname - # Clear sessions to ensure that the certificate we just specified is used. - # TODO: Confirm necessity of this session clearing and lay out mechanics. - self.fetcher._sessions = {} - logger.info('Trying HTTPS download of target file: ' + good_https_url) - download.safe_download(good_https_url, target_data_length, self.fetcher).close() - download.unsafe_download(good_https_url, target_data_length,self.fetcher).close() - - os.environ['REQUESTS_CA_BUNDLE'] = good2_cert_fname - # Clear sessions to ensure that the certificate we just specified is used. - # TODO: Confirm necessity of this session clearing and lay out mechanics. - self.fetcher._sessions = {} - logger.info('Trying HTTPS download of target file: ' + good2_https_url) - download.safe_download(good2_https_url, target_data_length, self.fetcher).close() - download.unsafe_download(good2_https_url, target_data_length, self.fetcher).close() - - finally: - for proc_handler in [ - good_https_server_handler, - good2_https_server_handler, - bad_https_server_handler, - expd_https_server_handler]: - - # Cleans the resources and flush the logged lines (if any). - proc_handler.clean() - - - -# Run unit test. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_endless_data_attack_old.py b/tests/test_endless_data_attack_old.py deleted file mode 100755 index aafed1a26c..0000000000 --- a/tests/test_endless_data_attack_old.py +++ /dev/null @@ -1,272 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_endless_data_attack_old.py - - - Konstantin Andrianov. - - - March 13, 2012. - - April 3, 2014. - Refactored to use the 'unittest' module (test conditions in code, rather - than verifying text output), use pre-generated repository files, and - discontinue use of the old repository tools. Minor edits to the test cases. - -vladimir.v.diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Simulate an endless data attack, where an updater client tries to download a - target file modified by an attacker to contain a large amount of data (a TUF - client should only download up to the file's expected length). TUF and - non-TUF client scenarios are tested. - - There is no difference between 'updates' and 'target' files. -""" - -import os -import tempfile -import shutil -import json -import logging -import unittest -import sys -from urllib import request - -import tuf -import tuf.formats -import tuf.log -import tuf.client.updater as updater -import tuf.unittest_toolbox as unittest_toolbox -import tuf.roledb - -from tests import utils - -import securesystemslib - -logger = logging.getLogger(__name__) - - -class TestEndlessDataAttack(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated of all the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.client_directory = os.path.join(temporary_repository_root, 'client') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_without_tuf(self): - # Verify that a target file replaced with a larger malicious version (to - # simulate an endless data attack) is downloaded by a non-TUF client (i.e., - # a non-TUF client that does not verify hashes, detect mix-and-mix attacks, - # etc.) A tuf client, on the other hand, should only download target files - # up to their expected lengths, as explicitly specified in metadata, or - # 'tuf.settings.py' (when retrieving 'timestamp.json' and 'root.json unsafely'.) - - # Test: Download a valid target file from the repository. - # Ensure the target file to be downloaded has not already been downloaded, - # and generate its file size and digest. The file size and digest is needed - # to verify that the malicious file was indeed downloaded. - target_path = os.path.join(self.repository_directory, 'targets', 'file1.txt') - client_target_path = os.path.join(self.client_directory, 'file1.txt') - self.assertFalse(os.path.exists(client_target_path)) - length, hashes = securesystemslib.util.get_file_details(target_path) - fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'targets', 'file1.txt') - - # On Windows, the URL portion should not contain backslashes. - request.urlretrieve(url_file.replace('\\', '/'), client_target_path) - - self.assertTrue(os.path.exists(client_target_path)) - length, hashes = securesystemslib.util.get_file_details(client_target_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - self.assertEqual(fileinfo, download_fileinfo) - - # Test: Download a target file that has been modified by an attacker with - # extra data. - with open(target_path, 'a') as file_object: - file_object.write('append large amount of data' * 100000) - large_length, hashes = securesystemslib.util.get_file_details(target_path) - malicious_fileinfo = tuf.formats.make_targets_fileinfo(large_length, hashes) - - # Is the modified file actually larger? - self.assertTrue(large_length > length) - - # On Windows, the URL portion should not contain backslashes. - request.urlretrieve(url_file.replace('\\', '/'), client_target_path) - - length, hashes = securesystemslib.util.get_file_details(client_target_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Verify 'download_fileinfo' is unequal to the original trusted version. - self.assertNotEqual(download_fileinfo, fileinfo) - - # Verify 'download_fileinfo' is equal to the malicious version. - self.assertEqual(download_fileinfo, malicious_fileinfo) - - - - def test_with_tuf(self): - # Verify that a target file (on the remote repository) modified by an - # attacker, to contain a large amount of extra data, is not downloaded by - # the TUF client. First test that the valid target file is successfully - # downloaded. - file1_fileinfo = self.repository_updater.get_one_valid_targetinfo('file1.txt') - destination = os.path.join(self.client_directory) - self.repository_updater.download_target(file1_fileinfo, destination) - client_target_path = os.path.join(destination, 'file1.txt') - self.assertTrue(os.path.exists(client_target_path)) - - # Verify the client's downloaded file matches the repository's. - target_path = os.path.join(self.repository_directory, 'targets', 'file1.txt') - length, hashes = securesystemslib.util.get_file_details(client_target_path) - fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - length, hashes = securesystemslib.util.get_file_details(client_target_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - self.assertEqual(fileinfo, download_fileinfo) - - # Modify 'file1.txt' and confirm that the TUF client only downloads up to - # the expected file length. - with open(target_path, 'a') as file_object: - file_object.write('append large amount of data' * 10000) - - # Is the modified file actually larger? - large_length, hashes = securesystemslib.util.get_file_details(target_path) - self.assertTrue(large_length > length) - - os.remove(client_target_path) - self.repository_updater.download_target(file1_fileinfo, destination) - - # A large amount of data has been appended to the original content. The - # extra data appended should be discarded by the client, so the downloaded - # file size and hash should not have changed. - length, hashes = securesystemslib.util.get_file_details(client_target_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - self.assertEqual(fileinfo, download_fileinfo) - - # Test that the TUF client does not download large metadata files, as well. - timestamp_path = os.path.join(self.repository_directory, 'metadata', - 'timestamp.json') - - original_length, hashes = securesystemslib.util.get_file_details(timestamp_path) - - with open(timestamp_path, 'r+') as file_object: - timestamp_content = securesystemslib.util.load_json_file(timestamp_path) - large_data = 'LargeTimestamp' * 10000 - timestamp_content['signed']['_type'] = large_data - json.dump(timestamp_content, file_object, indent=1, sort_keys=True) - - - modified_length, hashes = securesystemslib.util.get_file_details(timestamp_path) - self.assertTrue(modified_length > original_length) - - # Does the TUF client download the upper limit of an unsafely fetched - # 'timestamp.json'? 'timestamp.json' must not be greater than - # 'tuf.settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH'. - try: - self.repository_updater.refresh() - - except tuf.exceptions.NoWorkingMirrorError as exception: - for mirror_url, mirror_error in exception.mirror_errors.items(): - self.assertTrue(isinstance(mirror_error, securesystemslib.exceptions.Error)) - - else: - self.fail('TUF did not prevent an endless data attack.') - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_extraneous_dependencies_attack_old.py b/tests/test_extraneous_dependencies_attack_old.py deleted file mode 100755 index f086e7e86f..0000000000 --- a/tests/test_extraneous_dependencies_attack_old.py +++ /dev/null @@ -1,214 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2013 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_extraneous_dependencies_attack_old.py - - - Zane Fisher. - - - August 19, 2013. - - April 6, 2014. - Refactored to use the 'unittest' module (test conditions in code, rather - than verifying text output), use pre-generated repository files, and - discontinue use of the old repository tools. Modify the previous scenario - simulated for the mix-and-match attack. The metadata that specified the - dependencies of a project modified (previously a text file.) - -vladimir.v.diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Simulate an extraneous dependencies attack. The client attempts to download - a file, which lists all the target dependencies, with one legitimate - dependency, and one extraneous dependency. A client should not download a - target dependency even if it is found on the repository. Valid targets are - listed and verified by TUF metadata, such as 'targets.txt'. - - There is no difference between 'updates' and 'target' files. -""" - -import os -import tempfile -import shutil -import json -import logging -import unittest -import sys - -import tuf.formats -import tuf.log -import tuf.client.updater as updater -import tuf.roledb -import tuf.keydb -import tuf.unittest_toolbox as unittest_toolbox - -from tests import utils - -import securesystemslib - -logger = logging.getLogger(__name__) - - - -class TestExtraneousDependenciesAttack(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated of all the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client') - original_keystore = os.path.join(original_repository_files, 'keystore') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.client_directory = os.path.join(temporary_repository_root, 'client') - self.keystore_directory = os.path.join(temporary_repository_root, 'keystore') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_with_tuf(self): - # An attacker tries to trick a client into installing an extraneous target - # file (a valid file on the repository, in this case) by listing it in the - # project's metadata file. For the purposes of test_with_tuf(), - # 'role1.json' is treated as the metadata file that indicates all - # the files needed to install/update the 'role1' project. The attacker - # simply adds the extraneous target file to 'role1.json', which the TUF - # client should reject as improperly signed. - role1_filepath = os.path.join(self.repository_directory, 'metadata', - 'role1.json') - file1_filepath = os.path.join(self.repository_directory, 'targets', - 'file1.txt') - length, hashes = securesystemslib.util.get_file_details(file1_filepath) - - role1_metadata = securesystemslib.util.load_json_file(role1_filepath) - role1_metadata['signed']['targets']['/file2.txt'] = {} - role1_metadata['signed']['targets']['/file2.txt']['hashes'] = hashes - role1_metadata['signed']['targets']['/file2.txt']['length'] = length - - tuf.formats.check_signable_object_format(role1_metadata) - - with open(role1_filepath, 'wt') as file_object: - json.dump(role1_metadata, file_object, indent=1, sort_keys=True) - - # Un-install the metadata of the top-level roles so that the client can - # download and detect the invalid 'role1.json'. - os.remove(os.path.join(self.client_directory, self.repository_name, - 'metadata', 'current', 'snapshot.json')) - os.remove(os.path.join(self.client_directory, self.repository_name, - 'metadata', 'current', 'targets.json')) - os.remove(os.path.join(self.client_directory, self.repository_name, - 'metadata', 'current', 'timestamp.json')) - os.remove(os.path.join(self.client_directory, self.repository_name, - 'metadata', 'current', 'role1.json')) - - # Verify that the TUF client rejects the invalid metadata and refuses to - # continue the update process. - self.repository_updater.refresh() - - try: - with utils.ignore_deprecation_warnings('tuf.client.updater'): - self.repository_updater.targets_of_role('role1') - - # Verify that the specific 'tuf.exceptions.ForbiddenTargetError' exception is raised - # by each mirror. - except tuf.exceptions.NoWorkingMirrorError as exception: - for mirror_url, mirror_error in exception.mirror_errors.items(): - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'metadata', 'role1.json') - - # Verify that 'role1.json' is the culprit. - self.assertEqual(url_file.replace('\\', '/'), mirror_url) - self.assertTrue(isinstance(mirror_error, securesystemslib.exceptions.BadSignatureError)) - - else: - self.fail('TUF did not prevent an extraneous dependencies attack.') - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_fetcher_old.py b/tests/test_fetcher_old.py deleted file mode 100644 index 10e43354bf..0000000000 --- a/tests/test_fetcher_old.py +++ /dev/null @@ -1,134 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2021, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -"""Unit test for RequestsFetcher. -""" - -import io -import logging -import math -import os -import sys -import tempfile -import unittest - -import tuf -import tuf.exceptions -import tuf.requests_fetcher -from tests import utils -from tuf import unittest_toolbox - -logger = logging.getLogger(__name__) - - -class TestFetcher(unittest_toolbox.Modified_TestCase): - """Unit tests for RequestFetcher.""" - - def setUp(self): - """ - Create a temporary file and launch a simple server in the - current working directory. - """ - - unittest_toolbox.Modified_TestCase.setUp(self) - - # Making a temporary file. - current_dir = os.getcwd() - target_filepath = self.make_temp_data_file(directory=current_dir) - with open(target_filepath, "r", encoding="utf8") as target_fileobj: - self.file_contents = target_fileobj.read() - self.file_length = len(self.file_contents) - - # Launch a SimpleHTTPServer (serves files in the current dir). - self.server_process_handler = utils.TestServerProcess(log=logger) - - rel_target_filepath = os.path.basename(target_filepath) - self.url = ( - "http://" - + utils.TEST_HOST_ADDRESS - + ":" - + str(self.server_process_handler.port) - + "/" - + rel_target_filepath - ) - - # Create a temporary file where the target file chunks are written - # during fetching - # pylint: disable-next=consider-using-with - self.temp_file = tempfile.TemporaryFile() - self.fetcher = tuf.requests_fetcher.RequestsFetcher() - - # Stop server process and perform clean up. - def tearDown(self): - # Cleans the resources and flush the logged lines (if any). - self.server_process_handler.clean() - - self.temp_file.close() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - # Test: Normal case. - def test_fetch(self): - for chunk in self.fetcher.fetch(self.url, self.file_length): - self.temp_file.write(chunk) - - self.temp_file.seek(0) - temp_file_data = self.temp_file.read().decode("utf-8") - self.assertEqual(self.file_contents, temp_file_data) - - # Test if fetcher downloads file up to a required length - def test_fetch_restricted_length(self): - for chunk in self.fetcher.fetch(self.url, self.file_length - 4): - self.temp_file.write(chunk) - - self.temp_file.seek(0, io.SEEK_END) - self.assertEqual(self.temp_file.tell(), self.file_length - 4) - - # Test that fetcher does not download more than actual file length - def test_fetch_upper_length(self): - for chunk in self.fetcher.fetch(self.url, self.file_length + 4): - self.temp_file.write(chunk) - - self.temp_file.seek(0, io.SEEK_END) - self.assertEqual(self.temp_file.tell(), self.file_length) - - # Test incorrect URL parsing - def test_url_parsing(self): - with self.assertRaises(tuf.exceptions.URLParsingError): - self.fetcher.fetch(self.random_string(), self.file_length) - - # Test: Normal case with url data downloaded in more than one chunk - def test_fetch_in_chunks(self): - # Set smaller chunk size to ensure that the file will be downloaded - # in more than one chunk - default_chunk_size = tuf.settings.CHUNK_SIZE - tuf.settings.CHUNK_SIZE = 4 - - # expected_chunks_count: 3 - expected_chunks_count = math.ceil( - self.file_length / tuf.settings.CHUNK_SIZE - ) - self.assertEqual(expected_chunks_count, 3) - - chunks_count = 0 - for chunk in self.fetcher.fetch(self.url, self.file_length): - self.temp_file.write(chunk) - chunks_count += 1 - - self.temp_file.seek(0) - temp_file_data = self.temp_file.read().decode("utf-8") - self.assertEqual(self.file_contents, temp_file_data) - # Check that we calculate chunks as expected - self.assertEqual(chunks_count, expected_chunks_count) - - # Restore default settings - tuf.settings.CHUNK_SIZE = default_chunk_size - - -# Run unit test. -if __name__ == "__main__": - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_formats_old.py b/tests/test_formats_old.py deleted file mode 100755 index 498be2d107..0000000000 --- a/tests/test_formats_old.py +++ /dev/null @@ -1,971 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_formats_old.py - - - Vladimir Diaz - - - October 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'formats.py' -""" - -import unittest -import datetime -import sys -import os - -import tuf -import tuf.formats - -from tests import utils - -import securesystemslib -import securesystemslib.util - - -class TestFormats(unittest.TestCase): - def setUp(self): - pass - - - - def tearDown(self): - pass - - - - def test_schemas(self): - # Test conditions for valid schemas. - valid_schemas = { - 'ISO8601_DATETIME_SCHEMA': (securesystemslib.formats.ISO8601_DATETIME_SCHEMA, - '1985-10-21T13:20:00Z'), - - 'UNIX_TIMESTAMP_SCHEMA': (securesystemslib.formats.UNIX_TIMESTAMP_SCHEMA, 499137720), - - 'HASH_SCHEMA': (securesystemslib.formats.HASH_SCHEMA, 'A4582BCF323BCEF'), - - 'HASHDICT_SCHEMA': (securesystemslib.formats.HASHDICT_SCHEMA, - {'sha256': 'A4582BCF323BCEF'}), - - 'HEX_SCHEMA': (securesystemslib.formats.HEX_SCHEMA, 'A4582BCF323BCEF'), - - 'KEYID_SCHEMA': (securesystemslib.formats.KEYID_SCHEMA, '123456789abcdef'), - - 'KEYIDS_SCHEMA': (securesystemslib.formats.KEYIDS_SCHEMA, - ['123456789abcdef', '123456789abcdef']), - - 'SCHEME_SCHEMA': (securesystemslib.formats.SCHEME_SCHEMA, 'rsassa-pss-sha256'), - - 'RELPATH_SCHEMA': (tuf.formats.RELPATH_SCHEMA, 'metadata/root/'), - - 'RELPATHS_SCHEMA': (tuf.formats.RELPATHS_SCHEMA, - ['targets/role1/', 'targets/role2/']), - - 'PATH_SCHEMA': (securesystemslib.formats.PATH_SCHEMA, '/home/someuser/'), - - 'PATHS_SCHEMA': (securesystemslib.formats.PATHS_SCHEMA, - ['/home/McFly/', '/home/Tannen/']), - - 'URL_SCHEMA': (securesystemslib.formats.URL_SCHEMA, - 'https://www.updateframework.com/'), - - 'VERSION_SCHEMA': (tuf.formats.VERSION_SCHEMA, - {'major': 1, 'minor': 0, 'fix': 8}), - - 'LENGTH_SCHEMA': (tuf.formats.LENGTH_SCHEMA, 8), - - 'NAME_SCHEMA': (securesystemslib.formats.NAME_SCHEMA, 'Marty McFly'), - - 'BOOLEAN_SCHEMA': (securesystemslib.formats.BOOLEAN_SCHEMA, True), - - 'THRESHOLD_SCHEMA': (tuf.formats.THRESHOLD_SCHEMA, 1), - - 'ROLENAME_SCHEMA': (tuf.formats.ROLENAME_SCHEMA, 'Root'), - - 'RSAKEYBITS_SCHEMA': (securesystemslib.formats.RSAKEYBITS_SCHEMA, 4096), - - 'PASSWORD_SCHEMA': (securesystemslib.formats.PASSWORD_SCHEMA, 'secret'), - - 'PASSWORDS_SCHEMA': (securesystemslib.formats.PASSWORDS_SCHEMA, ['pass1', 'pass2']), - - 'KEYVAL_SCHEMA': (securesystemslib.formats.KEYVAL_SCHEMA, - {'public': 'pubkey', 'private': 'privkey'}), - - 'KEY_SCHEMA': (securesystemslib.formats.KEY_SCHEMA, - {'keytype': 'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}), - - 'RSAKEY_SCHEMA': (securesystemslib.formats.RSAKEY_SCHEMA, - {'keytype': 'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyid': '123456789abcdef', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}), - - 'TARGETS_FILEINFO_SCHEMA': (tuf.formats.TARGETS_FILEINFO_SCHEMA, - {'length': 1024, - 'hashes': {'sha256': 'A4582BCF323BCEF'}, - 'custom': {'type': 'paintjob'}}), - - 'METADATA_FILEINFO_SCHEMA': (tuf.formats.METADATA_FILEINFO_SCHEMA, - {'length': 1024, - 'hashes': {'sha256': 'A4582BCF323BCEF'}, - 'version': 1}), - - 'FILEDICT_SCHEMA': (tuf.formats.FILEDICT_SCHEMA, - {'metadata/root.json': {'length': 1024, - 'hashes': {'sha256': 'ABCD123'}, - 'custom': {'type': 'metadata'}}}), - - 'TARGETINFO_SCHEMA': (tuf.formats.TARGETINFO_SCHEMA, - {'filepath': 'targets/target1.gif', - 'fileinfo': {'length': 1024, - 'hashes': {'sha256': 'ABCD123'}, - 'custom': {'type': 'target'}}}), - - 'TARGETINFOS_SCHEMA': (tuf.formats.TARGETINFOS_SCHEMA, - [{'filepath': 'targets/target1.gif', - 'fileinfo': {'length': 1024, - 'hashes': {'sha256': 'ABCD123'}, - 'custom': {'type': 'target'}}}]), - - 'SIGNATURE_SCHEMA': (securesystemslib.formats.SIGNATURE_SCHEMA, - {'keyid': '123abc', - 'sig': 'A4582BCF323BCEF'}), - - 'SIGNATURESTATUS_SCHEMA': (tuf.formats.SIGNATURESTATUS_SCHEMA, - {'threshold': 1, - 'good_sigs': ['123abc'], - 'bad_sigs': ['123abc'], - 'unknown_sigs': ['123abc'], - 'untrusted_sigs': ['123abc'], - 'unknown_signing_schemes': ['123abc']}), - - 'SIGNABLE_SCHEMA': (tuf.formats.SIGNABLE_SCHEMA, - {'signed': 'signer', - 'signatures': [{'keyid': '123abc', - 'sig': 'A4582BCF323BCEF'}]}), - - 'KEYDICT_SCHEMA': (securesystemslib.formats.KEYDICT_SCHEMA, - {'123abc': {'keytype': 'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}}), - - 'KEYDB_SCHEMA': (tuf.formats.KEYDB_SCHEMA, - {'123abc': {'keytype': 'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyid': '123456789abcdef', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}}), - - 'SCPCONFIG_SCHEMA': (tuf.formats.SCPCONFIG_SCHEMA, - {'general': {'transfer_module': 'scp', - 'metadata_path': '/path/meta.json', - 'targets_directory': '/targets'}, - 'scp': {'host': 'http://localhost:8001', - 'user': 'McFly', - 'identity_file': '/home/.ssh/file', - 'remote_directory': '/home/McFly'}}), - - 'RECEIVECONFIG_SCHEMA': (tuf.formats.RECEIVECONFIG_SCHEMA, - {'general': {'transfer_module': 'scp', - 'pushroots': ['/pushes'], - 'repository_directory': '/repo', - 'metadata_directory': '/repo/meta', - 'targets_directory': '/repo/targets', - 'backup_directory': '/repo/backup'}}), - - 'ROLE_SCHEMA': (tuf.formats.ROLE_SCHEMA, - {'keyids': ['123abc'], - 'threshold': 1, - 'paths': ['path1/', 'path2']}), - - 'ROLEDICT_SCHEMA': (tuf.formats.ROLEDICT_SCHEMA, - {'root': {'keyids': ['123abc'], - 'threshold': 1, - 'paths': ['path1/', 'path2']}}), - - 'ROOT_SCHEMA': (tuf.formats.ROOT_SCHEMA, - {'_type': 'root', - 'spec_version': '1.0.0', - 'version': 8, - 'consistent_snapshot': False, - 'expires': '1985-10-21T13:20:00Z', - 'keys': {'123abc': {'keytype': 'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}}, - 'roles': {'root': {'keyids': ['123abc'], - 'threshold': 1, - 'paths': ['path1/', 'path2']}}}), - - 'TARGETS_SCHEMA': (tuf.formats.TARGETS_SCHEMA, - {'_type': 'targets', - 'spec_version': '1.0.0', - 'version': 8, - 'expires': '1985-10-21T13:20:00Z', - 'targets': {'metadata/targets.json': {'length': 1024, - 'hashes': {'sha256': 'ABCD123'}, - 'custom': {'type': 'metadata'}}}, - 'delegations': {'keys': {'123abc': {'keytype':'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}}, - 'roles': [{'name': 'root', 'keyids': ['123abc'], - 'threshold': 1, - 'paths': ['path1/', 'path2']}]}}), - - 'SNAPSHOT_SCHEMA': (tuf.formats.SNAPSHOT_SCHEMA, - {'_type': 'snapshot', - 'spec_version': '1.0.0', - 'version': 8, - 'expires': '1985-10-21T13:20:00Z', - 'meta': {'snapshot.json': {'version': 1024}}}), - - 'TIMESTAMP_SCHEMA': (tuf.formats.TIMESTAMP_SCHEMA, - {'_type': 'timestamp', - 'spec_version': '1.0.0', - 'version': 8, - 'expires': '1985-10-21T13:20:00Z', - 'meta': {'metadattimestamp.json': {'length': 1024, - 'hashes': {'sha256': 'AB1245'}, - 'version': 1}}}), - - 'MIRROR_SCHEMA': (tuf.formats.MIRROR_SCHEMA, - {'url_prefix': 'http://localhost:8001', - 'metadata_path': 'metadata/', - 'targets_path': 'targets/', - 'confined_target_dirs': ['path1/', 'path2/'], - 'custom': {'type': 'mirror'}}), - - 'MIRROR_SCHEMA_NO_CONFINED_TARGETS': (tuf.formats.MIRROR_SCHEMA, - {'url_prefix': 'http://localhost:8001', - 'metadata_path': 'metadata/', - 'targets_path': 'targets/', - 'custom': {'type': 'mirror'}}), - - 'MIRRORDICT_SCHEMA': (tuf.formats.MIRRORDICT_SCHEMA, - {'mirror1': {'url_prefix': 'http://localhost:8001', - 'metadata_path': 'metadata/', - 'targets_path': 'targets/', - 'confined_target_dirs': ['path1/', 'path2/'], - 'custom': {'type': 'mirror'}}}), - - 'MIRRORLIST_SCHEMA': (tuf.formats.MIRRORLIST_SCHEMA, - {'_type': 'mirrors', - 'version': 8, - 'spec_version': '1.0.0', - 'expires': '1985-10-21T13:20:00Z', - 'mirrors': [{'url_prefix': 'http://localhost:8001', - 'metadata_path': 'metadata/', - 'targets_path': 'targets/', - 'confined_target_dirs': ['path1/', 'path2/'], - 'custom': {'type': 'mirror'}}]})} - - # Iterate 'valid_schemas', ensuring each 'valid_schema' correctly matches - # its respective 'schema_type'. - for schema_name, (schema_type, valid_schema) in valid_schemas.items(): - if not schema_type.matches(valid_schema): - print('bad schema: ' + repr(valid_schema)) - self.assertEqual(True, schema_type.matches(valid_schema)) - - # Test conditions for invalid schemas. - # Set the 'valid_schema' of 'valid_schemas' to an invalid - # value and test that it does not match 'schema_type'. - for schema_name, (schema_type, valid_schema) in valid_schemas.items(): - invalid_schema = 0xBAD - if isinstance(schema_type, securesystemslib.schema.Integer): - invalid_schema = 'BAD' - self.assertEqual(False, schema_type.matches(invalid_schema)) - - - def test_specfication_version_schema(self): - """Test valid and invalid SPECIFICATION_VERSION_SCHEMAs, using examples - from 'regex101.com/r/Ly7O1x/3/', referenced by - 'semver.org/spec/v2.0.0.html'. """ - valid_schemas = [ - "0.0.4", - "1.2.3", - "10.20.30", - "1.1.2-prerelease+meta", - "1.1.2+meta", - "1.1.2+meta-valid", - "1.0.0-alpha", - "1.0.0-beta", - "1.0.0-alpha.beta", - "1.0.0-alpha.beta.1", - "1.0.0-alpha.1", - "1.0.0-alpha0.valid", - "1.0.0-alpha.0valid", - "1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay", - "1.0.0-rc.1+build.1", - "2.0.0-rc.1+build.123", - "1.2.3-beta", - "10.2.3-DEV-SNAPSHOT", - "1.2.3-SNAPSHOT-123", - "1.0.0", - "2.0.0", - "1.1.7", - "2.0.0+build.1848", - "2.0.1-alpha.1227", - "1.0.0-alpha+beta", - "1.2.3----RC-SNAPSHOT.12.9.1--.12+788", - "1.2.3----R-S.12.9.1--.12+meta", - "1.2.3----RC-SNAPSHOT.12.9.1--.12", - "1.0.0+0.build.1-rc.10000aaa-kk-0.1", - "99999999999999999999999.999999999999999999.99999999999999999", - "1.0.0-0A.is.legal"] - - for valid_schema in valid_schemas: - self.assertTrue( - tuf.formats.SPECIFICATION_VERSION_SCHEMA.matches(valid_schema), - "'{}' should match 'SPECIFICATION_VERSION_SCHEMA'.".format( - valid_schema)) - - invalid_schemas = [ - "1", - "1.2", - "1.2.3-0123", - "1.2.3-0123.0123", - "1.1.2+.123", - "+invalid", - "-invalid", - "-invalid+invalid", - "-invalid.01", - "alpha", - "alpha.beta", - "alpha.beta.1", - "alpha.1", - "alpha+beta", - "alpha_beta", - "alpha.", - "alpha..", - "beta", - "1.0.0-alpha_beta", - "-alpha.", - "1.0.0-alpha..", - "1.0.0-alpha..1", - "1.0.0-alpha...1", - "1.0.0-alpha....1", - "1.0.0-alpha.....1", - "1.0.0-alpha......1", - "1.0.0-alpha.......1", - "01.1.1", - "1.01.1", - "1.1.01", - "1.2", - "1.2.3.DEV", - "1.2-SNAPSHOT", - "1.2.31.2.3----RC-SNAPSHOT.12.09.1--..12+788", - "1.2-RC-SNAPSHOT", - "-1.0.3-gamma+b7718", - "+justmeta", - "9.8.7+meta+meta", - "9.8.7-whatever+meta+meta", - "99999999999999999999999.999999999999999999.99999999999999999----RC-SNAPSHOT.12.09.1--------------------------------..12"] - - for invalid_schema in invalid_schemas: - self.assertFalse( - tuf.formats.SPECIFICATION_VERSION_SCHEMA.matches(invalid_schema), - "'{}' should not match 'SPECIFICATION_VERSION_SCHEMA'.".format( - invalid_schema)) - - - def test_build_dict_conforming_to_schema(self): - # Test construction of a few metadata formats using - # build_dict_conforming_to_schema(). - - # Try the wrong type of schema object. - STRING_SCHEMA = securesystemslib.schema.AnyString() - - with self.assertRaises(ValueError): - tuf.formats.build_dict_conforming_to_schema( - STRING_SCHEMA, string='some string') - - # Try building Timestamp metadata. - spec_version = tuf.SPECIFICATION_VERSION - version = 8 - length = 88 - hashes = {'sha256': '3c7fe3eeded4a34'} - expires = '1985-10-21T13:20:00Z' - filedict = {'snapshot.json': {'length': length, 'hashes': hashes, 'version': 1}} - - - # Try with and without _type and spec_version, both of which are - # automatically populated if they are not included. - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches( # both - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - _type='timestamp', - spec_version=spec_version, - version=version, - expires=expires, - meta=filedict))) - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches( # neither - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - version=version, - expires=expires, - meta=filedict))) - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches( # one - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - spec_version=spec_version, - version=version, - expires=expires, - meta=filedict))) - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches( # the other - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - _type='timestamp', - version=version, - expires=expires, - meta=filedict))) - - - # Try test arguments for invalid Timestamp creation. - bad_spec_version = 123 - bad_version = 'eight' - bad_expires = '2000' - bad_filedict = 123 - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - _type='timestamp', - spec_version=bad_spec_version, - version=version, - expires=expires, - meta=filedict) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - _type='timestamp', - spec_version=spec_version, - version=bad_version, - expires=expires, - meta=filedict) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - _type='timestamp', - spec_version=spec_version, - version=version, - expires=bad_expires, - meta=filedict) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - _type='timestamp', - spec_version=spec_version, - version=version, - expires=expires, - meta=bad_filedict) - - with self.assertRaises(ValueError): - tuf.formats.build_dict_conforming_to_schema(123) - - - # Try building Root metadata. - consistent_snapshot = False - - keydict = {'123abc': {'keytype': 'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}} - - roledict = {'root': {'keyids': ['123abc'], - 'threshold': 1, - 'paths': ['path1/', 'path2']}} - - - self.assertTrue(tuf.formats.ROOT_SCHEMA.matches( - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version=spec_version, - version=version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot))) - - - # Additional test arguments for invalid Root creation. - bad_keydict = 123 - bad_roledict = 123 - - # TODO: Later on, write a test looper that takes pairs of key-value args - # to substitute in on each run to shorten this.... There's a lot of - # test code that looks like this, and it'd be easier to use a looper. - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version=bad_spec_version, - version=version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version=spec_version, - version=bad_version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version=spec_version, - version=version, - expires=bad_expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version=spec_version, - version=version, - expires=expires, - keys=bad_keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version=spec_version, - version=version, - expires=expires, - keys=keydict, - roles=bad_roledict, - consistent_snapshot=consistent_snapshot) - - with self.assertRaises(TypeError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, 'bad') - - with self.assertRaises(ValueError): - tuf.formats.build_dict_conforming_to_schema( - 'bad', - _type='root', - spec_version=spec_version, - version=version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - - - # Try building Snapshot metadata. - versiondict = {'targets.json' : {'version': version}} - - self.assertTrue(tuf.formats.SNAPSHOT_SCHEMA.matches( - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.SNAPSHOT_SCHEMA, - _type='snapshot', - spec_version=spec_version, - version=version, - expires=expires, - meta=versiondict))) - - # Additional test arguments for invalid Snapshot creation. - bad_versiondict = 123 - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.SNAPSHOT_SCHEMA, - _type='snapshot', - spec_version=bad_spec_version, - version=version, - expires=expires, - meta=versiondict) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.SNAPSHOT_SCHEMA, - _type='snapshot', - spec_version=spec_version, - version=bad_version, - expires=expires, - meta=versiondict) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.SNAPSHOT_SCHEMA, - _type='snapshot', - spec_version=spec_version, - version=version, - expires=bad_expires, - meta=versiondict) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.SNAPSHOT_SCHEMA, - _type='snapshot', - spec_version=spec_version, - version=version, - expires=expires, - meta=bad_versiondict) - - - - # Try building Targets metadata. - filedict = {'metadata/targets.json': {'length': 1024, - 'hashes': {'sha256': 'ABCD123'}, - 'custom': {'type': 'metadata'}}} - - delegations = {'keys': {'123abc': {'keytype':'rsa', - 'scheme': 'rsassa-pss-sha256', - 'keyval': {'public': 'pubkey', - 'private': 'privkey'}}}, - 'roles': [{'name': 'root', 'keyids': ['123abc'], - 'threshold': 1, 'paths': ['path1/', 'path2']}]} - - - self.assertTrue(tuf.formats.TARGETS_SCHEMA.matches( - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - _type='targets', - spec_version=spec_version, - version=version, - expires=expires, - targets=filedict, - delegations=delegations))) - - # Try with no delegations included (should work, since they're optional). - self.assertTrue(tuf.formats.TARGETS_SCHEMA.matches( - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - _type='targets', - spec_version=spec_version, - version=version, - expires=expires, - targets=filedict))) - - - # Additional test arguments for invalid Targets creation. - bad_filedict = 123 - bad_delegations = 123 - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - _type='targets', - spec_version=spec_version, - version=bad_version, - expires=expires, - targets=filedict, - delegations=delegations) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - _type='targets', - spec_version=spec_version, - version=version, - expires=bad_expires, - targets=filedict, - delegations=delegations) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - _type='targets', - spec_version=spec_version, - version=version, - expires=expires, - targets=bad_filedict, - delegations=delegations) - - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - _type='targets', - spec_version=spec_version, - version=version, - expires=expires, - targets=filedict, - delegations=bad_delegations) - - - - def test_expiry_string_to_datetime(self): - dt = tuf.formats.expiry_string_to_datetime('1985-10-21T13:20:00Z') - self.assertEqual(dt, datetime.datetime(1985, 10, 21, 13, 20, 0)) - dt = tuf.formats.expiry_string_to_datetime('2038-01-19T03:14:08Z') - self.assertEqual(dt, datetime.datetime(2038, 1, 19, 3, 14, 8)) - - # First 3 fail via securesystemslib schema, last one because of strptime() - invalid_inputs = [ - '2038-1-19T03:14:08Z', # leading zeros not optional - '2038-01-19T031408Z', # strict time parsing - '2038-01-19T03:14:08Z-06:00', # timezone not allowed - '2038-13-19T03:14:08Z', # too many months - ] - for invalid_input in invalid_inputs: - with self.assertRaises(securesystemslib.exceptions.FormatError): - tuf.formats.expiry_string_to_datetime(invalid_input) - - - - def test_unix_timestamp_to_datetime(self): - # Test conditions for valid arguments. - UNIX_TIMESTAMP_SCHEMA = securesystemslib.formats.UNIX_TIMESTAMP_SCHEMA - self.assertTrue(datetime.datetime, tuf.formats.unix_timestamp_to_datetime(499137720)) - datetime_object = datetime.datetime(1985, 10, 26, 1, 22) - self.assertEqual(datetime_object, tuf.formats.unix_timestamp_to_datetime(499137720)) - - # Test conditions for invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.unix_timestamp_to_datetime, 'bad') - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.unix_timestamp_to_datetime, 1000000000000000000000) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.unix_timestamp_to_datetime, -1) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.unix_timestamp_to_datetime, ['5']) - - - - def test_datetime_to_unix_timestamp(self): - # Test conditions for valid arguments. - datetime_object = datetime.datetime(2015, 10, 21, 19, 28) - self.assertEqual(1445455680, tuf.formats.datetime_to_unix_timestamp(datetime_object)) - - # Test conditions for invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.datetime_to_unix_timestamp, 'bad') - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.datetime_to_unix_timestamp, 1000000000000000000000) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.datetime_to_unix_timestamp, ['1']) - - - - def test_format_base64(self): - # Test conditions for valid arguments. - data = 'updateframework'.encode('utf-8') - self.assertEqual('dXBkYXRlZnJhbWV3b3Jr', tuf.formats.format_base64(data)) - self.assertTrue(isinstance(tuf.formats.format_base64(data), str)) - - # Test conditions for invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.format_base64, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.format_base64, True) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.format_base64, ['123']) - - - def test_parse_base64(self): - # Test conditions for valid arguments. - base64 = 'dXBkYXRlZnJhbWV3b3Jr' - self.assertEqual(b'updateframework', tuf.formats.parse_base64(base64)) - self.assertTrue(isinstance(tuf.formats.parse_base64(base64), bytes)) - - # Test conditions for invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.parse_base64, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.parse_base64, True) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.parse_base64, ['123']) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.formats.parse_base64, '/') - - - - def test_make_signable(self): - # Test conditions for expected make_signable() behavior. - SIGNABLE_SCHEMA = tuf.formats.SIGNABLE_SCHEMA - root_file = os.path.join('repository_data', 'repository', 'metadata', - 'root.json') - root = securesystemslib.util.load_json_file(root_file) - self.assertTrue(SIGNABLE_SCHEMA.matches(tuf.formats.make_signable(root))) - signable = tuf.formats.make_signable(root) - self.assertEqual('root', tuf.formats.check_signable_object_format(signable)) - - self.assertEqual(signable, tuf.formats.make_signable(signable)) - - # Test conditions for miscellaneous arguments. - self.assertTrue(SIGNABLE_SCHEMA.matches(tuf.formats.make_signable('123'))) - self.assertTrue(SIGNABLE_SCHEMA.matches(tuf.formats.make_signable(123))) - - - - - - def test_make_targets_fileinfo(self): - # Test conditions for valid arguments. - length = 1024 - hashes = {'sha256': 'A4582BCF323BCEF', 'sha512': 'A4582BCF323BFEF'} - custom = {'type': 'paintjob'} - - TARGETS_FILEINFO_SCHEMA = tuf.formats.TARGETS_FILEINFO_SCHEMA - make_targets_fileinfo = tuf.formats.make_targets_fileinfo - self.assertTrue(TARGETS_FILEINFO_SCHEMA.matches(make_targets_fileinfo(length, hashes, custom))) - self.assertTrue(TARGETS_FILEINFO_SCHEMA.matches(make_targets_fileinfo(length, hashes))) - - # Test conditions for invalid arguments. - bad_length = 'bad' - bad_hashes = 'bad' - bad_custom = 'bad' - - self.assertRaises(securesystemslib.exceptions.FormatError, make_targets_fileinfo, - bad_length, hashes, custom) - self.assertRaises(securesystemslib.exceptions.FormatError, make_targets_fileinfo, - length, bad_hashes, custom) - self.assertRaises(securesystemslib.exceptions.FormatError, make_targets_fileinfo, - length, hashes, bad_custom) - self.assertRaises(securesystemslib.exceptions.FormatError, make_targets_fileinfo, - bad_length, hashes) - self.assertRaises(securesystemslib.exceptions.FormatError, make_targets_fileinfo, - length, bad_hashes) - - - - def test_make_metadata_fileinfo(self): - # Test conditions for valid arguments. - length = 1024 - hashes = {'sha256': 'A4582BCF323BCEF', 'sha512': 'A4582BCF323BFEF'} - version = 8 - - METADATA_FILEINFO_SCHEMA = tuf.formats.METADATA_FILEINFO_SCHEMA - make_metadata_fileinfo = tuf.formats.make_metadata_fileinfo - self.assertTrue(METADATA_FILEINFO_SCHEMA.matches(make_metadata_fileinfo( - version, length, hashes))) - self.assertTrue(METADATA_FILEINFO_SCHEMA.matches(make_metadata_fileinfo(version))) - - # Test conditions for invalid arguments. - bad_version = 'bad' - bad_length = 'bad' - bad_hashes = 'bad' - - self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata_fileinfo, - bad_version, length, hashes) - self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata_fileinfo, - version, bad_length, hashes) - self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata_fileinfo, - version, length, bad_hashes) - self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata_fileinfo, - bad_version) - - - - def test_make_versioninfo(self): - # Test conditions for valid arguments. - version_number = 8 - versioninfo = {'version': version_number} - - VERSIONINFO_SCHEMA = tuf.formats.VERSIONINFO_SCHEMA - make_versioninfo = tuf.formats.make_versioninfo - self.assertTrue(VERSIONINFO_SCHEMA.matches(make_versioninfo(version_number))) - - # Test conditions for invalid arguments. - bad_version_number = '8' - - self.assertRaises(securesystemslib.exceptions.FormatError, make_versioninfo, bad_version_number) - - - - - - def test_expected_meta_rolename(self): - # Test conditions for valid arguments. - expected_rolename = tuf.formats.expected_meta_rolename - - self.assertEqual('root', expected_rolename('Root')) - self.assertEqual('targets', expected_rolename('Targets')) - self.assertEqual('snapshot', expected_rolename('Snapshot')) - self.assertEqual('timestamp', expected_rolename('Timestamp')) - self.assertEqual('mirrors', expected_rolename('Mirrors')) - self.assertEqual('targets role', expected_rolename('Targets Role')) - self.assertEqual('root', expected_rolename('Root')) - - # Test conditions for invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, expected_rolename, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, expected_rolename, tuf.formats.ROOT_SCHEMA) - self.assertRaises(securesystemslib.exceptions.FormatError, expected_rolename, True) - - - - def test_check_signable_object_format(self): - # Test condition for a valid argument. - root_file = os.path.join('repository_data', 'repository', 'metadata', - 'root.json') - root = securesystemslib.util.load_json_file(root_file) - root = tuf.formats.make_signable(root) - self.assertEqual('root', tuf.formats.check_signable_object_format(root)) - - # Test conditions for invalid arguments. - check_signable = tuf.formats.check_signable_object_format - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, 'root') - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, tuf.formats.ROOT_SCHEMA) - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, True) - - saved_type = root['signed']['_type'] - del root['signed']['_type'] - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, root) - root['signed']['_type'] = saved_type - - root['signed']['_type'] = 'Root' - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, root) - root['signed']['_type'] = 'root' - - del root['signed']['expires'] - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, root) - - - - def test_encode_canonical(self): - # Test conditions for valid arguments. - encode = securesystemslib.formats.encode_canonical - result = [] - output = result.append - bad_output = 123 - - self.assertEqual('""', encode("")) - self.assertEqual('[1,2,3]', encode([1, 2, 3])) - self.assertEqual('[1,2,3]', encode([1,2,3])) - self.assertEqual('[]', encode([])) - self.assertEqual('{"A":[99]}', encode({"A": [99]})) - self.assertEqual('{"x":3,"y":2}', encode({"x": 3, "y": 2})) - - self.assertEqual('{"x":3,"y":null}', encode({"x": 3, "y": None})) - - # Condition where 'encode()' sends the result to the callable - # 'output'. - self.assertEqual(None, encode([1, 2, 3], output)) - self.assertEqual('[1,2,3]', ''.join(result)) - - # Test conditions for invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, encode, tuf.formats.ROOT_SCHEMA) - self.assertRaises(securesystemslib.exceptions.FormatError, encode, 8.0) - self.assertRaises(securesystemslib.exceptions.FormatError, encode, {"x": 8.0}) - self.assertRaises(securesystemslib.exceptions.FormatError, encode, 8.0, output) - - self.assertRaises(securesystemslib.exceptions.FormatError, encode, {"x": securesystemslib.exceptions.FormatError}) - - -# Run unit test. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_indefinite_freeze_attack_old.py b/tests/test_indefinite_freeze_attack_old.py deleted file mode 100755 index 69d063a60d..0000000000 --- a/tests/test_indefinite_freeze_attack_old.py +++ /dev/null @@ -1,461 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_indefinite_freeze_attack_old.py - - - Konstantin Andrianov. - - - March 10, 2012. - - April 1, 2014. - Refactored to use the 'unittest' module (test conditions in code, rather - than verifying text output), use pre-generated repository files, and - discontinue use of the old repository tools. -vladimir.v.diaz - - March 9, 2016. - Additional test added relating to issue: - https://github.com/theupdateframework/python-tuf/issues/322 - If a metadata file is not updated (no indication of a new version - available), the expiration of the pre-existing, locally trusted metadata - must still be detected. This additional test complains if such does not - occur, and accompanies code in tuf.client.updater:refresh() to detect it. - -sebastien.awwad - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Simulate an indefinite freeze attack. In an indefinite freeze attack, - attacker is able to respond to client's requests with the same, outdated - metadata without the client being aware. -""" - -import os -import time -import tempfile -import shutil -import json -import logging -import unittest -import sys -from urllib import request -import unittest.mock as mock - -import tuf.formats -import tuf.log -import tuf.client.updater as updater -import tuf.repository_tool as repo_tool -import tuf.unittest_toolbox as unittest_toolbox -import tuf.roledb -import tuf.keydb -import tuf.exceptions - -from tests import utils - -import securesystemslib - -# The repository tool is imported and logs console messages by default. Disable -# console log messages generated by this unit test. -repo_tool.disable_console_log_messages() - -logger = logging.getLogger(__name__) - - -class TestIndefiniteFreezeAttack(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated of all the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client') - original_keystore = os.path.join(original_repository_files, 'keystore') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.client_directory = os.path.join(temporary_repository_root, 'client') - self.keystore_directory = os.path.join(temporary_repository_root, 'keystore') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_without_tuf(self): - # Without TUF, Test 1 and Test 2 are functionally equivalent, so we skip - # Test 1 and only perform Test 2. - # - # Test 1: If we find that the timestamp acquired from a mirror indicates - # that there is no new snapshot file, and our current snapshot - # file is expired, is it recognized as such? - # Test 2: If an expired timestamp is downloaded, is it recognized as such? - - - # Test 2 Begin: - # - # 'timestamp.json' specifies the latest version of the repository files. A - # client should only accept the same version of this file up to a certain - # point, or else it cannot detect that new files are available for - # download. Modify the repository's timestamp.json' so that it expires - # soon, copy it over to the client, and attempt to re-fetch the same - # expired version. - # - # A non-TUF client (without a way to detect when metadata has expired) is - # expected to download the same version, and thus the same outdated files. - # Verify that the downloaded 'timestamp.json' contains the same file size - # and hash as the one available locally. - - timestamp_path = os.path.join(self.repository_directory, 'metadata', - 'timestamp.json') - - timestamp_metadata = securesystemslib.util.load_json_file(timestamp_path) - expiry_time = time.time() - 10 - expires = tuf.formats.unix_timestamp_to_datetime(int(expiry_time)) - expires = expires.isoformat() + 'Z' - timestamp_metadata['signed']['expires'] = expires - tuf.formats.check_signable_object_format(timestamp_metadata) - - with open(timestamp_path, 'wb') as file_object: - # Explicitly specify the JSON separators for Python 2 + 3 consistency. - timestamp_content = \ - json.dumps(timestamp_metadata, indent=1, separators=(',', ': '), - sort_keys=True).encode('utf-8') - file_object.write(timestamp_content) - - client_timestamp_path = os.path.join(self.client_directory, 'timestamp.json') - shutil.copy(timestamp_path, client_timestamp_path) - - length, hashes = securesystemslib.util.get_file_details(timestamp_path) - fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'metadata', 'timestamp.json') - - request.urlretrieve(url_file.replace('\\', '/'), client_timestamp_path) - - length, hashes = securesystemslib.util.get_file_details(client_timestamp_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Verify 'download_fileinfo' is equal to the current local file. - self.assertEqual(download_fileinfo, fileinfo) - - - def test_with_tuf(self): - # Three tests are conducted here. - # - # Test 1: If we find that the timestamp acquired from a mirror indicates - # that there is no new snapshot file, and our current snapshot - # file is expired, is it recognized as such? - # Test 2: If an expired timestamp is downloaded, is it recognized as such? - # Test 3: If an expired Snapshot is downloaded, is it (1) rejected? (2) the - # local Snapshot file deleted? (3) and is the client able to recover when - # given a new, valid Snapshot? - - - # Test 1 Begin: - # - # Addresses this issue: https://github.com/theupdateframework/python-tuf/issues/322 - # - # If time has passed and our snapshot or targets role is expired, and - # the mirror whose timestamp we fetched doesn't indicate the existence of a - # new snapshot version, we still need to check that it's expired and notify - # the software update system / application / user. This test creates that - # scenario. The correct behavior is to raise an exception. - # - # Background: Expiration checks (updater._ensure_not_expired) were - # previously conducted when the metadata file was downloaded. If no new - # metadata file was downloaded, no expiry check would occur. In particular, - # while root was checked for expiration at the beginning of each - # updater.refresh() cycle, and timestamp was always checked because it was - # always fetched, snapshot and targets were never checked if the user did - # not receive evidence that they had changed. This bug allowed a class of - # freeze attacks. - # That bug was fixed and this test tests that fix going forward. - - # Modify the timestamp file on the remote repository. 'timestamp.json' - # must be properly updated and signed with 'repository_tool.py', otherwise - # the client will reject it as invalid metadata. - - # Load the repository - repository = repo_tool.load_repository(self.repository_directory) - - # Load the snapshot and timestamp keys - key_file = os.path.join(self.keystore_directory, 'timestamp_key') - timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - repository.timestamp.load_signing_key(timestamp_private) - key_file = os.path.join(self.keystore_directory, 'snapshot_key') - snapshot_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - repository.snapshot.load_signing_key(snapshot_private) - - # sign snapshot with expiry in near future (earlier than e.g. timestamp) - expiry = int(time.time() + 60*60) - repository.snapshot.expiration = tuf.formats.unix_timestamp_to_datetime( - expiry) - repository.mark_dirty(['snapshot', 'timestamp']) - repository.writeall() - - # And move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Refresh metadata on the client. For this refresh, all data is not expired. - logger.info('Test: Refreshing #1 - Initial metadata refresh occurring.') - self.repository_updater.refresh() - - logger.info('Test: Refreshing #2 - refresh after local snapshot expiry.') - - # mock current time to one second after snapshot expiry - mock_time = mock.Mock() - mock_time.return_value = expiry + 1 - with mock.patch('time.time', mock_time): - try: - self.repository_updater.refresh() # We expect this to fail! - - except tuf.exceptions.ExpiredMetadataError: - logger.info('Test: Refresh #2 - failed as expected. Expired local' - ' snapshot case generated a tuf.exceptions.ExpiredMetadataError' - ' exception as expected. Test pass.') - - else: - self.fail('TUF failed to detect expired stale snapshot metadata. Freeze' - ' attack successful.') - - - - - # Test 2 Begin: - # - # 'timestamp.json' specifies the latest version of the repository files. - # A client should only accept the same version of this file up to a certain - # point, or else it cannot detect that new files are available for download. - # Modify the repository's 'timestamp.json' so that it is about to expire, - # copy it over the to client, wait a moment until it expires, and attempt to - # re-fetch the same expired version. - - # The same scenario as in test_without_tuf() is followed here, except with - # a TUF client. The TUF client performs a refresh of top-level metadata, - # which includes 'timestamp.json', and should detect a freeze attack if - # the repository serves an outdated 'timestamp.json'. - - # Modify the timestamp file on the remote repository. 'timestamp.json' - # must be properly updated and signed with 'repository_tool.py', otherwise - # the client will reject it as invalid metadata. The resulting - # 'timestamp.json' should be valid metadata, but expired (as intended). - repository = repo_tool.load_repository(self.repository_directory) - - key_file = os.path.join(self.keystore_directory, 'timestamp_key') - timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - - repository.timestamp.load_signing_key(timestamp_private) - - # Set timestamp metadata to expire soon. - # We cannot set the timestamp expiration with - # 'repository.timestamp.expiration = ...' with already-expired timestamp - # metadata because of consistency checks that occur during that assignment. - expiry_time = time.time() + 60*60 - datetime_object = tuf.formats.unix_timestamp_to_datetime(int(expiry_time)) - repository.timestamp.expiration = datetime_object - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # mock current time to one second after timestamp expiry - mock_time = mock.Mock() - mock_time.return_value = expiry_time + 1 - with mock.patch('time.time', mock_time): - try: - self.repository_updater.refresh() # We expect NoWorkingMirrorError. - - except tuf.exceptions.NoWorkingMirrorError as e: - # Make sure the contained error is ExpiredMetadataError - for mirror_url, mirror_error in e.mirror_errors.items(): - self.assertTrue(isinstance(mirror_error, tuf.exceptions.ExpiredMetadataError)) - - else: - self.fail('TUF failed to detect expired, stale timestamp metadata.' - ' Freeze attack successful.') - - - - - # Test 3 Begin: - # - # Serve the client expired Snapshot. The client should reject the given, - # expired Snapshot and the locally trusted one, which should now be out of - # date. - # After the attack, attempt to re-issue a valid Snapshot to verify that - # the client is still able to update. A bug previously caused snapshot - # expiration or replay to result in an indefinite freeze; see - # github.com/theupdateframework/python-tuf/issues/736 - repository = repo_tool.load_repository(self.repository_directory) - - ts_key_file = os.path.join(self.keystore_directory, 'timestamp_key') - snapshot_key_file = os.path.join(self.keystore_directory, 'snapshot_key') - timestamp_private = repo_tool.import_ed25519_privatekey_from_file( - ts_key_file, 'password') - snapshot_private = repo_tool.import_ed25519_privatekey_from_file( - snapshot_key_file, 'password') - - repository.timestamp.load_signing_key(timestamp_private) - repository.snapshot.load_signing_key(snapshot_private) - - # Set ts to expire in 1 month. - ts_expiry_time = time.time() + 2630000 - - # Set snapshot to expire in 1 hour. - snapshot_expiry_time = time.time() + 60*60 - - ts_datetime_object = tuf.formats.unix_timestamp_to_datetime( - int(ts_expiry_time)) - snapshot_datetime_object = tuf.formats.unix_timestamp_to_datetime( - int(snapshot_expiry_time)) - repository.timestamp.expiration = ts_datetime_object - repository.snapshot.expiration = snapshot_datetime_object - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # mock current time to one second after snapshot expiry - mock_time = mock.Mock() - mock_time.return_value = snapshot_expiry_time + 1 - with mock.patch('time.time', mock_time): - try: - # We expect the following refresh() to raise a NoWorkingMirrorError. - self.repository_updater.refresh() - - except tuf.exceptions.NoWorkingMirrorError as e: - # Make sure the contained error is ExpiredMetadataError - for mirror_url, mirror_error in e.mirror_errors.items(): - self.assertTrue(isinstance(mirror_error, tuf.exceptions.ExpiredMetadataError)) - self.assertTrue(mirror_url.endswith('snapshot.json')) - - else: - self.fail('TUF failed to detect expired, stale Snapshot metadata.' - ' Freeze attack successful.') - - # The client should have rejected the malicious Snapshot metadata, and - # distrusted the local snapshot file that is no longer valid. - self.assertTrue('snapshot' not in self.repository_updater.metadata['current']) - self.assertEqual(sorted(['root', 'targets', 'timestamp']), - sorted(self.repository_updater.metadata['current'])) - - # Verify that the client is able to recover from the malicious Snapshot. - # Re-sign a valid Snapshot file that the client should accept. - repository = repo_tool.load_repository(self.repository_directory) - - repository.timestamp.load_signing_key(timestamp_private) - repository.snapshot.load_signing_key(snapshot_private) - - # Set snapshot to expire in 1 month. - snapshot_expiry_time = time.time() + 2630000 - - snapshot_datetime_object = tuf.formats.unix_timestamp_to_datetime( - int(snapshot_expiry_time)) - repository.snapshot.expiration = snapshot_datetime_object - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Verify that the client accepts the valid metadata file. - self.repository_updater.refresh() - self.assertTrue('snapshot' in self.repository_updater.metadata['current']) - self.assertEqual(sorted(['root', 'targets', 'timestamp', 'snapshot']), - sorted(self.repository_updater.metadata['current'])) - - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_key_revocation_integration_old.py b/tests/test_key_revocation_integration_old.py deleted file mode 100755 index 8cb77f127f..0000000000 --- a/tests/test_key_revocation_integration_old.py +++ /dev/null @@ -1,495 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_key_revocation_integration_old.py - - - Vladimir Diaz. - - - April 28, 2016. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Integration test that verifies top-level roles are updated after all of their - keys have been revoked. There are unit tests in 'test_repository_tool_old.py' - that verify key and role revocation of specific roles, but these should be - expanded to verify key revocations over the span of multiple snapshots of the - repository. - - The 'unittest_toolbox.py' module was created to provide additional testing - tools, such as automatically deleting temporary files created in test cases. - For more information on the additional testing tools, see - 'tests/unittest_toolbox.py'. -""" - -import os -import shutil -import tempfile -import logging -import unittest -import sys - -import tuf -import tuf.log -import tuf.roledb -import tuf.keydb -import tuf.repository_tool as repo_tool -import tuf.unittest_toolbox as unittest_toolbox -import tuf.client.updater as updater - -from tests import utils - -import securesystemslib - -logger = logging.getLogger(__name__) -repo_tool.disable_console_log_messages() - - -class TestKeyRevocation(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). Test - # cases will request metadata and target files that have been pre-generated - # in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of - # 'test_key_revocation.py' assume the pre-generated metadata files have a - # specific structure, such as a delegated role, three target files, five - # key files, etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated for the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf.tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_keystore = os.path.join(original_repository_files, 'keystore') - original_client = os.path.join(original_repository_files, 'client') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.keystore_directory = \ - os.path.join(temporary_repository_root, 'keystore') - self.client_directory = os.path.join(temporary_repository_root, 'client') - self.client_metadata = os.path.join(self.client_directory, - self.repository_name, 'metadata') - self.client_metadata_current = os.path.join(self.client_metadata, 'current') - self.client_metadata_previous = os.path.join(self.client_metadata, 'previous') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Creating repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - # Metadata role keys are needed by the test cases to make changes to the - # repository (e.g., adding a new target file to 'targets.json' and then - # requesting a refresh()). - self.role_keys = _load_role_keys(self.keystore_directory) - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - # UNIT TESTS. - def test_timestamp_key_revocation(self): - # First verify that the Timestamp role is properly signed. Calling - # refresh() should not raise an exception. - self.repository_updater.refresh() - - # There should only be one key for Timestamp. Store the keyid to later - # verify that it has been revoked. - timestamp_roleinfo = tuf.roledb.get_roleinfo('timestamp', self.repository_name) - timestamp_keyid = timestamp_roleinfo['keyids'] - self.assertEqual(len(timestamp_keyid), 1) - - # Remove 'timestamp_keyid' and add a new key. Verify that the client - # detects the removal and addition of keys to the Timestamp role. - repository = repo_tool.load_repository(self.repository_directory) - repository.timestamp.remove_verification_key(self.role_keys['timestamp']['public']) - repository.timestamp.add_verification_key(self.role_keys['snapshot']['public']) - - # Root, Snapshot, and Timestamp must be rewritten. Root must be written - # because the timestamp key has changed; Snapshot, because Root has - # changed, and ... - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['snapshot']['private']) - repository.writeall() - - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # The client performs a refresh of top-level metadata to get the latest - # changes. - self.repository_updater.refresh() - - # Verify that the client is able to recognize that a new set of keys have - # been added to the Timestamp role. - # First, has 'timestamp_keyid' been removed? - timestamp_roleinfo = tuf.roledb.get_roleinfo('timestamp', self.repository_name) - self.assertTrue(timestamp_keyid not in timestamp_roleinfo['keyids']) - - # Second, is Timestamp's new key correct? The new key should be Snapshot's. - - self.assertEqual(len(timestamp_roleinfo['keyids']), 1) - snapshot_roleinfo = tuf.roledb.get_roleinfo('snapshot', self.repository_name) - self.assertEqual(timestamp_roleinfo['keyids'], snapshot_roleinfo['keyids']) - - - - def test_snapshot_key_revocation(self): - # First verify that the Snapshot role is properly signed. Calling - # refresh() should not raise an exception. - self.repository_updater.refresh() - - # There should only be one key for Snapshot. Store the keyid to later - # verify that it has been revoked. - snapshot_roleinfo = tuf.roledb.get_roleinfo('snapshot', self.repository_name) - snapshot_keyid = snapshot_roleinfo['keyids'] - self.assertEqual(len(snapshot_keyid), 1) - - - # Remove 'snapshot_keyid' and add a new key. Verify that the client - # detects the removal and addition of keys to the Snapshot role. - repository = repo_tool.load_repository(self.repository_directory) - repository.snapshot.remove_verification_key(self.role_keys['snapshot']['public']) - repository.snapshot.add_verification_key(self.role_keys['timestamp']['public']) - - # Root, Snapshot, and Timestamp must be rewritten. Root must be written - # because the timestamp key has changed; Snapshot, because Root has - # changed, and Timesamp, because it must sign its metadata with a new key. - repository.root.load_signing_key(self.role_keys['root']['private']) - # Note: we added Timestamp's key to the Snapshot role. - repository.snapshot.load_signing_key(self.role_keys['timestamp']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # The client performs a refresh of top-level metadata to get the latest - # changes. - self.repository_updater.refresh() - - # Verify that the client is able to recognize that a new set of keys have - # been added to the Snapshot role. - # First, has 'snapshot_keyid' been removed? - snapshot_roleinfo = tuf.roledb.get_roleinfo('snapshot', self.repository_name) - self.assertTrue(snapshot_keyid not in snapshot_roleinfo['keyids']) - - # Second, is Snapshot's new key correct? The new key should be - # Timestamp's. - self.assertEqual(len(snapshot_roleinfo['keyids']), 1) - timestamp_roleinfo = tuf.roledb.get_roleinfo('timestamp', self.repository_name) - self.assertEqual(snapshot_roleinfo['keyids'], timestamp_roleinfo['keyids']) - - - - - - def test_targets_key_revocation(self): - # First verify that the Targets role is properly signed. Calling - # refresh() should not raise an exception. - self.repository_updater.refresh() - - # There should only be one key for Targets. Store the keyid to later - # verify that it has been revoked. - targets_roleinfo = tuf.roledb.get_roleinfo('targets', self.repository_name) - targets_keyid = targets_roleinfo['keyids'] - self.assertEqual(len(targets_keyid), 1) - - # Remove 'targets_keyid' and add a new key. Verify that the client - # detects the removal and addition of keys to the Targets role. - repository = repo_tool.load_repository(self.repository_directory) - repository.targets.remove_verification_key(self.role_keys['targets']['public']) - repository.targets.add_verification_key(self.role_keys['timestamp']['public']) - - # Root, Snapshot, and Timestamp must be rewritten. Root must be written - # because the timestamp key has changed; Snapshot, because Root has - # changed, and Timestamp because it must sign its metadata with a new key. - repository.root.load_signing_key(self.role_keys['root']['private']) - # Note: we added Timestamp's key to the Targets role. - repository.targets.load_signing_key(self.role_keys['timestamp']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # The client performs a refresh of top-level metadata to get the latest - # changes. - self.repository_updater.refresh() - - # Verify that the client is able to recognize that a new set of keys have - # been added to the Targets role. - # First, has 'targets_keyid' been removed? - targets_roleinfo = tuf.roledb.get_roleinfo('targets', self.repository_name) - self.assertTrue(targets_keyid not in targets_roleinfo['keyids']) - - # Second, is Targets's new key correct? The new key should be - # Timestamp's. - self.assertEqual(len(targets_roleinfo['keyids']), 1) - timestamp_roleinfo = tuf.roledb.get_roleinfo('timestamp', self.repository_name) - self.assertEqual(targets_roleinfo['keyids'], timestamp_roleinfo['keyids']) - - - - def test_root_key_revocation(self): - # First verify that the Root role is properly signed. Calling - # refresh() should not raise an exception. - self.repository_updater.refresh() - - # There should only be one key for Root. Store the keyid to later verify - # that it has been revoked. - root_roleinfo = tuf.roledb.get_roleinfo('root', self.repository_name) - root_keyid = root_roleinfo['keyids'] - self.assertEqual(len(root_keyid), 1) - - # Remove 'root_keyid' and add a new key. Verify that the client detects - # the removal and addition of keys to the Root file. - repository = repo_tool.load_repository(self.repository_directory) - - repository.root.add_verification_key(self.role_keys['snapshot']['public']) - repository.root.add_verification_key(self.role_keys['targets']['public']) - repository.root.add_verification_key(self.role_keys['timestamp']['public']) - - # Root, Snapshot, and Timestamp must be rewritten. Root must be written - # because the timestamp key has changed; Snapshot, because Root has - # changed, and Timestamp because it must sign its metadata with a new key. - repository.root.load_signing_key(self.role_keys['snapshot']['private']) - repository.root.load_signing_key(self.role_keys['targets']['private']) - repository.root.load_signing_key(self.role_keys['timestamp']['private']) - - # Note: We added the Snapshot, Targets, and Timestampkeys to the Root role. - # The Root's expected private key has not been loaded yet, so that we can - # verify that refresh() correctly raises a - # securesystemslib.exceptions.BadSignatureError exception. - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - - # Root's version number = 2 after the following writeall(). - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Note well: The client should reject the new Root file because the - # repository has revoked the only Root key that the client trusts. - try: - self.repository_updater.refresh() - - except tuf.exceptions.NoWorkingMirrorError as exception: - for mirror_exception in exception.mirror_errors.values(): - self.assertTrue(isinstance(mirror_exception, - securesystemslib.exceptions.BadSignatureError)) - - repository.root.add_verification_key(self.role_keys['root']['public']) - repository.root.load_signing_key(self.role_keys['root']['private']) - - # root, snapshot, and timestamp should be dirty - repository.dirty_roles() - repository.write('root', increment_version_number=False) - repository.write('snapshot') - repository.write('timestamp') - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Root's version number = 2... - # The client successfully performs a refresh of top-level metadata to get - # the latest changes. - self.repository_updater.refresh() - self.assertEqual(self.repository_updater.metadata['current']['root']['version'], 2) - - # Revoke the snapshot and targets keys (added to root) so that multiple - # snapshots are created. Discontinue signing with the old root key now - # that the client has successfully updated (note: the old Root key - # was revoked, but the repository continued signing with it to allow - # the client to update). - repository.root.remove_verification_key(self.role_keys['root']['public']) - repository.root.unload_signing_key(self.role_keys['root']['private']) - repository.root.remove_verification_key(self.role_keys['snapshot']['public']) - repository.root.unload_signing_key(self.role_keys['snapshot']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Root's version number = 3... - self.repository_updater.refresh() - - repository.root.remove_verification_key(self.role_keys['targets']['public']) - repository.root.unload_signing_key(self.role_keys['targets']['private']) - - # The following should fail because root rotation requires the new Root - # to be signed with the previous self.role_keys['targets'] key. - self.assertRaises(tuf.exceptions.UnsignedMetadataError, - repository.writeall) - - repository.root.load_signing_key(self.role_keys['targets']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Root's version number = 4... - self.repository_updater.refresh() - self.assertEqual(self.repository_updater.metadata['current']['root']['version'], 4) - - # Verify that the client is able to recognize that a new set of keys have - # been added to the Root role. - # First, has 'root_keyid' been removed? - root_roleinfo = tuf.roledb.get_roleinfo('root', self.repository_name) - self.assertTrue(root_keyid not in root_roleinfo['keyids']) - - # Second, is Root's new key correct? The new key should be - # Timestamp's. - self.assertEqual(len(root_roleinfo['keyids']), 1) - timestamp_roleinfo = tuf.roledb.get_roleinfo('timestamp', self.repository_name) - self.assertEqual(root_roleinfo['keyids'], timestamp_roleinfo['keyids']) - - - -def _load_role_keys(keystore_directory): - - # Populating 'self.role_keys' by importing the required public and private - # keys of 'tuf/tests/repository_data/'. The role keys are needed when - # modifying the remote repository used by the test cases in this unit test. - # The pre-generated key files in 'repository_data/keystore' are all encrypted with - # a 'password' passphrase. - EXPECTED_KEYFILE_PASSWORD = 'password' - - # Store and return the cryptography keys of the top-level roles, including 1 - # delegated role. - role_keys = {} - - root_key_file = os.path.join(keystore_directory, 'root_key') - targets_key_file = os.path.join(keystore_directory, 'targets_key') - snapshot_key_file = os.path.join(keystore_directory, 'snapshot_key') - timestamp_key_file = os.path.join(keystore_directory, 'timestamp_key') - delegation_key_file = os.path.join(keystore_directory, 'delegation_key') - - role_keys = {'root': {}, 'targets': {}, 'snapshot': {}, 'timestamp': {}, - 'role1': {}} - - # Import the top-level and delegated role public keys. - role_keys['root']['public'] = \ - repo_tool.import_rsa_publickey_from_file(root_key_file+'.pub') - role_keys['targets']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(targets_key_file + '.pub') - role_keys['snapshot']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(snapshot_key_file + '.pub') - role_keys['timestamp']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(timestamp_key_file + '.pub') - role_keys['role1']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(delegation_key_file + '.pub') - - # Import the private keys of the top-level and delegated roles. - role_keys['root']['private'] = \ - repo_tool.import_rsa_privatekey_from_file(root_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['targets']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(targets_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['snapshot']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['timestamp']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['role1']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(delegation_key_file, - EXPECTED_KEYFILE_PASSWORD) - - return role_keys - - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_keydb_old.py b/tests/test_keydb_old.py deleted file mode 100755 index b76b5c0f39..0000000000 --- a/tests/test_keydb_old.py +++ /dev/null @@ -1,407 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_keydb_old.py - - - Vladimir Diaz - - - October 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'keydb.py'. -""" - -import unittest -import logging -import sys - -import tuf -import tuf.formats -import securesystemslib.keys -import securesystemslib.settings -import tuf.keydb -import tuf.log - -from tests import utils - -logger = logging.getLogger(__name__) - - -# Generate the three keys to use in our test cases. -KEYS = [] -for junk in range(3): - rsa_key = securesystemslib.keys.generate_rsa_key(2048) - rsa_key['keyid_hash_algorithms'] = securesystemslib.settings.HASH_ALGORITHMS - KEYS.append(rsa_key) - - - -class TestKeydb(unittest.TestCase): - def setUp(self): - tuf.keydb.clear_keydb(clear_all=True) - - - - def tearDown(self): - tuf.keydb.clear_keydb(clear_all=True) - - - - def test_create_keydb(self): - # Test condition for normal behaviour. - repository_name = 'example_repository' - - # The keydb dictionary should contain only the 'default' repository entry. - self.assertTrue('default' in tuf.keydb._keydb_dict) - self.assertEqual(1, len(tuf.keydb._keydb_dict)) - - - tuf.keydb.create_keydb(repository_name) - self.assertEqual(2, len(tuf.keydb._keydb_dict)) - - # Verify that a keydb cannot be created for a name that already exists. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.keydb.create_keydb, repository_name) - - # Ensure that the key database for 'example_repository' is deleted so that - # the key database is returned to its original, default state. - tuf.keydb.remove_keydb(repository_name) - - - - def test_remove_keydb(self): - # Test condition for expected behaviour. - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.keydb.remove_keydb, 'default') - - tuf.keydb.create_keydb(repository_name) - tuf.keydb.remove_keydb(repository_name) - - # tuf.keydb.remove_keydb() logs a warning if a keydb for a non-existent - # repository is specified. - tuf.keydb.remove_keydb(repository_name) - - # Test condition for improperly formatted argument, and unexpected argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.remove_keydb, 123) - self.assertRaises(TypeError, tuf.keydb.remove_keydb, rsakey, 123) - - - - def test_clear_keydb(self): - # Test condition ensuring 'clear_keydb()' clears the keydb database. - # Test the length of the keydb before and after adding a key. - self.assertEqual(0, len(tuf.keydb._keydb_dict['default'])) - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - tuf.keydb._keydb_dict['default'][keyid] = rsakey - self.assertEqual(1, len(tuf.keydb._keydb_dict['default'])) - tuf.keydb.clear_keydb() - self.assertEqual(0, len(tuf.keydb._keydb_dict['default'])) - - # Test condition for unexpected argument. - self.assertRaises(TypeError, tuf.keydb.clear_keydb, 'default', False, 'unexpected_argument') - - # Test condition for improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.clear_keydb, 0) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.clear_keydb, 'default', 0) - - # Test condition for non-existent repository name. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.keydb.clear_keydb, 'non-existent') - - # Test condition for keys added to a non-default key database. Unlike the - # test conditions above, this test makes use of the public functions - # add_key(), create_keydb(), and get_key() to more easily verify - # clear_keydb()'s behaviour. - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - repository_name = 'example_repository' - tuf.keydb.create_keydb(repository_name) - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid, repository_name) - tuf.keydb.add_key(rsakey, keyid, repository_name) - self.assertEqual(rsakey, tuf.keydb.get_key(keyid, repository_name)) - - tuf.keydb.clear_keydb(repository_name) - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid, repository_name) - - # Remove 'repository_name' from the key database to revert it back to its - # original, default state (i.e., only the 'default' repository exists). - tuf.keydb.remove_keydb(repository_name) - - - - def test_get_key(self): - # Test conditions using valid 'keyid' arguments. - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - tuf.keydb._keydb_dict['default'][keyid] = rsakey - rsakey2 = KEYS[1] - keyid2 = KEYS[1]['keyid'] - tuf.keydb._keydb_dict['default'][keyid2] = rsakey2 - - self.assertEqual(rsakey, tuf.keydb.get_key(keyid)) - self.assertEqual(rsakey2, tuf.keydb.get_key(keyid2)) - self.assertNotEqual(rsakey2, tuf.keydb.get_key(keyid)) - self.assertNotEqual(rsakey, tuf.keydb.get_key(keyid2)) - - # Test conditions using invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.get_key, None) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.get_key, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.get_key, ['123']) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.get_key, {'keyid': '123'}) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.get_key, '') - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.get_key, keyid, 123) - - # Test condition using a 'keyid' that has not been added yet. - keyid3 = KEYS[2]['keyid'] - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid3) - - # Test condition for a key added to a non-default repository. - repository_name = 'example_repository' - rsakey3 = KEYS[2] - tuf.keydb.create_keydb(repository_name) - tuf.keydb.add_key(rsakey3, keyid3, repository_name) - - # Test condition for a key added to a non-existent repository. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.keydb.get_key, - keyid, 'non-existent') - - # Verify that 'rsakey3' is added to the expected repository name. - # If not supplied, the 'default' repository name is searched. - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid3) - self.assertEqual(rsakey3, tuf.keydb.get_key(keyid3, repository_name)) - - # Remove the 'example_repository' so that other test functions have access - # to a default state of the keydb. - tuf.keydb.remove_keydb(repository_name) - - - - def test_add_key(self): - # Test conditions using valid 'keyid' arguments. - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - rsakey2 = KEYS[1] - keyid2 = KEYS[1]['keyid'] - rsakey3 = KEYS[2] - keyid3 = KEYS[2]['keyid'] - self.assertEqual(None, tuf.keydb.add_key(rsakey, keyid)) - self.assertEqual(None, tuf.keydb.add_key(rsakey2, keyid2)) - self.assertEqual(None, tuf.keydb.add_key(rsakey3)) - - self.assertEqual(rsakey, tuf.keydb.get_key(keyid)) - self.assertEqual(rsakey2, tuf.keydb.get_key(keyid2)) - self.assertEqual(rsakey3, tuf.keydb.get_key(keyid3)) - - # Test conditions using arguments with invalid formats. - tuf.keydb.clear_keydb() - rsakey3['keytype'] = 'bad_keytype' - - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, None, keyid) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, '', keyid) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, ['123'], keyid) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, {'a': 'b'}, keyid) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, rsakey, {'keyid': ''}) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, rsakey, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, rsakey, False) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, rsakey, ['keyid']) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, rsakey3, keyid3) - rsakey3['keytype'] = 'rsa' - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.add_key, rsakey3, keyid3, 123) - - # Test conditions where keyid does not match the rsakey. - self.assertRaises(securesystemslib.exceptions.Error, tuf.keydb.add_key, rsakey, keyid2) - self.assertRaises(securesystemslib.exceptions.Error, tuf.keydb.add_key, rsakey2, keyid) - - # Test conditions using keyids that have already been added. - tuf.keydb.add_key(rsakey, keyid) - tuf.keydb.add_key(rsakey2, keyid2) - self.assertRaises(tuf.exceptions.KeyAlreadyExistsError, tuf.keydb.add_key, rsakey) - self.assertRaises(tuf.exceptions.KeyAlreadyExistsError, tuf.keydb.add_key, rsakey2) - - # Test condition for key added to the keydb of a non-default repository. - repository_name = 'example_repository' - tuf.keydb.create_keydb(repository_name) - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid3, repository_name) - tuf.keydb.add_key(rsakey3, keyid3, repository_name) - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid3) - self.assertEqual(rsakey3, tuf.keydb.get_key(keyid3, repository_name)) - - # Test condition for key added to the keydb of a non-existent repository. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.keydb.add_key, - rsakey3, keyid3, 'non-existent') - - # Reset the keydb to its original, default state. Other test functions - # expect only the 'default' repository to exist. - tuf.keydb.remove_keydb(repository_name) - - - - def test_remove_key(self): - # Test conditions using valid keyids. - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - rsakey2 = KEYS[1] - keyid2 = KEYS[1]['keyid'] - rsakey3 = KEYS[2] - keyid3 = KEYS[2]['keyid'] - tuf.keydb.add_key(rsakey, keyid) - tuf.keydb.add_key(rsakey2, keyid2) - tuf.keydb.add_key(rsakey3, keyid3) - - self.assertEqual(None, tuf.keydb.remove_key(keyid)) - self.assertEqual(None, tuf.keydb.remove_key(keyid2)) - - # Ensure the keys were actually removed. - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid) - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid2) - - # Test for 'keyid' not in keydb. - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.remove_key, keyid) - - # Test condition for unknown key argument. - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.remove_key, '1') - - # Test condition for removal of keys from a non-default repository. - repository_name = 'example_repository' - tuf.keydb.create_keydb(repository_name) - tuf.keydb.add_key(rsakey, keyid, repository_name) - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.keydb.remove_key, keyid, 'non-existent') - tuf.keydb.remove_key(keyid, repository_name) - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.remove_key, keyid, repository_name) - - # Reset the keydb so that subsequent tests have access to the original, - # default keydb. - tuf.keydb.remove_keydb(repository_name) - - # Test conditions for arguments with invalid formats. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.remove_key, None) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.remove_key, '') - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.remove_key, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.remove_key, ['123']) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.remove_key, keyid, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.keydb.remove_key, {'bad': '123'}) - self.assertRaises(securesystemslib.exceptions.Error, tuf.keydb.remove_key, rsakey3) - - - - def test_create_keydb_from_root_metadata(self): - # Test condition using a valid 'root_metadata' argument. - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - rsakey2 = KEYS[1] - keyid2 = KEYS[1]['keyid'] - - keydict = {keyid: rsakey, keyid2: rsakey2} - - roledict = {'Root': {'keyids': [keyid], 'threshold': 1}, - 'Targets': {'keyids': [keyid2, keyid], 'threshold': 1}} - version = 8 - consistent_snapshot = False - expires = '1985-10-21T01:21:00Z' - - root_metadata = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version='1.0.0', - version=version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - self.assertEqual(None, tuf.keydb.create_keydb_from_root_metadata(root_metadata)) - tuf.keydb.create_keydb_from_root_metadata(root_metadata) - - # Ensure 'keyid' and 'keyid2' were added to the keydb database. - self.assertEqual(rsakey, tuf.keydb.get_key(keyid)) - self.assertEqual(rsakey2, tuf.keydb.get_key(keyid2)) - - # Verify that the keydb is populated for a non-default repository. - repository_name = 'example_repository' - tuf.keydb.create_keydb_from_root_metadata(root_metadata, repository_name) - - # Test conditions for arguments with invalid formats. - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, None) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, '') - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, ['123']) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, {'bad': '123'}) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, root_metadata, 123) - - # Verify that a keydb cannot be created for a non-existent repository name. - tuf.keydb.create_keydb_from_root_metadata(root_metadata, 'non-existent') - - # Remove the 'non-existent' and 'example_repository' key database so that - # subsequent test functions have access to a default keydb. - tuf.keydb.remove_keydb(repository_name) - tuf.keydb.remove_keydb('non-existent') - - - # Test conditions for correctly formatted 'root_metadata' arguments but - # containing incorrect keyids or key types. In these conditions, the keys - # should not be added to the keydb database and a warning should be logged. - tuf.keydb.clear_keydb() - - # 'keyid' does not match 'rsakey2'. - # In this case, the key will be added to the keydb - keydict[keyid] = rsakey2 - - # Key with invalid keytype. - rsakey3 = KEYS[2] - keyid3 = KEYS[2]['keyid'] - rsakey3['keytype'] = 'bad_keytype' - keydict[keyid3] = rsakey3 - - version = 8 - expires = '1985-10-21T01:21:00Z' - - root_metadata = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version='1.0.0', - version=version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - self.assertEqual(None, tuf.keydb.create_keydb_from_root_metadata(root_metadata)) - - # Ensure only 'keyid2' and 'keyid' were added to the keydb database. - # 'keyid3' should not be stored. - self.maxDiff = None - self.assertEqual(rsakey2, tuf.keydb.get_key(keyid2)) - - test_key = rsakey2 - test_key['keyid'] = keyid - self.assertEqual(test_key, tuf.keydb.get_key(keyid)) - - self.assertRaises(tuf.exceptions.UnknownKeyError, tuf.keydb.get_key, keyid3) - - # reset values - rsakey3['keytype'] = 'rsa' - rsakey2['keyid'] = keyid2 - - - -# Run unit test. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_log_old.py b/tests/test_log_old.py deleted file mode 100755 index a92661b305..0000000000 --- a/tests/test_log_old.py +++ /dev/null @@ -1,210 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_log_old.py - - - Vladimir Diaz - - - May 1, 2014. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'log.py'. -""" - -import logging -import unittest -import os -import shutil -import sys -import importlib - -import tuf -import tuf.log -import tuf.settings - -import securesystemslib -import securesystemslib.util - -from tests import utils - - -# We explicitly create a logger which is a child of the tuf hierarchy, -# instead of using the standard getLogger(__name__) pattern, because the -# tests are not part of the tuf hierarchy and we are testing functionality -# of the tuf package explicitly enabled on the tuf hierarchy -logger = logging.getLogger('tuf.test_log') - -log_levels = [logging.CRITICAL, logging.ERROR, logging.WARNING, - logging.INFO, logging.DEBUG] - - -class TestLog(unittest.TestCase): - - def setUp(self): - # store the current log level so it can be restored after the test - self._initial_level = logging.getLogger('tuf').level - - def tearDown(self): - tuf.log.remove_console_handler() - tuf.log.disable_file_logging() - logging.getLogger('tuf').level = self._initial_level - - - - - def test_set_log_level(self): - # Test normal case. - global log_levels - global logger - - tuf.log.set_log_level() - self.assertTrue(logger.isEnabledFor(logging.DEBUG)) - - for level in log_levels: - tuf.log.set_log_level(level) - self.assertTrue(logger.isEnabledFor(level)) - - # Test for improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.set_log_level, '123') - - # Test for invalid argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.set_log_level, 51) - - - - def test_set_filehandler_log_level(self): - # Normal case. Default log level. - # A file handler is not set by default. Add one now before attempting to - # set the log level. - self.assertRaises(tuf.exceptions.Error, tuf.log.set_filehandler_log_level) - tuf.log.enable_file_logging() - tuf.log.set_filehandler_log_level() - - # Expected log levels. - for level in log_levels: - tuf.log.set_log_level(level) - - # Test that the log level of the file handler cannot be set because - # file logging is disabled (via tuf.settings.ENABLE_FILE_LOGGING). - tuf.settings.ENABLE_FILE_LOGGING = False - importlib.reload(tuf.log) - - # Test for improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.set_filehandler_log_level, '123') - - # Test for invalid argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.set_filehandler_log_level, 51) - - - def test_set_console_log_level(self): - # Test setting a console log level without first adding one. - self.assertRaises(securesystemslib.exceptions.Error, tuf.log.set_console_log_level) - - # Normal case. Default log level. Setting the console log level first - # requires adding a console logger. - tuf.log.add_console_handler() - tuf.log.set_console_log_level() - - # Expected log levels. - for level in log_levels: - tuf.log.set_console_log_level(level) - - # Test for improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.set_console_log_level, '123') - - # Test for invalid argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.set_console_log_level, 51) - - - - - - def test_add_console_handler(self): - # Normal case. Default log level. - tuf.log.add_console_handler() - - # Adding a console handler when one has already been added. - tuf.log.add_console_handler() - - # Expected log levels. - for level in log_levels: - tuf.log.set_console_log_level(level) - - # Test for improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.add_console_handler, '123') - - # Test for invalid argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.log.add_console_handler, 51) - - # Test that an exception is printed to the console. Note: A stack trace - # is not included in the exception output because 'log.py' applies a filter - # to minimize the amount of output to the console. - try: - raise TypeError('Test exception output in the console.') - - except TypeError as e: - logger.exception(e) - - - def test_remove_console_handler(self): - # Normal case. - tuf.log.remove_console_handler() - - # Removing a console handler that has not been added. Logs a warning. - tuf.log.remove_console_handler() - - - def test_enable_file_logging(self): - # Normal case. - if os.path.exists(tuf.settings.LOG_FILENAME): - shutil.move( - tuf.settings.LOG_FILENAME, tuf.settings.LOG_FILENAME + '.backup') - - tuf.log.enable_file_logging() - self.assertTrue(os.path.exists(tuf.settings.LOG_FILENAME)) - if os.path.exists(tuf.settings.LOG_FILENAME + '.backup'): - shutil.move( - tuf.settings.LOG_FILENAME + '.backup', tuf.settings.LOG_FILENAME) - - # The file logger must first be unset before attempting to re-add it. - self.assertRaises(tuf.exceptions.Error, tuf.log.enable_file_logging) - - tuf.log.disable_file_logging() - tuf.log.enable_file_logging('my_log_file.log') - logger.debug('testing file logging') - self.assertTrue(os.path.exists('my_log_file.log')) - - # Test for an improperly formatted argument. - tuf.log.disable_file_logging() - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.log.enable_file_logging, 1) - - - def test_disable_file_logging(self): - # Normal case. - tuf.log.enable_file_logging('my.log') - logger.debug('debug message') - junk, hashes = securesystemslib.util.get_file_details('my.log') - tuf.log.disable_file_logging() - logger.debug('new debug message') - junk, hashes2 = securesystemslib.util.get_file_details('my.log') - self.assertEqual(hashes, hashes2) - - # An exception should not be raised if an attempt is made to disable - # the file logger if it has already been disabled. - tuf.log.disable_file_logging() - - -# Run unit test. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_mirrors_old.py b/tests/test_mirrors_old.py deleted file mode 100755 index 0d530154c6..0000000000 --- a/tests/test_mirrors_old.py +++ /dev/null @@ -1,138 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_mirrors_old.py - - - Konstantin Andrianov. - - - March 26, 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'mirrors.py'. -""" - -import unittest -import sys - -import tuf.mirrors as mirrors -import tuf.unittest_toolbox as unittest_toolbox - -from tests import utils - -import securesystemslib -import securesystemslib.util - - -class TestMirrors(unittest_toolbox.Modified_TestCase): - - def setUp(self): - - unittest_toolbox.Modified_TestCase.setUp(self) - - self.mirrors = \ - {'mirror1': {'url_prefix' : 'http://mirror1.com', - 'metadata_path' : 'metadata', - 'targets_path' : 'targets'}, - 'mirror2': {'url_prefix' : 'http://mirror2.com', - 'metadata_path' : 'metadata', - 'targets_path' : 'targets', - 'confined_target_dirs' : ['targets/release/', - 'targets/release/']}, - 'mirror3': {'url_prefix' : 'http://mirror3.com', - 'targets_path' : 'targets', - 'confined_target_dirs' : ['targets/release/v2/']}, - # confined_target_dirs = [] means that none of the targets on - # that mirror is available. - 'mirror4': {'url_prefix' : 'http://mirror4.com', - 'metadata_path' : 'metadata', - 'confined_target_dirs' : []}, - # Make sure we are testing when confined_target_dirs is [''] which means - # that all targets are available on that mirror. - 'mirror5': {'url_prefix' : 'http://mirror5.com', - 'targets_path' : 'targets', - 'confined_target_dirs' : ['']} - } - - - - def test_get_list_of_mirrors(self): - # Test: Normal case. - - # 1 match: a mirror without target directory confinement - mirror_list = mirrors.get_list_of_mirrors('target', 'a.txt', self.mirrors) - self.assertEqual(len(mirror_list), 2) - self.assertTrue(self.mirrors['mirror1']['url_prefix']+'/targets/a.txt' in \ - mirror_list) - self.assertTrue(self.mirrors['mirror5']['url_prefix']+'/targets/a.txt' in \ - mirror_list) - - mirror_list = mirrors.get_list_of_mirrors('target', 'a/b', self.mirrors) - self.assertEqual(len(mirror_list), 2) - self.assertTrue(self.mirrors['mirror1']['url_prefix']+'/targets/a/b' in \ - mirror_list) - self.assertTrue(self.mirrors['mirror5']['url_prefix']+'/targets/a/b' in \ - mirror_list) - - # 2 matches: One with non-confined targets and one with matching confinement - mirror_list = mirrors.get_list_of_mirrors('target', 'release/v2/c', self.mirrors) - self.assertEqual(len(mirror_list), 3) - self.assertTrue(self.mirrors['mirror1']['url_prefix']+'/targets/release/v2/c' in \ - mirror_list) - self.assertTrue(self.mirrors['mirror3']['url_prefix']+'/targets/release/v2/c' in \ - mirror_list) - self.assertTrue(self.mirrors['mirror5']['url_prefix']+'/targets/release/v2/c' in \ - mirror_list) - - # 3 matches: Metadata found on 3 mirrors - mirror_list = mirrors.get_list_of_mirrors('meta', 'release.txt', self.mirrors) - self.assertEqual(len(mirror_list), 3) - self.assertTrue(self.mirrors['mirror1']['url_prefix']+'/metadata/release.txt' in \ - mirror_list) - self.assertTrue(self.mirrors['mirror2']['url_prefix']+'/metadata/release.txt' in \ - mirror_list) - self.assertTrue(self.mirrors['mirror4']['url_prefix']+'/metadata/release.txt' in \ - mirror_list) - - # No matches - del self.mirrors['mirror1'] - del self.mirrors['mirror5'] - mirror_list = mirrors.get_list_of_mirrors('target', 'a/b', self.mirrors) - self.assertFalse(mirror_list) - - - # Test: Invalid 'file_type'. - self.assertRaises(securesystemslib.exceptions.Error, mirrors.get_list_of_mirrors, - self.random_string(), 'a', self.mirrors) - - self.assertRaises(securesystemslib.exceptions.Error, mirrors.get_list_of_mirrors, - 12345, 'a', self.mirrors) - - # Test: Improperly formatted 'file_path'. - self.assertRaises(securesystemslib.exceptions.FormatError, mirrors.get_list_of_mirrors, - 'meta', 12345, self.mirrors) - - # Test: Improperly formatted 'mirrors_dict' object. - self.assertRaises(securesystemslib.exceptions.FormatError, mirrors.get_list_of_mirrors, - 'meta', 'a', 12345) - - self.assertRaises(securesystemslib.exceptions.FormatError, mirrors.get_list_of_mirrors, - 'meta', 'a', ['a']) - - self.assertRaises(securesystemslib.exceptions.FormatError, mirrors.get_list_of_mirrors, - 'meta', 'a', {'a':'b'}) - - - -# Run the unittests -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_mix_and_match_attack_old.py b/tests/test_mix_and_match_attack_old.py deleted file mode 100755 index cc033c291e..0000000000 --- a/tests/test_mix_and_match_attack_old.py +++ /dev/null @@ -1,236 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_mix_and_match_attack_old.py - - - Konstantin Andrianov. - - - March 27, 2012. - - April 6, 2014. - Refactored to use the 'unittest' module (test conditions in code, rather - than verifying text output), use pre-generated repository files, and - discontinue use of the old repository tools. Modify the previous scenario - simulated for the mix-and-match attack. -vladimir.v.diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Simulate a mix-and-match attack. In a mix-and-match attack, an attacker is - able to trick clients into using a combination of metadata that never existed - together on the repository at the same time. - - Note: There is no difference between 'updates' and 'target' files. -""" - -import os -import tempfile -import shutil -import logging -import unittest -import sys - -import tuf.exceptions -import tuf.log -import tuf.client.updater as updater -import tuf.repository_tool as repo_tool -import tuf.unittest_toolbox as unittest_toolbox -import tuf.roledb -import tuf.keydb - -from tests import utils - - -# The repository tool is imported and logs console messages by default. -# Disable console log messages generated by this unit test. -repo_tool.disable_console_log_messages() - -logger = logging.getLogger(__name__) - - - -class TestMixAndMatchAttack(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and - # target files. 'temporary_directory' must be deleted in TearDownModule() - # so that temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated of all the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client') - original_keystore = os.path.join(original_repository_files, 'keystore') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.client_directory = os.path.join(temporary_repository_root, 'client') - self.keystore_directory = os.path.join(temporary_repository_root, 'keystore') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_with_tuf(self): - # Scenario: - # An attacker tries to trick the client into installing files indicated by - # a previous release of its corresponding metatadata. The outdated metadata - # is properly named and was previously valid, but is no longer current - # according to the latest 'snapshot.json' role. Generate a new snapshot of - # the repository after modifying a target file of 'role1.json'. - # Backup 'role1.json' (the delegated role to be updated, and then inserted - # again for the mix-and-match attack.) - role1_path = os.path.join(self.repository_directory, 'metadata', 'role1.json') - backup_role1 = os.path.join(self.repository_directory, 'role1.json.backup') - shutil.copy(role1_path, backup_role1) - - # Backup 'file3.txt', specified by 'role1.json'. - file3_path = os.path.join(self.repository_directory, 'targets', 'file3.txt') - shutil.copy(file3_path, file3_path + '.backup') - - # Re-generate the required metadata on the remote repository. The affected - # metadata must be properly updated and signed with 'repository_tool.py', - # otherwise the client will reject them as invalid metadata. The resulting - # metadata should be valid metadata. - repository = repo_tool.load_repository(self.repository_directory) - - # Load the signing keys so that newly generated metadata is properly signed. - timestamp_keyfile = os.path.join(self.keystore_directory, 'timestamp_key') - role1_keyfile = os.path.join(self.keystore_directory, 'delegation_key') - snapshot_keyfile = os.path.join(self.keystore_directory, 'snapshot_key') - timestamp_private = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_keyfile, 'password') - role1_private = \ - repo_tool.import_ed25519_privatekey_from_file(role1_keyfile, 'password') - snapshot_private = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_keyfile, 'password') - - repository.targets('role1').load_signing_key(role1_private) - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - # Modify a 'role1.json' target file, and add it to its metadata so that a - # new version is generated. - with open(file3_path, 'wt') as file_object: - file_object.write('This is role2\'s target file.') - repository.targets('role1').add_target(os.path.basename(file3_path)) - - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Insert the previously valid 'role1.json'. The TUF client should reject it. - shutil.move(backup_role1, role1_path) - - # Verify that the TUF client detects unexpected metadata (previously valid, - # but not up-to-date with the latest snapshot of the repository) and - # refuses to continue the update process. Refresh top-level metadata so - # that the client is aware of the latest snapshot of the repository. - self.repository_updater.refresh() - - try: - with utils.ignore_deprecation_warnings('tuf.client.updater'): - self.repository_updater.targets_of_role('role1') - - # Verify that the specific - # 'tuf.exceptions.BadVersionNumberError' exception is raised by - # each mirror. - except tuf.exceptions.NoWorkingMirrorError as exception: - for mirror_url, mirror_error in exception.mirror_errors.items(): - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'metadata', 'role1.json') - - # Verify that 'role1.json' is the culprit. - self.assertEqual(url_file.replace('\\', '/'), mirror_url) - self.assertTrue(isinstance( - mirror_error, tuf.exceptions.BadVersionNumberError)) - - else: - self.fail('TUF did not prevent a mix-and-match attack.') - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_multiple_repositories_integration_old.py b/tests/test_multiple_repositories_integration_old.py deleted file mode 100755 index 6387764894..0000000000 --- a/tests/test_multiple_repositories_integration_old.py +++ /dev/null @@ -1,268 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_multiple_repositories_integration_old.py - - - Vladimir Diaz - - - February 2, 2017 - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Verify that clients and the repository tools are able to keep track of - multiple repositories and separate sets of metadata for each. -""" - -import os -import tempfile -import logging -import shutil -import unittest -import json -import sys - -import tuf -import tuf.log -import tuf.roledb -import tuf.client.updater as updater -import tuf.settings -import tuf.unittest_toolbox as unittest_toolbox -import tuf.repository_tool as repo_tool - -from tests import utils - -import securesystemslib - -logger = logging.getLogger(__name__) - -repo_tool.disable_console_log_messages() - - -class TestMultipleRepositoriesIntegration(unittest_toolbox.Modified_TestCase): - - def setUp(self): - # Modified_Testcase can handle temp dir removal - unittest_toolbox.Modified_TestCase.setUp(self) - self.temporary_directory = self.make_temp_directory(directory=os.getcwd()) - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - - self.temporary_repository_root = tempfile.mkdtemp(dir=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client', 'test_repository1') - original_keystore = os.path.join(original_repository_files, 'keystore') - original_map_file = os.path.join(original_repository_files, 'map.json') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = os.path.join(self.temporary_repository_root, - 'repository_server1') - self.repository_directory2 = os.path.join(self.temporary_repository_root, - 'repository_server2') - - # Setting 'tuf.settings.repositories_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.temporary_repository_root - - self.repository_name = 'test_repository1' - self.repository_name2 = 'test_repository2' - - self.client_directory = os.path.join(self.temporary_repository_root, - self.repository_name) - self.client_directory2 = os.path.join(self.temporary_repository_root, - self.repository_name2) - - self.keystore_directory = os.path.join(self.temporary_repository_root, 'keystore') - self.map_file = os.path.join(self.client_directory, 'map.json') - self.map_file2 = os.path.join(self.client_directory2, 'map.json') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_repository, self.repository_directory2) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_client, self.client_directory2) - shutil.copyfile(original_map_file, self.map_file) - shutil.copyfile(original_map_file, self.map_file2) - shutil.copytree(original_keystore, self.keystore_directory) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - - # Needed because in some tests simple_server.py cannot be found. - # The reason is that the current working directory - # has been changed when executing a subprocess. - SIMPLE_SERVER_PATH = os.path.join(os.getcwd(), 'simple_server.py') - - # Creates a subprocess running a server. - self.server_process_handler = utils.TestServerProcess(log=logger, - server=SIMPLE_SERVER_PATH, popen_cwd=self.repository_directory) - - logger.debug('Server process started.') - - # Creates a subprocess running a server. - self.server_process_handler2 = utils.TestServerProcess(log=logger, - server=SIMPLE_SERVER_PATH, popen_cwd=self.repository_directory2) - - logger.debug('Server process 2 started.') - - url_prefix = \ - 'http://' + utils.TEST_HOST_ADDRESS + ':' + \ - str(self.server_process_handler.port) - url_prefix2 = \ - 'http://' + utils.TEST_HOST_ADDRESS + ':' + \ - str(self.server_process_handler2.port) - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - self.repository_mirrors2 = {'mirror1': {'url_prefix': url_prefix2, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instances. The test cases will use these client - # updaters to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - self.repository_updater2 = updater.Updater(self.repository_name2, - self.repository_mirrors2) - - - def tearDown(self): - # Cleans the resources and flush the logged lines (if any). - self.server_process_handler.clean() - self.server_process_handler2.clean() - - # updater.Updater() populates the roledb with the name "test_repository1" - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Remove top-level temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_update(self): - self.assertEqual('test_repository1', str(self.repository_updater)) - self.assertEqual('test_repository2', str(self.repository_updater2)) - - self.assertEqual(sorted(['role1', 'root', 'snapshot', 'targets', 'timestamp']), - sorted(tuf.roledb.get_rolenames('test_repository1'))) - - self.assertEqual(sorted(['role1', 'root', 'snapshot', 'targets', 'timestamp']), - sorted(tuf.roledb.get_rolenames('test_repository2'))) - - # Note: refresh() resets the known metadata and updates the latest - # top-level metadata. - self.repository_updater.refresh() - - self.assertEqual(sorted(['root', 'snapshot', 'targets', 'timestamp']), - sorted(tuf.roledb.get_rolenames('test_repository1'))) - - # test_repository2 wasn't refreshed and should still know about delegated - # roles. - self.assertEqual(sorted(['root', 'role1', 'snapshot', 'targets', 'timestamp']), - sorted(tuf.roledb.get_rolenames('test_repository2'))) - - # 'role1.json' should be downloaded, because it provides info for the - # requested 'file3.txt'. - valid_targetinfo = self.repository_updater.get_one_valid_targetinfo('file3.txt') - - self.assertEqual(sorted(['role2', 'role1', 'root', 'snapshot', 'targets', 'timestamp']), - sorted(tuf.roledb.get_rolenames('test_repository1'))) - - - - - def test_repository_tool(self): - - self.assertEqual(self.repository_name, str(self.repository_updater)) - self.assertEqual(self.repository_name2, str(self.repository_updater2)) - - repository = repo_tool.load_repository(self.repository_directory, - self.repository_name) - repository2 = repo_tool.load_repository(self.repository_directory2, - self.repository_name2) - - repository.timestamp.version = 88 - self.assertEqual(['timestamp'], tuf.roledb.get_dirty_roles( - self.repository_name)) - self.assertEqual([], tuf.roledb.get_dirty_roles(self.repository_name2)) - - repository2.timestamp.version = 100 - self.assertEqual(['timestamp'], tuf.roledb.get_dirty_roles( - self.repository_name2)) - - key_file = os.path.join(self.keystore_directory, 'timestamp_key') - timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file, "password") - - repository.timestamp.load_signing_key(timestamp_private) - repository2.timestamp.load_signing_key(timestamp_private) - - repository.write('timestamp', increment_version_number=False) - repository2.write('timestamp', increment_version_number=False) - - # And move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.rmtree(os.path.join(self.repository_directory2, 'metadata')) - - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory2, 'metadata.staged'), - os.path.join(self.repository_directory2, 'metadata')) - - # Verify that the client retrieves the expected updates. - logger.info('Downloading timestamp from server 1.') - self.repository_updater.refresh() - - self.assertEqual( - 88, self.repository_updater.metadata['current']['timestamp']['version']) - logger.info('Downloading timestamp from server 2.') - self.repository_updater2.refresh() - - self.assertEqual( - 100, self.repository_updater2.metadata['current']['timestamp']['version']) - - # Test the behavior of the multi-repository updater. - map_file = securesystemslib.util.load_json_file(self.map_file) - map_file['repositories'][self.repository_name] = ['http://localhost:' \ - + str(self.server_process_handler.port)] - map_file['repositories'][self.repository_name2] = ['http://localhost:' \ - + str(self.server_process_handler2.port)] - with open(self.map_file, 'w') as file_object: - file_object.write(json.dumps(map_file)) - - # Try to load a non-existent map file. - self.assertRaises(tuf.exceptions.Error, updater.MultiRepoUpdater, 'bad_path') - - multi_repo_updater = updater.MultiRepoUpdater(self.map_file) - valid_targetinfo = multi_repo_updater.get_valid_targetinfo('file3.txt') - - for my_updater, my_targetinfo in valid_targetinfo.items(): - my_updater.download_target(my_targetinfo, self.temporary_directory) - self.assertTrue(os.path.exists(os.path.join(self.temporary_directory, 'file3.txt'))) - - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_replay_attack_old.py b/tests/test_replay_attack_old.py deleted file mode 100755 index 92dc3ba466..0000000000 --- a/tests/test_replay_attack_old.py +++ /dev/null @@ -1,321 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_replay_attack_old.py - - - Konstantin Andrianov. - - - February 22, 2012. - - April 5, 2014. - Refactored to use the 'unittest' module (test conditions in code, rather - than verifying text output), use pre-generated repository files, and - discontinue use of the old repository tools. Expanded comments. - -vladimir.v.diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Simulate a replay, or rollback, attack. In a replay attack, a client is - tricked into installing software that is older than that which the client - previously knew to be available. - - Note: There is no difference between 'updates' and 'target' files. -""" - -import os -import tempfile -import datetime -import shutil -import logging -import unittest -import sys -from urllib import request - -import tuf.formats -import tuf.log -import tuf.client.updater as updater -import tuf.repository_tool as repo_tool -import tuf.unittest_toolbox as unittest_toolbox - -from tests import utils - -import securesystemslib - - -# The repository tool is imported and logs console messages by default. -# Disable console log messages generated by this unit test. -repo_tool.disable_console_log_messages() - -logger = logging.getLogger(__name__) - - - -class TestReplayAttack(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated of all the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client') - original_keystore = os.path.join(original_repository_files, 'keystore') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.client_directory = os.path.join(temporary_repository_root, 'client') - self.keystore_directory = os.path.join(temporary_repository_root, 'keystore') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_without_tuf(self): - # Scenario: - # 'timestamp.json' specifies the latest version of the repository files. - # A client should only accept the same version number (specified in the - # file) of the metadata, or greater. A version number less than the one - # currently trusted should be rejected. A non-TUF client may use a - # different mechanism for determining versions of metadata, but version - # numbers in this integrations because that is what TUF uses. - # - # Modify the repository's timestamp.json' so that a new version is generated - # and accepted by the client, and backup the previous version. The previous - # is then returned the next time the client requests an update. A non-TUF - # client (without a way to detect older versions of metadata, and thus - # updates) is expected to download older metadata and outdated files. - # Verify that the older version of timestamp.json' is downloaded by the - # non-TUF client. - - # Backup the current version of 'timestamp'. It will be used as the - # outdated version returned to the client. The repository tool removes - # obsolete metadadata, so do *not* save the backup version in the - # repository's metadata directory. - timestamp_path = os.path.join(self.repository_directory, 'metadata', - 'timestamp.json') - backup_timestamp = os.path.join(self.repository_directory, - 'timestamp.json.backup') - shutil.copy(timestamp_path, backup_timestamp) - - # The fileinfo of the previous version is saved to verify that it is indeed - # accepted by the non-TUF client. - length, hashes = securesystemslib.util.get_file_details(backup_timestamp) - previous_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Modify the timestamp file on the remote repository. - repository = repo_tool.load_repository(self.repository_directory) - key_file = os.path.join(self.keystore_directory, 'timestamp_key') - timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - repository.timestamp.load_signing_key(timestamp_private) - - # Set an arbitrary expiration so that the repository tool generates a new - # version. - repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 12) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Save the fileinfo of the new version generated to verify that it is - # saved by the client. - length, hashes = securesystemslib.util.get_file_details(timestamp_path) - new_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'metadata', 'timestamp.json') - client_timestamp_path = os.path.join(self.client_directory, - self.repository_name, 'metadata', 'current', 'timestamp.json') - - # On Windows, the URL portion should not contain back slashes. - request.urlretrieve(url_file.replace('\\', '/'), client_timestamp_path) - - length, hashes = securesystemslib.util.get_file_details(client_timestamp_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Verify 'download_fileinfo' is equal to the new version. - self.assertEqual(download_fileinfo, new_fileinfo) - - # Restore the previous version of 'timestamp.json' on the remote repository - # and verify that the non-TUF client downloads it (expected, but not ideal). - shutil.move(backup_timestamp, timestamp_path) - - # On Windows, the URL portion should not contain back slashes. - request.urlretrieve(url_file.replace('\\', '/'), client_timestamp_path) - - length, hashes = securesystemslib.util.get_file_details(client_timestamp_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Verify 'download_fileinfo' is equal to the previous version. - self.assertEqual(download_fileinfo, previous_fileinfo) - self.assertNotEqual(download_fileinfo, new_fileinfo) - - - - def test_with_tuf(self): - # The same scenario outlined in test_without_tuf() is followed here, except - # with a TUF client (scenario description provided in the opening comment - # block of that test case.) The TUF client performs a refresh of top-level - # metadata, which also includes 'timestamp.json'. - - # Backup the current version of 'timestamp'. It will be used as the - # outdated version returned to the client. The repository tool removes - # obsolete metadadata, so do *not* save the backup version in the - # repository's metadata directory. - timestamp_path = os.path.join(self.repository_directory, 'metadata', - 'timestamp.json') - backup_timestamp = os.path.join(self.repository_directory, - 'timestamp.json.backup') - shutil.copy(timestamp_path, backup_timestamp) - - # The fileinfo of the previous version is saved to verify that it is indeed - # accepted by the non-TUF client. - length, hashes = securesystemslib.util.get_file_details(backup_timestamp) - previous_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Modify the timestamp file on the remote repository. - repository = repo_tool.load_repository(self.repository_directory) - key_file = os.path.join(self.keystore_directory, 'timestamp_key') - timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - repository.timestamp.load_signing_key(timestamp_private) - - # Set an arbitrary expiration so that the repository tool generates a new - # version. - repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 12) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Save the fileinfo of the new version generated to verify that it is - # saved by the client. - length, hashes = securesystemslib.util.get_file_details(timestamp_path) - new_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Refresh top-level metadata, including 'timestamp.json'. Installation of - # new version of 'timestamp.json' is expected. - self.repository_updater.refresh() - - client_timestamp_path = os.path.join(self.client_directory, - self.repository_name, 'metadata', 'current', 'timestamp.json') - length, hashes = securesystemslib.util.get_file_details(client_timestamp_path) - download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Verify 'download_fileinfo' is equal to the new version. - self.assertEqual(download_fileinfo, new_fileinfo) - - # Restore the previous version of 'timestamp.json' on the remote repository - # and verify that the non-TUF client downloads it (expected, but not ideal). - shutil.move(backup_timestamp, timestamp_path) - logger.info('Moving the timestamp.json backup to the current version.') - - # Verify that the TUF client detects replayed metadata and refuses to - # continue the update process. - try: - self.repository_updater.refresh() - - # Verify that the specific 'tuf.exceptions.ReplayedMetadataError' is raised by each - # mirror. - except tuf.exceptions.NoWorkingMirrorError as exception: - for mirror_url, mirror_error in exception.mirror_errors.items(): - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'metadata', 'timestamp.json') - - # Verify that 'timestamp.json' is the culprit. - self.assertEqual(url_file.replace('\\', '/'), mirror_url) - self.assertTrue(isinstance(mirror_error, tuf.exceptions.ReplayedMetadataError)) - - else: - self.fail('TUF did not prevent a replay attack.') - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_repository_lib_old.py b/tests/test_repository_lib_old.py deleted file mode 100755 index aa784a2e37..0000000000 --- a/tests/test_repository_lib_old.py +++ /dev/null @@ -1,1102 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_repository_lib_old.py - - - Vladimir Diaz - - - June 1, 2014. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'repository_lib.py'. -""" - -import os -import time -import datetime -import logging -import tempfile -import json -import shutil -import unittest -import copy -import sys - -import tuf -import tuf.formats -import tuf.log -import tuf.formats -import tuf.roledb -import tuf.keydb -import tuf.settings - -import tuf.repository_lib as repo_lib -import tuf.repository_tool as repo_tool - -from tests import utils - -import securesystemslib -import securesystemslib.exceptions -import securesystemslib.rsa_keys -import securesystemslib.interface -import securesystemslib.storage - -logger = logging.getLogger(__name__) - -repo_lib.disable_console_log_messages() - -TOP_LEVEL_METADATA_FILES = ['root.json', 'targets.json', 'timestamp.json', - 'snapshot.json'] - - -class TestRepositoryToolFunctions(unittest.TestCase): - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownClass() so that - # temporary files are always removed, even when exceptions occur. - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - - - @classmethod - def tearDownClass(cls): - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated for the test cases. - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - shutil.rmtree(cls.temporary_directory) - - - def setUp(self): - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - - - def test_import_rsa_privatekey_from_file(self): - # Test normal case. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - - # Load one of the pre-generated key files from 'tuf/tests/repository_data'. - # 'password' unlocks the pre-generated key files. - key_filepath = os.path.join('repository_data', 'keystore', - 'root_key') - self.assertTrue(os.path.exists(key_filepath)) - - imported_rsa_key = repo_lib.import_rsa_privatekey_from_file(key_filepath, - 'password') - self.assertTrue(securesystemslib.formats.RSAKEY_SCHEMA.matches(imported_rsa_key)) - - - # Test improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.import_rsa_privatekey_from_file, 3, 'pw') - - - # Test invalid argument. - # Non-existent key file. - nonexistent_keypath = os.path.join(temporary_directory, - 'nonexistent_keypath') - self.assertRaises(securesystemslib.exceptions.StorageError, - repo_lib.import_rsa_privatekey_from_file, - nonexistent_keypath, 'pw') - - # Invalid key file argument. - invalid_keyfile = os.path.join(temporary_directory, 'invalid_keyfile') - with open(invalid_keyfile, 'wb') as file_object: - file_object.write(b'bad keyfile') - self.assertRaises(securesystemslib.exceptions.CryptoError, repo_lib.import_rsa_privatekey_from_file, - invalid_keyfile, 'pw') - - - - def test_import_ed25519_privatekey_from_file(self): - # Test normal case. - # Generate ed25519 keys that can be imported. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - ed25519_keypath = os.path.join(temporary_directory, 'ed25519_key') - securesystemslib.interface.generate_and_write_ed25519_keypair( - password='pw', filepath=ed25519_keypath) - - imported_ed25519_key = \ - repo_lib.import_ed25519_privatekey_from_file(ed25519_keypath, 'pw') - self.assertTrue(securesystemslib.formats.ED25519KEY_SCHEMA.matches(imported_ed25519_key)) - - - # Test improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.import_ed25519_privatekey_from_file, 3, 'pw') - - - # Test invalid argument. - # Non-existent key file. - nonexistent_keypath = os.path.join(temporary_directory, - 'nonexistent_keypath') - self.assertRaises(securesystemslib.exceptions.StorageError, - repo_lib.import_ed25519_privatekey_from_file, - nonexistent_keypath, 'pw') - - # Invalid key file argument. - invalid_keyfile = os.path.join(temporary_directory, 'invalid_keyfile') - with open(invalid_keyfile, 'wb') as file_object: - file_object.write(b'bad keyfile') - - self.assertRaises(securesystemslib.exceptions.Error, - repo_lib.import_ed25519_privatekey_from_file, invalid_keyfile, 'pw') - - # Invalid private key imported (contains unexpected keytype.) - imported_ed25519_key['keytype'] = 'invalid_keytype' - - # Use 'rsa_keys.py' to bypass the key format validation performed by - # 'keys.py'. - salt, iterations, derived_key = \ - securesystemslib.rsa_keys._generate_derived_key('pw') - - # Store the derived key info in a dictionary, the object expected - # by the non-public _encrypt() routine. - derived_key_information = {'salt': salt, 'iterations': iterations, - 'derived_key': derived_key} - - # Convert the key object to json string format and encrypt it with the - # derived key. - encrypted_key = securesystemslib.rsa_keys._encrypt( - json.dumps(imported_ed25519_key), derived_key_information) - - with open(ed25519_keypath, 'wb') as file_object: - file_object.write(encrypted_key.encode('utf-8')) - - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.import_ed25519_privatekey_from_file, ed25519_keypath, 'pw') - - - - def test_get_top_level_metadata_filenames(self): - - # Test normal case. - metadata_directory = os.path.join('metadata/') - filenames = {'root.json': metadata_directory + 'root.json', - 'targets.json': metadata_directory + 'targets.json', - 'snapshot.json': metadata_directory + 'snapshot.json', - 'timestamp.json': metadata_directory + 'timestamp.json'} - - self.assertEqual(filenames, - repo_lib.get_top_level_metadata_filenames('metadata/')) - - # If a directory argument is not specified, the current working directory - # is used. - metadata_directory = os.getcwd() - filenames = {'root.json': os.path.join(metadata_directory, 'root.json'), - 'targets.json': os.path.join(metadata_directory, 'targets.json'), - 'snapshot.json': os.path.join(metadata_directory, 'snapshot.json'), - 'timestamp.json': os.path.join(metadata_directory, 'timestamp.json')} - self.assertEqual(filenames, - repo_lib.get_top_level_metadata_filenames(metadata_directory)) - - - # Test improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.get_top_level_metadata_filenames, 3) - - - - def test_get_targets_metadata_fileinfo(self): - # Test normal case. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - test_filepath = os.path.join(temporary_directory, 'file.txt') - - with open(test_filepath, 'wt') as file_object: - file_object.write('test file') - - # Generate test fileinfo object. It is assumed SHA256 and SHA512 hashes - # are computed by get_targets_metadata_fileinfo(). - file_length = os.path.getsize(test_filepath) - sha256_digest_object = securesystemslib.hash.digest_filename(test_filepath) - sha512_digest_object = securesystemslib.hash.digest_filename(test_filepath, algorithm='sha512') - file_hashes = {'sha256': sha256_digest_object.hexdigest(), - 'sha512': sha512_digest_object.hexdigest()} - fileinfo = {'length': file_length, 'hashes': file_hashes} - self.assertTrue(tuf.formats.TARGETS_FILEINFO_SCHEMA.matches(fileinfo)) - - storage_backend = securesystemslib.storage.FilesystemBackend() - - self.assertEqual(fileinfo, repo_lib.get_targets_metadata_fileinfo(test_filepath, - storage_backend)) - - - # Test improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.get_targets_metadata_fileinfo, 3, - storage_backend) - - - # Test non-existent file. - nonexistent_filepath = os.path.join(temporary_directory, 'oops.txt') - self.assertRaises(securesystemslib.exceptions.Error, - repo_lib.get_targets_metadata_fileinfo, - nonexistent_filepath, storage_backend) - - - - def test_get_target_hash(self): - # Test normal case. - expected_target_hashes = { - '/file1.txt': 'e3a3d89eb3b70ce3fbce6017d7b8c12d4abd5635427a0e8a238f53157df85b3d', - '/README.txt': '8faee106f1bb69f34aaf1df1e3c2e87d763c4d878cb96b91db13495e32ceb0b0', - '/packages/file2.txt': 'c9c4a5cdd84858dd6a23d98d7e6e6b2aec45034946c16b2200bc317c75415e92' - } - for filepath, target_hash in expected_target_hashes.items(): - self.assertTrue(tuf.formats.RELPATH_SCHEMA.matches(filepath)) - self.assertTrue(securesystemslib.formats.HASH_SCHEMA.matches(target_hash)) - self.assertEqual(repo_lib.get_target_hash(filepath), target_hash) - - # Test for improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.get_target_hash, 8) - - - - def test_generate_root_metadata(self): - # Test normal case. - # Load the root metadata provided in 'tuf/tests/repository_data/'. - root_filepath = os.path.join('repository_data', 'repository', - 'metadata', 'root.json') - root_signable = securesystemslib.util.load_json_file(root_filepath) - - # generate_root_metadata() expects the top-level roles and keys to be - # available in 'tuf.keydb' and 'tuf.roledb'. - tuf.roledb.create_roledb_from_root_metadata(root_signable['signed']) - tuf.keydb.create_keydb_from_root_metadata(root_signable['signed']) - expires = '1985-10-21T01:22:00Z' - - root_metadata = repo_lib.generate_root_metadata(1, expires, - consistent_snapshot=False) - self.assertTrue(tuf.formats.ROOT_SCHEMA.matches(root_metadata)) - - root_keyids = tuf.roledb.get_role_keyids('root') - tuf.keydb._keydb_dict['default'][root_keyids[0]]['keytype'] = 'bad_keytype' - self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_root_metadata, 1, - expires, consistent_snapshot=False) - - # Reset the root key's keytype, so that we can next verify that a different - # securesystemslib.exceptions.Error exception is raised for duplicate keyids. - tuf.keydb._keydb_dict['default'][root_keyids[0]]['keytype'] = 'rsa' - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_root_metadata, - '3', expires, False) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_root_metadata, - 1, '3', False) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_root_metadata, - 1, expires, 3) - - # Test for missing required roles and keys. - tuf.roledb.clear_roledb() - tuf.keydb.clear_keydb() - self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_root_metadata, - 1, expires, False) - - - - def test_generate_targets_metadata(self): - # Test normal case. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - targets_directory = os.path.join(temporary_directory, 'targets') - file1_path = os.path.join(targets_directory, 'file.txt') - securesystemslib.util.ensure_parent_dir(file1_path) - - with open(file1_path, 'wt') as file_object: - file_object.write('test file.') - - # Set valid generate_targets_metadata() arguments. Add a custom field for - # the 'target_files' target set below. - version = 1 - datetime_object = datetime.datetime(2030, 1, 1, 12, 0) - expiration_date = datetime_object.isoformat() + 'Z' - file_permissions = oct(os.stat(file1_path).st_mode)[4:] - target_files = {'file.txt': {'custom': {'file_permission': file_permissions}}} - - # Delegations data must be loaded into roledb since - # generate_targets_metadata tries to update delegations keyids - # and threshold - repository_path = os.path.join('repository_data', 'repository') - repository = repo_tool.load_repository(repository_path) - roleinfo = tuf.roledb.get_roleinfo('targets') - delegations = roleinfo['delegations'] - - targets_metadata = repo_lib.generate_targets_metadata(targets_directory, - target_files, version, expiration_date, delegations, False) - self.assertTrue(tuf.formats.TARGETS_SCHEMA.matches(targets_metadata)) - - # Valid arguments with 'delegations' set to None. - targets_metadata = repo_lib.generate_targets_metadata(targets_directory, - target_files, version, expiration_date, None, False) - self.assertTrue(tuf.formats.TARGETS_SCHEMA.matches(targets_metadata)) - - # Test update in targets' delegations - keystore_path = os.path.join('repository_data', 'keystore') - targets_public_keypath = os.path.join(keystore_path, 'targets_key.pub') - targets_public_key = securesystemslib.interface.\ - import_ed25519_publickey_from_file(targets_public_keypath) - - # Add new key and threshold to delegated role - repository.targets('role1').add_verification_key(targets_public_key) - repository.targets('role1').threshold = 2 - role1_keyids = tuf.roledb.get_role_keyids('role1') - role1_threshold = tuf.roledb.get_role_threshold('role1') - roleinfo = tuf.roledb.get_roleinfo('targets') - delegations = roleinfo['delegations'] - old_delegations = copy.deepcopy(delegations) - - targets_metadata = repo_lib.generate_targets_metadata(targets_directory, - target_files, version, expiration_date, delegations, False) - self.assertNotEqual(old_delegations, delegations) - self.assertEqual(role1_keyids, - targets_metadata['delegations']['roles'][0]['keyids']) - self.assertEqual(role1_threshold, - targets_metadata['delegations']['roles'][0]['threshold']) - for keyid in role1_keyids: - self.assertIn(keyid, targets_metadata['delegations']['keys']) - - - # Verify that 'digest.filename' file is saved to 'targets_directory' if - # the 'write_consistent_targets' argument is True. - list_targets_directory = os.listdir(targets_directory) - targets_metadata = repo_lib.generate_targets_metadata(targets_directory, - target_files, version, expiration_date, delegations, - write_consistent_targets=True) - new_list_targets_directory = os.listdir(targets_directory) - - # Verify that 'targets_directory' contains only one extra item. - self.assertTrue(len(list_targets_directory) + 1, - len(new_list_targets_directory)) - - # Verify that an exception is not raised if the target files already exist. - repo_lib.generate_targets_metadata(targets_directory, target_files, - version, expiration_date, delegations, - write_consistent_targets=True) - - - # Verify that 'targets_metadata' contains a 'custom' entry (optional) - # for 'file.txt'. - self.assertTrue('custom' in targets_metadata['targets']['file.txt']) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, - 3, target_files, version, expiration_date) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, - targets_directory, 3, version, expiration_date) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, - targets_directory, target_files, '3', expiration_date) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, - targets_directory, target_files, version, '3') - - # Improperly formatted 'delegations' and 'write_consistent_targets' - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, - targets_directory, target_files, version, expiration_date, - 3, False) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, - targets_directory, target_files, version, expiration_date, - delegations, 3) - - # Test non-existent target file. - bad_target_file = \ - {'non-existent.txt': {'file_permission': file_permissions}} - - self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_targets_metadata, - targets_directory, bad_target_file, version, - expiration_date) - - - # Test use of an existing fileinfo structures - target1_hashes = {'sha256': 'c2986576f5fdfd43944e2b19e775453b96748ec4fe2638a6d2f32f1310967095'} - target2_hashes = {'sha256': '517c0ce943e7274a2431fa5751e17cfd5225accd23e479bfaad13007751e87ef'} - - # Test missing expected field, hashes, when use_existing_fileinfo - target_files = {'file.txt': {'length': 555}} - self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_targets_metadata, - targets_directory, target_files, version, expiration_date, - use_existing_fileinfo=True) - - # Test missing expected field, length, when use_existing_fileinfo - target_files = {'file.txt': {'hashes': target1_hashes}} - self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_targets_metadata, - targets_directory, target_files, version, expiration_date, - use_existing_fileinfo=True) - - # Test missing both expected fields when use_existing_fileinfo - target_files = {'file.txt': {}} - self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_targets_metadata, - targets_directory, target_files, version, expiration_date, - use_existing_fileinfo=True) - - target_files = {'file1.txt': {'custom': {'meta': 'foo'}, - 'hashes': target1_hashes, - 'length': 555}, - 'file2.txt': {'custom': {'meta': 'bar'}, - 'hashes': target2_hashes, - 'length': 42}} - targets_metadata = \ - repo_lib.generate_targets_metadata(targets_directory, target_files, - version, expiration_date, delegations, - False, use_existing_fileinfo=True) - - - def _setup_generate_snapshot_metadata_test(self): - # Test normal case. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - original_repository_path = os.path.join('repository_data', - 'repository') - repository_directory = os.path.join(temporary_directory, 'repository') - shutil.copytree(original_repository_path, repository_directory) - metadata_directory = os.path.join(repository_directory, - repo_lib.METADATA_STAGED_DIRECTORY_NAME) - - targets_directory = os.path.join(repository_directory, repo_lib.TARGETS_DIRECTORY_NAME) - - version = 1 - expiration_date = '1985-10-21T13:20:00Z' - - # Load a valid repository so that top-level roles exist in roledb and - # generate_snapshot_metadata() has roles to specify in snapshot metadata. - storage_backend = securesystemslib.storage.FilesystemBackend() - repository = repo_tool.Repository(repository_directory, metadata_directory, - targets_directory, storage_backend) - repository_junk = repo_tool.load_repository(repository_directory) - - # Load a valid repository so that top-level roles exist in roledb and - # generate_snapshot_metadata() has roles to specify in snapshot metadata. - storage_backend = securesystemslib.storage.FilesystemBackend() - - # For testing purposes, store an invalid metadata file in the metadata directory - # to verify that it isn't loaded by generate_snapshot_metadata(). Unknown - # metadata file extensions should be ignored. - invalid_metadata_file = os.path.join(metadata_directory, 'role_file.xml') - with open(invalid_metadata_file, 'w') as file_object: - file_object.write('bad extension on metadata file') - - return metadata_directory, version, expiration_date, \ - storage_backend - - - def test_generate_snapshot_metadata(self): - metadata_directory, version, expiration_date, storage_backend = \ - self._setup_generate_snapshot_metadata_test() - - snapshot_metadata = \ - repo_lib.generate_snapshot_metadata(metadata_directory, version, - expiration_date, - storage_backend, - consistent_snapshot=False) - self.assertTrue(tuf.formats.SNAPSHOT_SCHEMA.matches(snapshot_metadata)) - - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_snapshot_metadata, - 3, version, expiration_date, consistent_snapshot=False, - storage_backend=storage_backend) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_snapshot_metadata, - metadata_directory, '3', expiration_date, storage_backend, - consistent_snapshot=False) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_snapshot_metadata, - metadata_directory, version, '3', storage_backend, - consistent_snapshot=False) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_snapshot_metadata, - metadata_directory, version, expiration_date, 3, - storage_backend) - - - - def test_generate_snapshot_metadata_with_length(self): - metadata_directory, version, expiration_date, storage_backend = \ - self._setup_generate_snapshot_metadata_test() - - snapshot_metadata = \ - repo_lib.generate_snapshot_metadata(metadata_directory, version, - expiration_date, - storage_backend, - consistent_snapshot=False, - use_length=True) - self.assertTrue(tuf.formats.SNAPSHOT_SCHEMA.matches(snapshot_metadata)) - - metadata_files_info_dict = snapshot_metadata['meta'] - for metadata_filename in sorted(os.listdir(metadata_directory), reverse=True): - - # In the metadata_directory, there are files with format: - # 1.root.json. The prefix number should be removed. - stripped_filename, version = \ - repo_lib._strip_version_number(metadata_filename, - consistent_snapshot=True) - - # In the repository, the file "role_file.xml" have been added to make - # sure that non-json files aren't loaded. This file should be filtered. - if stripped_filename.endswith('.json'): - if stripped_filename not in TOP_LEVEL_METADATA_FILES: - # Check that length is not calculated but hashes is - self.assertIn('length', metadata_files_info_dict[stripped_filename]) - self.assertNotIn('hashes', metadata_files_info_dict[stripped_filename]) - - - - def test_generate_snapshot_metadata_with_hashes(self): - metadata_directory, version, expiration_date, storage_backend = \ - self._setup_generate_snapshot_metadata_test() - - snapshot_metadata = \ - repo_lib.generate_snapshot_metadata(metadata_directory, version, - expiration_date, - storage_backend, - consistent_snapshot=False, - use_hashes=True) - self.assertTrue(tuf.formats.SNAPSHOT_SCHEMA.matches(snapshot_metadata)) - - metadata_files_info_dict = snapshot_metadata['meta'] - for metadata_filename in sorted(os.listdir(metadata_directory), reverse=True): - - # In the metadata_directory, there are files with format: - # 1.root.json. The prefix number should be removed. - stripped_filename, version = \ - repo_lib._strip_version_number(metadata_filename, - consistent_snapshot=True) - - # In the repository, the file "role_file.xml" have been added to make - # sure that non-json files aren't loaded. This file should be filtered. - if stripped_filename.endswith('.json'): - if stripped_filename not in TOP_LEVEL_METADATA_FILES: - # Check that hashes is not calculated but length is - self.assertNotIn('length', metadata_files_info_dict[stripped_filename]) - self.assertIn('hashes', metadata_files_info_dict[stripped_filename]) - - - - def test_generate_snapshot_metadata_with_hashes_and_length(self): - metadata_directory, version, expiration_date, storage_backend = \ - self._setup_generate_snapshot_metadata_test() - - snapshot_metadata = \ - repo_lib.generate_snapshot_metadata(metadata_directory, version, - expiration_date, - storage_backend, - consistent_snapshot=False, - use_length=True, - use_hashes=True) - self.assertTrue(tuf.formats.SNAPSHOT_SCHEMA.matches(snapshot_metadata)) - - metadata_files_info_dict = snapshot_metadata['meta'] - for metadata_filename in sorted(os.listdir(metadata_directory), reverse=True): - - # In the metadata_directory, there are files with format: - # 1.root.json. The prefix number should be removed. - stripped_filename, version = \ - repo_lib._strip_version_number(metadata_filename, - consistent_snapshot=True) - - # In the repository, the file "role_file.xml" have been added to make - # sure that non-json files aren't loaded. This file should be filtered. - if stripped_filename.endswith('.json'): - if stripped_filename not in TOP_LEVEL_METADATA_FILES: - # Check that both length and hashes are not are not calculated - self.assertIn('length', metadata_files_info_dict[stripped_filename]) - self.assertIn('hashes', metadata_files_info_dict[stripped_filename]) - - - - def _setup_generate_timestamp_metadata_test(self): - # Test normal case. - repository_name = 'test_repository' - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - original_repository_path = os.path.join('repository_data', - 'repository') - repository_directory = os.path.join(temporary_directory, 'repository') - shutil.copytree(original_repository_path, repository_directory) - metadata_directory = os.path.join(repository_directory, - repo_lib.METADATA_STAGED_DIRECTORY_NAME) - targets_directory = os.path.join(repository_directory, repo_lib.TARGETS_DIRECTORY_NAME) - - snapshot_file_path = os.path.join(metadata_directory, - repo_lib.SNAPSHOT_FILENAME) - - # Set valid generate_timestamp_metadata() arguments. - version = 1 - expiration_date = '1985-10-21T13:20:00Z' - - storage_backend = securesystemslib.storage.FilesystemBackend() - # Load a valid repository so that top-level roles exist in roledb and - # generate_snapshot_metadata() has roles to specify in snapshot metadata. - repository = repo_tool.Repository(repository_directory, metadata_directory, - targets_directory, repository_name) - - repository_junk = repo_tool.load_repository(repository_directory, - repository_name) - - return snapshot_file_path, version, expiration_date, storage_backend, \ - repository_name - - - def test_generate_timestamp_metadata(self): - snapshot_file_path, version, expiration_date, storage_backend, \ - repository_name = self._setup_generate_timestamp_metadata_test() - - timestamp_metadata = repo_lib.generate_timestamp_metadata(snapshot_file_path, - version, expiration_date, storage_backend, repository_name) - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches(timestamp_metadata)) - - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.generate_timestamp_metadata, 3, version, expiration_date, - storage_backend, repository_name) - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.generate_timestamp_metadata, snapshot_file_path, '3', - expiration_date, storage_backend, repository_name) - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.generate_timestamp_metadata, snapshot_file_path, version, '3', - storage_backend, repository_name) - - - - def test_generate_timestamp_metadata_without_length(self): - snapshot_file_path, version, expiration_date, storage_backend, \ - repository_name = self._setup_generate_timestamp_metadata_test() - - timestamp_metadata = repo_lib.generate_timestamp_metadata(snapshot_file_path, - version, expiration_date, storage_backend, repository_name, - use_length=False) - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches(timestamp_metadata)) - - # Check that length is not calculated but hashes is - timestamp_file_info = timestamp_metadata['meta'] - - self.assertNotIn('length', timestamp_file_info['snapshot.json']) - self.assertIn('hashes', timestamp_file_info['snapshot.json']) - - - - def test_generate_timestamp_metadata_without_hashes(self): - snapshot_file_path, version, expiration_date, storage_backend, \ - repository_name = self._setup_generate_timestamp_metadata_test() - - timestamp_metadata = repo_lib.generate_timestamp_metadata(snapshot_file_path, - version, expiration_date, storage_backend, repository_name, - use_hashes=False) - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches(timestamp_metadata)) - - # Check that hashes is not calculated but length is - timestamp_file_info = timestamp_metadata['meta'] - - self.assertIn('length', timestamp_file_info['snapshot.json']) - self.assertNotIn('hashes', timestamp_file_info['snapshot.json']) - - - - def test_generate_timestamp_metadata_without_length_and_hashes(self): - snapshot_file_path, version, expiration_date, storage_backend, \ - repository_name = self._setup_generate_timestamp_metadata_test() - - timestamp_metadata = repo_lib.generate_timestamp_metadata(snapshot_file_path, - version, expiration_date, storage_backend, repository_name, - use_hashes=False, use_length=False) - self.assertTrue(tuf.formats.TIMESTAMP_SCHEMA.matches(timestamp_metadata)) - - # Check that length and hashes attributes are not added - timestamp_file_info = timestamp_metadata['meta'] - self.assertNotIn('length', timestamp_file_info['snapshot.json']) - self.assertNotIn('hashes', timestamp_file_info['snapshot.json']) - - - - def test_sign_metadata(self): - # Test normal case. - repository_name = 'test_repository' - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - metadata_path = os.path.join('repository_data', 'repository', 'metadata') - keystore_path = os.path.join('repository_data', 'keystore') - root_filename = os.path.join(metadata_path, 'root.json') - root_metadata = securesystemslib.util.load_json_file(root_filename)['signed'] - targets_filename = os.path.join(metadata_path, 'targets.json') - targets_metadata = securesystemslib.util.load_json_file(targets_filename)['signed'] - - tuf.keydb.create_keydb_from_root_metadata(root_metadata, repository_name) - tuf.roledb.create_roledb_from_root_metadata(root_metadata, repository_name) - root_keyids = tuf.roledb.get_role_keyids('root', repository_name) - targets_keyids = tuf.roledb.get_role_keyids('targets', repository_name) - - root_private_keypath = os.path.join(keystore_path, 'root_key') - root_private_key = repo_lib.import_rsa_privatekey_from_file(root_private_keypath, - 'password') - - # Sign with a valid, but not a threshold, key. - targets_public_keypath = os.path.join(keystore_path, 'targets_key.pub') - targets_public_key = securesystemslib.interface.\ - import_ed25519_publickey_from_file(targets_public_keypath) - - # sign_metadata() expects the private key 'root_metadata' to be in - # 'tuf.keydb'. Remove any public keys that may be loaded before - # adding private key, otherwise a 'tuf.KeyAlreadyExists' exception is - # raised. - tuf.keydb.remove_key(root_private_key['keyid'], - repository_name=repository_name) - tuf.keydb.add_key(root_private_key, repository_name=repository_name) - tuf.keydb.remove_key(targets_public_key['keyid'], repository_name=repository_name) - tuf.keydb.add_key(targets_public_key, repository_name=repository_name) - - # Verify that a valid root signable is generated. - root_signable = repo_lib.sign_metadata(root_metadata, root_keyids, - root_filename, repository_name) - self.assertTrue(tuf.formats.SIGNABLE_SCHEMA.matches(root_signable)) - - # Test for an unset private key (in this case, target's). - repo_lib.sign_metadata(targets_metadata, targets_keyids, targets_filename, - repository_name) - - # Add an invalid keytype to one of the root keys. - root_keyid = root_keyids[0] - tuf.keydb._keydb_dict[repository_name][root_keyid]['keytype'] = 'bad_keytype' - self.assertRaises(securesystemslib.exceptions.Error, repo_lib.sign_metadata, - root_metadata, root_keyids, root_filename, repository_name) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.sign_metadata, 3, root_keyids, 'root.json', repository_name) - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.sign_metadata, root_metadata, 3, 'root.json', repository_name) - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.sign_metadata, root_metadata, root_keyids, 3, repository_name) - - - - def test_write_metadata_file(self): - # Test normal case. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - metadata_directory = os.path.join('repository_data', 'repository', 'metadata') - root_filename = os.path.join(metadata_directory, 'root.json') - root_signable = securesystemslib.util.load_json_file(root_filename) - - output_filename = os.path.join(temporary_directory, 'root.json') - version_number = root_signable['signed']['version'] + 1 - - self.assertFalse(os.path.exists(output_filename)) - storage_backend = securesystemslib.storage.FilesystemBackend() - repo_lib.write_metadata_file(root_signable, output_filename, version_number, - consistent_snapshot=False, storage_backend=storage_backend) - self.assertTrue(os.path.exists(output_filename)) - - # Attempt to over-write the previously written metadata file. An exception - # is not raised in this case, only a debug message is logged. - repo_lib.write_metadata_file(root_signable, output_filename, version_number, - consistent_snapshot=False, storage_backend=storage_backend) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file, - 3, output_filename, version_number, False, storage_backend) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file, - root_signable, 3, version_number, False, storage_backend) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file, - root_signable, output_filename, '3', False, storage_backend) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file, - root_signable, output_filename, storage_backend, version_number, 3) - - - - def test_create_tuf_client_directory(self): - # Test normal case. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - repository_directory = os.path.join('repository_data', 'repository') - client_directory = os.path.join(temporary_directory, 'client') - - repo_lib.create_tuf_client_directory(repository_directory, client_directory) - - self.assertTrue(os.path.exists(client_directory)) - metadata_directory = os.path.join(client_directory, 'metadata') - current_directory = os.path.join(metadata_directory, 'current') - previous_directory = os.path.join(metadata_directory, 'previous') - self.assertTrue(os.path.exists(client_directory)) - self.assertTrue(os.path.exists(metadata_directory)) - self.assertTrue(os.path.exists(current_directory)) - self.assertTrue(os.path.exists(previous_directory)) - - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.create_tuf_client_directory, 3, client_directory) - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_lib.create_tuf_client_directory, repository_directory, 3) - - - # Test invalid argument (i.e., client directory already exists.) - self.assertRaises(tuf.exceptions.RepositoryError, - repo_lib.create_tuf_client_directory, repository_directory, - client_directory) - - # Test invalid client metadata directory (i.e., non-errno.EEXIST exceptions - # should be re-raised.) - shutil.rmtree(metadata_directory) - - # Save the original metadata directory name so that it can be restored - # after testing. - metadata_directory_name = repo_lib.METADATA_DIRECTORY_NAME - repo_lib.METADATA_DIRECTORY_NAME = '/' - - # Creation of the '/' directory is forbidden on all supported OSs. The '/' - # argument to create_tuf_client_directory should cause it to re-raise a - # non-errno.EEXIST exception. - self.assertRaises((OSError, tuf.exceptions.RepositoryError), - repo_lib.create_tuf_client_directory, repository_directory, '/') - - # Restore the metadata directory name in repo_lib. - repo_lib.METADATA_DIRECTORY_NAME = metadata_directory_name - - - - def test__generate_and_write_metadata(self): - # Test for invalid, or unsupported, rolename. - # Load the root metadata provided in 'tuf/tests/repository_data/'. - repository_name = 'repository_name' - root_filepath = os.path.join('repository_data', 'repository', - 'metadata', 'root.json') - root_signable = securesystemslib.util.load_json_file(root_filepath) - - # _generate_and_write_metadata() expects the top-level roles - # (specifically 'snapshot') and keys to be available in 'tuf.roledb'. - tuf.roledb.create_roledb_from_root_metadata(root_signable['signed'], - repository_name) - tuf.keydb.create_keydb_from_root_metadata(root_signable['signed'], - repository_name) - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - targets_directory = os.path.join(temporary_directory, 'targets') - os.mkdir(targets_directory) - repository_directory = os.path.join(temporary_directory, 'repository') - metadata_directory = os.path.join(repository_directory, - repo_lib.METADATA_STAGED_DIRECTORY_NAME) - targets_metadata = os.path.join('repository_data', 'repository', 'metadata', - 'targets.json') - obsolete_metadata = os.path.join(metadata_directory, 'obsolete_role.json') - securesystemslib.util.ensure_parent_dir(obsolete_metadata) - shutil.copyfile(targets_metadata, obsolete_metadata) - - keystore_path = os.path.join('repository_data', 'keystore') - targets_private_keypath = os.path.join(keystore_path, 'targets_key') - targets_private_key = repo_lib.import_ed25519_privatekey_from_file(targets_private_keypath, - 'password') - tuf.keydb.remove_key(targets_private_key['keyid'], - repository_name=repository_name) - tuf.keydb.add_key(targets_private_key, repository_name=repository_name) - - # Verify that obsolete metadata (a metadata file exists on disk, but the - # role is unavailable in 'tuf.roledb'). First add the obsolete - # role to 'tuf.roledb' so that its metadata file can be written to disk. - targets_roleinfo = tuf.roledb.get_roleinfo('targets', repository_name) - targets_roleinfo['version'] = 1 - expiration = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + 86400)) - expiration = expiration.isoformat() + 'Z' - targets_roleinfo['expires'] = expiration - targets_roleinfo['signing_keyids'] = targets_roleinfo['keyids'] - tuf.roledb.add_role('obsolete_role', targets_roleinfo, - repository_name=repository_name) - - storage_backend = securesystemslib.storage.FilesystemBackend() - repo_lib._generate_and_write_metadata('obsolete_role', obsolete_metadata, - targets_directory, metadata_directory, storage_backend, - consistent_snapshot=False, filenames=None, - repository_name=repository_name) - - snapshot_filepath = os.path.join('repository_data', 'repository', - 'metadata', 'snapshot.json') - snapshot_signable = securesystemslib.util.load_json_file(snapshot_filepath) - tuf.roledb.remove_role('obsolete_role', repository_name) - self.assertTrue(os.path.exists(os.path.join(metadata_directory, - 'obsolete_role.json'))) - tuf.repository_lib._delete_obsolete_metadata(metadata_directory, - snapshot_signable['signed'], False, repository_name, - storage_backend) - self.assertFalse(os.path.exists(metadata_directory + 'obsolete_role.json')) - shutil.copyfile(targets_metadata, obsolete_metadata) - - - - def test__delete_obsolete_metadata(self): - repository_name = 'test_repository' - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - repository_directory = os.path.join(temporary_directory, 'repository') - metadata_directory = os.path.join(repository_directory, - repo_lib.METADATA_STAGED_DIRECTORY_NAME) - os.makedirs(metadata_directory) - snapshot_filepath = os.path.join('repository_data', 'repository', - 'metadata', 'snapshot.json') - snapshot_signable = securesystemslib.util.load_json_file(snapshot_filepath) - storage_backend = securesystemslib.storage.FilesystemBackend() - - # Create role metadata that should not exist in snapshot.json. - role1_filepath = os.path.join('repository_data', 'repository', 'metadata', - 'role1.json') - shutil.copyfile(role1_filepath, os.path.join(metadata_directory, 'role2.json')) - - repo_lib._delete_obsolete_metadata(metadata_directory, - snapshot_signable['signed'], True, repository_name, storage_backend) - - # _delete_obsolete_metadata should never delete root.json. - root_filepath = os.path.join('repository_data', 'repository', 'metadata', - 'root.json') - shutil.copyfile(root_filepath, os.path.join(metadata_directory, 'root.json')) - repo_lib._delete_obsolete_metadata(metadata_directory, - snapshot_signable['signed'], True, repository_name, storage_backend) - self.assertTrue(os.path.exists(os.path.join(metadata_directory, 'root.json'))) - - # Verify what happens for a non-existent metadata directory (a debug - # message is logged). - self.assertRaises(securesystemslib.exceptions.StorageError, - repo_lib._delete_obsolete_metadata, 'non-existent', - snapshot_signable['signed'], True, repository_name, storage_backend) - - - def test__load_top_level_metadata(self): - repository_name = 'test_repository' - - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - repository_directory = os.path.join(temporary_directory, 'repository') - metadata_directory = os.path.join(repository_directory, - repo_lib.METADATA_STAGED_DIRECTORY_NAME) - targets_directory = os.path.join(repository_directory, - repo_lib.TARGETS_DIRECTORY_NAME) - shutil.copytree(os.path.join('repository_data', 'repository', 'metadata'), - metadata_directory) - shutil.copytree(os.path.join('repository_data', 'repository', 'targets'), - targets_directory) - - # Add a duplicate signature to the Root file for testing purposes). - root_file = os.path.join(metadata_directory, 'root.json') - signable = securesystemslib.util.load_json_file(os.path.join(metadata_directory, 'root.json')) - signable['signatures'].append(signable['signatures'][0]) - - storage_backend = securesystemslib.storage.FilesystemBackend() - repo_lib.write_metadata_file(signable, root_file, 8, False, storage_backend) - - filenames = repo_lib.get_top_level_metadata_filenames(metadata_directory) - repository = repo_tool.create_new_repository(repository_directory, repository_name) - repo_lib._load_top_level_metadata(repository, filenames, repository_name) - - # Manually add targets delegations to roledb since - # repository.write('targets') will try to update its delegations - targets_filepath = os.path.join('repository_data', 'repository', - 'metadata', 'targets.json') - targets_signable = securesystemslib.util.load_json_file(targets_filepath) - delegations = targets_signable['signed']['delegations'] - - roleinfo = {} - roleinfo['name'] = delegations['roles'][0]['name'] - roleinfo['keyids'] = delegations['roles'][0]['keyids'] - roleinfo['threshold'] = delegations['roles'][0]['threshold'] - roleinfo['version'] = 1 - tuf.roledb.add_role('role1', roleinfo, repository_name) - - keystore_path = os.path.join('repository_data', 'keystore') - root_privkey_path = os.path.join(keystore_path, 'root_key') - targets_privkey_path = os.path.join(keystore_path, 'targets_key') - snapshot_privkey_path = os.path.join(keystore_path, 'snapshot_key') - timestamp_privkey_path = os.path.join(keystore_path, 'timestamp_key') - - repository.root.load_signing_key(repo_lib.import_rsa_privatekey_from_file(root_privkey_path, 'password')) - repository.targets.load_signing_key(repo_lib.import_ed25519_privatekey_from_file(targets_privkey_path, 'password')) - repository.snapshot.load_signing_key(repo_lib.import_ed25519_privatekey_from_file(snapshot_privkey_path, 'password')) - repository.timestamp.load_signing_key(repo_lib.import_ed25519_privatekey_from_file(timestamp_privkey_path, 'password')) - - # Partially write all top-level roles (we increase the threshold of each - # top-level role so that they are flagged as partially written. - repository.root.threshold = repository.root.threshold + 1 - repository.snapshot.threshold = repository.snapshot.threshold + 1 - repository.targets.threshold = repository.targets.threshold + 1 - repository.timestamp.threshold = repository.timestamp.threshold + 1 - repository.write('root') - repository.write('snapshot') - repository.write('targets') - repository.write('timestamp') - - repo_lib._load_top_level_metadata(repository, filenames, repository_name) - - # Attempt to load a repository with missing top-level metadata. - for role_file in os.listdir(metadata_directory): - if role_file.endswith('.json') and not role_file.startswith('root'): - role_filename = os.path.join(metadata_directory, role_file) - os.remove(role_filename) - self.assertRaises(tuf.exceptions.RepositoryError, - repo_lib._load_top_level_metadata, repository, filenames, - repository_name) - - # Remove the required Root file and verify that an exception is raised. - os.remove(os.path.join(metadata_directory, 'root.json')) - self.assertRaises(tuf.exceptions.RepositoryError, - repo_lib._load_top_level_metadata, repository, filenames, - repository_name) - - - - def test__remove_invalid_and_duplicate_signatures(self): - # Remove duplicate PSS signatures (same key generates valid, but different - # signatures). First load a valid signable (in this case, the root role). - repository_name = 'test_repository' - root_filepath = os.path.join('repository_data', 'repository', - 'metadata', 'root.json') - root_signable = securesystemslib.util.load_json_file(root_filepath) - key_filepath = os.path.join('repository_data', 'keystore', 'root_key') - root_rsa_key = repo_lib.import_rsa_privatekey_from_file(key_filepath, - 'password') - - # Add 'root_rsa_key' to tuf.keydb, since - # _remove_invalid_and_duplicate_signatures() checks for unknown keys in - # tuf.keydb. - tuf.keydb.add_key(root_rsa_key, repository_name=repository_name) - - # Append the new valid, but duplicate PSS signature, and test that - # duplicates are removed. create_signature() generates a key for the - # key type of the first argument (i.e., root_rsa_key). - data = securesystemslib.formats.encode_canonical(root_signable['signed']).encode('utf-8') - new_pss_signature = securesystemslib.keys.create_signature(root_rsa_key, - data) - root_signable['signatures'].append(new_pss_signature) - - expected_number_of_signatures = len(root_signable['signatures']) - tuf.repository_lib._remove_invalid_and_duplicate_signatures(root_signable, - repository_name) - self.assertEqual(len(root_signable), expected_number_of_signatures) - - # Test for an invalid keyid. - root_signable['signatures'][0]['keyid'] = '404' - tuf.repository_lib._remove_invalid_and_duplicate_signatures(root_signable, - repository_name) - - # Re-add a valid signature for the following test condition. - root_signable['signatures'].append(new_pss_signature) - - # Test that an exception is not raised if an invalid sig is present, - # and that the duplicate key is removed 'root_signable'. - root_signable['signatures'][0]['sig'] = '4040' - invalid_keyid = root_signable['signatures'][0]['keyid'] - tuf.repository_lib._remove_invalid_and_duplicate_signatures(root_signable, - repository_name) - - for signature in root_signable['signatures']: - self.assertFalse(invalid_keyid == signature['keyid']) - - - -# Run the test cases. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_repository_tool_old.py b/tests/test_repository_tool_old.py deleted file mode 100755 index 8b04a8814c..0000000000 --- a/tests/test_repository_tool_old.py +++ /dev/null @@ -1,2199 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_repository_tool_old.py - - - Vladimir Diaz - - - April 7, 2014. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'repository_tool.py'. -""" - -import os -import time -import datetime -import unittest -import logging -import tempfile -import shutil -import sys - -import tuf -import tuf.log -import tuf.formats -import tuf.roledb -import tuf.keydb -import tuf.repository_tool as repo_tool - -from tests import utils - -import securesystemslib -import securesystemslib.exceptions -import securesystemslib.storage - -logger = logging.getLogger(__name__) - -repo_tool.disable_console_log_messages() - - -class TestRepository(unittest.TestCase): - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownClass() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - - @classmethod - def tearDownClass(cls): - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated for the test cases. - shutil.rmtree(cls.temporary_directory) - - - - def setUp(self): - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - - def test_init(self): - # Test normal case. - repository_name = 'test_repository' - storage_backend = securesystemslib.storage.FilesystemBackend() - repository = repo_tool.Repository('repository_directory/', - 'metadata_directory/', 'targets_directory/', storage_backend, - repository_name) - self.assertTrue(isinstance(repository.root, repo_tool.Root)) - self.assertTrue(isinstance(repository.snapshot, repo_tool.Snapshot)) - self.assertTrue(isinstance(repository.timestamp, repo_tool.Timestamp)) - self.assertTrue(isinstance(repository.targets, repo_tool.Targets)) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, - storage_backend, 3, 'metadata_directory/', 'targets_directory') - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, - 'repository_directory', storage_backend, 3, 'targets_directory') - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, - 'repository_directory', 'metadata_directory', 3, storage_backend) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, - 'repository_directory/', 'metadata_directory/', 'targets_directory/', - storage_backend, repository_name, use_timestamp_length=3) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, - 'repository_directory/', 'metadata_directory/', 'targets_directory/', - storage_backend, repository_name, use_timestamp_length=False, - use_timestamp_hashes=3) - - - - def create_repository_directory(self): - # Create a repository directory and copy in test targets data - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - targets_directory = os.path.join(temporary_directory, 'repository', - repo_tool.TARGETS_DIRECTORY_NAME) - original_targets_directory = os.path.join('repository_data', - 'repository', 'targets') - shutil.copytree(original_targets_directory, targets_directory) - - # In this case, create_new_repository() creates the 'repository/' - # sub-directory in 'temporary_directory' if it does not exist. - return os.path.join(temporary_directory, 'repository') - - - - - def test_writeall(self): - # Test creation of a TUF repository. - # - # 1. Import public and private keys. - # 2. Add verification keys. - # 3. Load signing keys. - # 4. Add target files. - # 5. Perform delegation. - # 6. writeall() - # - # Copy the target files from 'tuf/tests/repository_data' so that writeall() - # has target fileinfo to include in metadata. - repository_name = 'test_repository' - repository_directory = self.create_repository_directory() - metadata_directory = os.path.join(repository_directory, - repo_tool.METADATA_STAGED_DIRECTORY_NAME) - - repository = repo_tool.create_new_repository(repository_directory, repository_name) - - # (1) Load the public and private keys of the top-level roles, and one - # delegated role. - keystore_directory = os.path.join('repository_data', 'keystore') - - # Load the public keys. - root_pubkey_path = os.path.join(keystore_directory, 'root_key.pub') - targets_pubkey_path = os.path.join(keystore_directory, 'targets_key.pub') - snapshot_pubkey_path = os.path.join(keystore_directory, 'snapshot_key.pub') - timestamp_pubkey_path = os.path.join(keystore_directory, 'timestamp_key.pub') - role1_pubkey_path = os.path.join(keystore_directory, 'delegation_key.pub') - - root_pubkey = repo_tool.import_rsa_publickey_from_file(root_pubkey_path) - targets_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(targets_pubkey_path) - snapshot_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(snapshot_pubkey_path) - timestamp_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(timestamp_pubkey_path) - role1_pubkey = repo_tool.import_ed25519_publickey_from_file(role1_pubkey_path) - - # Load the private keys. - root_privkey_path = os.path.join(keystore_directory, 'root_key') - targets_privkey_path = os.path.join(keystore_directory, 'targets_key') - snapshot_privkey_path = os.path.join(keystore_directory, 'snapshot_key') - timestamp_privkey_path = os.path.join(keystore_directory, 'timestamp_key') - role1_privkey_path = os.path.join(keystore_directory, 'delegation_key') - - root_privkey = \ - repo_tool.import_rsa_privatekey_from_file(root_privkey_path, 'password') - targets_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(targets_privkey_path, - 'password') - snapshot_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_privkey_path, - 'password') - timestamp_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_privkey_path, - 'password') - role1_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(role1_privkey_path, - 'password') - - - # (2) Add top-level verification keys. - repository.root.add_verification_key(root_pubkey) - repository.targets.add_verification_key(targets_pubkey) - repository.snapshot.add_verification_key(snapshot_pubkey) - - # Verify that repository.writeall() fails for insufficient threshold - # of signatures (default threshold = 1). - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - - repository.timestamp.add_verification_key(timestamp_pubkey) - - - # (3) Load top-level signing keys. - repository.status() - repository.root.load_signing_key(root_privkey) - repository.status() - repository.targets.load_signing_key(targets_privkey) - repository.status() - repository.snapshot.load_signing_key(snapshot_privkey) - repository.status() - - # Verify that repository.writeall() fails for insufficient threshold - # of signatures (default threshold = 1). - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - - repository.timestamp.load_signing_key(timestamp_privkey) - - - # (4) Add target files. - target1 = 'file1.txt' - target2 = 'file2.txt' - target3 = 'file3.txt' - repository.targets.add_target(target1) - repository.targets.add_target(target2) - - # (5) Perform delegation. - repository.targets.delegate('role1', [role1_pubkey], [target3]) - repository.targets('role1').load_signing_key(role1_privkey) - - # (6) Write repository. - repository.writeall() - - # Verify that the expected metadata is written. - for role in ['root.json', 'targets.json', 'snapshot.json', 'timestamp.json']: - role_filepath = os.path.join(metadata_directory, role) - role_signable = securesystemslib.util.load_json_file(role_filepath) - - # Raise 'securesystemslib.exceptions.FormatError' if 'role_signable' is - # an invalid signable. - tuf.formats.check_signable_object_format(role_signable) - - self.assertTrue(os.path.exists(role_filepath)) - - # Verify the 'role1.json' delegation is also written. - role1_filepath = os.path.join(metadata_directory, 'role1.json') - role1_signable = securesystemslib.util.load_json_file(role1_filepath) - tuf.formats.check_signable_object_format(role1_signable) - - # Verify that an exception is *not* raised for multiple - # repository.writeall(). - repository.writeall() - - # Verify that status() does not raise an exception. - repository.status() - - # Verify that status() does not raise - # 'tuf.exceptions.InsufficientKeysError' if a top-level role - # does not contain a threshold of keys. - targets_roleinfo = tuf.roledb.get_roleinfo('targets', repository_name) - old_threshold = targets_roleinfo['threshold'] - targets_roleinfo['threshold'] = 10 - tuf.roledb.update_roleinfo('targets', targets_roleinfo, - repository_name=repository_name) - repository.status() - - # Restore the original threshold values. - targets_roleinfo = tuf.roledb.get_roleinfo('targets', repository_name) - targets_roleinfo['threshold'] = old_threshold - tuf.roledb.update_roleinfo('targets', targets_roleinfo, - repository_name=repository_name) - - # Verify that status() does not raise - # 'tuf.exceptions.InsufficientKeysError' if a delegated role - # does not contain a threshold of keys. - role1_roleinfo = tuf.roledb.get_roleinfo('role1', repository_name) - old_role1_threshold = role1_roleinfo['threshold'] - role1_roleinfo['threshold'] = 10 - tuf.roledb.update_roleinfo('role1', role1_roleinfo, - repository_name=repository_name) - repository.status() - - # Restore role1's threshold. - role1_roleinfo = tuf.roledb.get_roleinfo('role1', repository_name) - role1_roleinfo['threshold'] = old_role1_threshold - tuf.roledb.update_roleinfo('role1', role1_roleinfo, - repository_name=repository_name) - - # Verify status() does not raise 'tuf.exceptions.UnsignedMetadataError' if any of the - # the top-level roles. Test that 'root' is improperly signed. - repository.root.unload_signing_key(root_privkey) - repository.root.load_signing_key(targets_privkey) - repository.status() - - repository.targets('role1').unload_signing_key(role1_privkey) - repository.targets('role1').load_signing_key(targets_privkey) - repository.status() - - # Reset Root and 'role1', and verify Targets. - repository.root.unload_signing_key(targets_privkey) - repository.root.load_signing_key(root_privkey) - repository.targets('role1').unload_signing_key(targets_privkey) - repository.targets('role1').load_signing_key(role1_privkey) - repository.targets.unload_signing_key(targets_privkey) - repository.targets.load_signing_key(snapshot_privkey) - repository.status() - - # Reset Targets and verify Snapshot. - repository.targets.unload_signing_key(snapshot_privkey) - repository.targets.load_signing_key(targets_privkey) - repository.snapshot.unload_signing_key(snapshot_privkey) - repository.snapshot.load_signing_key(timestamp_privkey) - repository.status() - - # Reset Snapshot and verify timestamp. - repository.snapshot.unload_signing_key(timestamp_privkey) - repository.snapshot.load_signing_key(snapshot_privkey) - repository.timestamp.unload_signing_key(timestamp_privkey) - repository.timestamp.load_signing_key(root_privkey) - repository.status() - - # Reset Timestamp - repository.timestamp.unload_signing_key(root_privkey) - repository.timestamp.load_signing_key(timestamp_privkey) - - # Verify that a writeall() fails if a repository is loaded and a change - # is made to a role. - repo_tool.load_repository(repository_directory, repository_name) - - repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 0) - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - - # Load the required Timestamp key so that a valid repository can be written. - repository.timestamp.load_signing_key(timestamp_privkey) - repository.writeall() - - # Test creation of a consistent snapshot repository. Writing a consistent - # snapshot modifies the Root metadata, which specifies whether a repository - # supports consistent snapshot. Verify that an exception is raised due to - # the missing signature of Root. - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall, True) - - # Make sure the private keys of Root (new version required since Root will - # change to enable consistent snapshot), Snapshot, role1, and timestamp - # loaded before writing consistent snapshot. - repository.root.load_signing_key(root_privkey) - repository.snapshot.load_signing_key(snapshot_privkey) - # Must also load targets signing key, because targets is re-signed when - # updating 'role1'. - repository.targets.load_signing_key(targets_privkey) - repository.targets('role1').load_signing_key(role1_privkey) - - # Verify that a consistent snapshot can be written and loaded. The roles - # above must be marked as dirty, otherwise writeall() will not create a - # consistent snapshot for them. - repository.mark_dirty(['role1', 'targets', 'root', 'snapshot', 'timestamp']) - repository.writeall(consistent_snapshot=True) - - # Verify that the newly written consistent snapshot can be loaded - # successfully. - repo_tool.load_repository(repository_directory, repository_name) - - # Verify the behavior of marking and unmarking roles as dirty. - # We begin by ensuring that writeall() cleared the list of dirty roles.. - self.assertEqual([], tuf.roledb.get_dirty_roles(repository_name)) - - repository.mark_dirty(['root', 'timestamp']) - self.assertEqual(['root', 'timestamp'], tuf.roledb.get_dirty_roles(repository_name)) - repository.unmark_dirty(['root']) - self.assertEqual(['timestamp'], tuf.roledb.get_dirty_roles(repository_name)) - - # Ensure status() does not leave behind any dirty roles. - repository.status() - self.assertEqual(['timestamp'], tuf.roledb.get_dirty_roles(repository_name)) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repository.writeall, 3) - - - def test_writeall_no_files(self): - # Test writeall() when using pre-supplied fileinfo - - repository_name = 'test_repository' - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - repository_directory = os.path.join(temporary_directory, 'repository') - targets_directory = os.path.join(repository_directory, - repo_tool.TARGETS_DIRECTORY_NAME) - - repository = repo_tool.create_new_repository(repository_directory, repository_name) - - # (1) Load the public and private keys of the top-level roles, and one - # delegated role. - keystore_directory = os.path.join('repository_data', 'keystore') - - # Load the public keys. - root_pubkey_path = os.path.join(keystore_directory, 'root_key.pub') - targets_pubkey_path = os.path.join(keystore_directory, 'targets_key.pub') - snapshot_pubkey_path = os.path.join(keystore_directory, 'snapshot_key.pub') - timestamp_pubkey_path = os.path.join(keystore_directory, 'timestamp_key.pub') - - root_pubkey = repo_tool.import_rsa_publickey_from_file(root_pubkey_path) - targets_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(targets_pubkey_path) - snapshot_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(snapshot_pubkey_path) - timestamp_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(timestamp_pubkey_path) - - # Load the private keys. - root_privkey_path = os.path.join(keystore_directory, 'root_key') - targets_privkey_path = os.path.join(keystore_directory, 'targets_key') - snapshot_privkey_path = os.path.join(keystore_directory, 'snapshot_key') - timestamp_privkey_path = os.path.join(keystore_directory, 'timestamp_key') - - root_privkey = \ - repo_tool.import_rsa_privatekey_from_file(root_privkey_path, 'password') - targets_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(targets_privkey_path, - 'password') - snapshot_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_privkey_path, - 'password') - timestamp_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_privkey_path, - 'password') - - - # (2) Add top-level verification keys. - repository.root.add_verification_key(root_pubkey) - repository.targets.add_verification_key(targets_pubkey) - repository.snapshot.add_verification_key(snapshot_pubkey) - - # Verify that repository.writeall() fails for insufficient threshold - # of signatures (default threshold = 1). - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - - repository.timestamp.add_verification_key(timestamp_pubkey) - - - # (3) Load top-level signing keys. - repository.status() - repository.root.load_signing_key(root_privkey) - repository.status() - repository.targets.load_signing_key(targets_privkey) - repository.status() - repository.snapshot.load_signing_key(snapshot_privkey) - repository.status() - - # Verify that repository.writeall() fails for insufficient threshold - # of signatures (default threshold = 1). - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - - repository.timestamp.load_signing_key(timestamp_privkey) - - # Add target fileinfo - target1_hashes = {'sha256': 'c2986576f5fdfd43944e2b19e775453b96748ec4fe2638a6d2f32f1310967095'} - target2_hashes = {'sha256': '517c0ce943e7274a2431fa5751e17cfd5225accd23e479bfaad13007751e87ef'} - target1_fileinfo = tuf.formats.make_targets_fileinfo(555, target1_hashes) - target2_fileinfo = tuf.formats.make_targets_fileinfo(37, target2_hashes) - target1 = 'file1.txt' - target2 = 'file2.txt' - repository.targets.add_target(target1, fileinfo=target1_fileinfo) - repository.targets.add_target(target2, fileinfo=target2_fileinfo) - - repository.writeall(use_existing_fileinfo=True) - - # Verify that the expected metadata is written. - metadata_directory = os.path.join(repository_directory, - repo_tool.METADATA_STAGED_DIRECTORY_NAME) - - for role in ['root.json', 'targets.json', 'snapshot.json', 'timestamp.json']: - role_filepath = os.path.join(metadata_directory, role) - role_signable = securesystemslib.util.load_json_file(role_filepath) - - # Raise 'securesystemslib.exceptions.FormatError' if 'role_signable' is - # an invalid signable. - tuf.formats.check_signable_object_format(role_signable) - - self.assertTrue(os.path.exists(role_filepath)) - - - - def test_get_filepaths_in_directory(self): - # Test normal case. - # Use the pre-generated metadata directory for testing. - # Set 'repo' reference to improve readability. - repo = repo_tool.Repository - metadata_directory = os.path.join('repository_data', - 'repository', 'metadata') - - # Verify the expected filenames. get_filepaths_in_directory() returns - # a list of absolute paths. - metadata_files = repo.get_filepaths_in_directory(metadata_directory) - - # Construct list of file paths expected, determining absolute paths. - expected_files = [] - for filepath in ['1.root.json', 'root.json', 'targets.json', - 'snapshot.json', 'timestamp.json', 'role1.json', 'role2.json']: - expected_files.append(os.path.abspath(os.path.join( - 'repository_data', 'repository', 'metadata', filepath))) - - self.assertEqual(sorted(expected_files), sorted(metadata_files)) - - - # Test when the 'recursive_walk' argument is True. - # In this case, recursive walk should yield the same results as the - # previous, non-recursive call. - metadata_files = repo.get_filepaths_in_directory(metadata_directory, - recursive_walk=True) - self.assertEqual(sorted(expected_files), sorted(metadata_files)) - - # And this recursive call from the directory above should yield the same - # results as well, plus extra files. - metadata_files = repo.get_filepaths_in_directory( - os.path.join('repository_data', 'repository'), recursive_walk=True) - for expected_file in expected_files: - self.assertIn(expected_file, metadata_files) - # self.assertEqual(sorted(expected_files), sorted(metadata_files)) - - # Now let's check it against the full list of expected files for the parent - # directory.... We'll add to the existing list. Expect the same files in - # metadata.staged/ as in metadata/, and a few target files in targets/ - # This is somewhat redundant with the previous test, but together they're - # probably more future-proof. - for filepath in ['file1.txt', 'file2.txt', 'file3.txt']: - expected_files.append(os.path.abspath(os.path.join( - 'repository_data', 'repository', 'targets', filepath))) - for filepath in [ '1.root.json', 'root.json', 'targets.json', - 'snapshot.json', 'timestamp.json', 'role1.json', 'role2.json']: - expected_files.append(os.path.abspath(os.path.join( - 'repository_data', 'repository', 'metadata.staged', filepath))) - - self.assertEqual(sorted(expected_files), sorted(metadata_files)) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo.get_filepaths_in_directory, - 3, recursive_walk=False, followlinks=False) - self.assertRaises(securesystemslib.exceptions.FormatError, repo.get_filepaths_in_directory, - metadata_directory, 3, followlinks=False) - self.assertRaises(securesystemslib.exceptions.FormatError, repo.get_filepaths_in_directory, - metadata_directory, recursive_walk=False, followlinks=3) - - # Test invalid directory argument. - # A non-directory. - self.assertRaises(securesystemslib.exceptions.Error, repo.get_filepaths_in_directory, - os.path.join(metadata_directory, 'root.json')) - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - nonexistent_directory = os.path.join(temporary_directory, 'nonexistent/') - self.assertRaises(securesystemslib.exceptions.Error, repo.get_filepaths_in_directory, - nonexistent_directory, recursive_walk=False, - followlinks=False) - - - - def test_writeall_abstract_storage(self): - # Test creation of a TUF repository with a custom storage backend to ensure - # that functions relying on a storage backend being supplied operate - # correctly - - - class TestStorageBackend(securesystemslib.storage.StorageBackendInterface): - """ - An implementation of securesystemslib.storage.StorageBackendInterface - which mutates filenames on put()/get(), translating filename in memory - to filename + '.tst' on-disk, such that trying to read the - expected/canonical file paths from local storage doesn't find the TUF - metadata files. - """ - - from contextlib import contextmanager - - - @contextmanager - def get(self, filepath): - file_object = open(filepath + '.tst', 'rb') - yield file_object - file_object.close() - - - def put(self, fileobj, filepath): - if not fileobj.closed: - fileobj.seek(0) - - with open(filepath + '.tst', 'wb') as destination_file: - shutil.copyfileobj(fileobj, destination_file) - destination_file.flush() - os.fsync(destination_file.fileno()) - - - def remove(self, filepath): - os.remove(filepath + '.tst') - - - def getsize(self, filepath): - return os.path.getsize(filepath + '.tst') - - - def create_folder(self, filepath): - if not filepath: - return - try: - os.makedirs(filepath) - except OSError as err: - pass - - - def list_folder(self, filepath): - contents = [] - files = os.listdir(filepath) - - for fi in files: - if fi.endswith('.tst'): - contents.append(fi.split('.tst')[0]) - else: - contents.append(fi) - - return contents - - - - # Set up the repository directory - repository_name = 'test_repository' - repository_directory = self.create_repository_directory() - metadata_directory = os.path.join(repository_directory, - repo_tool.METADATA_STAGED_DIRECTORY_NAME) - targets_directory = os.path.join(repository_directory, - repo_tool.TARGETS_DIRECTORY_NAME) - - # TestStorageBackend expects all files on disk to have an additional '.tst' - # file extension - for target in os.listdir(targets_directory): - src = os.path.join(targets_directory, target) - dst = os.path.join(targets_directory, target + '.tst') - os.rename(src, dst) - - # (0) Create a repository with TestStorageBackend() - storage_backend = TestStorageBackend() - repository = repo_tool.create_new_repository(repository_directory, - repository_name, - storage_backend) - - # (1) Load the public and private keys of the top-level roles, and one - # delegated role. - keystore_directory = os.path.join('repository_data', 'keystore') - - # Load the public keys. - root_pubkey_path = os.path.join(keystore_directory, 'root_key.pub') - targets_pubkey_path = os.path.join(keystore_directory, 'targets_key.pub') - snapshot_pubkey_path = os.path.join(keystore_directory, 'snapshot_key.pub') - timestamp_pubkey_path = os.path.join(keystore_directory, 'timestamp_key.pub') - - root_pubkey = repo_tool.import_rsa_publickey_from_file(root_pubkey_path) - targets_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(targets_pubkey_path) - snapshot_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(snapshot_pubkey_path) - timestamp_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(timestamp_pubkey_path) - - # Load the private keys. - root_privkey_path = os.path.join(keystore_directory, 'root_key') - targets_privkey_path = os.path.join(keystore_directory, 'targets_key') - snapshot_privkey_path = os.path.join(keystore_directory, 'snapshot_key') - timestamp_privkey_path = os.path.join(keystore_directory, 'timestamp_key') - - root_privkey = \ - repo_tool.import_rsa_privatekey_from_file(root_privkey_path, 'password') - targets_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(targets_privkey_path, - 'password') - snapshot_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_privkey_path, - 'password') - timestamp_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_privkey_path, - 'password') - - - # (2) Add top-level verification keys. - repository.root.add_verification_key(root_pubkey) - repository.targets.add_verification_key(targets_pubkey) - repository.snapshot.add_verification_key(snapshot_pubkey) - repository.timestamp.add_verification_key(timestamp_pubkey) - - - # (3) Load top-level signing keys. - repository.root.load_signing_key(root_privkey) - repository.targets.load_signing_key(targets_privkey) - repository.snapshot.load_signing_key(snapshot_privkey) - repository.timestamp.load_signing_key(timestamp_privkey) - - - # (4) Add target files. - target1 = 'file1.txt' - target2 = 'file2.txt' - target3 = 'file3.txt' - repository.targets.add_target(target1) - repository.targets.add_target(target2) - repository.targets.add_target(target3) - - # (6) Write repository. - repository.writeall() - - - # Ensure all of the metadata files exist at the mutated file location and - # that those files are valid metadata - for role in ['root.json.tst', 'targets.json.tst', 'snapshot.json.tst', - 'timestamp.json.tst']: - role_filepath = os.path.join(metadata_directory, role) - self.assertTrue(os.path.exists(role_filepath)) - - role_signable = securesystemslib.util.load_json_file(role_filepath) - # Raise 'securesystemslib.exceptions.FormatError' if 'role_signable' is - # an invalid signable. - tuf.formats.check_signable_object_format(role_signable) - - - def test_signature_order(self): - """Test signatures are added to metadata in alphabetical order. """ - # Create empty repo dir and init default repo in memory - repo_dir = tempfile.mkdtemp(dir=self.temporary_directory) - repo = repo_tool.create_new_repository(repo_dir) - - # Dedicate any two existing test keys as root signing keys - for key_name in ["targets_key", "snapshot_key"]: - repo.root.load_signing_key( - repo_tool.import_ed25519_privatekey_from_file( - os.path.join("repository_data", "keystore", key_name), - "password")) - - # Write root metadata with two signatures - repo.write("root") - - # Load signed and written json metadata back into memory - root_metadata_path = os.path.join( - repo_dir, repo_tool.METADATA_STAGED_DIRECTORY_NAME, "root.json") - root_metadata = securesystemslib.util.load_json_file(root_metadata_path) - - # Assert signatures are ordered alphabetically (by signing key keyid) - self.assertListEqual( - [sig["keyid"] for sig in root_metadata["signatures"]], - [ - "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d", - "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" - ]) - - - -class TestMetadata(unittest.TestCase): - def setUp(self): - # Inherit from the repo_tool.Metadata() base class. All of the methods - # to be tested in TestMetadata require at least 1 role, so create it here - # and set its roleinfo. - - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - - class MetadataRole(repo_tool.Metadata): - def __init__(self): - super(MetadataRole, self).__init__() - - self._rolename = 'metadata_role' - self._repository_name = 'test_repository' - - # Expire in 86400 seconds (1 day). - expiration = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + 86400)) - expiration = expiration.isoformat() + 'Z' - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'signatures': [], 'version': 0, - 'consistent_snapshot': False, - 'expires': expiration, - 'partial_loaded': False} - - tuf.roledb.add_role(self._rolename, roleinfo, - repository_name='test_repository') - - self.metadata = MetadataRole() - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - self.metadata = None - - - - def test_rolename(self): - base_metadata = repo_tool.Metadata() - - self.assertEqual(base_metadata.rolename, None) - - # Test the sub-classed MetadataRole(). - self.assertEqual(self.metadata.rolename, 'metadata_role') - - - - def test_version(self): - # Test version getter, and the default version number. - self.assertEqual(self.metadata.version, 0) - - # Test version setter, and verify updated version number. - self.metadata.version = 8 - self.assertEqual(self.metadata.version, 8) - - - - def test_threshold(self): - # Test threshold getter, and the default threshold number. - self.assertEqual(self.metadata.threshold, 1) - - # Test threshold setter, and verify updated threshold number. - self.metadata.threshold = 3 - self.assertEqual(self.metadata.threshold, 3) - - - - def test_expiration(self): - # Test expiration getter. - expiration = self.metadata.expiration - self.assertTrue(isinstance(expiration, datetime.datetime)) - - # Test expiration setter. - self.metadata.expiration = datetime.datetime(2030, 1, 1, 12, 0) - expiration = self.metadata.expiration - self.assertTrue(isinstance(expiration, datetime.datetime)) - - # test a setter with microseconds, we are forcing the microseconds value - expiration = datetime.datetime.today() + datetime.timedelta(weeks = 1) - # we force the microseconds value if we are unlucky enough to get a 0 - if expiration.microsecond == 0: - expiration = expiration.replace(microsecond = 1) - - new_expiration = self.metadata.expiration - self.assertTrue(isinstance(new_expiration, datetime.datetime)) - - # check that the expiration value is truncated - self.assertTrue(new_expiration.microsecond == 0) - - # Test improperly formatted datetime. - try: - self.metadata.expiration = '3' - - except securesystemslib.exceptions.FormatError: - pass - - else: - self.fail('Setter failed to detect improperly formatted datetime.') - - - # Test invalid argument (i.e., expiration has already expired.) - expired_datetime = tuf.formats.unix_timestamp_to_datetime(int(time.time() - 1)) - try: - self.metadata.expiration = expired_datetime - - except securesystemslib.exceptions.Error: - pass - - else: - self.fail('Setter failed to detect an expired datetime.') - - - - def test_keys(self): - # Test default case, where a verification key has not been added. - self.assertEqual(self.metadata.keys, []) - - - # Test keys() getter after a verification key has been loaded. - key_path = os.path.join('repository_data', - 'keystore', 'snapshot_key.pub') - key_object = repo_tool.import_ed25519_publickey_from_file(key_path) - self.metadata.add_verification_key(key_object) - - keyid = key_object['keyid'] - self.assertEqual([keyid], self.metadata.keys) - - - - def test_signing_keys(self): - # Test default case, where a signing key has not been added. - self.assertEqual(self.metadata.signing_keys, []) - - - # Test signing_keys() getter after a signing key has been loaded. - key_path = os.path.join('repository_data', - 'keystore', 'root_key') - key_object = repo_tool.import_rsa_privatekey_from_file(key_path, 'password') - self.metadata.load_signing_key(key_object) - - keyid = key_object['keyid'] - self.assertEqual([keyid], self.metadata.signing_keys) - - - - - - def test_add_verification_key(self): - # Add verification key and verify that it was added via (role).keys. - key_path = os.path.join('repository_data', 'keystore', 'snapshot_key.pub') - key_object = repo_tool.import_ed25519_publickey_from_file(key_path) - self.metadata.add_verification_key(key_object) - - keyid = key_object['keyid'] - self.assertEqual([keyid], self.metadata.keys) - - expiration = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + 86400)) - expiration = expiration.isoformat() + 'Z' - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'signatures': [], 'version': 0, - 'consistent_snapshot': False, 'expires': expiration, - 'partial_loaded': False} - - tuf.roledb.add_role('Root', roleinfo, 'test_repository') - tuf.roledb.add_role('Targets', roleinfo, 'test_repository') - tuf.roledb.add_role('Snapshot', roleinfo, 'test_repository') - tuf.roledb.add_role('Timestamp', roleinfo, 'test_repository') - - # Test for different top-level role names. - self.metadata._rolename = 'Targets' - self.metadata.add_verification_key(key_object) - self.metadata._rolename = 'Snapshot' - self.metadata.add_verification_key(key_object) - self.metadata._rolename = 'Timestamp' - self.metadata.add_verification_key(key_object) - - # Test for a given 'expires' argument. - expires = datetime.datetime(2030, 1, 1, 12, 0) - self.metadata.add_verification_key(key_object, expires) - - - # Test for an expired 'expires'. - expired = datetime.datetime(1984, 1, 1, 12, 0) - self.assertRaises(securesystemslib.exceptions.Error, - self.metadata.add_verification_key, key_object, expired) - - # Test improperly formatted key argument. - self.assertRaises(securesystemslib.exceptions.FormatError, self.metadata.add_verification_key, 3) - self.assertRaises(securesystemslib.exceptions.FormatError, self.metadata.add_verification_key, key_object, 3) - - - - def test_remove_verification_key(self): - # Add verification key so that remove_verifiation_key() can be tested. - key_path = os.path.join('repository_data', - 'keystore', 'snapshot_key.pub') - key_object = repo_tool.import_ed25519_publickey_from_file(key_path) - self.metadata.add_verification_key(key_object) - - keyid = key_object['keyid'] - self.assertEqual([keyid], self.metadata.keys) - - - # Test successful removal of verification key added above. - self.metadata.remove_verification_key(key_object) - self.assertEqual(self.metadata.keys, []) - - - # Test improperly formatted argument - self.assertRaises(securesystemslib.exceptions.FormatError, self.metadata.remove_verification_key, 3) - - - # Test non-existent public key argument. - key_path = os.path.join('repository_data', - 'keystore', 'targets_key.pub') - unused_key_object = repo_tool.import_ed25519_publickey_from_file(key_path) - - self.assertRaises(securesystemslib.exceptions.Error, self.metadata.remove_verification_key, - unused_key_object) - - - - def test_load_signing_key(self): - # Test normal case. - key_path = os.path.join('repository_data', - 'keystore', 'snapshot_key') - key_object = repo_tool.import_ed25519_privatekey_from_file(key_path, 'password') - self.metadata.load_signing_key(key_object) - - keyid = key_object['keyid'] - self.assertEqual([keyid], self.metadata.signing_keys) - - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, self.metadata.load_signing_key, 3) - - - # Test non-private key. - key_path = os.path.join('repository_data', - 'keystore', 'snapshot_key.pub') - key_object = repo_tool.import_ed25519_publickey_from_file(key_path) - self.assertRaises(securesystemslib.exceptions.Error, self.metadata.load_signing_key, key_object) - - - - def test_unload_signing_key(self): - # Load a signing key so that unload_signing_key() can have a key to unload. - key_path = os.path.join('repository_data', - 'keystore', 'snapshot_key') - key_object = repo_tool.import_ed25519_privatekey_from_file(key_path, 'password') - self.metadata.load_signing_key(key_object) - - keyid = key_object['keyid'] - self.assertEqual([keyid], self.metadata.signing_keys) - - self.metadata.unload_signing_key(key_object) - - self.assertEqual(self.metadata.signing_keys, []) - - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, self.metadata.unload_signing_key, 3) - - - # Test non-existent key argument. - key_path = os.path.join('repository_data', - 'keystore', 'targets_key') - unused_key_object = repo_tool.import_ed25519_privatekey_from_file(key_path, - 'password') - - self.assertRaises(securesystemslib.exceptions.Error, self.metadata.unload_signing_key, - unused_key_object) - - - - def test_add_signature(self): - # Test normal case. - # Load signature list from any of pre-generated metadata; needed for - # testing. - metadata_directory = os.path.join('repository_data', - 'repository', 'metadata') - root_filepath = os.path.join(metadata_directory, 'root.json') - root_signable = securesystemslib.util.load_json_file(root_filepath) - signatures = root_signable['signatures'] - - # Add the first signature from the list, as only one is needed. - self.metadata.add_signature(signatures[0]) - self.assertEqual(signatures, self.metadata.signatures) - - # Verify that a signature is added if a 'signatures' entry is not present. - tuf.roledb.create_roledb_from_root_metadata(root_signable['signed'], repository_name='test_repository') - del tuf.roledb._roledb_dict['test_repository']['root']['signatures'] - self.metadata._rolename = 'root' - self.metadata.add_signature(signatures[0]) - - # Add a duplicate signature. - self.metadata.add_signature(signatures[0]) - - # Test improperly formatted signature argument. - self.assertRaises(securesystemslib.exceptions.FormatError, self.metadata.add_signature, 3) - self.assertRaises(securesystemslib.exceptions.FormatError, self.metadata.add_signature, signatures[0], 3) - - - - def test_remove_signature(self): - # Test normal case. - # Add a signature so remove_signature() has some signature to remove. - metadata_directory = os.path.join('repository_data', - 'repository', 'metadata') - root_filepath = os.path.join(metadata_directory, 'root.json') - root_signable = securesystemslib.util.load_json_file(root_filepath) - signatures = root_signable['signatures'] - self.metadata.add_signature(signatures[0]) - - self.metadata.remove_signature(signatures[0]) - self.assertEqual(self.metadata.signatures, []) - - - # Test improperly formatted signature argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.metadata.remove_signature, 3) - - # Test invalid signature argument (i.e., signature that has not been added.) - # Load an unused signature to be tested. - targets_filepath = os.path.join(metadata_directory, 'targets.json') - targets_signable = securesystemslib.util.load_json_file(targets_filepath) - signatures = targets_signable['signatures'] - - self.assertRaises(securesystemslib.exceptions.Error, - self.metadata.remove_signature, signatures[0]) - - - - def test_signatures(self): - # Test default case, where no signatures have been added yet. - self.assertEqual(self.metadata.signatures, []) - - - # Test getter after adding an example signature. - metadata_directory = os.path.join('repository_data', - 'repository', 'metadata') - root_filepath = os.path.join(metadata_directory, 'root.json') - root_signable = securesystemslib.util.load_json_file(root_filepath) - signatures = root_signable['signatures'] - - # Add the first signature from the list, as only need one is needed. - self.metadata.add_signature(signatures[0]) - self.assertEqual(signatures, self.metadata.signatures) - - - -class TestRoot(unittest.TestCase): - def setUp(self): - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - - - def test_init(self): - - # Test normal case. - # Root() subclasses Metadata(), and creates a 'root' role in 'tuf.roledb'. - repository_name = 'test_repository' - root_object = repo_tool.Root(repository_name) - self.assertTrue(isinstance(root_object, repo_tool.Metadata)) - self.assertTrue(tuf.roledb.role_exists('root', repository_name)) - - - -class TestTimestamp(unittest.TestCase): - def setUp(self): - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - - - def test_init(self): - - # Test normal case. - # Timestamp() subclasses Metadata(), and creates a 'timestamp' role in - # 'tuf.roledb'. - timestamp_object = repo_tool.Timestamp('test_repository') - self.assertTrue(isinstance(timestamp_object, repo_tool.Metadata)) - self.assertTrue(tuf.roledb.role_exists('timestamp', 'test_repository')) - - - - - -class TestSnapshot(unittest.TestCase): - def setUp(self): - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - - - def test_init(self): - - # Test normal case. - # Snapshot() subclasses Metadata(), and creates a 'snapshot' role in - # 'tuf.roledb'. - snapshot_object = repo_tool.Snapshot('test_repository') - self.assertTrue(isinstance(snapshot_object, repo_tool.Metadata)) - self.assertTrue(tuf.roledb.role_exists('snapshot', 'test_repository')) - - - - - -class TestTargets(unittest.TestCase): - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownClass() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - - - @classmethod - def tearDownClass(cls): - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated for the test cases. - shutil.rmtree(cls.temporary_directory) - - - - def setUp(self): - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - self.targets_directory = os.path.join(temporary_directory, 'repository', - 'targets') - original_targets_directory = os.path.join('repository_data', - 'repository', 'targets') - shutil.copytree(original_targets_directory, self.targets_directory) - self.targets_object = repo_tool.Targets(self.targets_directory, - repository_name='test_repository') - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - self.targets_object = None - - - - def test_init(self): - - # Test normal case. - # Snapshot() subclasses Metadata(), and creates a 'snapshot' role in - # 'tuf.roledb'. - targets_object = repo_tool.Targets('targets_directory/') - self.assertTrue(isinstance(targets_object, repo_tool.Metadata)) - self.assertTrue(tuf.roledb.role_exists('targets')) - - # Custom Targets object rolename. - targets_object = repo_tool.Targets('targets_directory/', 'project') - self.assertTrue(isinstance(targets_object, repo_tool.Metadata)) - self.assertTrue(tuf.roledb.role_exists('project')) - - # Custom roleinfo object (i.e., tuf.formats.ROLEDB_SCHEMA). 'keyids' and - # 'threshold' are required, the rest are optional. - roleinfo = {'keyids': - ['66c4cb5fef5e4d62b7013ef1cab4b8a827a36c14056d5603c3a970e21eb30e6f'], - 'threshold': 8} - self.assertTrue(tuf.formats.ROLEDB_SCHEMA.matches(roleinfo)) - - targets_object = repo_tool.Targets('targets_directory/', 'package', roleinfo) - self.assertTrue(isinstance(targets_object, repo_tool.Metadata)) - self.assertTrue(tuf.roledb.role_exists('package')) - - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Targets, 3) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Targets, 'targets_directory/', 3) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Targets, 'targets_directory/', - 'targets', 3) - - - - def test_call(self): - # Test normal case. - # Perform a delegation so that a delegated role can be accessed and tested - # through __call__(). Example: {targets_object}('role1'). - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - - # Create Targets() object to be tested. - targets_object = repo_tool.Targets(self.targets_directory) - targets_object.delegate('role1', [public_key], ['file1.txt']) - - self.assertTrue(isinstance(targets_object('role1'), repo_tool.Targets)) - - # Test invalid (i.e., non-delegated) rolename argument. - self.assertRaises(tuf.exceptions.UnknownRoleError, targets_object, 'unknown_role') - - # Test improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, targets_object, 1) - - - - def test_get_delegated_rolenames(self): - # Test normal case. - # Perform two delegations so that get_delegated_rolenames() has roles to - # return. - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - - # Set needed arguments by delegate(). - public_keys = [public_key] - threshold = 1 - - self.targets_object.delegate('tuf', public_keys, [], threshold, False, - ['file1.txt'], path_hash_prefixes=None) - - self.targets_object.delegate('warehouse', public_keys, [], threshold, False, - ['file2.txt'], path_hash_prefixes=None) - - # Test that get_delegated_rolenames returns the expected delegations. - expected_delegated_rolenames = ['targets/tuf/', 'targets/warehouse'] - for delegated_rolename in self.targets_object.get_delegated_rolenames(): - delegated_rolename in expected_delegated_rolenames - - - - def test_target_files(self): - # Test normal case. - # Verify the targets object initially contains zero target files. - self.assertEqual(self.targets_object.target_files, {}) - - target_filepath = 'file1.txt' - self.targets_object.add_target(target_filepath) - - self.assertEqual(len(self.targets_object.target_files), 1) - self.assertTrue(target_filepath in self.targets_object.target_files) - - - - def test_delegations(self): - # Test normal case. - # Perform a delegation so that delegations() has a Targets() object to - # return. - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - - # Set needed arguments by delegate(). - public_keys = [public_key] - rolename = 'tuf' - paths = ['file1.txt'] - threshold = 1 - - self.targets_object.delegate(rolename, public_keys, paths, threshold, - terminating=False, list_of_targets=None, path_hash_prefixes=None) - - # Test that a valid Targets() object is returned by delegations(). - for delegated_object in self.targets_object.delegations: - self.assertTrue(isinstance(delegated_object, repo_tool.Targets)) - - # For testing / coverage purposes, try to remove a delegated role with the - # remove_delegated_role() method. - self.targets_object.remove_delegated_role(rolename) - - - - def test_add_delegated_role(self): - # Test for invalid targets object. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_delegated_role, 'targets', 'bad_object') - - - - def test_add_target(self): - # Test normal case. - # Verify the targets object initially contains zero target files. - self.assertEqual(self.targets_object.target_files, {}) - - target_filepath = 'file1.txt' - self.targets_object.add_target(target_filepath) - - self.assertEqual(len(self.targets_object.target_files), 1) - self.assertTrue(target_filepath in self.targets_object.target_files) - - # Test the 'custom' parameter of add_target(), where additional information - # may be specified for the target. - target2_filepath = 'file2.txt' - target2_fullpath = os.path.join(self.targets_directory, target2_filepath) - - # The file permission of the target (octal number specifying file access - # for owner, group, others (e.g., 0755). - octal_file_permissions = oct(os.stat(target2_fullpath).st_mode)[4:] - custom_file_permissions = {'file_permissions': octal_file_permissions} - self.targets_object.add_target(target2_filepath, custom_file_permissions) - - self.assertEqual(len(self.targets_object.target_files), 2) - self.assertTrue(target2_filepath in self.targets_object.target_files) - self.assertEqual(self.targets_object.target_files['file2.txt']['custom'], - custom_file_permissions) - - # Attempt to replace target that has already been added. - octal_file_permissions2 = oct(os.stat(target2_fullpath).st_mode)[4:] - custom_file_permissions2 = {'file_permissions': octal_file_permissions} - self.targets_object.add_target(target2_filepath, custom_file_permissions2) - self.assertEqual(self.targets_object.target_files[target2_filepath]['custom'], - custom_file_permissions2) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_target, 3) - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_target, 3, custom_file_permissions) - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_target, target_filepath, 3) - - # A target path starting with a directory separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.add_target, '/file1.txt') - - # A target path using a backward slash as a separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.add_target, 'subdir\\file1.txt') - - # Should not access the file system to check for non-existent files - self.targets_object.add_target('non-existent') - - - - def test_add_targets(self): - # Test normal case. - # Verify the targets object initially contains zero target files. - self.assertEqual(self.targets_object.target_files, {}) - - target1_filepath = 'file1.txt' - target2_filepath = 'file2.txt' - target3_filepath = 'file3.txt' - - # Add a 'target1_filepath' duplicate for testing purposes - # ('target1_filepath' should not be added twice.) - target_files = \ - [target1_filepath, target2_filepath, 'file3.txt', target1_filepath] - self.targets_object.add_targets(target_files) - - self.assertEqual(len(self.targets_object.target_files), 3) - self.assertEqual(self.targets_object.target_files, - {target1_filepath: {}, target2_filepath: {}, target3_filepath: {}}) - - # Attempt to replace targets that has already been added. - self.targets_object.add_targets(target_files) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_targets, 3) - - # A target path starting with a directory separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.add_targets, ['/file1.txt']) - - # A target path using a backward slash as a separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.add_targets, ['subdir\\file1.txt']) - - # Check if the addition of the whole list is rolled back in case of - # wrong target path - target_files = self.targets_object.target_files - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.add_targets, ['file4.txt', '/file5.txt']) - self.assertEqual(self.targets_object.target_files, target_files) - - # Should not access the file system to check for non-existent files - self.targets_object.add_targets(['non-existent']) - - - def test_remove_target(self): - # Test normal case. - # Verify the targets object initially contains zero target files. - self.assertEqual(self.targets_object.target_files, {}) - - # Add a target so that remove_target() has something to remove. - target_filepath = 'file1.txt' - self.targets_object.add_target(target_filepath) - - # Test remove_target()'s behavior. - self.targets_object.remove_target(target_filepath) - self.assertEqual(self.targets_object.target_files, {}) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.remove_target, 3) - - # Test for filepath that hasn't been added yet. - target5_filepath = 'file5.txt' - self.assertRaises(securesystemslib.exceptions.Error, - self.targets_object.remove_target, - target5_filepath) - - - - def test_clear_targets(self): - # Test normal case. - # Verify the targets object initially contains zero target files. - self.assertEqual(self.targets_object.target_files, {}) - - # Add targets, to be tested by clear_targets(). - target1_filepath = 'file1.txt' - target2_filepath = 'file2.txt' - self.targets_object.add_targets([target1_filepath, target2_filepath]) - - self.targets_object.clear_targets() - self.assertEqual(self.targets_object.target_files, {}) - - - - def test_delegate(self): - # Test normal case. - # Need at least one public key and valid target paths required by - # delegate(). - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - - # Set needed arguments by delegate(). - public_keys = [public_key] - rolename = 'tuf' - list_of_targets = ['file1.txt', 'file2.txt'] - threshold = 1 - paths = ['*'] - path_hash_prefixes = ['e3a3', '8fae', 'd543'] - - self.targets_object.delegate(rolename, public_keys, paths, - threshold, terminating=False, list_of_targets=list_of_targets, - path_hash_prefixes=path_hash_prefixes) - - self.assertEqual(self.targets_object.get_delegated_rolenames(), - ['tuf']) - - # Test for delegated paths that do not exist. - # An exception should not be raised for non-existent delegated paths, since - # these paths may not necessarily exist when the delegation is done, - # and also because the delegated paths can be glob patterns. - self.targets_object.delegate(rolename, public_keys, ['non-existent'], - threshold, terminating=False, list_of_targets=list_of_targets, - path_hash_prefixes=path_hash_prefixes) - - # Test for delegated targets that do not exist. - # An exception should not be raised for non-existent delegated targets, - # since at this point the file system should not be accessed yet - self.targets_object.delegate(rolename, public_keys, [], threshold, - terminating=False, list_of_targets=['non-existent.txt'], - path_hash_prefixes=path_hash_prefixes) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate, 3, public_keys, paths, threshold, - list_of_targets, path_hash_prefixes) - - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate, rolename, 3, paths, threshold, - list_of_targets, path_hash_prefixes) - - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate, rolename, public_keys, 3, threshold, - list_of_targets, path_hash_prefixes) - - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate, rolename, public_keys, paths, '3', - list_of_targets, path_hash_prefixes) - - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate, rolename, public_keys, paths, threshold, - 3, path_hash_prefixes) - - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate, rolename, public_keys, paths, threshold, - list_of_targets, 3) - - # Test invalid arguments (e.g., already delegated 'rolename', non-existent - # files, etc.). - # Test duplicate 'rolename' delegation, which should have been delegated - # in the normal case above. - self.assertRaises(securesystemslib.exceptions.Error, - self.targets_object.delegate, rolename, public_keys, paths, threshold, - list_of_targets, path_hash_prefixes) - - # A path or target starting with a directory separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.delegate, rolename, public_keys, ['/*']) - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.delegate, rolename, public_keys, [], - list_of_targets=['/file1.txt']) - - # A path or target using '\' as a directory separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.delegate, rolename, public_keys, ['subpath\\*']) - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.delegate, rolename, public_keys, [], - list_of_targets=['subpath\\file1.txt']) - - - - - def test_delegate_hashed_bins(self): - # Test normal case. - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - - # Set needed arguments by delegate_hashed_bins(). - public_keys = [public_key] - list_of_targets = ['file1.txt'] - - - # A helper function to check that the range of prefixes the role is - # delegated for, specified in path_hash_prefixes, matches the range - # implied by the bin, or delegation role, name. - def check_prefixes_match_range(): - roleinfo = tuf.roledb.get_roleinfo(self.targets_object.rolename, - 'test_repository') - have_prefixes = False - - for delegated_role in roleinfo['delegations']['roles']: - if len(delegated_role['path_hash_prefixes']) > 0: - rolename = delegated_role['name'] - prefixes = delegated_role['path_hash_prefixes'] - have_prefixes = True - - if len(prefixes) > 1: - prefix_range = "{}-{}".format(prefixes[0], prefixes[-1]) - else: - prefix_range = prefixes[0] - - self.assertEqual(rolename, prefix_range) - - # We expect at least one delegation with some path_hash_prefixes - self.assertTrue(have_prefixes) - - - # Test delegate_hashed_bins() and verify that 16 hashed bins have - # been delegated in the parent's roleinfo. - self.targets_object.delegate_hashed_bins(list_of_targets, public_keys, - number_of_bins=16) - - # The expected child rolenames, since 'number_of_bins' = 16 - delegated_rolenames = ['0', '1', '2', '3', '4', '5', '6', '7', - '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'] - - self.assertEqual(sorted(self.targets_object.get_delegated_rolenames()), - sorted(delegated_rolenames)) - check_prefixes_match_range() - - # For testing / coverage purposes, try to create delegated bins that - # hold a range of hash prefixes (e.g., bin name: 000-003). - self.targets_object.delegate_hashed_bins(list_of_targets, public_keys, - number_of_bins=512) - check_prefixes_match_range() - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate_hashed_bins, 3, public_keys, - number_of_bins=1) - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate_hashed_bins, - list_of_targets, 3, number_of_bins=1) - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.delegate_hashed_bins, - list_of_targets, public_keys, '1') - - # Test invalid arguments. - # Invalid number of bins, which must be a power of 2. - self.assertRaises(securesystemslib.exceptions.Error, - self.targets_object.delegate_hashed_bins, - list_of_targets, public_keys, number_of_bins=3) - - # Invalid 'list_of_targets'. - # A path or target starting with a directory separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.delegate_hashed_bins, - ['/file1.txt'], public_keys, - number_of_bins=2) - - # A path or target using '\' as a directory separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.delegate_hashed_bins, - ['subpath\\file1.txt'], public_keys, - number_of_bins=2) - - - def test_add_target_to_bin(self): - # Test normal case. - # Delegate the hashed bins so that add_target_to_bin() can be tested. - repository_name = 'test_repository' - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'targets_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - target1_filepath = 'file1.txt' - - # Set needed arguments by delegate_hashed_bins(). - public_keys = [public_key] - - # Delegate to hashed bins. The target filepath to be tested is expected - # to contain a hash prefix of 'e', and should be available at: - # repository.targets('e'). - self.targets_object.delegate_hashed_bins([], public_keys, - number_of_bins=16) - - # Ensure each hashed bin initially contains zero targets. - for delegation in self.targets_object.delegations: - self.assertEqual(delegation.target_files, {}) - - # Add 'target1_filepath' and verify that the relative path of - # 'target1_filepath' is added to the correct bin. - rolename = self.targets_object.add_target_to_bin(target1_filepath, 16) - - for delegation in self.targets_object.delegations: - if delegation.rolename == rolename: - self.assertTrue('file1.txt' in delegation.target_files) - - else: - self.assertFalse('file1.txt' in delegation.target_files) - - # Test for non-existent delegations and hashed bins. - empty_targets_role = repo_tool.Targets(self.targets_directory, 'empty', - repository_name=repository_name) - - self.assertRaises(securesystemslib.exceptions.Error, - empty_targets_role.add_target_to_bin, - target1_filepath, 16) - - # Test for a required hashed bin that does not exist. - self.targets_object.revoke(rolename) - self.assertRaises(securesystemslib.exceptions.Error, - self.targets_object.add_target_to_bin, - target1_filepath, 16) - - # Test adding a target with fileinfo - target2_hashes = {'sha256': '517c0ce943e7274a2431fa5751e17cfd5225accd23e479bfaad13007751e87ef'} - target2_fileinfo = tuf.formats.make_targets_fileinfo(37, target2_hashes) - target2_filepath = 'file2.txt' - - rolename = self.targets_object.add_target_to_bin(target2_filepath, 16, - fileinfo=target2_fileinfo) - - for delegation in self.targets_object.delegations: - if delegation.rolename == rolename: - self.assertTrue(target2_filepath in delegation.target_files) - - else: - self.assertFalse(target2_filepath in delegation.target_files) - - # Test improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_target_to_bin, 3, 'foo') - - - - def test_remove_target_from_bin(self): - # Test normal case. - # Delegate the hashed bins so that add_target_to_bin() can be tested. - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'targets_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - target1_filepath = 'file1.txt' - - # Set needed arguments by delegate_hashed_bins(). - public_keys = [public_key] - - # Delegate to hashed bins. The target filepath to be tested is expected - # to contain a hash prefix of 'e', and can be accessed as: - # repository.targets('e'). - self.targets_object.delegate_hashed_bins([], public_keys, - number_of_bins=16) - - # Ensure each hashed bin initially contains zero targets. - for delegation in self.targets_object.delegations: - self.assertEqual(delegation.target_files, {}) - - # Add 'target1_filepath' and verify that the relative path of - # 'target1_filepath' is added to the correct bin. - added_rolename = self.targets_object.add_target_to_bin(target1_filepath, 16) - - for delegation in self.targets_object.delegations: - if delegation.rolename == added_rolename: - self.assertTrue('file1.txt' in delegation.target_files) - self.assertTrue(len(delegation.target_files) == 1) - else: - self.assertTrue('file1.txt' not in delegation.target_files) - - # Test the remove_target_from_bin() method. Verify that 'target1_filepath' - # has been removed. - removed_rolename = self.targets_object.remove_target_from_bin(target1_filepath, 16) - self.assertEqual(added_rolename, removed_rolename) - - for delegation in self.targets_object.delegations: - self.assertTrue(target1_filepath not in delegation.target_files) - - - # Test improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.remove_target_from_bin, 3, 'foo') - - # Invalid target file path argument. - self.assertRaises(securesystemslib.exceptions.Error, - self.targets_object.remove_target_from_bin, 'non-existent', 16) - - - - def test_default_bin_num(self): - # Test creating, adding to and removing from hashed bins with the default - # number of bins - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - target1_filepath = os.path.join(self.targets_directory, 'file1.txt') - - # Set needed arguments by delegate_hashed_bins(). - public_keys = [public_key] - - # Test default parameters for number_of_bins - self.targets_object.delegate_hashed_bins([], public_keys) - - # Ensure each hashed bin initially contains zero targets. - for delegation in self.targets_object.delegations: - self.assertEqual(delegation.target_files, {}) - - # Add 'target1_filepath' and verify that the relative path of - # 'target1_filepath' is added to the correct bin. - added_rolename = self.targets_object.add_target_to_bin(os.path.basename(target1_filepath)) - - for delegation in self.targets_object.delegations: - if delegation.rolename == added_rolename: - self.assertTrue('file1.txt' in delegation.target_files) - - else: - self.assertFalse('file1.txt' in delegation.target_files) - - # Remove target1_filepath and verify that all bins are now empty - removed_rolename = self.targets_object.remove_target_from_bin( - os.path.basename(target1_filepath)) - self.assertEqual(added_rolename, removed_rolename) - - for delegation in self.targets_object.delegations: - self.assertEqual(delegation.target_files, {}) - - - def test_add_paths(self): - # Test normal case. - # Perform a delegation so that add_paths() has a child role to delegate a - # path to. - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - - # Set needed arguments by delegate(). - public_keys = [public_key] - rolename = 'tuf' - threshold = 1 - - self.targets_object.delegate(rolename, public_keys, [], threshold, - list_of_targets=None, path_hash_prefixes=None) - - # Delegate an extra role for test coverage (i.e., to later verify that - # delegated paths are not added to a child role that was not requested). - self.targets_object.delegate('junk_role', public_keys, []) - - paths = ['tuf_files/*'] - self.targets_object.add_paths(paths, 'tuf') - - # Retrieve 'targets_object' roleinfo, and verify the roleinfo contains the - # expected delegated paths of the delegated role. - targets_object_roleinfo = tuf.roledb.get_roleinfo(self.targets_object.rolename, - 'test_repository') - - delegated_role = targets_object_roleinfo['delegations']['roles'][0] - self.assertEqual(['tuf_files/*'], delegated_role['paths']) - - # Try to add a delegated path that has already been set. - # add_paths() should simply log a message in this case. - self.targets_object.add_paths(paths, 'tuf') - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_paths, 3, 'tuf') - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object.add_paths, paths, 3) - - - # Test invalid arguments. - # A non-delegated child role. - self.assertRaises(securesystemslib.exceptions.Error, - self.targets_object.add_paths, paths, 'non_delegated_rolename') - - # A path starting with a directory separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.add_paths, ['/tuf_files/*'], 'tuf') - - # A path using a backward slash as a separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object.add_paths, ['tuf_files\\*'], 'tuf') - - # add_paths() should not raise an exception for non-existent - # paths, which it previously did. - self.targets_object.add_paths(['non-existent'], 'tuf') - - - - - def test_revoke(self): - # Test normal case. - # Perform a delegation so that revoke() has a delegation to revoke. - keystore_directory = os.path.join('repository_data', 'keystore') - public_keypath = os.path.join(keystore_directory, 'snapshot_key.pub') - public_key = repo_tool.import_ed25519_publickey_from_file(public_keypath) - - # Set needed arguments by delegate(). - public_keys = [public_key] - rolename = 'tuf' - paths = ['file1.txt'] - threshold = 1 - - self.targets_object.delegate(rolename, public_keys, [], threshold, False, - paths, path_hash_prefixes=None) - - # Test revoke() - self.targets_object.revoke('tuf') - self.assertEqual(self.targets_object.get_delegated_rolenames(), []) - - - # Test improperly formatted rolename argument. - self.assertRaises(securesystemslib.exceptions.FormatError, self.targets_object.revoke, 3) - - - - def test_check_path(self): - # Test that correct path does not raise exception: using '/' as a separator - # and does not start with a directory separator - self.targets_object._check_path('file1.txt') - - # Test that non-existent path does not raise exception (_check_path - # checks only the path string for compliance) - self.targets_object._check_path('non-existent.txt') - self.targets_object._check_path('subdir/non-existent') - - # Test improperly formatted pathname argument. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.targets_object._check_path, 3) - - # Test invalid pathname - # Starting with os separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object._check_path, '/file1.txt') - - # Starting with Windows-style separator - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object._check_path, '\\file1.txt') - - # Using Windows-style separator ('\') - self.assertRaises(tuf.exceptions.InvalidNameError, - self.targets_object._check_path, 'subdir\\non-existent') - - - -class TestRepositoryToolFunctions(unittest.TestCase): - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownClass() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - - - @classmethod - def tearDownClass(cls): - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated for the test cases. - shutil.rmtree(cls.temporary_directory) - - - - def setUp(self): - tuf.roledb.create_roledb('test_repository') - tuf.keydb.create_keydb('test_repository') - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - - - def test_create_new_repository(self): - # Test normal case. - # Setup the temporary repository directories needed by - # create_new_repository(). - repository_name = 'test_repository' - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - repository_directory = os.path.join(temporary_directory, 'repository') - metadata_directory = os.path.join(repository_directory, - repo_tool.METADATA_STAGED_DIRECTORY_NAME) - targets_directory = os.path.join(repository_directory, - repo_tool.TARGETS_DIRECTORY_NAME) - - repository = repo_tool.create_new_repository(repository_directory, - repository_name) - self.assertTrue(isinstance(repository, repo_tool.Repository)) - - # Verify that the 'repository/', 'repository/metadata', and - # 'repository/targets' directories were created. - self.assertTrue(os.path.exists(repository_directory)) - self.assertTrue(os.path.exists(metadata_directory)) - self.assertTrue(os.path.exists(targets_directory)) - - # Test that the 'repository' directory is created (along with the other - # sub-directories) when it does not exist yet. The repository tool creates - # the non-existent directory. - shutil.rmtree(repository_directory) - - repository = repo_tool.create_new_repository(repository_directory, - repository_name) - self.assertTrue(isinstance(repository, repo_tool.Repository)) - - # Verify that the 'repository/', 'repository/metadata', and - # 'repository/targets' directories were created. - self.assertTrue(os.path.exists(repository_directory)) - self.assertTrue(os.path.exists(metadata_directory)) - self.assertTrue(os.path.exists(targets_directory)) - - # Test passing custom arguments to control the computation - # of length and hashes for timestamp and snapshot roles. - repository = repo_tool.create_new_repository(repository_directory, - repository_name, use_timestamp_length=True, use_timestamp_hashes=True, - use_snapshot_length=True, use_snapshot_hashes=True) - - # Verify that the argument for optional hashes and length for - # snapshot and timestamp are properly set. - self.assertTrue(repository._use_timestamp_length) - self.assertTrue(repository._use_timestamp_hashes) - self.assertTrue(repository._use_snapshot_length) - self.assertTrue(repository._use_snapshot_hashes) - - # Test for a repository name that doesn't exist yet. Note: - # The 'test_repository' repository name is created in setup() before this - # test case is run. - repository = repo_tool.create_new_repository(repository_directory, 'my-repo') - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_tool.create_new_repository, 3, repository_name) - - # For testing purposes, try to create a repository directory that - # fails due to a non-errno.EEXIST exception raised. - self.assertRaises(securesystemslib.exceptions.StorageError, - repo_tool.create_new_repository, 'bad' * 2000, repository_name) - - # Reset the 'repository_directory' so that the metadata and targets - # directories can be tested likewise. - repository_directory = os.path.join(temporary_directory, 'repository') - - # The same test as before, but for the metadata and targets directories. - original_metadata_staged_directory = \ - tuf.repository_tool.METADATA_STAGED_DIRECTORY_NAME - tuf.repository_tool.METADATA_STAGED_DIRECTORY_NAME = 'bad' * 2000 - - self.assertRaises(securesystemslib.exceptions.StorageError, - repo_tool.create_new_repository, repository_directory, repository_name) - - # Reset metadata staged directory so that the targets directory can be - # tested... - tuf.repository_tool.METADATA_STAGED_DIRECTORY_NAME = \ - original_metadata_staged_directory - - original_targets_directory = tuf.repository_tool.TARGETS_DIRECTORY_NAME - tuf.repository_tool.TARGETS_DIRECTORY_NAME = 'bad' * 2000 - - self.assertRaises(securesystemslib.exceptions.StorageError, - repo_tool.create_new_repository, repository_directory, repository_name) - - tuf.repository_tool.TARGETS_DIRECTORY_NAME = \ - original_targets_directory - - - - def test_load_repository(self): - # Test normal case. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - original_repository_directory = os.path.join('repository_data', - 'repository') - - repository_directory = os.path.join(temporary_directory, 'repository') - metadata_directory = os.path.join(repository_directory, 'metadata.staged') - shutil.copytree(original_repository_directory, repository_directory) - - # For testing purposes, add a metadata file with an extension that is - # not supported, and another with invalid JSON content. - invalid_metadata_file = os.path.join(metadata_directory, 'root.xml') - root_file = os.path.join(metadata_directory, 'root.json') - shutil.copyfile(root_file, invalid_metadata_file) - bad_root_content = os.path.join(metadata_directory, 'root_bad.json') - - with open(bad_root_content, 'wb') as file_object: - file_object.write(b'bad') - - repository = repo_tool.load_repository(repository_directory) - self.assertTrue(isinstance(repository, repo_tool.Repository)) - self.assertTrue(isinstance(repository.targets('role1'), - repo_tool.Targets)) - self.assertTrue(isinstance(repository.targets('role1')('role2'), - repo_tool.Targets)) - - # Verify the expected roles have been loaded. See - # 'tuf/tests/repository_data/repository/'. - expected_roles = \ - ['root', 'targets', 'snapshot', 'timestamp', 'role1', 'role2'] - for role in tuf.roledb.get_rolenames(): - self.assertTrue(role in expected_roles) - - self.assertTrue(len(repository.root.keys)) - self.assertTrue(len(repository.targets.keys)) - self.assertTrue(len(repository.snapshot.keys)) - self.assertTrue(len(repository.timestamp.keys)) - self.assertEqual(1, repository.targets('role1').version) - - # It is assumed that the targets (tuf/tests/repository_data/) role contains - # 'file1.txt' and 'file2.txt'. - self.assertTrue('file1.txt' in repository.targets.target_files) - self.assertTrue('file2.txt' in repository.targets.target_files) - self.assertTrue('file3.txt' in repository.targets('role1').target_files) - - # Test if targets file info is loaded correctly: read the JSON metadata - # files separately and then compare with the loaded repository data. - targets_path = os.path.join(metadata_directory, 'targets.json') - role1_path = os.path.join(metadata_directory, 'role1.json') - - targets_object = securesystemslib.util.load_json_file(targets_path) - role1_object = securesystemslib.util.load_json_file(role1_path) - - targets_fileinfo = targets_object['signed']['targets'] - role1_fileinfo = role1_object['signed']['targets'] - - repository = repo_tool.load_repository(repository_directory) - - self.assertEqual(targets_fileinfo, repository.targets.target_files) - self.assertEqual(role1_fileinfo, repository.targets('role1').target_files) - - # Test for a non-default repository name. - repository = repo_tool.load_repository(repository_directory, 'my-repo') - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_tool.load_repository, 3) - - - # Test passing custom arguments to control the computation - # of length and hashes for timestamp and snapshot roles. - repository = repo_tool.load_repository(repository_directory, - 'my-repo', use_timestamp_length=True, use_timestamp_hashes=True, - use_snapshot_length=True, use_snapshot_hashes=True) - - # Verify that the argument for optional hashes and length for - # snapshot and timestamp are properly set. - self.assertTrue(repository._use_timestamp_length) - self.assertTrue(repository._use_timestamp_hashes) - self.assertTrue(repository._use_snapshot_length) - self.assertTrue(repository._use_snapshot_hashes) - - # Test for invalid 'repository_directory' (i.e., does not contain the - # minimum required metadata. - root_filepath = os.path.join(repository_directory, - repo_tool.METADATA_STAGED_DIRECTORY_NAME, 'root.json') - os.remove(root_filepath) - self.assertRaises(tuf.exceptions.RepositoryError, - repo_tool.load_repository, repository_directory) - - - - def test_dirty_roles(self): - repository_name = 'test_repository' - original_repository_directory = os.path.join('repository_data', - 'repository') - repository = repo_tool.load_repository(original_repository_directory, - repository_name) - - # dirty_roles() only logs the list of dirty roles. - repository.dirty_roles() - - - - def test_dump_signable_metadata(self): - metadata_directory = os.path.join('repository_data', - 'repository', 'metadata') - targets_metadata_file = os.path.join(metadata_directory, 'targets.json') - - metadata_content = repo_tool.dump_signable_metadata(targets_metadata_file) - - # Test for an invalid targets metadata file.. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_tool.dump_signable_metadata, 1) - self.assertRaises(securesystemslib.exceptions.StorageError, - repo_tool.dump_signable_metadata, 'bad file path') - - - - def test_append_signature(self): - metadata_directory = os.path.join('repository_data', - 'repository', 'metadata') - targets_metadata_path = os.path.join(metadata_directory, 'targets.json') - - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - tmp_targets_metadata_path = os.path.join(temporary_directory, 'targets.json') - shutil.copyfile(targets_metadata_path, tmp_targets_metadata_path) - - # Test for normal case. - targets_metadata = securesystemslib.util.load_json_file(tmp_targets_metadata_path) - num_signatures = len(targets_metadata['signatures']) - signature = targets_metadata['signatures'][0] - - repo_tool.append_signature(signature, tmp_targets_metadata_path) - - targets_metadata = securesystemslib.util.load_json_file(tmp_targets_metadata_path) - self.assertTrue(num_signatures, len(targets_metadata['signatures'])) - - # Test for invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_tool.append_signature, 1, tmp_targets_metadata_path) - - self.assertRaises(securesystemslib.exceptions.FormatError, - repo_tool.append_signature, signature, 1) - - -# Run the test cases. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_roledb_old.py b/tests/test_roledb_old.py deleted file mode 100755 index 04b76e9545..0000000000 --- a/tests/test_roledb_old.py +++ /dev/null @@ -1,787 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_roledb_old.py - - - Vladimir Diaz - - - October 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Unit test for 'roledb.py'. -""" - -import unittest -import logging -import sys - -import tuf -import tuf.formats -import tuf.roledb -import tuf.exceptions -import tuf.log - -from tests import utils - -import securesystemslib -import securesystemslib.keys - -logger = logging.getLogger(__name__) - - -# Generate the three keys to use in our test cases. -KEYS = [] -for junk in range(3): - KEYS.append(securesystemslib.keys.generate_rsa_key(2048)) - - - -class TestRoledb(unittest.TestCase): - def setUp(self): - tuf.roledb.clear_roledb(clear_all=True) - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - - - - def test_create_roledb(self): - # Verify that a roledb is created for a named repository. - self.assertTrue('default' in tuf.roledb._roledb_dict) - self.assertEqual(1, len(tuf.roledb._roledb_dict)) - - repository_name = 'example_repository' - tuf.roledb.create_roledb(repository_name) - self.assertEqual(2, len(tuf.roledb._roledb_dict)) - self.assertTrue(repository_name in tuf.roledb._roledb_dict) - - # Test for invalid and improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.create_roledb, 123) - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.create_roledb, 'default') - - # Reset the roledb so that subsequent test functions have access to the - # original, default roledb. - tuf.roledb.remove_roledb(repository_name) - - - - def test_remove_roledb(self): - # Verify that the named repository is removed from the roledb. - repository_name = 'example_repository' - - rolename = 'targets' - roleinfo = {'keyids': ['123'], 'threshold': 1} - - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.remove_roledb, 'default') - tuf.roledb.create_roledb(repository_name) - - tuf.roledb.remove_roledb(repository_name) - - # remove_roledb() should not raise an exception if a non-existent - # 'repository_name' is specified. - tuf.roledb.remove_roledb(repository_name) - - # Ensure the roledb is reset to its original, default state. Subsequent - # test functions expect only the 'default' repository to exist in the roledb. - tuf.roledb.remove_roledb(repository_name) - - - - def test_clear_roledb(self): - # Test for an empty roledb, a length of 1 after adding a key, and finally - # an empty roledb after calling 'clear_roledb()'. - self.assertEqual(0, len(tuf.roledb._roledb_dict['default'])) - tuf.roledb._roledb_dict['default']['Root'] = {'keyids': ['123'], 'threshold': 1} - self.assertEqual(1, len(tuf.roledb._roledb_dict['default'])) - tuf.roledb.clear_roledb() - self.assertEqual(0, len(tuf.roledb._roledb_dict['default'])) - - # Verify that the roledb can be cleared for a non-default repository. - rolename = 'targets' - roleinfo = {'keyids': ['123'], 'threshold': 1} - - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.clear_roledb, repository_name) - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertEqual(roleinfo['keyids'], tuf.roledb.get_role_keyids(rolename, repository_name)) - tuf.roledb.clear_roledb(repository_name) - self.assertFalse(tuf.roledb.role_exists(rolename, repository_name)) - - # Reset the roledb so that subsequent tests have access to the original, - # default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test condition for invalid and unexpected arguments. - self.assertRaises(TypeError, tuf.roledb.clear_roledb, 'default', False, 'unexpected_argument') - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.clear_roledb, 123) - - - - def test_add_role(self): - # Test conditions where the arguments are valid. - self.assertEqual(0, len(tuf.roledb._roledb_dict['default'])) - rolename = 'targets' - roleinfo = {'keyids': ['123'], 'threshold': 1} - rolename2 = 'role1' - self.assertEqual(None, tuf.roledb.add_role(rolename, roleinfo)) - self.assertEqual(1, len(tuf.roledb._roledb_dict['default'])) - tuf.roledb.clear_roledb() - self.assertEqual(None, tuf.roledb.add_role(rolename, roleinfo)) - self.assertEqual(1, len(tuf.roledb._roledb_dict['default'])) - - # Verify that a role can be added to a non-default repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.clear_roledb, - repository_name) - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertEqual(roleinfo['keyids'], tuf.roledb.get_role_keyids(rolename, - repository_name)) - - # Reset the roledb so that subsequent tests have access to a default - # roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where the arguments are improperly formatted. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.add_role, None, roleinfo) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.add_role, 123, roleinfo) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.add_role, [''], roleinfo) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.add_role, rolename, None) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.add_role, rolename, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.add_role, rolename, ['']) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.add_role, rolename, roleinfo, 123) - - - # Test condition where the rolename already exists in the role database. - self.assertRaises(tuf.exceptions.RoleAlreadyExistsError, tuf.roledb.add_role, - rolename, roleinfo) - - # Test where the repository name does not exist in the role database. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.add_role, - 'new_role', roleinfo, 'non-existent') - - # Test conditions for invalid rolenames. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.add_role, ' badrole ', - roleinfo) - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.add_role, '/badrole/', - roleinfo) - - - - - - def test_role_exists(self): - # Test conditions where the arguments are valid. - rolename = 'targets' - roleinfo = {'keyids': ['123'], 'threshold': 1} - rolename2 = 'role1' - - self.assertEqual(False, tuf.roledb.role_exists(rolename)) - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo) - self.assertEqual(True, tuf.roledb.role_exists(rolename)) - self.assertEqual(True, tuf.roledb.role_exists(rolename2)) - - # Verify that a role can be queried for a non-default repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.clear_roledb, repository_name) - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.role_exists, rolename, repository_name) - - tuf.roledb.create_roledb(repository_name) - self.assertEqual(False, tuf.roledb.role_exists(rolename, repository_name)) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertTrue(tuf.roledb.role_exists(rolename, repository_name)) - - # Reset the roledb so that subsequent tests have access to the original, - # default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where the arguments are improperly formatted. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.role_exists, None) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.role_exists, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.role_exists, ['rolename']) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.role_exists, rolename, 123) - - # Test conditions for invalid rolenames. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.role_exists, '') - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.role_exists, ' badrole ') - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.role_exists, '/badrole/') - - - - - - def test_remove_role(self): - # Test conditions where the arguments are valid. - rolename = 'targets' - rolename2 = 'release' - rolename3 = 'django' - roleinfo = {'keyids': ['123'], 'threshold': 1} - roleinfo2 = {'keyids': ['123'], 'threshold': 1, 'delegations': - {'roles': [{'name': 'django', 'keyids': ['456'], 'threshold': 1}], - 'keys': {'456': {'keytype': 'rsa', 'keyval': {'public': '456'}}, - }}} - - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo2) - tuf.roledb.add_role(rolename3, roleinfo) - - self.assertEqual(None, tuf.roledb.remove_role(rolename)) - self.assertEqual(True, rolename not in tuf.roledb._roledb_dict) - - # Verify that a role can be removed from a non-default repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.remove_role, rolename, repository_name) - tuf.roledb.create_roledb(repository_name) - - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertEqual(roleinfo['keyids'], tuf.roledb.get_role_keyids(rolename, repository_name)) - self.assertEqual(None, tuf.roledb.remove_role(rolename, repository_name)) - - # Verify that a role cannot be removed from a non-existent repository name. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.remove_role, rolename, 'non-existent') - - # Reset the roledb so that subsequent test have access to the original, - # default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where removing a role does not cause the removal of its - # delegated roles. The 'django' role should now only exist (after the - # removal of 'targets' in the previous test condition, and the removal - # of 'release' in the remove_role() call next. - self.assertEqual(None, tuf.roledb.remove_role(rolename2)) - self.assertEqual(1, len(tuf.roledb._roledb_dict['default'])) - - # Test conditions where the arguments are improperly formatted, - # contain invalid names, or haven't been added to the role database. - self._test_rolename(tuf.roledb.remove_role) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.remove_role, rolename, 123) - - - - - def test_get_rolenames(self): - # Test conditions where the arguments are valid. - rolename = 'targets' - rolename2 = 'role1' - roleinfo = {'keyids': ['123'], 'threshold': 1} - self.assertEqual([], tuf.roledb.get_rolenames()) - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo) - self.assertEqual(set(['targets', 'role1']), - set(tuf.roledb.get_rolenames())) - - # Verify that rolenames can be retrieved for a role in a non-default - # repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_rolenames, repository_name) - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - tuf.roledb.add_role(rolename2, roleinfo, repository_name) - - self.assertEqual(set(['targets', 'role1']), - set(tuf.roledb.get_rolenames())) - - # Reset the roledb so that subsequent tests have access to the original, - # default repository. - tuf.roledb.remove_roledb(repository_name) - - # Test for invalid or improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_rolenames, 123) - - - - def test_get_role_info(self): - # Test conditions where the arguments are valid. - rolename = 'targets' - rolename2 = 'role1' - roleinfo = {'keyids': ['123'], 'threshold': 1} - roleinfo2 = {'keyids': ['456', '789'], 'threshold': 2} - self.assertRaises(tuf.exceptions.UnknownRoleError, tuf.roledb.get_roleinfo, rolename) - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo2) - - self.assertEqual(roleinfo, tuf.roledb.get_roleinfo(rolename)) - self.assertEqual(roleinfo2, tuf.roledb.get_roleinfo(rolename2)) - - # Verify that a roleinfo can be retrieved for a role in a non-default - # repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_roleinfo, - rolename, repository_name) - - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertEqual(roleinfo, tuf.roledb.get_roleinfo(rolename, repository_name)) - - # Verify that a roleinfo cannot be retrieved for a non-existent repository - # name. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_roleinfo, rolename, - 'non-existent') - - # Reset the roledb so that subsequent tests have access to the original, - # default roledb - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where the arguments are improperly formatted, contain - # invalid names, or haven't been added to the role database. - self._test_rolename(tuf.roledb.get_roleinfo) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_roleinfo, rolename, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_roleinfo, 123) - - - - def test_get_role_keyids(self): - # Test conditions where the arguments are valid. - rolename = 'targets' - rolename2 = 'role1' - roleinfo = {'keyids': ['123'], 'threshold': 1} - roleinfo2 = {'keyids': ['456', '789'], 'threshold': 2} - self.assertRaises(tuf.exceptions.UnknownRoleError, tuf.roledb.get_role_keyids, rolename) - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo2) - - self.assertEqual(['123'], tuf.roledb.get_role_keyids(rolename)) - self.assertEqual(set(['456', '789']), - set(tuf.roledb.get_role_keyids(rolename2))) - - # Verify that the role keyids can be retrieved for a role in a non-default - # repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_role_keyids, - rolename, repository_name) - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertEqual(['123'], tuf.roledb.get_role_keyids(rolename, repository_name)) - - # Verify that rolekeyids cannot be retrieved from a non-existent repository - # name. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_role_keyids, rolename, - 'non-existent') - - # Reset the roledb so that subsequent tests have access to the original, - # default roledb - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where the arguments are improperly formatted, contain - # invalid names, or haven't been added to the role database. - self._test_rolename(tuf.roledb.get_role_keyids) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_role_keyids, rolename, 123) - - - - def test_get_role_threshold(self): - # Test conditions where the arguments are valid. - rolename = 'targets' - rolename2 = 'role1' - roleinfo = {'keyids': ['123'], 'threshold': 1} - roleinfo2 = {'keyids': ['456', '789'], 'threshold': 2} - self.assertRaises(tuf.exceptions.UnknownRoleError, tuf.roledb.get_role_threshold, rolename) - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo2) - - self.assertEqual(1, tuf.roledb.get_role_threshold(rolename)) - self.assertEqual(2, tuf.roledb.get_role_threshold(rolename2)) - - # Verify that the threshold can be retrieved for a role in a non-default - # repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_role_threshold, - rolename, repository_name) - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertEqual(roleinfo['threshold'], tuf.roledb.get_role_threshold(rolename, repository_name)) - - # Verify that a role's threshold cannot be retrieved from a non-existent - # repository name. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_role_threshold, - rolename, 'non-existent') - - # Reset the roledb so that subsequent tests have access to the original, - # default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where the arguments are improperly formatted, - # contain invalid names, or haven't been added to the role database. - self._test_rolename(tuf.roledb.get_role_threshold) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_role_threshold, rolename, 123) - - - def test_get_role_paths(self): - # Test conditions where the arguments are valid. - rolename = 'targets' - rolename2 = 'role1' - roleinfo = {'keyids': ['123'], 'threshold': 1} - paths = ['a/b', 'c/d'] - roleinfo2 = {'keyids': ['456', '789'], 'threshold': 2, 'paths': paths} - self.assertRaises(tuf.exceptions.UnknownRoleError, tuf.roledb.get_role_paths, rolename) - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo2) - - self.assertEqual({}, tuf.roledb.get_role_paths(rolename)) - self.assertEqual(paths, tuf.roledb.get_role_paths(rolename2)) - - # Verify that role paths can be queried for roles in non-default - # repositories. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_role_paths, - rolename, repository_name) - - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename2, roleinfo2, repository_name) - self.assertEqual(roleinfo2['paths'], tuf.roledb.get_role_paths(rolename2, - repository_name)) - - # Reset the roledb so that subsequent roles have access to the original, - # default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where the arguments are improperly formatted, - # contain invalid names, or haven't been added to the role database. - self._test_rolename(tuf.roledb.get_role_paths) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_role_paths, rolename, 123) - - - - def test_get_delegated_rolenames(self): - # Test conditions where the arguments are valid. - rolename = 'unclaimed' - rolename2 = 'django' - rolename3 = 'release' - rolename4 = 'tuf' - - # unclaimed's roleinfo. - roleinfo = {'keyids': ['123'], 'threshold': 1, 'delegations': - {'roles': [{'name': 'django', 'keyids': ['456'], 'threshold': 1}, - {'name': 'tuf', 'keyids': ['888'], 'threshold': 1}], - 'keys': {'456': {'keytype': 'rsa', 'keyval': {'public': '456'}}, - }}} - - # django's roleinfo. - roleinfo2 = {'keyids': ['456'], 'threshold': 1, 'delegations': - {'roles': [{'name': 'release', 'keyids': ['789'], 'threshold': 1}], - 'keys': {'789': {'keytype': 'rsa', 'keyval': {'public': '789'}}, - }}} - - # release's roleinfo. - roleinfo3 = {'keyids': ['789'], 'threshold': 1, 'delegations': - {'roles': [], - 'keys': {}}} - - # tuf's roleinfo. - roleinfo4 = {'keyids': ['888'], 'threshold': 1, 'delegations': - {'roles': [], - 'keys': {}}} - - self.assertRaises(tuf.exceptions.UnknownRoleError, tuf.roledb.get_delegated_rolenames, - rolename) - - tuf.roledb.add_role(rolename, roleinfo) - tuf.roledb.add_role(rolename2, roleinfo2) - tuf.roledb.add_role(rolename3, roleinfo3) - tuf.roledb.add_role(rolename4, roleinfo4) - - self.assertEqual(set(['django', 'tuf']), - set(tuf.roledb.get_delegated_rolenames(rolename))) - - self.assertEqual(set(['release']), - set(tuf.roledb.get_delegated_rolenames(rolename2))) - - self.assertEqual(set([]), - set(tuf.roledb.get_delegated_rolenames(rolename3))) - - self.assertEqual(set([]), - set(tuf.roledb.get_delegated_rolenames(rolename4))) - - # Verify that the delegated rolenames of a role in a non-default - # repository can be accessed. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_delegated_rolenames, - rolename, repository_name) - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - self.assertEqual(set(['django', 'tuf']), - set(tuf.roledb.get_delegated_rolenames(rolename, repository_name))) - - # Reset the roledb so that subsequent tests have access to the original, - # default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test conditions where the arguments are improperly formatted, - # contain invalid names, or haven't been added to the role database. - self._test_rolename(tuf.roledb.get_delegated_rolenames) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_delegated_rolenames, rolename, 123) - - - - def test_create_roledb_from_root_metadata(self): - # Test condition using a valid 'root_metadata' argument. - rsakey = KEYS[0] - keyid = KEYS[0]['keyid'] - rsakey2 = KEYS[1] - keyid2 = KEYS[1]['keyid'] - rsakey3 = KEYS[2] - keyid3 = KEYS[2]['keyid'] - keydict = {keyid: rsakey, keyid2: rsakey2} - roledict = {'root': {'keyids': [keyid], 'threshold': 1}, - 'targets': {'keyids': [keyid2], 'threshold': 1}} - version = 8 - consistent_snapshot = False - expires = '1985-10-21T01:21:00Z' - - root_metadata = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version='1.0.0', - version=version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - self.assertEqual(None, - tuf.roledb.create_roledb_from_root_metadata(root_metadata)) - - # Ensure 'Root' and 'Targets' were added to the role database. - self.assertEqual([keyid], tuf.roledb.get_role_keyids('root')) - self.assertEqual([keyid2], tuf.roledb.get_role_keyids('targets')) - - # Test that a roledb is created for a non-default repository. - repository_name = 'example_repository' - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.clear_roledb, - repository_name) - tuf.roledb.create_roledb_from_root_metadata(root_metadata, repository_name) - self.assertEqual([keyid], tuf.roledb.get_role_keyids('root', repository_name)) - self.assertEqual([keyid2], tuf.roledb.get_role_keyids('targets', repository_name)) - - # Remove the example repository added to the roledb so that subsequent - # tests have access to an original, default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test conditions for arguments with invalid formats. - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.roledb.create_roledb_from_root_metadata, None) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.roledb.create_roledb_from_root_metadata, '') - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.roledb.create_roledb_from_root_metadata, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.roledb.create_roledb_from_root_metadata, ['123']) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.roledb.create_roledb_from_root_metadata, {'bad': '123'}) - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.roledb.create_roledb_from_root_metadata, root_metadata, 123) - - # Verify that the expected roles of a Root file are properly loaded. - tuf.roledb.clear_roledb() - roledict = {'root': {'keyids': [keyid], 'threshold': 1}, - 'release': {'keyids': [keyid3], 'threshold': 1}} - version = 8 - - # Add a third key for 'release'. - keydict[keyid3] = rsakey3 - - # Generate 'root_metadata' to verify that 'release' and 'root' are added - # to the role database. - - root_metadata = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - _type='root', - spec_version='1.0.0', - version=version, - expires=expires, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - self.assertEqual(None, - tuf.roledb.create_roledb_from_root_metadata(root_metadata)) - - # Ensure only 'root' and 'release' were added to the role database. - self.assertEqual(2, len(tuf.roledb._roledb_dict['default'])) - self.assertEqual(True, tuf.roledb.role_exists('root')) - self.assertEqual(True, tuf.roledb.role_exists('release')) - - - - def test_update_roleinfo(self): - rolename = 'targets' - roleinfo = {'keyids': ['123'], 'threshold': 1} - tuf.roledb.add_role(rolename, roleinfo) - - # Test normal case. - tuf.roledb.update_roleinfo(rolename, roleinfo) - - # Verify that a roleinfo can be updated for a role in a non-default - # repository. - repository_name = 'example_repository' - mark_role_as_dirty = True - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.clear_roledb, repository_name) - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo, repository_name) - tuf.roledb.update_roleinfo(rolename, roleinfo, mark_role_as_dirty, repository_name) - self.assertEqual(roleinfo['keyids'], tuf.roledb.get_role_keyids(rolename, repository_name)) - - # Reset the roledb so that subsequent tests can access the default roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test for an unknown role. - self.assertRaises(tuf.exceptions.UnknownRoleError, tuf.roledb.update_roleinfo, - 'unknown_rolename', roleinfo) - - # Verify that a roleinfo cannot be updated to a non-existent repository - # name. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.update_roleinfo, - 'new_rolename', roleinfo, False, 'non-existent') - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.update_roleinfo, 1, roleinfo) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.update_roleinfo, rolename, 1) - - repository_name = 'example_repository' - mark_role_as_dirty = True - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.update_roleinfo, rolename, - roleinfo, 1, repository_name) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.update_roleinfo, - rolename, mark_role_as_dirty, 123) - - - - def test_get_dirty_roles(self): - # Verify that the dirty roles of a role are returned. - rolename = 'targets' - roleinfo1 = {'keyids': ['123'], 'threshold': 1} - tuf.roledb.add_role(rolename, roleinfo1) - roleinfo2 = {'keyids': ['123'], 'threshold': 2} - mark_role_as_dirty = True - tuf.roledb.update_roleinfo(rolename, roleinfo2, mark_role_as_dirty) - # Note: The 'default' repository is searched if the repository name is - # not given to get_dirty_roles(). - self.assertEqual([rolename], tuf.roledb.get_dirty_roles()) - - # Verify that a list of dirty roles is returned for a non-default - # repository. - repository_name = 'example_repository' - tuf.roledb.create_roledb(repository_name) - tuf.roledb.add_role(rolename, roleinfo1, repository_name) - tuf.roledb.update_roleinfo(rolename, roleinfo2, mark_role_as_dirty, repository_name) - self.assertEqual([rolename], tuf.roledb.get_dirty_roles(repository_name)) - - # Verify that dirty roles are not returned for a non-existent repository. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.get_dirty_roles, 'non-existent') - - # Reset the roledb so that subsequent tests have access to a default - # roledb. - tuf.roledb.remove_roledb(repository_name) - - # Test for improperly formatted argument. - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.roledb.get_dirty_roles, 123) - - - - def test_mark_dirty(self): - # Add a dirty role to roledb. - rolename = 'targets' - roleinfo1 = {'keyids': ['123'], 'threshold': 1} - tuf.roledb.add_role(rolename, roleinfo1) - rolename2 = 'dirty_role' - roleinfo2 = {'keyids': ['123'], 'threshold': 2} - mark_role_as_dirty = True - tuf.roledb.update_roleinfo(rolename, roleinfo1, mark_role_as_dirty) - # Note: The 'default' repository is searched if the repository name is - # not given to get_dirty_roles(). - self.assertEqual([rolename], tuf.roledb.get_dirty_roles()) - - tuf.roledb.mark_dirty(['dirty_role']) - self.assertEqual([rolename2, rolename], tuf.roledb.get_dirty_roles()) - - # Verify that a role cannot be marked as dirty for a non-existent - # repository. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.mark_dirty, - ['dirty_role'], 'non-existent') - - - - def test_unmark_dirty(self): - # Add a dirty role to roledb. - rolename = 'targets' - roleinfo1 = {'keyids': ['123'], 'threshold': 1} - tuf.roledb.add_role(rolename, roleinfo1) - rolename2 = 'dirty_role' - roleinfo2 = {'keyids': ['123'], 'threshold': 2} - tuf.roledb.add_role(rolename2, roleinfo2) - mark_role_as_dirty = True - tuf.roledb.update_roleinfo(rolename, roleinfo1, mark_role_as_dirty) - # Note: The 'default' repository is searched if the repository name is - # not given to get_dirty_roles(). - self.assertEqual([rolename], tuf.roledb.get_dirty_roles()) - tuf.roledb.update_roleinfo(rolename2, roleinfo2, mark_role_as_dirty) - - tuf.roledb.unmark_dirty(['dirty_role']) - self.assertEqual([rolename], tuf.roledb.get_dirty_roles()) - tuf.roledb.unmark_dirty(['targets']) - self.assertEqual([], tuf.roledb.get_dirty_roles()) - - # What happens for a role that isn't dirty? unmark_dirty() should just - # log a message. - tuf.roledb.unmark_dirty(['unknown_role']) - - # Verify that a role cannot be unmarked as dirty for a non-existent - # repository. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, tuf.roledb.unmark_dirty, - ['dirty_role'], 'non-existent') - - - def _test_rolename(self, test_function): - # Private function that tests the 'rolename' argument of 'test_function' - # for format, invalid name, and unknown role exceptions. - - # Test conditions where the arguments are improperly formatted. - self.assertRaises(securesystemslib.exceptions.FormatError, test_function, None) - self.assertRaises(securesystemslib.exceptions.FormatError, test_function, 123) - self.assertRaises(securesystemslib.exceptions.FormatError, test_function, ['rolename']) - self.assertRaises(securesystemslib.exceptions.FormatError, test_function, {'a': 'b'}) - self.assertRaises(securesystemslib.exceptions.FormatError, test_function, ('a', 'b')) - self.assertRaises(securesystemslib.exceptions.FormatError, test_function, True) - - # Test condition where the 'rolename' has not been added to the role database. - self.assertRaises(tuf.exceptions.UnknownRoleError, test_function, 'badrole') - - # Test conditions for invalid rolenames. - self.assertRaises(securesystemslib.exceptions.InvalidNameError, test_function, '') - self.assertRaises(securesystemslib.exceptions.InvalidNameError, test_function, ' badrole ') - self.assertRaises(securesystemslib.exceptions.InvalidNameError, test_function, '/badrole/') - - - -def setUpModule(): - # setUpModule() is called before any test cases run. - # Ensure the roledb has not been modified by a previous test, which may - # affect assumptions (i.e., empty roledb) made by the tests cases in this - # unit test. - tuf.roledb.clear_roledb() - -def tearDownModule(): - # tearDownModule() is called after all the tests have run. - # Ensure we clean up roledb. Courtesy is contagious, and it begins with - # test_roledb_old.py. - tuf.roledb.clear_roledb() - - - -# Run the unit tests. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_root_versioning_integration_old.py b/tests/test_root_versioning_integration_old.py deleted file mode 100755 index 251bdfe6c4..0000000000 --- a/tests/test_root_versioning_integration_old.py +++ /dev/null @@ -1,230 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_root_versioning_integration_old.py - - - Evan Cordell. - - - July 21, 2016. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Test root versioning for efficient root key rotation. -""" - - -import os -import logging -import tempfile -import shutil -import unittest -import sys - -import tuf -import tuf.log -import tuf.formats -import tuf.exceptions -import tuf.roledb -import tuf.keydb -import tuf.repository_tool as repo_tool - -from tests import utils - -import securesystemslib -import securesystemslib.storage - -logger = logging.getLogger(__name__) - -repo_tool.disable_console_log_messages() - - -class TestRepository(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.temporary_directory) - - def tearDown(self): - tuf.roledb.clear_roledb() - tuf.keydb.clear_keydb() - - def test_init(self): - # Test normal case. - storage_backend = securesystemslib.storage.FilesystemBackend() - repository = repo_tool.Repository('repository_directory/', - 'metadata_directory/', - 'targets_directory/', - storage_backend) - self.assertTrue(isinstance(repository.root, repo_tool.Root)) - self.assertTrue(isinstance(repository.snapshot, repo_tool.Snapshot)) - self.assertTrue(isinstance(repository.timestamp, repo_tool.Timestamp)) - self.assertTrue(isinstance(repository.targets, repo_tool.Targets)) - - # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, 3, - 'metadata_directory/', 'targets_directory', storage_backend) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, - 'repository_directory', 3, 'targets_directory', storage_backend) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.Repository, - 'repository_directory', 'metadata_directory', storage_backend, 3) - - - - def test_root_role_versioning(self): - # Test root role versioning - # - # 1. Import public and private keys. - # 2. Add verification keys. - # 3. Load signing keys. - # 4. Add target files. - # 5. Perform delegation. - # 6. writeall() - # - # Copy the target files from 'tuf/tests/repository_data' so that writeall() - # has target fileinfo to include in metadata. - temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - targets_directory = os.path.join(temporary_directory, 'repository', - repo_tool.TARGETS_DIRECTORY_NAME) - original_targets_directory = os.path.join('repository_data', - 'repository', 'targets') - shutil.copytree(original_targets_directory, targets_directory) - - # In this case, create_new_repository() creates the 'repository/' - # sub-directory in 'temporary_directory' if it does not exist. - repository_directory = os.path.join(temporary_directory, 'repository') - metadata_directory = os.path.join(repository_directory, - repo_tool.METADATA_STAGED_DIRECTORY_NAME) - repository = repo_tool.create_new_repository(repository_directory) - - - - - # (1) Load the public and private keys of the top-level roles, and one - # delegated role. - keystore_directory = os.path.join('repository_data', 'keystore') - - # Load the public keys. - root_pubkey_path = os.path.join(keystore_directory, 'root_key.pub') - targets_pubkey_path = os.path.join(keystore_directory, 'targets_key.pub') - snapshot_pubkey_path = os.path.join(keystore_directory, 'snapshot_key.pub') - timestamp_pubkey_path = os.path.join(keystore_directory, 'timestamp_key.pub') - role1_pubkey_path = os.path.join(keystore_directory, 'delegation_key.pub') - - root_pubkey = repo_tool.import_rsa_publickey_from_file(root_pubkey_path) - targets_pubkey = repo_tool.import_ed25519_publickey_from_file(targets_pubkey_path) - snapshot_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(snapshot_pubkey_path) - timestamp_pubkey = \ - repo_tool.import_ed25519_publickey_from_file(timestamp_pubkey_path) - role1_pubkey = repo_tool.import_ed25519_publickey_from_file(role1_pubkey_path) - - # Load the private keys. - root_privkey_path = os.path.join(keystore_directory, 'root_key') - targets_privkey_path = os.path.join(keystore_directory, 'targets_key') - snapshot_privkey_path = os.path.join(keystore_directory, 'snapshot_key') - timestamp_privkey_path = os.path.join(keystore_directory, 'timestamp_key') - role1_privkey_path = os.path.join(keystore_directory, 'delegation_key') - - root_privkey = \ - repo_tool.import_rsa_privatekey_from_file(root_privkey_path, 'password') - targets_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(targets_privkey_path, 'password') - snapshot_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_privkey_path, - 'password') - timestamp_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_privkey_path, - 'password') - role1_privkey = \ - repo_tool.import_ed25519_privatekey_from_file(role1_privkey_path, - 'password') - - - # (2) Add top-level verification keys. - repository.root.add_verification_key(root_pubkey) - repository.targets.add_verification_key(targets_pubkey) - repository.snapshot.add_verification_key(snapshot_pubkey) - repository.timestamp.add_verification_key(timestamp_pubkey) - - - # (3) Load top-level signing keys. - repository.root.load_signing_key(root_privkey) - repository.targets.load_signing_key(targets_privkey) - repository.snapshot.load_signing_key(snapshot_privkey) - repository.timestamp.load_signing_key(timestamp_privkey) - - # (4) Add target files. - target1 = 'file1.txt' - target2 = 'file2.txt' - target3 = 'file3.txt' - repository.targets.add_target(target1) - repository.targets.add_target(target2) - - - # (5) Perform delegation. - repository.targets.delegate('role1', [role1_pubkey], [target3]) - repository.targets('role1').load_signing_key(role1_privkey) - - # (6) Write repository. - repository.writeall() - - self.assertTrue(os.path.exists(os.path.join(metadata_directory, 'root.json'))) - self.assertTrue(os.path.exists(os.path.join(metadata_directory, '1.root.json'))) - - - # Verify that the expected metadata is written. - root_filepath = os.path.join(metadata_directory, 'root.json') - root_1_filepath = os.path.join(metadata_directory, '1.root.json') - root_2_filepath = os.path.join(metadata_directory, '2.root.json') - old_root_signable = securesystemslib.util.load_json_file(root_filepath) - root_1_signable = securesystemslib.util.load_json_file(root_1_filepath) - - # Make a change to the root keys - repository.root.add_verification_key(targets_pubkey) - repository.root.load_signing_key(targets_privkey) - repository.root.threshold = 2 - repository.writeall() - - new_root_signable = securesystemslib.util.load_json_file(root_filepath) - root_2_signable = securesystemslib.util.load_json_file(root_2_filepath) - - for role_signable in [old_root_signable, new_root_signable, root_1_signable, root_2_signable]: - # Raise 'securesystemslib.exceptions.FormatError' if 'role_signable' is an - # invalid signable. - tuf.formats.check_signable_object_format(role_signable) - - # Verify contents of versioned roots - self.assertEqual(old_root_signable, root_1_signable) - self.assertEqual(new_root_signable, root_2_signable) - - self.assertEqual(root_1_signable['signed']['version'], 1) - self.assertEqual(root_2_signable['signed']['version'], 2) - - repository.root.remove_verification_key(root_pubkey) - repository.root.unload_signing_key(root_privkey) - repository.root.threshold = 2 - - # Errors, not enough signing keys to satisfy old threshold - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - - # No error, write() ignore's root's threshold and allows it to be written - # to disk partially signed. - repository.write('root') - - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_sig_old.py b/tests/test_sig_old.py deleted file mode 100755 index d93659dad0..0000000000 --- a/tests/test_sig_old.py +++ /dev/null @@ -1,546 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_sig_old.py - - - Geremy Condra - Vladimir Diaz - - - February 28, 2012. Based on a previous version of this module. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Test cases for sig.py. -""" - -import unittest -import logging -import copy -import sys - -import tuf -import tuf.log -import tuf.formats -import tuf.keydb -import tuf.roledb -import tuf.sig -import tuf.exceptions - -from tests import utils - -import securesystemslib -import securesystemslib.keys - -logger = logging.getLogger(__name__) - -# Setup the keys to use in our test cases. -KEYS = [] -for _ in range(3): - KEYS.append(securesystemslib.keys.generate_rsa_key(2048)) - - - -class TestSig(unittest.TestCase): - def setUp(self): - pass - - def tearDown(self): - tuf.roledb.clear_roledb() - tuf.keydb.clear_keydb() - - - def test_get_signature_status_no_role(self): - signable = {'signed': 'test', 'signatures': []} - - # A valid, but empty signature status. - sig_status = tuf.sig.get_signature_status(signable) - self.assertTrue(tuf.formats.SIGNATURESTATUS_SCHEMA.matches(sig_status)) - - self.assertEqual(0, sig_status['threshold']) - self.assertEqual([], sig_status['good_sigs']) - self.assertEqual([], sig_status['bad_sigs']) - self.assertEqual([], sig_status['unknown_sigs']) - self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_signing_schemes']) - - # A valid signable, but non-existent role argument. - self.assertRaises(tuf.exceptions.UnknownRoleError, - tuf.sig.get_signature_status, signable, 'unknown_role') - - # Should verify we are not adding a duplicate signature - # when doing the following action. Here we know 'signable' - # has only one signature so it's okay. - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - tuf.keydb.add_key(KEYS[0]) - - # Improperly formatted role. - self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.sig.get_signature_status, signable, 1) - - # Not allowed to call verify() without having specified a role. - args = (signable, None) - self.assertRaises(securesystemslib.exceptions.Error, tuf.sig.verify, *args) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - - - def test_get_signature_status_bad_sig(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - signable['signed'] += 'signature no longer matches signed data' - - tuf.keydb.add_key(KEYS[0]) - threshold = 1 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, keyids=[KEYS[0]['keyid']], threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - - sig_status = tuf.sig.get_signature_status(signable, 'Root') - - self.assertEqual(1, sig_status['threshold']) - self.assertEqual([], sig_status['good_sigs']) - self.assertEqual([KEYS[0]['keyid']], sig_status['bad_sigs']) - self.assertEqual([], sig_status['unknown_sigs']) - self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_signing_schemes']) - - self.assertFalse(tuf.sig.verify(signable, 'Root')) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - # Remove the role. - tuf.roledb.remove_role('Root') - - - def test_get_signature_status_unknown_signing_scheme(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - valid_scheme = KEYS[0]['scheme'] - KEYS[0]['scheme'] = 'unknown_signing_scheme' - tuf.keydb.add_key(KEYS[0]) - threshold = 1 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, keyids=[KEYS[0]['keyid']], threshold=threshold) - - tuf.roledb.add_role('root', roleinfo) - - sig_status = tuf.sig.get_signature_status(signable, 'root') - - self.assertEqual(1, sig_status['threshold']) - self.assertEqual([], sig_status['good_sigs']) - self.assertEqual([], sig_status['bad_sigs']) - self.assertEqual([], sig_status['unknown_sigs']) - self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([KEYS[0]['keyid']], - sig_status['unknown_signing_schemes']) - - self.assertFalse(tuf.sig.verify(signable, 'root')) - - # Done. Let's remove the added key(s) from the key database. - KEYS[0]['scheme'] = valid_scheme - tuf.keydb.remove_key(KEYS[0]['keyid']) - # Remove the role. - tuf.roledb.remove_role('root') - - - def test_get_signature_status_single_key(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - threshold = 1 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, keyids=[KEYS[0]['keyid']], threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - tuf.keydb.add_key(KEYS[0]) - - sig_status = tuf.sig.get_signature_status(signable, 'Root') - - self.assertEqual(1, sig_status['threshold']) - self.assertEqual([KEYS[0]['keyid']], sig_status['good_sigs']) - self.assertEqual([], sig_status['bad_sigs']) - self.assertEqual([], sig_status['unknown_sigs']) - self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_signing_schemes']) - - self.assertTrue(tuf.sig.verify(signable, 'Root')) - - # Test for an unknown signature when 'role' is left unspecified. - sig_status = tuf.sig.get_signature_status(signable) - - self.assertEqual(0, sig_status['threshold']) - self.assertEqual([], sig_status['good_sigs']) - self.assertEqual([], sig_status['bad_sigs']) - self.assertEqual([KEYS[0]['keyid']], sig_status['unknown_sigs']) - self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_signing_schemes']) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - # Remove the role. - tuf.roledb.remove_role('Root') - - - def test_get_signature_status_below_threshold(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - tuf.keydb.add_key(KEYS[0]) - threshold = 2 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, - keyids=[KEYS[0]['keyid'], KEYS[2]['keyid']], - threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - - sig_status = tuf.sig.get_signature_status(signable, 'Root') - - self.assertEqual(2, sig_status['threshold']) - self.assertEqual([KEYS[0]['keyid']], sig_status['good_sigs']) - self.assertEqual([], sig_status['bad_sigs']) - self.assertEqual([], sig_status['unknown_sigs']) - self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_signing_schemes']) - - self.assertFalse(tuf.sig.verify(signable, 'Root')) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - - # Remove the role. - tuf.roledb.remove_role('Root') - - - def test_get_signature_status_below_threshold_unrecognized_sigs(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - # Two keys sign it, but only one of them will be trusted. - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[2], signed)) - - tuf.keydb.add_key(KEYS[0]) - tuf.keydb.add_key(KEYS[1]) - threshold = 2 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, - keyids=[KEYS[0]['keyid'], KEYS[1]['keyid']], - threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - - sig_status = tuf.sig.get_signature_status(signable, 'Root') - - self.assertEqual(2, sig_status['threshold']) - self.assertEqual([KEYS[0]['keyid']], sig_status['good_sigs']) - self.assertEqual([], sig_status['bad_sigs']) - self.assertEqual([KEYS[2]['keyid']], sig_status['unknown_sigs']) - self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_signing_schemes']) - - self.assertFalse(tuf.sig.verify(signable, 'Root')) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - tuf.keydb.remove_key(KEYS[1]['keyid']) - - # Remove the role. - tuf.roledb.remove_role('Root') - - - def test_get_signature_status_below_threshold_unauthorized_sigs(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - # Two keys sign it, but one of them is only trusted for a different - # role. - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[1], signed)) - - tuf.keydb.add_key(KEYS[0]) - tuf.keydb.add_key(KEYS[1]) - threshold = 2 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, - keyids=[KEYS[0]['keyid'], KEYS[2]['keyid']], - threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, - keyids=[KEYS[1]['keyid'], KEYS[2]['keyid']], - threshold=threshold) - - tuf.roledb.add_role('Release', roleinfo) - - sig_status = tuf.sig.get_signature_status(signable, 'Root') - - self.assertEqual(2, sig_status['threshold']) - self.assertEqual([KEYS[0]['keyid']], sig_status['good_sigs']) - self.assertEqual([], sig_status['bad_sigs']) - self.assertEqual([], sig_status['unknown_sigs']) - self.assertEqual([KEYS[1]['keyid']], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_signing_schemes']) - - self.assertFalse(tuf.sig.verify(signable, 'Root')) - - self.assertRaises(tuf.exceptions.UnknownRoleError, - tuf.sig.get_signature_status, signable, 'unknown_role') - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - tuf.keydb.remove_key(KEYS[1]['keyid']) - - # Remove the roles. - tuf.roledb.remove_role('Root') - tuf.roledb.remove_role('Release') - - - - def test_check_signatures_no_role(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - tuf.keydb.add_key(KEYS[0]) - - # No specific role we're considering. It's invalid to use the - # function tuf.sig.verify() without a role specified because - # tuf.sig.verify() is checking trust, as well. - args = (signable, None) - self.assertRaises(securesystemslib.exceptions.Error, tuf.sig.verify, *args) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - - - - def test_verify_single_key(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - tuf.keydb.add_key(KEYS[0]) - threshold = 1 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, keyids=[KEYS[0]['keyid']], threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - - # This will call verify() and return True if 'signable' is valid, - # False otherwise. - self.assertTrue(tuf.sig.verify(signable, 'Root')) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - - # Remove the roles. - tuf.roledb.remove_role('Root') - - - - def test_verify_must_not_count_duplicate_keyids_towards_threshold(self): - # Create and sign dummy metadata twice with same key - # Note that we use the non-deterministic rsassa-pss signing scheme, so - # creating the signature twice shows that we don't only detect duplicate - # signatures but also different signatures from the same key. - signable = {"signed" : "test", "signatures" : []} - signed = securesystemslib.formats.encode_canonical( - signable["signed"]).encode("utf-8") - signable["signatures"].append( - securesystemslib.keys.create_signature(KEYS[0], signed)) - signable["signatures"].append( - securesystemslib.keys.create_signature(KEYS[0], signed)) - - # 'get_signature_status' uses keys from keydb for verification - tuf.keydb.add_key(KEYS[0]) - - # Assert that 'get_signature_status' returns two good signatures ... - status = tuf.sig.get_signature_status( - signable, "root", keyids=[KEYS[0]["keyid"]], threshold=2) - self.assertTrue(len(status["good_sigs"]) == 2) - - # ... but only one counts towards the threshold - self.assertFalse( - tuf.sig.verify(signable, "root", keyids=[KEYS[0]["keyid"]], threshold=2)) - - # Clean-up keydb - tuf.keydb.remove_key(KEYS[0]["keyid"]) - - - - def test_verify_count_different_keyids_for_same_key_towards_threshold(self): - # Create and sign dummy metadata twice with same key but different keyids - signable = {"signed" : "test", "signatures" : []} - key_sha256 = copy.deepcopy(KEYS[0]) - key_sha256["keyid"] = "deadbeef256" - - key_sha512 = copy.deepcopy(KEYS[0]) - key_sha512["keyid"] = "deadbeef512" - - signed = securesystemslib.formats.encode_canonical( - signable["signed"]).encode("utf-8") - signable["signatures"].append( - securesystemslib.keys.create_signature(key_sha256, signed)) - signable["signatures"].append( - securesystemslib.keys.create_signature(key_sha512, signed)) - - # 'get_signature_status' uses keys from keydb for verification - tuf.keydb.add_key(key_sha256) - tuf.keydb.add_key(key_sha512) - - # Assert that the key only counts toward the threshold once - keyids = [key_sha256["keyid"], key_sha512["keyid"]] - self.assertFalse( - tuf.sig.verify(signable, "root", keyids=keyids, threshold=2)) - - # Clean-up keydb - tuf.keydb.remove_key(key_sha256["keyid"]) - tuf.keydb.remove_key(key_sha512["keyid"]) - - - - def test_verify_unrecognized_sig(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - # Two keys sign it, but only one of them will be trusted. - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[2], signed)) - - tuf.keydb.add_key(KEYS[0]) - tuf.keydb.add_key(KEYS[1]) - threshold = 2 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, - keyids=[KEYS[0]['keyid'], KEYS[1]['keyid']], - threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - - self.assertFalse(tuf.sig.verify(signable, 'Root')) - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[0]['keyid']) - tuf.keydb.remove_key(KEYS[1]['keyid']) - - # Remove the roles. - tuf.roledb.remove_role('Root') - - - - def test_generate_rsa_signature(self): - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - self.assertEqual(1, len(signable['signatures'])) - signature = signable['signatures'][0] - self.assertEqual(KEYS[0]['keyid'], signature['keyid']) - - returned_signature = tuf.sig.generate_rsa_signature(signable['signed'], KEYS[0]) - self.assertTrue(securesystemslib.formats.SIGNATURE_SCHEMA.matches(returned_signature)) - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[1], signed)) - - self.assertEqual(2, len(signable['signatures'])) - signature = signable['signatures'][1] - self.assertEqual(KEYS[1]['keyid'], signature['keyid']) - - - - def test_may_need_new_keys(self): - # One untrusted key in 'signable'. - signable = {'signed' : 'test', 'signatures' : []} - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - - signable['signatures'].append(securesystemslib.keys.create_signature( - KEYS[0], signed)) - - tuf.keydb.add_key(KEYS[1]) - threshold = 1 - - roleinfo = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, keyids=[KEYS[1]['keyid']], threshold=threshold) - - tuf.roledb.add_role('Root', roleinfo) - - sig_status = tuf.sig.get_signature_status(signable, 'Root') - - self.assertTrue(tuf.sig.may_need_new_keys(sig_status)) - - - # Done. Let's remove the added key(s) from the key database. - tuf.keydb.remove_key(KEYS[1]['keyid']) - - # Remove the roles. - tuf.roledb.remove_role('Root') - - - def test_signable_has_invalid_format(self): - # get_signature_status() and verify() validate 'signable' before continuing. - # 'signable' must be of the form: {'signed': , 'signatures': [{}]}. - # Object types are checked as well. - signable = {'not_signed' : 'test', 'signatures' : []} - args = (signable['not_signed'], KEYS[0]) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.sig.get_signature_status, *args) - - # 'signatures' value must be a list. Let's try a dict. - signable = {'signed' : 'test', 'signatures' : {}} - args = (signable['signed'], KEYS[0]) - self.assertRaises(securesystemslib.exceptions.FormatError, tuf.sig.get_signature_status, *args) - - - -# Run unit test. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_slow_retrieval_attack_old.py b/tests/test_slow_retrieval_attack_old.py deleted file mode 100755 index 9f22c88f36..0000000000 --- a/tests/test_slow_retrieval_attack_old.py +++ /dev/null @@ -1,216 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_slow_retrieval_attack_old.py - - - Konstantin Andrianov. - - - March 13, 2012. - - April 5, 2014. - Refactored to use the 'unittest' module (test conditions in code, rather - than verifying text output), use pre-generated repository files, and - discontinue use of the old repository tools. Expanded comments and modified - previous setup. -vladimir.v.diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Simulate a slow retrieval attack, where an attacker is able to prevent clients - from receiving updates by responding to client requests so slowly that updates - never complete. Test cases included for two types of slow retrievals: data - that slowly trickles in, and data that is only returned after a long time - delay. TUF prevents slow retrieval attacks by ensuring the download rate - does not fall below a required rate (tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED). - - Note: There is no difference between 'updates' and 'target' files. - - # TODO: Consider additional tests for slow metadata download. Tests here only - use slow target download. -""" - -import os -import tempfile -import shutil -import logging -import unittest -import sys - -import tuf.log -import tuf.client.updater as updater -import tuf.unittest_toolbox as unittest_toolbox -import tuf.repository_tool as repo_tool -import tuf.roledb -import tuf.keydb - -from tests import utils - -logger = logging.getLogger(__name__) -repo_tool.disable_console_log_messages() - - - -class TestSlowRetrieval(unittest_toolbox.Modified_TestCase): - - def setUp(self): - # Modified_Testcase can handle temp dir removal - unittest_toolbox.Modified_TestCase.setUp(self) - self.temporary_directory = self.make_temp_directory(directory=os.getcwd()) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = tempfile.mkdtemp(dir=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client') - original_keystore = os.path.join(original_repository_files, 'keystore') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.client_directory = os.path.join(temporary_repository_root, 'client') - self.keystore_directory = os.path.join(temporary_repository_root, 'keystore') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - - # Produce a longer target file than exists in the other test repository - # data, to provide for a long-duration slow attack. Then we'll write new - # top-level metadata that includes a hash over that file, and provide that - # metadata to the client as well. - - # The slow retrieval server, in mode 2 (1 byte per second), will only - # sleep for a total of (target file size) seconds. Add a target file - # that contains sufficient number of bytes to trigger a slow retrieval - # error. A transfer should not be permitted to take 1 second per byte - # transferred. Because this test is currently expected to fail, I'm - # limiting the size to 10 bytes (10 seconds) to avoid expected testing - # delays.... Consider increasing again after fix, to, e.g. 400. - total_bytes = 10 - - repository = repo_tool.load_repository(self.repository_directory) - file1_filepath = os.path.join(self.repository_directory, 'targets', - 'file1.txt') - with open(file1_filepath, 'wb') as file_object: - data = 'a' * int(round(total_bytes)) - file_object.write(data.encode('utf-8')) - - key_file = os.path.join(self.keystore_directory, 'timestamp_key') - timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - key_file = os.path.join(self.keystore_directory, 'snapshot_key') - snapshot_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - key_file = os.path.join(self.keystore_directory, 'targets_key') - targets_private = repo_tool.import_ed25519_privatekey_from_file(key_file, - 'password') - - repository.targets.load_signing_key(targets_private) - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Since we've changed the repository metadata in this setup (by lengthening - # a target file and then writing new metadata), we also have to update the - # client metadata to get to the expected initial state, where the client - # knows the right target info (and so expects the right, longer target - # length. - # We'll skip using updater.refresh since we don't have a server running, - # and we'll update the metadata locally, manually. - shutil.rmtree(os.path.join( - self.client_directory, self.repository_name, 'metadata', 'current')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata'), - os.path.join(self.client_directory, self.repository_name, 'metadata', - 'current')) - - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - - self.server_process_handler = utils.TestServerProcess(log=logger, - server='slow_retrieval_server_old.py') - - logger.info('Slow Retrieval Server process started.') - - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Create the repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Cleans the resources and flush the logged lines (if any). - self.server_process_handler.clean() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_delay_before_send(self): - # Simulate a slow retrieval attack. - # When download begins,the server blocks the download for a long - # time by doing nothing before it sends the first byte of data. - - # Verify that the TUF client detects replayed metadata and refuses to - # continue the update process. - try: - file1_target = self.repository_updater.get_one_valid_targetinfo('file1.txt') - self.repository_updater.download_target(file1_target, self.client_directory) - - # Verify that the specific 'tuf.exceptions.SlowRetrievalError' exception is raised by - # each mirror. - except tuf.exceptions.NoWorkingMirrorError as exception: - for mirror_url, mirror_error in exception.mirror_errors.items(): - url_prefix = self.repository_mirrors['mirror1']['url_prefix'] - url_file = os.path.join(url_prefix, 'targets', 'file1.txt') - - # Verify that 'file1.txt' is the culprit. - self.assertEqual(url_file.replace('\\', '/'), mirror_url) - self.assertTrue(isinstance(mirror_error, tuf.exceptions.SlowRetrievalError)) - - else: - self.fail('TUF did not prevent a slow retrieval attack.') - - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_tutorial_old.py b/tests/test_tutorial_old.py deleted file mode 100755 index ac33dec86a..0000000000 --- a/tests/test_tutorial_old.py +++ /dev/null @@ -1,407 +0,0 @@ -#!/usr/bin/env python - -""" - - test_tutorial_old.py - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Regression test for the TUF tutorial as laid out in TUTORIAL.md. - This essentially runs the tutorial and checks some results. - - There are a few deviations from the TUTORIAL.md instructions: - - steps that involve user input (like passphrases) are modified slightly - to not require user input - - use of path separators '/' is replaced by join() calls. (We assume that - when following the tutorial, users will correctly deal with path - separators for their system if they happen to be using non-Linux systems.) - - shell instructions are mimicked using Python commands - -""" - - -import unittest -import datetime # part of TUTORIAL.md -import os # part of TUTORIAL.md, but also needed separately -import shutil -import tempfile -import sys -import unittest.mock as mock - -from tuf.repository_tool import * # part of TUTORIAL.md - -from tests import utils - -import securesystemslib.exceptions - -from securesystemslib.formats import encode_canonical # part of TUTORIAL.md -from securesystemslib.keys import create_signature # part of TUTORIAL.md - - -class TestTutorial(unittest.TestCase): - def setUp(self): - self.working_dir = os.getcwd() - self.test_dir = os.path.realpath(tempfile.mkdtemp()) - os.chdir(self.test_dir) - - def tearDown(self): - os.chdir(self.working_dir) - shutil.rmtree(self.test_dir) - - def test_tutorial(self): - """ - Run the TUTORIAL.md tutorial. - Note that anywhere the tutorial provides a command that prompts for the - user to enter a passphrase/password, this test is changed to simply provide - that as an argument. It's not worth trying to arrange automated testing of - the interactive password entry process here. Anywhere user entry has been - skipped from the tutorial instructions, "# Skipping user entry of password" - is written, with the original line below it, starting with ##. - """ - - # ----- Tutorial Section: Keys - - generate_and_write_rsa_keypair(password='password', filepath='root_key', bits=2048) - - # Skipping user entry of password - ## generate_and_write_rsa_keypair_with_prompt('root_key2') - generate_and_write_rsa_keypair(password='password', filepath='root_key2') - - # Tutorial tells users to expect these files to exist: - # ['root_key', 'root_key.pub', 'root_key2', 'root_key2.pub'] - for fname in ['root_key', 'root_key.pub', 'root_key2', 'root_key2.pub']: - self.assertTrue(os.path.exists(fname)) - - # Generate key pair at /path/to/KEYID - fname = generate_and_write_rsa_keypair(password="password") - self.assertTrue(os.path.exists(fname)) - - - # ----- Tutorial Section: Import RSA Keys - - public_root_key = import_rsa_publickey_from_file('root_key.pub') - - # Skipping user entry of password - ## private_root_key = import_rsa_privatekey_from_file('root_key') - private_root_key = import_rsa_privatekey_from_file('root_key', 'password') - - # Skipping user entry of password - ## import_rsa_privatekey_from_file('root_key') - with self.assertRaises(securesystemslib.exceptions.CryptoError): - import_rsa_privatekey_from_file('root_key', 'not_the_real_pw') - - - - # ----- Tutorial Section: Create and Import Ed25519 Keys - - # Skipping user entry of password - ## generate_and_write_ed25519_keypair_with_prompt('ed25519_key') - generate_and_write_ed25519_keypair(password='password', filepath='ed25519_key') - - public_ed25519_key = import_ed25519_publickey_from_file('ed25519_key.pub') - - # Skipping user entry of password - ## private_ed25519_key = import_ed25519_privatekey_from_file('ed25519_key') - private_ed25519_key = import_ed25519_privatekey_from_file( - 'ed25519_key', 'password') - - - - # ----- Tutorial Section: Create Top-level Metadata - repository = create_new_repository('repository') - repository.root.add_verification_key(public_root_key) - self.assertTrue(repository.root.keys) - - public_root_key2 = import_rsa_publickey_from_file('root_key2.pub') - repository.root.add_verification_key(public_root_key2) - - repository.root.threshold = 2 - private_root_key2 = import_rsa_privatekey_from_file( - 'root_key2', password='password') - - repository.root.load_signing_key(private_root_key) - repository.root.load_signing_key(private_root_key2) - - # NOTE: The tutorial does not call dirty_roles anymore due to #964 and - # #958. We still call it here to see if roles are dirty as expected. - with mock.patch("tuf.repository_tool.logger") as mock_logger: - repository.dirty_roles() - # Concat strings to avoid Python2/3 unicode prefix problems ('' vs. u'') - mock_logger.info.assert_called_with("Dirty roles: " + str(['root'])) - - # Patch logger to assert that it accurately logs the repo's status. Since - # the logger is called multiple times, we have to assert for the accurate - # sequence of calls or rather its call arguments. - with mock.patch("tuf.repository_lib.logger") as mock_logger: - repository.status() - # Concat strings to avoid Python2/3 unicode prefix problems ('' vs. u'') - self.assertListEqual([ - repr('targets') + " role contains 0 / 1 public keys.", - repr('snapshot') + " role contains 0 / 1 public keys.", - repr('timestamp') + " role contains 0 / 1 public keys.", - repr('root') + " role contains 2 / 2 signatures.", - repr('targets') + " role contains 0 / 1 signatures." - ], [args[0] for args, _ in mock_logger.info.call_args_list]) - - generate_and_write_rsa_keypair(password='password', filepath='targets_key') - generate_and_write_rsa_keypair(password='password', filepath='snapshot_key') - generate_and_write_rsa_keypair(password='password', filepath='timestamp_key') - - repository.targets.add_verification_key(import_rsa_publickey_from_file( - 'targets_key.pub')) - repository.snapshot.add_verification_key(import_rsa_publickey_from_file( - 'snapshot_key.pub')) - repository.timestamp.add_verification_key(import_rsa_publickey_from_file( - 'timestamp_key.pub')) - - # Skipping user entry of password - ## private_targets_key = import_rsa_privatekey_from_file('targets_key') - private_targets_key = import_rsa_privatekey_from_file( - 'targets_key', 'password') - - # Skipping user entry of password - ## private_snapshot_key = import_rsa_privatekey_from_file('snapshot_key') - private_snapshot_key = import_rsa_privatekey_from_file( - 'snapshot_key', 'password') - - # Skipping user entry of password - ## private_timestamp_key = import_rsa_privatekey_from_file('timestamp_key') - private_timestamp_key = import_rsa_privatekey_from_file( - 'timestamp_key', 'password') - - repository.targets.load_signing_key(private_targets_key) - repository.snapshot.load_signing_key(private_snapshot_key) - repository.timestamp.load_signing_key(private_timestamp_key) - - repository.timestamp.expiration = datetime.datetime(2080, 10, 28, 12, 8) - - # NOTE: The tutorial does not call dirty_roles anymore due to #964 and - # #958. We still call it here to see if roles are dirty as expected. - with mock.patch("tuf.repository_tool.logger") as mock_logger: - repository.dirty_roles() - # Concat strings to avoid Python2/3 unicode prefix problems ('' vs. u'') - mock_logger.info.assert_called_with("Dirty roles: " + - str(['root', 'snapshot', 'targets', 'timestamp'])) - - repository.writeall() - - - - # ----- Tutorial Section: Targets - # These next commands in the tutorial are shown as bash commands, so I'll - # just simulate this with some Python commands. - ## $ cd repository/targets/ - ## $ echo 'file1' > file1.txt - ## $ echo 'file2' > file2.txt - ## $ echo 'file3' > file3.txt - ## $ mkdir myproject; echo 'file4' > myproject/file4.txt - ## $ cd ../../ - - with open(os.path.join('repository', 'targets', 'file1.txt'), 'w') as fobj: - fobj.write('file1') - with open(os.path.join('repository', 'targets', 'file2.txt'), 'w') as fobj: - fobj.write('file2') - with open(os.path.join('repository', 'targets', 'file3.txt'), 'w') as fobj: - fobj.write('file3') - - os.mkdir(os.path.join('repository', 'targets', 'myproject')) - with open(os.path.join('repository', 'targets', 'myproject', 'file4.txt'), - 'w') as fobj: - fobj.write('file4') - - - repository = load_repository('repository') - - # TODO: replace the hard-coded list of targets with a helper - # method that returns a list of normalized relative target paths - list_of_targets = ['file1.txt', 'file2.txt', 'file3.txt'] - - repository.targets.add_targets(list_of_targets) - - self.assertTrue('file1.txt' in repository.targets.target_files) - self.assertTrue('file2.txt' in repository.targets.target_files) - self.assertTrue('file3.txt' in repository.targets.target_files) - - target4_filepath = 'myproject/file4.txt' - target4_abspath = os.path.abspath(os.path.join( - 'repository', 'targets', target4_filepath)) - octal_file_permissions = oct(os.stat(target4_abspath).st_mode)[4:] - custom_file_permissions = {'file_permissions': octal_file_permissions} - repository.targets.add_target(target4_filepath, custom_file_permissions) - # Note that target filepaths specified in the repo use '/' even on Windows. - # (This is important to make metadata platform-independent.) - self.assertTrue( - os.path.join(target4_filepath) in repository.targets.target_files) - - - # Skipping user entry of password - ## private_targets_key = import_rsa_privatekey_from_file('targets_key') - private_targets_key = import_rsa_privatekey_from_file( - 'targets_key', 'password') - repository.targets.load_signing_key(private_targets_key) - - # Skipping user entry of password - ## private_snapshot_key = import_rsa_privatekey_from_file('snapshot_key') - private_snapshot_key = import_rsa_privatekey_from_file( - 'snapshot_key', 'password') - repository.snapshot.load_signing_key(private_snapshot_key) - - # Skipping user entry of password - ## private_timestamp_key = import_rsa_privatekey_from_file('timestamp_key') - private_timestamp_key = import_rsa_privatekey_from_file( - 'timestamp_key', 'password') - repository.timestamp.load_signing_key(private_timestamp_key) - - # NOTE: The tutorial does not call dirty_roles anymore due to #964 and - # #958. We still call it here to see if roles are dirty as expected. - with mock.patch("tuf.repository_tool.logger") as mock_logger: - repository.dirty_roles() - # Concat strings to avoid Python2/3 unicode prefix problems ('' vs. u'') - mock_logger.info.assert_called_with( - "Dirty roles: " + str(['snapshot', 'targets', 'timestamp'])) - - repository.writeall() - - repository.targets.remove_target('myproject/file4.txt') - self.assertTrue(os.path.exists(os.path.join( - 'repository','targets', 'myproject', 'file4.txt'))) - - # NOTE: The tutorial does not call dirty_roles anymore due to #964 and - # #958. We still call it here to see if roles are dirty as expected. - with mock.patch("tuf.repository_tool.logger") as mock_logger: - repository.dirty_roles() - # Concat strings to avoid Python2/3 unicode prefix problems ('' vs. u'') - mock_logger.info.assert_called_with( - "Dirty roles: " + str(['targets'])) - - repository.mark_dirty(['snapshot', 'timestamp']) - repository.writeall() - - - # ----- Tutorial Section: Excursion: Dump Metadata and Append Signature - signable_content = dump_signable_metadata( - os.path.join('repository', 'metadata.staged', 'timestamp.json')) - - # Skipping user entry of password - ## private_ed25519_key = import_ed25519_privatekey_from_file('ed25519_key') - private_ed25519_key = import_ed25519_privatekey_from_file('ed25519_key', 'password') - signature = create_signature( - private_ed25519_key, encode_canonical(signable_content).encode()) - append_signature( - signature, - os.path.join('repository', 'metadata.staged', 'timestamp.json')) - - - - # ----- Tutorial Section: Delegations - generate_and_write_rsa_keypair( - password='password', filepath='unclaimed_key', bits=2048) - public_unclaimed_key = import_rsa_publickey_from_file('unclaimed_key.pub') - repository.targets.delegate( - 'unclaimed', [public_unclaimed_key], ['myproject/*.txt']) - - repository.targets("unclaimed").add_target("myproject/file4.txt") - - # Skipping user entry of password - ## private_unclaimed_key = import_rsa_privatekey_from_file('unclaimed_key') - private_unclaimed_key = import_rsa_privatekey_from_file( - 'unclaimed_key', 'password') - repository.targets("unclaimed").load_signing_key(private_unclaimed_key) - - # NOTE: The tutorial does not call dirty_roles anymore due to #964 and - # #958. We still call it here to see if roles are dirty as expected. - with mock.patch("tuf.repository_tool.logger") as mock_logger: - repository.dirty_roles() - # Concat strings to avoid Python2/3 unicode prefix problems ('' vs. u'') - mock_logger.info.assert_called_with( - "Dirty roles: " + str(['targets', 'unclaimed'])) - - repository.mark_dirty(["snapshot", "timestamp"]) - repository.writeall() - - - # Simulate the following shell command: - ## $ cp -r "repository/metadata.staged/" "repository/metadata/" - shutil.copytree( - os.path.join('repository', 'metadata.staged'), - os.path.join('repository', 'metadata')) - - - # ----- Tutorial Section: Delegate to Hashed Bins - repository.targets('unclaimed').remove_target("myproject/file4.txt") - - targets = ['myproject/file4.txt'] - - # Patch logger to assert that it accurately logs the output of hashed bin - # delegation. The logger is called multiple times, first with info level - # then with warning level. So we have to assert for the accurate sequence - # of calls or rather its call arguments. - with mock.patch("tuf.repository_tool.logger") as mock_logger: - repository.targets('unclaimed').delegate_hashed_bins( - targets, [public_unclaimed_key], 32) - - self.assertListEqual([ - "Creating hashed bin delegations.\n" - "1 total targets.\n" - "32 hashed bins.\n" - "256 total hash prefixes.\n" - "Each bin ranges over 8 hash prefixes." - ] + ["Adding a verification key that has already been used."] * 32, - [ - args[0] for args, _ in - mock_logger.info.call_args_list + mock_logger.warning.call_args_list - ]) - - - for delegation in repository.targets('unclaimed').delegations: - delegation.load_signing_key(private_unclaimed_key) - - # NOTE: The tutorial does not call dirty_roles anymore due to #964 and - # #958. We still call it here to see if roles are dirty as expected. - with mock.patch("tuf.repository_tool.logger") as mock_logger: - repository.dirty_roles() - # Concat strings to avoid Python2/3 unicode prefix problems ('' vs. u'') - mock_logger.info.assert_called_with( - "Dirty roles: " + str(['00-07', '08-0f', '10-17', '18-1f', '20-27', - '28-2f', '30-37', '38-3f', '40-47', '48-4f', '50-57', '58-5f', - '60-67', '68-6f', '70-77', '78-7f', '80-87', '88-8f', '90-97', - '98-9f', 'a0-a7', 'a8-af', 'b0-b7', 'b8-bf', 'c0-c7', 'c8-cf', - 'd0-d7', 'd8-df', 'e0-e7', 'e8-ef', 'f0-f7', 'f8-ff', 'unclaimed'])) - - repository.mark_dirty(["snapshot", "timestamp"]) - repository.writeall() - - # ----- Tutorial Section: How to Perform an Update - - # A separate tutorial is linked to for client use. That is not tested here. - create_tuf_client_directory("repository/", "client/tufrepo/") - - - - # ----- Tutorial Section: Test TUF Locally - - # TODO: Run subprocess to simulate the following bash instructions: - - # $ cd "repository/"; python3 -m http.server 8001 - # We next retrieve targets from the TUF repository and save them to client/. The client.py script is available to download metadata and files from a specified repository. In a different command-line prompt . . . - - # $ cd "client/" - # $ ls - # metadata/ - - # $ client.py --repo http://localhost:8001 file1.txt - # $ ls . targets/ - # .: - # metadata targets - - # targets/: - # file1.txt - - - -# Run unit test. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_unittest_toolbox_old.py b/tests/test_unittest_toolbox_old.py deleted file mode 100755 index d26d079286..0000000000 --- a/tests/test_unittest_toolbox_old.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_unittest_toolbox_old.py - - - Vladimir Diaz - - - July 14, 2017. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Test cases for unittest_toolbox.py. -""" - -import unittest -import logging -import shutil -import sys - -import tuf.unittest_toolbox as unittest_toolbox - -from tests import utils - -logger = logging.getLogger(__name__) - - -class TestUnittestToolbox(unittest_toolbox.Modified_TestCase): - def setUp(self): - unittest_toolbox.Modified_TestCase.setUp(self) - - def tearDown(self): - unittest_toolbox.Modified_TestCase.tearDown(self) - - - def test_tear_down_already_deleted_dir(self): - temp_directory = self.make_temp_directory() - - # Delete the temp directory to make sure unittest_toolbox doesn't - # complain about the missing temp_directory. - shutil.rmtree(temp_directory) - - -# Run the unit tests. -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_updater_old.py b/tests/test_updater_old.py deleted file mode 100755 index f2148855d7..0000000000 --- a/tests/test_updater_old.py +++ /dev/null @@ -1,2138 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_updater_old.py - - - Konstantin Andrianov. - - - October 15, 2012. - - March 11, 2014. - Refactored to remove mocked modules and old repository tool dependence, use - exact repositories, and add realistic retrieval of files. -vladimir.v.diaz - - - See LICENSE-MIT OR LICENSE for licensing information. - - - 'test_updater.py_old' provides a collection of methods that test the public / - non-public methods and functions of 'tuf.client.updater.py'. - - The 'unittest_toolbox.py' module was created to provide additional testing - tools, such as automatically deleting temporary files created in test cases. - For more information, see 'tests/unittest_toolbox.py'. - - - Test cases here should follow a specific order (i.e., independent methods are - tested before dependent methods). More accurately, least dependent methods - are tested before most dependent methods. There is no reason to rewrite or - construct other methods that replicate already-tested methods solely for - testing purposes. This is possible because the 'unittest.TestCase' class - guarantees the order of unit tests. The 'test_something_A' method would - be tested before 'test_something_B'. To ensure the expected order of tests, - a number is placed after 'test' and before methods name like so: - 'test_1_check_directory'. The number is a measure of dependence, where 1 is - less dependent than 2. -""" - -import os -import time -import shutil -import copy -import tempfile -import logging -import errno -import sys -import unittest -import json -import unittest.mock as mock - -import tuf -import tuf.exceptions -import tuf.log -import tuf.formats -import tuf.keydb -import tuf.roledb -import tuf.repository_tool as repo_tool -import tuf.repository_lib as repo_lib -import tuf.unittest_toolbox as unittest_toolbox -import tuf.client.updater as updater - -from tests import utils - -import securesystemslib - -logger = logging.getLogger(__name__) -repo_tool.disable_console_log_messages() - - -class TestUpdater(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Needed because in some tests simple_server.py cannot be found. - # The reason is that the current working directory - # has been changed when executing a subprocess. - cls.SIMPLE_SERVER_PATH = os.path.join(os.getcwd(), 'simple_server.py') - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served - # by the SimpleHTTPServer launched here. The test cases of 'test_updater_old.py' - # assume the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger, - server=cls.SIMPLE_SERVER_PATH) - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated for the test cases - shutil.rmtree(cls.temporary_directory) - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf.tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_keystore = os.path.join(original_repository_files, 'keystore') - original_client = os.path.join(original_repository_files, 'client') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.keystore_directory = \ - os.path.join(temporary_repository_root, 'keystore') - - self.client_directory = os.path.join(temporary_repository_root, - 'client') - self.client_metadata = os.path.join(self.client_directory, - self.repository_name, 'metadata') - self.client_metadata_current = os.path.join(self.client_metadata, - 'current') - self.client_metadata_previous = os.path.join(self.client_metadata, - 'previous') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Creating a repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - # Metadata role keys are needed by the test cases to make changes to the - # repository (e.g., adding a new target file to 'targets.json' and then - # requesting a refresh()). - self.role_keys = _load_role_keys(self.keystore_directory) - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - # UNIT TESTS. - - def test_1__init__exceptions(self): - # The client's repository requires a metadata directory (and the 'current' - # and 'previous' sub-directories), and at least the 'root.json' file. - # setUp(), called before each test case, instantiates the required updater - # objects and keys. The needed objects/data is available in - # 'self.repository_updater', 'self.client_directory', etc. - - - # Test: Invalid arguments. - # Invalid 'updater_name' argument. String expected. - self.assertRaises(securesystemslib.exceptions.FormatError, updater.Updater, 8, - self.repository_mirrors) - - # Invalid 'repository_mirrors' argument. 'tuf.formats.MIRRORDICT_SCHEMA' - # expected. - self.assertRaises(securesystemslib.exceptions.FormatError, updater.Updater, updater.Updater, 8) - - - # 'tuf.client.updater.py' requires that the client's repositories directory - # be configured in 'tuf.settings.py'. - tuf.settings.repositories_directory = None - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', - self.repository_mirrors) - # Restore 'tuf.settings.repositories_directory' to the original client - # directory. - tuf.settings.repositories_directory = self.client_directory - - # Test: repository does not exist - self.assertRaises(tuf.exceptions.MissingLocalRepositoryError, updater.Updater, - 'test_non_existing_repository', self.repository_mirrors) - - # Test: empty client repository (i.e., no metadata directory). - metadata_backup = self.client_metadata + '.backup' - shutil.move(self.client_metadata, metadata_backup) - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', - self.repository_mirrors) - # Restore the client's metadata directory. - shutil.move(metadata_backup, self.client_metadata) - - - # Test: repository with only a '{repository_directory}/metadata' directory. - # (i.e., missing the required 'current' and 'previous' sub-directories). - current_backup = self.client_metadata_current + '.backup' - previous_backup = self.client_metadata_previous + '.backup' - - shutil.move(self.client_metadata_current, current_backup) - shutil.move(self.client_metadata_previous, previous_backup) - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', - self.repository_mirrors) - - # Restore the client's previous directory. The required 'current' directory - # is still missing. - shutil.move(previous_backup, self.client_metadata_previous) - - # Test: repository with only a '{repository_directory}/metadata/previous' - # directory. - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', - self.repository_mirrors) - # Restore the client's current directory. - shutil.move(current_backup, self.client_metadata_current) - - # Test: repository with a '{repository_directory}/metadata/current' - # directory, but the 'previous' directory is missing. - shutil.move(self.client_metadata_previous, previous_backup) - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', - self.repository_mirrors) - shutil.move(previous_backup, self.client_metadata_previous) - - # Test: repository missing the required 'root.json' file. - client_root_file = os.path.join(self.client_metadata_current, 'root.json') - backup_root_file = client_root_file + '.backup' - shutil.move(client_root_file, backup_root_file) - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', - self.repository_mirrors) - # Restore the client's 'root.json file. - shutil.move(backup_root_file, client_root_file) - - # Test: Normal 'tuf.client.updater.Updater' instantiation. - updater.Updater('test_repository1', self.repository_mirrors) - - - - - - def test_1__load_metadata_from_file(self): - - # Setup - # Get the 'role1.json' filepath. Manually load the role metadata, and - # compare it against the loaded metadata by '_load_metadata_from_file()'. - role1_filepath = \ - os.path.join(self.client_metadata_current, 'role1.json') - role1_meta = securesystemslib.util.load_json_file(role1_filepath) - - # Load the 'role1.json' file with _load_metadata_from_file, which should - # store the loaded metadata in the 'self.repository_updater.metadata' - # store. - self.assertEqual(len(self.repository_updater.metadata['current']), 4) - self.repository_updater._load_metadata_from_file('current', 'role1') - - # Verify that the correct number of metadata objects has been loaded - # (i.e., only the 'root.json' file should have been loaded. - self.assertEqual(len(self.repository_updater.metadata['current']), 5) - - # Verify that the content of root metadata is valid. - self.assertEqual(self.repository_updater.metadata['current']['role1'], - role1_meta['signed']) - - # Verify that _load_metadata_from_file() doesn't raise an exception for - # improperly formatted metadata, and doesn't load the bad file. - with open(role1_filepath, 'ab') as file_object: - file_object.write(b'bad JSON data') - - self.repository_updater._load_metadata_from_file('current', 'role1') - self.assertEqual(len(self.repository_updater.metadata['current']), 5) - - # Test if we fail gracefully if we can't deserialize a meta file - self.repository_updater._load_metadata_from_file('current', 'empty_file') - self.assertFalse('empty_file' in self.repository_updater.metadata['current']) - - # Test invalid metadata set argument (must be either - # 'current' or 'previous'.) - self.assertRaises(securesystemslib.exceptions.Error, - self.repository_updater._load_metadata_from_file, - 'bad_metadata_set', 'role1') - - - - - def test_1__rebuild_key_and_role_db(self): - # Setup - root_roleinfo = tuf.roledb.get_roleinfo('root', self.repository_name) - root_metadata = self.repository_updater.metadata['current']['root'] - root_threshold = root_metadata['roles']['root']['threshold'] - number_of_root_keys = len(root_metadata['keys']) - - self.assertEqual(root_roleinfo['threshold'], root_threshold) - - # Ensure we add 2 to the number of root keys (actually, the number of root - # keys multiplied by the number of keyid hash algorithms), to include the - # delegated targets key (+1 for its sha512 keyid). The delegated roles of - # 'targets.json' are also loaded when the repository object is - # instantiated. - - self.assertEqual(number_of_root_keys + 1, len(tuf.keydb._keydb_dict[self.repository_name])) - - # Test: normal case. - self.repository_updater._rebuild_key_and_role_db() - - root_roleinfo = tuf.roledb.get_roleinfo('root', self.repository_name) - self.assertEqual(root_roleinfo['threshold'], root_threshold) - - # _rebuild_key_and_role_db() will only rebuild the keys and roles specified - # in the 'root.json' file, unlike __init__(). Instantiating an updater - # object calls both _rebuild_key_and_role_db() and _import_delegations(). - self.assertEqual(number_of_root_keys, len(tuf.keydb._keydb_dict[self.repository_name])) - - # Test: properly updated roledb and keydb dicts if the Root role changes. - root_metadata = self.repository_updater.metadata['current']['root'] - root_metadata['roles']['root']['threshold'] = 8 - root_metadata['keys'].popitem() - - self.repository_updater._rebuild_key_and_role_db() - - root_roleinfo = tuf.roledb.get_roleinfo('root', self.repository_name) - self.assertEqual(root_roleinfo['threshold'], 8) - self.assertEqual(number_of_root_keys - 1, len(tuf.keydb._keydb_dict[self.repository_name])) - - - - - def test_1__update_versioninfo(self): - # Tests - # Verify that the 'self.versioninfo' dictionary is empty (it starts off - # empty and is only populated if _update_versioninfo() is called. - versioninfo_dict = self.repository_updater.versioninfo - self.assertEqual(len(versioninfo_dict), 0) - - # Load the versioninfo of the top-level Targets role. This action - # populates the 'self.versioninfo' dictionary. - self.repository_updater._update_versioninfo('targets.json') - self.assertEqual(len(versioninfo_dict), 1) - self.assertTrue(tuf.formats.FILEINFODICT_SCHEMA.matches(versioninfo_dict)) - - # The Snapshot role stores the version numbers of all the roles available - # on the repository. Load Snapshot to extract Root's version number - # and compare it against the one loaded by 'self.repository_updater'. - snapshot_filepath = os.path.join(self.client_metadata_current, 'snapshot.json') - snapshot_signable = securesystemslib.util.load_json_file(snapshot_filepath) - targets_versioninfo = snapshot_signable['signed']['meta']['targets.json'] - - # Verify that the manually loaded version number of root.json matches - # the one loaded by the updater object. - self.assertTrue('targets.json' in versioninfo_dict) - self.assertEqual(versioninfo_dict['targets.json'], targets_versioninfo) - - # Verify that 'self.versioninfo' is incremented if another role is updated. - self.repository_updater._update_versioninfo('role1.json') - self.assertEqual(len(versioninfo_dict), 2) - - # Verify that 'self.versioninfo' is incremented if a non-existent role is - # requested, and has its versioninfo entry set to 'None'. - self.repository_updater._update_versioninfo('bad_role.json') - self.assertEqual(len(versioninfo_dict), 3) - self.assertEqual(versioninfo_dict['bad_role.json'], None) - - # Verify that the versioninfo specified in Timestamp is used if the Snapshot - # role hasn't been downloaded yet. - del self.repository_updater.metadata['current']['snapshot'] - #self.assertRaises(self.repository_updater._update_versioninfo('snapshot.json')) - self.repository_updater._update_versioninfo('snapshot.json') - self.assertEqual(versioninfo_dict['snapshot.json']['version'], 1) - - - - def test_1__refresh_must_not_count_duplicate_keyids_towards_threshold(self): - # Update root threshold on the server repository and sign twice with 1 key - repository = repo_tool.load_repository(self.repository_directory) - repository.root.threshold = 2 - repository.root.load_signing_key(self.role_keys['root']['private']) - - storage_backend = securesystemslib.storage.FilesystemBackend() - # The client uses the threshold from the previous root file to verify the - # new root. Thus we need to make two updates so that the threshold used for - # verification becomes 2. I.e. we bump the version, sign twice with the - # same key and write to disk '2.root.json' and '3.root.json'. - for version in [2, 3]: - repository.root.version = version - info = tuf.roledb.get_roleinfo("root") - metadata = repo_lib.generate_root_metadata( - info["version"], info["expires"], False) - signed_metadata = repo_lib.sign_metadata( - metadata, info["keyids"], "root.json", "default") - signed_metadata["signatures"].append(signed_metadata["signatures"][0]) - live_root_path = os.path.join( - self.repository_directory, "metadata", "root.json") - - # Bypass server side verification in 'write' or 'writeall', which would - # catch the unmet threshold. - # We also skip writing to 'metadata.staged' and copying to 'metadata' and - # instead write directly to 'metadata' - repo_lib.write_metadata_file(signed_metadata, live_root_path, - info["version"], True, storage_backend) - - - # Update from current '1.root.json' to '3.root.json' on client and assert - # raise of 'BadSignatureError' (caused by unmet signature threshold). - try: - self.repository_updater.refresh() - - except tuf.exceptions.NoWorkingMirrorError as e: - mirror_errors = list(e.mirror_errors.values()) - self.assertTrue(len(mirror_errors) == 1) - self.assertTrue( - isinstance(mirror_errors[0], - securesystemslib.exceptions.BadSignatureError)) - self.assertEqual( - str(mirror_errors[0]), - repr("root") + " metadata has bad signature.") - - else: - self.fail( - "Expected a NoWorkingMirrorError composed of one BadSignatureError") - - - def test_2__import_delegations(self): - # Setup. - # In order to test '_import_delegations' the parent of the delegation - # has to be in Repository.metadata['current'], but it has to be inserted - # there without using '_load_metadata_from_file()' since it calls - # '_import_delegations()'. - repository_name = self.repository_updater.repository_name - tuf.keydb.clear_keydb(repository_name) - tuf.roledb.clear_roledb(repository_name) - - self.assertEqual(len(tuf.roledb._roledb_dict[repository_name]), 0) - self.assertEqual(len(tuf.keydb._keydb_dict[repository_name]), 0) - - self.repository_updater._rebuild_key_and_role_db() - - self.assertEqual(len(tuf.roledb._roledb_dict[repository_name]), 4) - - # Take into account the number of keyids algorithms supported by default, - # which this test condition expects to be two (sha256 and sha512). - self.assertEqual(4, len(tuf.keydb._keydb_dict[repository_name])) - - # Test: pass a role without delegations. - self.repository_updater._import_delegations('root') - - # Verify that there was no change to the roledb and keydb dictionaries by - # checking the number of elements in the dictionaries. - self.assertEqual(len(tuf.roledb._roledb_dict[repository_name]), 4) - # Take into account the number of keyid hash algorithms, which this - # test condition expects to be one - self.assertEqual(len(tuf.keydb._keydb_dict[repository_name]), 4) - - # Test: normal case, first level delegation. - self.repository_updater._import_delegations('targets') - - self.assertEqual(len(tuf.roledb._roledb_dict[repository_name]), 5) - # The number of root keys (times the number of key hash algorithms) + - # delegation's key (+1 for its sha512 keyid). - self.assertEqual(len(tuf.keydb._keydb_dict[repository_name]), 4 + 1) - - # Verify that roledb dictionary was added. - self.assertTrue('role1' in tuf.roledb._roledb_dict[repository_name]) - - # Verify that keydb dictionary was updated. - role1_signable = \ - securesystemslib.util.load_json_file(os.path.join(self.client_metadata_current, - 'role1.json')) - keyids = [] - for signature in role1_signable['signatures']: - keyids.append(signature['keyid']) - - for keyid in keyids: - self.assertTrue(keyid in tuf.keydb._keydb_dict[repository_name]) - - # Verify that _import_delegations() ignores invalid keytypes in the 'keys' - # field of parent role's 'delegations'. - existing_keyid = keyids[0] - - self.repository_updater.metadata['current']['targets']\ - ['delegations']['keys'][existing_keyid]['keytype'] = 'bad_keytype' - self.repository_updater._import_delegations('targets') - - # Restore the keytype of 'existing_keyid'. - self.repository_updater.metadata['current']['targets']\ - ['delegations']['keys'][existing_keyid]['keytype'] = 'ed25519' - - # Verify that _import_delegations() raises an exception if one of the - # delegated keys is malformed. - valid_keyval = self.repository_updater.metadata['current']['targets']\ - ['delegations']['keys'][existing_keyid]['keyval'] - - self.repository_updater.metadata['current']['targets']\ - ['delegations']['keys'][existing_keyid]['keyval'] = 1 - self.assertRaises(securesystemslib.exceptions.FormatError, self.repository_updater._import_delegations, 'targets') - - self.repository_updater.metadata['current']['targets']\ - ['delegations']['keys'][existing_keyid]['keyval'] = valid_keyval - - # Verify that _import_delegations() raises an exception if one of the - # delegated roles is malformed. - self.repository_updater.metadata['current']['targets']\ - ['delegations']['roles'][0]['name'] = 1 - self.assertRaises(securesystemslib.exceptions.FormatError, self.repository_updater._import_delegations, 'targets') - - - - def test_2__versioninfo_has_been_updated(self): - # Verify that the method returns 'False' if a versioninfo was not changed. - snapshot_filepath = os.path.join(self.client_metadata_current, 'snapshot.json') - snapshot_signable = securesystemslib.util.load_json_file(snapshot_filepath) - targets_versioninfo = snapshot_signable['signed']['meta']['targets.json'] - - self.assertFalse(self.repository_updater._versioninfo_has_been_updated('targets.json', - targets_versioninfo)) - - # Verify that the method returns 'True' if Root's version number changes. - targets_versioninfo['version'] = 8 - self.assertTrue(self.repository_updater._versioninfo_has_been_updated('targets.json', - targets_versioninfo)) - - - - - - def test_2__move_current_to_previous(self): - # Test case will consist of removing a metadata file from client's - # '{client_repository}/metadata/previous' directory, executing the method - # and then verifying that the 'previous' directory contains the snapshot - # file. - previous_snapshot_filepath = os.path.join(self.client_metadata_previous, - 'snapshot.json') - os.remove(previous_snapshot_filepath) - self.assertFalse(os.path.exists(previous_snapshot_filepath)) - - # Verify that the current 'snapshot.json' is moved to the previous directory. - self.repository_updater._move_current_to_previous('snapshot') - self.assertTrue(os.path.exists(previous_snapshot_filepath)) - - # assert that non-ascii alphanumeric role name "../ä" (that is url encoded - # in local filename) works - encoded_current = os.path.join( - self.client_metadata_current, '..%2F%C3%A4.json' - ) - encoded_previous = os.path.join( - self.client_metadata_previous, '..%2F%C3%A4.json' - ) - - with open(encoded_current, "w"): - pass - self.repository_updater._move_current_to_previous('../ä') - self.assertTrue(os.path.exists(encoded_previous)) - - - - - - def test_2__delete_metadata(self): - # This test will verify that 'root' metadata is never deleted. When a role - # is deleted verify that the file is not present in the - # 'self.repository_updater.metadata' dictionary. - self.repository_updater._delete_metadata('root') - self.assertTrue('root' in self.repository_updater.metadata['current']) - - self.repository_updater._delete_metadata('timestamp') - self.assertFalse('timestamp' in self.repository_updater.metadata['current']) - - - - - - def test_2__ensure_not_expired(self): - # This test condition will verify that nothing is raised when a metadata - # file has a future expiration date. - root_metadata = self.repository_updater.metadata['current']['root'] - self.repository_updater._ensure_not_expired(root_metadata, 'root') - - # Metadata with an expiration time in the future should, of course, not - # count as expired - expires = tuf.formats.unix_timestamp_to_datetime(int(time.time() + 10)) - expires = expires.isoformat() + 'Z' - root_metadata['expires'] = expires - self.assertTrue(tuf.formats.ROOT_SCHEMA.matches(root_metadata)) - self.repository_updater._ensure_not_expired(root_metadata, 'root') - - # Metadata that expires at the exact current time is considered expired - expire_time = int(time.time()) - expires = \ - tuf.formats.unix_timestamp_to_datetime(expire_time).isoformat()+'Z' - root_metadata['expires'] = expires - mock_time = mock.Mock() - mock_time.return_value = expire_time - self.assertTrue(tuf.formats.ROOT_SCHEMA.matches(root_metadata)) - with mock.patch('time.time', mock_time): - self.assertRaises(tuf.exceptions.ExpiredMetadataError, - self.repository_updater._ensure_not_expired, - root_metadata, 'root') - - # Metadata that expires in the past is considered expired - expires = tuf.formats.unix_timestamp_to_datetime(int(time.time() - 10)) - expires = expires.isoformat() + 'Z' - root_metadata['expires'] = expires - self.assertTrue(tuf.formats.ROOT_SCHEMA.matches(root_metadata)) - self.assertRaises(tuf.exceptions.ExpiredMetadataError, - self.repository_updater._ensure_not_expired, - root_metadata, 'root') - - - - - - def test_3__update_metadata(self): - # Setup - # _update_metadata() downloads, verifies, and installs the specified - # metadata role. Remove knowledge of currently installed metadata and - # verify that they are re-installed after calling _update_metadata(). - - # This is the default metadata that we would create for the timestamp role, - # because it has no signed metadata for itself. - DEFAULT_TIMESTAMP_FILELENGTH = tuf.settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH - - # This is the upper bound length for Targets metadata. - DEFAULT_TARGETS_FILELENGTH = tuf.settings.DEFAULT_TARGETS_REQUIRED_LENGTH - - # Save the versioninfo of 'targets.json,' needed later when re-installing - # with _update_metadata(). - targets_versioninfo = \ - self.repository_updater.metadata['current']['snapshot']['meta']\ - ['targets.json'] - - # Remove the currently installed metadata from the store and disk. Verify - # that the metadata dictionary is re-populated after calling - # _update_metadata(). - del self.repository_updater.metadata['current']['timestamp'] - del self.repository_updater.metadata['current']['targets'] - - timestamp_filepath = \ - os.path.join(self.client_metadata_current, 'timestamp.json') - targets_filepath = os.path.join(self.client_metadata_current, 'targets.json') - root_filepath = os.path.join(self.client_metadata_current, 'root.json') - os.remove(timestamp_filepath) - os.remove(targets_filepath) - - # Test: normal case. - # Verify 'timestamp.json' is properly installed. - self.assertFalse('timestamp' in self.repository_updater.metadata) - - logger.info('\nroleinfo: ' + repr(tuf.roledb.get_rolenames(self.repository_name))) - self.repository_updater._update_metadata('timestamp', - DEFAULT_TIMESTAMP_FILELENGTH) - self.assertTrue('timestamp' in self.repository_updater.metadata['current']) - os.path.exists(timestamp_filepath) - - # Verify 'targets.json' is properly installed. - self.assertFalse('targets' in self.repository_updater.metadata['current']) - self.repository_updater._update_metadata('targets', - DEFAULT_TARGETS_FILELENGTH, - targets_versioninfo['version']) - self.assertTrue('targets' in self.repository_updater.metadata['current']) - - targets_signable = securesystemslib.util.load_json_file(targets_filepath) - loaded_targets_version = targets_signable['signed']['version'] - self.assertEqual(targets_versioninfo['version'], loaded_targets_version) - - # Test: Invalid / untrusted version numbers. - # Invalid version number for 'targets.json'. - self.assertRaises(tuf.exceptions.NoWorkingMirrorError, - self.repository_updater._update_metadata, - 'targets', DEFAULT_TARGETS_FILELENGTH, 88) - - # Verify that the specific exception raised is correct for the previous - # case. - try: - self.repository_updater._update_metadata('targets', - DEFAULT_TARGETS_FILELENGTH, 88) - - except tuf.exceptions.NoWorkingMirrorError as e: - for mirror_error in e.mirror_errors.values(): - assert isinstance(mirror_error, tuf.exceptions.BadVersionNumberError) - - else: - self.fail( - 'Expected a NoWorkingMirrorError composed of BadVersionNumberErrors') - - # Verify that the specific exception raised is correct for the previous - # case. The version number is checked, so the specific error in - # this case should be 'tuf.exceptions.BadVersionNumberError'. - try: - self.repository_updater._update_metadata('targets', - DEFAULT_TARGETS_FILELENGTH, - 88) - - except tuf.exceptions.NoWorkingMirrorError as e: - for mirror_error in e.mirror_errors.values(): - assert isinstance(mirror_error, tuf.exceptions.BadVersionNumberError) - - else: - self.fail( - 'Expected a NoWorkingMirrorError composed of BadVersionNumberErrors') - - - - - - def test_3__get_metadata_file(self): - - ''' - This test focuses on making sure that the updater rejects unknown or - badly-formatted TUF specification version numbers.... - ''' - - # Make note of the correct supported TUF specification version. - correct_specification_version = tuf.SPECIFICATION_VERSION - - # Change it long enough to write new metadata. - tuf.SPECIFICATION_VERSION = '0.9.0' - - repository = repo_tool.load_repository(self.repository_directory) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - - # Change the supported TUF specification version back to what it should be - # so that we can parse the metadata and see that the spec version in the - # metadata does not match the code's expected spec version. - tuf.SPECIFICATION_VERSION = correct_specification_version - - upperbound_filelength = tuf.settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH - try: - self.repository_updater._get_metadata_file('timestamp', 'timestamp.json', - upperbound_filelength, 1) - - except tuf.exceptions.NoWorkingMirrorError as e: - # Note that this test provides a piece of metadata which would fail to - # be accepted -- with a different error -- if the specification version - # number were not a problem. - for mirror_error in e.mirror_errors.values(): - assert isinstance( - mirror_error, tuf.exceptions.UnsupportedSpecificationError) - - else: - self.fail( - 'Expected a failure to verify metadata when the metadata had a ' - 'specification version number that was unexpected. ' - 'No error was raised.') - - - - - - def test_3__update_metadata_if_changed(self): - # Setup. - # The client repository is initially loaded with only four top-level roles. - # Verify that the metadata store contains the metadata of only these four - # roles before updating the metadata of 'targets.json'. - self.assertEqual(len(self.repository_updater.metadata['current']), 4) - self.assertTrue('targets' in self.repository_updater.metadata['current']) - targets_path = os.path.join(self.client_metadata_current, 'targets.json') - self.assertTrue(os.path.exists(targets_path)) - self.assertEqual(self.repository_updater.metadata['current']['targets']['version'], 1) - - # Test: normal case. Update 'targets.json'. The version number should not - # change. - self.repository_updater._update_metadata_if_changed('targets') - - # Verify the current version of 'targets.json' has not changed. - self.assertEqual(self.repository_updater.metadata['current']['targets']['version'], 1) - - # Modify one target file on the remote repository. - repository = repo_tool.load_repository(self.repository_directory) - target3 = 'file3.txt' - - repository.targets.add_target(target3) - repository.root.version = repository.root.version + 1 - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Update 'targets.json' and verify that the client's current 'targets.json' - # has been updated. 'timestamp' and 'snapshot' must be manually updated - # so that new 'targets' can be recognized. - DEFAULT_TIMESTAMP_FILELENGTH = tuf.settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH - - self.repository_updater._update_metadata('timestamp', DEFAULT_TIMESTAMP_FILELENGTH) - self.repository_updater._update_metadata_if_changed('snapshot', 'timestamp') - self.repository_updater._update_metadata_if_changed('targets') - targets_path = os.path.join(self.client_metadata_current, 'targets.json') - self.assertTrue(os.path.exists(targets_path)) - self.assertTrue(self.repository_updater.metadata['current']['targets']) - self.assertEqual(self.repository_updater.metadata['current']['targets']['version'], 2) - - # Test for an invalid 'referenced_metadata' argument. - self.assertRaises(tuf.exceptions.RepositoryError, - self.repository_updater._update_metadata_if_changed, 'snapshot', 'bad_role') - - - - def test_3__targets_of_role(self): - # Setup. - # Extract the list of targets from 'targets.json', to be compared to what - # is returned by _targets_of_role('targets'). - targets_in_metadata = \ - self.repository_updater.metadata['current']['targets']['targets'] - - # Test: normal case. - targetinfos_list = self.repository_updater._targets_of_role('targets') - - # Verify that the list of targets was returned, and that it contains valid - # target files. - self.assertTrue(tuf.formats.TARGETINFOS_SCHEMA.matches(targetinfos_list)) - for targetinfo in targetinfos_list: - self.assertTrue((targetinfo['filepath'], targetinfo['fileinfo']) in targets_in_metadata.items()) - - - - - - def test_4_refresh(self): - # This unit test is based on adding an extra target file to the - # server and rebuilding all server-side metadata. All top-level metadata - # should be updated when the client calls refresh(). - - # First verify that an expired root metadata is updated. - expired_date = '1960-01-01T12:00:00Z' - self.repository_updater.metadata['current']['root']['expires'] = expired_date - self.repository_updater.refresh() - - # Second, verify that expired root metadata is not updated if - # 'unsafely_update_root_if_necessary' is explicitly set to 'False'. - expired_date = '1960-01-01T12:00:00Z' - self.repository_updater.metadata['current']['root']['expires'] = expired_date - self.assertRaises(tuf.exceptions.ExpiredMetadataError, - self.repository_updater.refresh, - unsafely_update_root_if_necessary=False) - - repository = repo_tool.load_repository(self.repository_directory) - target3 = 'file3.txt' - - repository.targets.add_target(target3) - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Reference 'self.Repository.metadata['current']['targets']'. Ensure - # 'target3' is not already specified. - targets_metadata = self.repository_updater.metadata['current']['targets'] - self.assertFalse(target3 in targets_metadata['targets']) - - # Verify the expected version numbers of the roles to be modified. - self.assertEqual(self.repository_updater.metadata['current']['targets']\ - ['version'], 1) - self.assertEqual(self.repository_updater.metadata['current']['snapshot']\ - ['version'], 1) - self.assertEqual(self.repository_updater.metadata['current']['timestamp']\ - ['version'], 1) - - # Test: normal case. 'targes.json' should now specify 'target3', and the - # following top-level metadata should have also been updated: - # 'snapshot.json' and 'timestamp.json'. - self.repository_updater.refresh() - - # Verify that the client's metadata was updated. - targets_metadata = self.repository_updater.metadata['current']['targets'] - self.assertTrue(target3 in targets_metadata['targets']) - - # Verify the expected version numbers of the updated roles. - self.assertEqual(self.repository_updater.metadata['current']['targets']\ - ['version'], 2) - self.assertEqual(self.repository_updater.metadata['current']['snapshot']\ - ['version'], 2) - self.assertEqual(self.repository_updater.metadata['current']['timestamp']\ - ['version'], 2) - - - - - - def test_4__refresh_targets_metadata(self): - # Setup. - # It is assumed that the client repository has only loaded the top-level - # metadata. Refresh the 'targets.json' metadata, including all delegated - # roles (i.e., the client should add the missing 'role1.json' metadata. - self.assertEqual(len(self.repository_updater.metadata['current']), 4) - - # Test: normal case. - self.repository_updater._refresh_targets_metadata(refresh_all_delegated_roles=True) - - # Verify that client's metadata files were refreshed successfully. - self.assertEqual(len(self.repository_updater.metadata['current']), 6) - - # Test for non-existing rolename. - self.repository_updater._refresh_targets_metadata('bad_rolename', - refresh_all_delegated_roles=False) - - # Test that non-json metadata in Snapshot is ignored. - self.repository_updater.metadata['current']['snapshot']['meta']['bad_role.xml'] = {} - self.repository_updater._refresh_targets_metadata(refresh_all_delegated_roles=True) - - - - def test_5_all_targets(self): - # Setup - # As with '_refresh_targets_metadata()', - - # Update top-level metadata before calling one of the "targets" methods, as - # recommended by 'updater.py'. - self.repository_updater.refresh() - - # Test: normal case. - with utils.ignore_deprecation_warnings('tuf.client.updater'): - all_targets = self.repository_updater.all_targets() - - # Verify format of 'all_targets', it should correspond to - # 'TARGETINFOS_SCHEMA'. - self.assertTrue(tuf.formats.TARGETINFOS_SCHEMA.matches(all_targets)) - - # Verify that there is a correct number of records in 'all_targets' list, - # and the expected filepaths specified in the metadata. On the targets - # directory of the repository, there should be 3 target files (2 of - # which are specified by 'targets.json'.) The delegated role 'role1' - # specifies 1 target file. The expected total number targets in - # 'all_targets' should be 3. - self.assertEqual(len(all_targets), 3) - - target_filepaths = [] - for target in all_targets: - target_filepaths.append(target['filepath']) - - self.assertTrue('file1.txt' in target_filepaths) - self.assertTrue('file2.txt' in target_filepaths) - self.assertTrue('file3.txt' in target_filepaths) - - - - - - def test_5_targets_of_role(self): - # Setup - # Remove knowledge of 'targets.json' from the metadata store. - self.repository_updater.metadata['current']['targets'] - - # Remove the metadata of the delegated roles. - #shutil.rmtree(os.path.join(self.client_metadata, 'targets')) - os.remove(os.path.join(self.client_metadata_current, 'targets.json')) - - # Extract the target files specified by the delegated role, 'role1.json', - # as available on the server-side version of the role. - role1_filepath = os.path.join(self.repository_directory, 'metadata', - 'role1.json') - role1_signable = securesystemslib.util.load_json_file(role1_filepath) - expected_targets = role1_signable['signed']['targets'] - - - # Test: normal case. - with utils.ignore_deprecation_warnings('tuf.client.updater'): - targetinfos = self.repository_updater.targets_of_role('role1') - - # Verify that the expected role files were downloaded and installed. - os.path.exists(os.path.join(self.client_metadata_current, 'targets.json')) - os.path.exists(os.path.join(self.client_metadata_current, 'targets', - 'role1.json')) - self.assertTrue('targets' in self.repository_updater.metadata['current']) - self.assertTrue('role1' in self.repository_updater.metadata['current']) - - # Verify that list of targets was returned and that it contains valid - # target files. - self.assertTrue(tuf.formats.TARGETINFOS_SCHEMA.matches(targetinfos)) - for targetinfo in targetinfos: - self.assertTrue((targetinfo['filepath'], targetinfo['fileinfo']) in expected_targets.items()) - - # Test: Invalid arguments. - # targets_of_role() expected a string rolename. - with utils.ignore_deprecation_warnings('tuf.client.updater'): - self.assertRaises(securesystemslib.exceptions.FormatError, self.repository_updater.targets_of_role, - 8) - self.assertRaises(tuf.exceptions.UnknownRoleError, self.repository_updater.targets_of_role, - 'unknown_rolename') - - - - - - def test_6_get_one_valid_targetinfo(self): - # Setup - # Unlike some of the other tests, start up a fresh server here. - # The SimpleHTTPServer started in the setupclass has a tendency to - # timeout in Windows after a few tests. - - # Creates a subprocess running a server. - server_process_handler = utils.TestServerProcess(log=logger, - server=self.SIMPLE_SERVER_PATH) - - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', 'targets_path': 'targets'}} - - # Creating a repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - # Extract the file information of the targets specified in 'targets.json'. - self.repository_updater.refresh() - targets_metadata = self.repository_updater.metadata['current']['targets'] - - target_files = targets_metadata['targets'] - # Extract random target from 'target_files', which will be compared to what - # is returned by get_one_valid_targetinfo(). Restore the popped target - # (dict value stored in the metadata store) so that it can be found later. - filepath, fileinfo = target_files.popitem() - target_files[filepath] = fileinfo - - target_targetinfo = self.repository_updater.get_one_valid_targetinfo(filepath) - self.assertTrue(tuf.formats.TARGETINFO_SCHEMA.matches(target_targetinfo)) - self.assertEqual(target_targetinfo['filepath'], filepath) - self.assertEqual(target_targetinfo['fileinfo'], fileinfo) - - # Test: invalid target path. - self.assertRaises(tuf.exceptions.UnknownTargetError, - self.repository_updater.get_one_valid_targetinfo, - self.random_path().lstrip(os.sep).lstrip('/')) - - # Test updater.get_one_valid_targetinfo() backtracking behavior (enabled by - # default.) - targets_directory = os.path.join(self.repository_directory, 'targets') - os.makedirs(os.path.join(targets_directory, 'foo')) - - foo_package = 'foo/foo1.1.tar.gz' - foo_pattern = 'foo/foo*.tar.gz' - - foo_fullpath = os.path.join(targets_directory, foo_package) - with open(foo_fullpath, 'wb') as file_object: - file_object.write(b'new release') - - # Modify delegations on the remote repository to test backtracking behavior. - repository = repo_tool.load_repository(self.repository_directory) - - - repository.targets.delegate('role3', [self.role_keys['targets']['public']], - [foo_pattern]) - - repository.targets.delegate('role4', [self.role_keys['targets']['public']], - [foo_pattern], list_of_targets=[foo_package]) - repository.targets('role4').add_target(foo_package) - - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.targets('role3').load_signing_key(self.role_keys['targets']['private']) - repository.targets('role4').load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - - # updater.get_one_valid_targetinfo() should find 'foo1.1.tar.gz' by - # backtracking to 'role3'. 'role2' allows backtracking. - self.repository_updater.refresh() - self.repository_updater.get_one_valid_targetinfo('foo/foo1.1.tar.gz') - - # A leading path separator is disallowed. - self.assertRaises(tuf.exceptions.FormatError, - self.repository_updater.get_one_valid_targetinfo, '/foo/foo1.1.tar.gz') - - # Test when 'role2' does *not* allow backtracking. If 'foo/foo1.1.tar.gz' - # is not provided by the authoritative 'role2', - # updater.get_one_valid_targetinfo() should return a - # 'tuf.exceptions.UnknownTargetError' exception. - repository = repo_tool.load_repository(self.repository_directory) - - repository.targets.revoke('role3') - repository.targets.revoke('role4') - - # Ensure we delegate in trusted order (i.e., 'role2' has higher priority.) - repository.targets.delegate('role3', [self.role_keys['targets']['public']], - [foo_pattern], terminating=True, list_of_targets=[]) - - repository.targets.delegate('role4', [self.role_keys['targets']['public']], - [foo_pattern], list_of_targets=[foo_package]) - - repository.targets('role3').load_signing_key(self.role_keys['targets']['private']) - repository.targets('role4').load_signing_key(self.role_keys['targets']['private']) - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Verify that 'tuf.exceptions.UnknownTargetError' is raised by - # updater.get_one_valid_targetinfo(). - self.repository_updater.refresh() - self.assertRaises(tuf.exceptions.UnknownTargetError, - self.repository_updater.get_one_valid_targetinfo, - 'foo/foo1.1.tar.gz') - - # Verify that a 'tuf.exceptions.FormatError' is raised for delegated paths - # that contain a leading path separator. - self.assertRaises(tuf.exceptions.FormatError, - self.repository_updater.get_one_valid_targetinfo, - '/foo/foo1.1.tar.gz') - - # Cleans the resources and flush the logged lines (if any). - server_process_handler.clean() - - - - - def test_6_download_target(self): - # Create temporary directory (destination directory of downloaded targets) - # that will be passed as an argument to 'download_target()'. - destination_directory = self.make_temp_directory() - target_filepaths = \ - list(self.repository_updater.metadata['current']['targets']['targets'].keys()) - - # Test: normal case. - # Get the target info, which is an argument to 'download_target()'. - - # 'target_filepaths' is expected to have at least two targets. The first - # target will be used to test against download_target(). The second - # will be used to test against download_target() and a repository with - # 'consistent_snapshot' set to True. - target_filepath1 = target_filepaths.pop() - targetinfo = self.repository_updater.get_one_valid_targetinfo(target_filepath1) - self.repository_updater.download_target(targetinfo, - destination_directory) - - download_filepath = \ - os.path.join(destination_directory, target_filepath1.lstrip('/')) - self.assertTrue(os.path.exists(download_filepath)) - length, hashes = \ - securesystemslib.util.get_file_details(download_filepath, - securesystemslib.settings.HASH_ALGORITHMS) - download_targetfileinfo = tuf.formats.make_targets_fileinfo(length, hashes) - - # Add any 'custom' data from the repository's target fileinfo to the - # 'download_targetfileinfo' object being tested. - if 'custom' in targetinfo['fileinfo']: - download_targetfileinfo['custom'] = targetinfo['fileinfo']['custom'] - - self.assertEqual(targetinfo['fileinfo'], download_targetfileinfo) - - # Test when consistent snapshots is set. First, create a valid - # repository with consistent snapshots set (root.json contains a - # "consistent_snapshot" entry that the updater uses to correctly fetch - # snapshots. The updater expects the existence of - # '.filename' files if root.json sets 'consistent_snapshot - # = True'. - - # The repository must be rewritten with 'consistent_snapshot' set. - repository = repo_tool.load_repository(self.repository_directory) - - # Write metadata for all the top-level roles , since consistent snapshot - # is now being set to true (i.e., the pre-generated repository isn't set - # to support consistent snapshots. A new version of targets.json is needed - # to ensure .filename target files are written to disk. - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - - repository.writeall(consistent_snapshot=True) - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # And ensure the client has the latest top-level metadata. - self.repository_updater.refresh() - - target_filepath2 = target_filepaths.pop() - targetinfo2 = self.repository_updater.get_one_valid_targetinfo(target_filepath2) - self.repository_updater.download_target(targetinfo2, - destination_directory) - - # Checks if the file has been successfully downloaded - download_filepath = os.path.join(destination_directory, target_filepath2) - self.assertTrue(os.path.exists(download_filepath)) - - # Removes the file so that it can be downloaded again in the next test - os.remove(download_filepath) - - # Test downloading with consistent snapshot enabled, but without adding - # the hash of the file as a prefix to its name. - - file1_path = targetinfo2['filepath'] - file1_hashes = securesystemslib.util.get_file_hashes( - os.path.join(self.repository_directory, 'targets', file1_path), - hash_algorithms=['sha256', 'sha512']) - - # Currently in the repository directory, those three files exists: - # "file1.txt", ".file1.txt" and ".file1.txt" - # where both sha256 and sha512 hashes are for file file1.txt. - # Remove the files with the hash digest prefix to ensure that - # the served target file is not prefixed. - os.remove(os.path.join(self.repository_directory, 'targets', - file1_hashes['sha256'] + '.' + file1_path)) - os.remove(os.path.join(self.repository_directory, 'targets', - file1_hashes['sha512'] + '.' + file1_path)) - - - self.repository_updater.download_target(targetinfo2, - destination_directory, - prefix_filename_with_hash=False) - - # Checks if the file has been successfully downloaded - self.assertTrue(os.path.exists(download_filepath)) - - # Test for a destination that cannot be written to (apart from a target - # file that already exists at the destination) and which raises an - # exception. - bad_destination_directory = 'bad' * 2000 - - try: - self.repository_updater.download_target(targetinfo, bad_destination_directory) - - except OSError as e: - self.assertTrue( - e.errno in [errno.ENAMETOOLONG, errno.ENOENT, errno.EINVAL], - "wrong errno: " + str(e.errno)) - - else: - self.fail('No OSError raised') - - - # Test: Invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.repository_updater.download_target, - 8, destination_directory) - - self.assertRaises(securesystemslib.exceptions.FormatError, - self.repository_updater.download_target, - targetinfo, 8) - - # Test: - # Attempt a file download of a valid target, however, a download exception - # occurs because the target is not within the mirror's confined target - # directories. Adjust mirrors dictionary, so that 'confined_target_dirs' - # field contains at least one confined target and excludes needed target - # file. - mirrors = self.repository_updater.mirrors - for mirror_name, mirror_info in mirrors.items(): - mirrors[mirror_name]['confined_target_dirs'] = [self.random_path()] - - try: - self.repository_updater.download_target(targetinfo, - destination_directory) - - except tuf.exceptions.NoWorkingMirrorError as exception: - # Ensure that no mirrors were found due to mismatch in confined target - # directories. get_list_of_mirrors() returns an empty list in this case, - # which does not generate specific exception errors. - self.assertEqual(len(exception.mirror_errors), 0) - - else: - self.fail( - 'Expected a NoWorkingMirrorError with zero mirror errors in it.') - - - - - - def test_7_updated_targets(self): - # Verify that the list of targets returned by updated_targets() contains - # all the files that need to be updated, these files include modified and - # new target files. Also, confirm that files that need not to be updated - # are absent from the list. - # Setup - - # Unlike some of the other tests, start up a fresh server here. - # The SimpleHTTPServer started in the setupclass has a tendency to - # timeout in Windows after a few tests. - - # Creates a subprocess running a server. - server_process_handler = utils.TestServerProcess(log=logger, - server=self.SIMPLE_SERVER_PATH) - - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', 'targets_path': 'targets'}} - - # Creating a repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - # Create temporary directory which will hold client's target files. - destination_directory = self.make_temp_directory() - - # Get the list of target files. It will be used as an argument to the - # 'updated_targets()' function. - with utils.ignore_deprecation_warnings('tuf.client.updater'): - all_targets = self.repository_updater.all_targets() - - # Test for duplicates and targets in the root directory of the repository. - additional_target = all_targets[0].copy() - all_targets.append(additional_target) - additional_target_in_root_directory = additional_target.copy() - additional_target_in_root_directory['filepath'] = 'file1.txt' - all_targets.append(additional_target_in_root_directory) - - # At this point client needs to update and download all targets. - # Test: normal cases. - updated_targets = \ - self.repository_updater.updated_targets(all_targets, destination_directory) - - with utils.ignore_deprecation_warnings('tuf.client.updater'): - all_targets = self.repository_updater.all_targets() - - # Assumed the pre-generated repository specifies two target files in - # 'targets.json' and one delegated target file in 'role1.json'. - self.assertEqual(len(updated_targets), 3) - - # Test: download one of the targets. - download_target = copy.deepcopy(updated_targets).pop() - self.repository_updater.download_target(download_target, - destination_directory) - - updated_targets = \ - self.repository_updater.updated_targets(all_targets, destination_directory) - - self.assertEqual(len(updated_targets), 2) - - # Test: download all the targets. - for download_target in all_targets: - self.repository_updater.download_target(download_target, - destination_directory) - updated_targets = \ - self.repository_updater.updated_targets(all_targets, destination_directory) - - self.assertEqual(len(updated_targets), 0) - - - # Test: Invalid arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, - self.repository_updater.updated_targets, - 8, destination_directory) - - self.assertRaises(securesystemslib.exceptions.FormatError, - self.repository_updater.updated_targets, - all_targets, 8) - - # Modify one target file on the remote repository. - repository = repo_tool.load_repository(self.repository_directory) - - target1 = os.path.join(self.repository_directory, 'targets', 'file1.txt') - repository.targets.remove_target(os.path.basename(target1)) - - length, hashes = securesystemslib.util.get_file_details(target1) - - repository.targets.add_target(os.path.basename(target1)) - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - - with open(target1, 'ab') as file_object: - file_object.write(b'append extra text') - - length, hashes = securesystemslib.util.get_file_details(target1) - - repository.targets.add_target(os.path.basename(target1)) - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Ensure the client has up-to-date metadata. - self.repository_updater.refresh() - - # Verify that the new target file is considered updated. - with utils.ignore_deprecation_warnings('tuf.client.updater'): - all_targets = self.repository_updater.all_targets() - updated_targets = \ - self.repository_updater.updated_targets(all_targets, destination_directory) - self.assertEqual(len(updated_targets), 1) - - # Cleans the resources and flush the logged lines (if any). - server_process_handler.clean() - - - - - def test_8_remove_obsolete_targets(self): - # Setup. - # Unlike some of the other tests, start up a fresh server here. - # The SimpleHTTPServer started in the setupclass has a tendency to - # timeout in Windows after a few tests. - - # Creates a subprocess running a server. - server_process_handler = utils.TestServerProcess(log=logger, - server=self.SIMPLE_SERVER_PATH) - - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', 'targets_path': 'targets'}} - - # Creating a repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - # Create temporary directory that will hold the client's target files. - destination_directory = self.make_temp_directory() - - # Populate 'destination_direction' with all target files. - with utils.ignore_deprecation_warnings('tuf.client.updater'): - all_targets = self.repository_updater.all_targets() - - self.assertEqual(len(os.listdir(destination_directory)), 0) - - for target in all_targets: - self.repository_updater.download_target(target, destination_directory) - - self.assertEqual(len(os.listdir(destination_directory)), 3) - - # Remove two target files from the server's repository. - repository = repo_tool.load_repository(self.repository_directory) - target1 = os.path.join(self.repository_directory, 'targets', 'file1.txt') - repository.targets.remove_target(os.path.basename(target1)) - - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Update client's metadata. - self.repository_updater.refresh() - - # Test: normal case. - # Verify number of target files in 'destination_directory' (should be 1 - # after the update made to the remote repository), and call - # 'remove_obsolete_targets()'. - with utils.ignore_deprecation_warnings('tuf.client.updater'): - all_targets = self.repository_updater.all_targets() - - updated_targets = \ - self.repository_updater.updated_targets(all_targets, - destination_directory) - - for updated_target in updated_targets: - self.repository_updater.download_target(updated_target, - destination_directory) - - self.assertEqual(len(os.listdir(destination_directory)), 3) - self.repository_updater.remove_obsolete_targets(destination_directory) - self.assertEqual(len(os.listdir(destination_directory)), 2) - - # Verify that, if there are no obsolete files, the number of files - # in 'destination_directory' remains the same. - self.repository_updater.remove_obsolete_targets(destination_directory) - self.assertEqual(len(os.listdir(destination_directory)), 2) - - # Test coverage for a destination path that causes an exception not due - # to an already removed target. - bad_destination_directory = 'bad' * 2000 - self.repository_updater.remove_obsolete_targets(bad_destination_directory) - - # Test coverage for a target that is not specified in current metadata. - del self.repository_updater.metadata['current']['targets']['targets']['file2.txt'] - self.repository_updater.remove_obsolete_targets(destination_directory) - - # Test coverage for a role that doesn't exist in the previously trusted set - # of metadata. - del self.repository_updater.metadata['previous']['targets'] - self.repository_updater.remove_obsolete_targets(destination_directory) - - # Cleans the resources and flush the logged lines (if any). - server_process_handler.clean() - - - - def test_9__get_target_hash(self): - # Test normal case. - # Test target filepaths with ascii and non-ascii characters. - expected_target_hashes = { - '/file1.txt': 'e3a3d89eb3b70ce3fbce6017d7b8c12d4abd5635427a0e8a238f53157df85b3d', - '/Jalape\xc3\xb1o': '78bfd5c314680545eb48ecad508aceb861f8d6e680f4fe1b791da45c298cda88' - } - for filepath, target_hash in expected_target_hashes.items(): - self.assertTrue(tuf.formats.RELPATH_SCHEMA.matches(filepath)) - self.assertTrue(securesystemslib.formats.HASH_SCHEMA.matches(target_hash)) - self.assertEqual(self.repository_updater._get_target_hash(filepath), target_hash) - - # Test for improperly formatted argument. - #self.assertRaises(securesystemslib.exceptions.FormatError, self.repository_updater._get_target_hash, 8) - - - - - def test_10__check_file_length(self): - # Test for exception if file object is not equal to trusted file length. - with tempfile.TemporaryFile() as temp_file_object: - temp_file_object.write(b'X') - temp_file_object.seek(0) - self.assertRaises(tuf.exceptions.DownloadLengthMismatchError, - self.repository_updater._check_file_length, - temp_file_object, 10) - - - - - - def test_10__targets_of_role(self): - # Test for non-existent role. - self.assertRaises(tuf.exceptions.UnknownRoleError, - self.repository_updater._targets_of_role, - 'non-existent-role') - - # Test for role that hasn't been loaded yet. - del self.repository_updater.metadata['current']['targets'] - self.assertEqual(len(self.repository_updater._targets_of_role('targets', - skip_refresh=True)), 0) - - # 'targets.json' tracks two targets. - self.assertEqual(len(self.repository_updater._targets_of_role('targets')), - 2) - - - - def test_10__preorder_depth_first_walk(self): - - # Test that infinite loop is prevented if the target file is not found and - # the max number of delegations is reached. - valid_max_number_of_delegations = tuf.settings.MAX_NUMBER_OF_DELEGATIONS - tuf.settings.MAX_NUMBER_OF_DELEGATIONS = 0 - self.assertEqual(None, self.repository_updater._preorder_depth_first_walk('unknown.txt')) - - # Reset the setting for max number of delegations so that subsequent unit - # tests reference the expected setting. - tuf.settings.MAX_NUMBER_OF_DELEGATIONS = valid_max_number_of_delegations - - # Attempt to create a circular delegation, where role1 performs a - # delegation to the top-level Targets role. The updater should ignore the - # delegation and not raise an exception. - targets_path = os.path.join(self.client_metadata_current, 'targets.json') - targets_metadata = securesystemslib.util.load_json_file(targets_path) - targets_metadata['signed']['delegations']['roles'][0]['paths'] = ['/file8.txt'] - with open(targets_path, 'wb') as file_object: - file_object.write(repo_lib._get_written_metadata(targets_metadata)) - - role1_path = os.path.join(self.client_metadata_current, 'role1.json') - role1_metadata = securesystemslib.util.load_json_file(role1_path) - role1_metadata['signed']['delegations']['roles'][0]['name'] = 'targets' - role1_metadata['signed']['delegations']['roles'][0]['paths'] = ['/file8.txt'] - with open(role1_path, 'wb') as file_object: - file_object.write(repo_lib._get_written_metadata(role1_metadata)) - - role2_path = os.path.join(self.client_metadata_current, 'role2.json') - role2_metadata = securesystemslib.util.load_json_file(role2_path) - role2_metadata['signed']['delegations']['roles'] = role1_metadata['signed']['delegations']['roles'] - role2_metadata['signed']['delegations']['roles'][0]['paths'] = ['/file8.txt'] - with open(role2_path, 'wb') as file_object: - file_object.write(repo_lib._get_written_metadata(role2_metadata)) - - logger.debug('attempting circular delegation') - self.assertEqual(None, self.repository_updater._preorder_depth_first_walk('/file8.txt')) - - - - - - - def test_10__visit_child_role(self): - # Call _visit_child_role and test the dict keys: 'paths', - # 'path_hash_prefixes', and if both are missing. - - targets_role = self.repository_updater.metadata['current']['targets'] - targets_role['delegations']['roles'][0]['paths'] = ['/*.txt', '/target.exe'] - child_role = targets_role['delegations']['roles'][0] - - role1_path = os.path.join(self.client_metadata_current, 'role1.json') - role1_metadata = securesystemslib.util.load_json_file(role1_path) - role1_metadata['signed']['delegations']['roles'][0]['paths'] = ['/*.exe'] - with open(role1_path, 'wb') as file_object: - file_object.write(repo_lib._get_written_metadata(role1_metadata)) - - self.assertEqual(self.repository_updater._visit_child_role(child_role, - '/target.exe'), child_role['name']) - - # Test for a valid path hash prefix... - child_role['path_hash_prefixes'] = ['8baf'] - self.assertEqual(self.repository_updater._visit_child_role(child_role, - '/file3.txt'), child_role['name']) - - # ... and an invalid one, as well. - child_role['path_hash_prefixes'] = ['badd'] - self.assertEqual(self.repository_updater._visit_child_role(child_role, - '/file3.txt'), None) - - # Test for a forbidden target. - del child_role['path_hash_prefixes'] - self.repository_updater._visit_child_role(child_role, '/forbidden.tgz') - - # Verify that unequal path_hash_prefixes are skipped. - child_role['path_hash_prefixes'] = ['bad', 'bad'] - self.assertEqual(None, self.repository_updater._visit_child_role(child_role, - '/unknown.exe')) - - # Test if both 'path' and 'path_hash_prefixes' are missing. - del child_role['paths'] - del child_role['path_hash_prefixes'] - self.assertRaises(securesystemslib.exceptions.FormatError, self.repository_updater._visit_child_role, - child_role, child_role['name']) - - - - def test_11__verify_metadata_file(self): - # Test for invalid metadata content. - with tempfile.TemporaryFile() as metadata_file_object: - metadata_file_object.write(b'X') - metadata_file_object.seek(0) - - self.assertRaises(tuf.exceptions.InvalidMetadataJSONError, - self.repository_updater._verify_metadata_file, - metadata_file_object, 'root') - - - def test_13__targets_of_role(self): - # Test case where a list of targets is given. By default, the 'targets' - # parameter is None. - targets = [{'filepath': 'file1.txt', 'fileinfo': {'length': 1, 'hashes': {'sha256': 'abc'}}}] - self.repository_updater._targets_of_role('targets', - targets=targets, skip_refresh=False) - - - - -class TestMultiRepoUpdater(unittest_toolbox.Modified_TestCase): - - def setUp(self): - # Modified_Testcase can handle temp dir removal - unittest_toolbox.Modified_TestCase.setUp(self) - self.temporary_directory = self.make_temp_directory(directory=os.getcwd()) - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf/tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - - self.temporary_repository_root = tempfile.mkdtemp(dir=self.temporary_directory) - - # Needed because in some tests simple_server.py cannot be found. - # The reason is that the current working directory - # has been changed when executing a subprocess. - self.SIMPLE_SERVER_PATH = os.path.join(os.getcwd(), 'simple_server.py') - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client', 'test_repository1') - original_keystore = os.path.join(original_repository_files, 'keystore') - original_map_file = os.path.join(original_repository_files, 'map.json') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = os.path.join(self.temporary_repository_root, - 'repository_server1') - self.repository_directory2 = os.path.join(self.temporary_repository_root, - 'repository_server2') - - # Setting 'tuf.settings.repositories_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.temporary_repository_root - - repository_name = 'test_repository1' - repository_name2 = 'test_repository2' - - self.client_directory = os.path.join(self.temporary_repository_root, - repository_name) - self.client_directory2 = os.path.join(self.temporary_repository_root, - repository_name2) - - self.keystore_directory = os.path.join(self.temporary_repository_root, - 'keystore') - self.map_file = os.path.join(self.client_directory, 'map.json') - self.map_file2 = os.path.join(self.client_directory2, 'map.json') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_repository, self.repository_directory2) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_client, self.client_directory2) - shutil.copyfile(original_map_file, self.map_file) - shutil.copyfile(original_map_file, self.map_file2) - shutil.copytree(original_keystore, self.keystore_directory) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - - # Creates a subprocess running a server. - self.server_process_handler = utils.TestServerProcess(log=logger, - server=self.SIMPLE_SERVER_PATH, popen_cwd=self.repository_directory) - - logger.debug('Server process started.') - - # Creates a subprocess running a server. - self.server_process_handler2 = utils.TestServerProcess(log=logger, - server=self.SIMPLE_SERVER_PATH, popen_cwd=self.repository_directory2) - - logger.debug('Server process 2 started.') - - url_prefix = \ - 'http://' + utils.TEST_HOST_ADDRESS + ':' + \ - str(self.server_process_handler.port) - url_prefix2 = \ - 'http://' + utils.TEST_HOST_ADDRESS + ':' + \ - str(self.server_process_handler2.port) - - # We have all of the necessary information for two repository mirrors - # in map.json, except for url prefixes. - # For the url prefixes, we create subprocesses that run a server script. - # In server scripts we get a free port from the OS which is sent - # back to the parent process. - # That's why we dynamically add the ports to the url prefixes - # and changing the content of map.json. - self.map_file_path = os.path.join(self.client_directory, 'map.json') - data = securesystemslib.util.load_json_file(self.map_file_path) - - data['repositories']['test_repository1'] = [url_prefix] - data['repositories']['test_repository2'] = [url_prefix2] - with open(self.map_file_path, 'w') as f: - json.dump(data, f) - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', 'targets_path': 'targets'}} - - self.repository_mirrors2 = {'mirror1': {'url_prefix': url_prefix2, - 'metadata_path': 'metadata', 'targets_path': 'targets'}} - - # Create the repository instances. The test cases will use these client - # updaters to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(repository_name, - self.repository_mirrors) - self.repository_updater2 = updater.Updater(repository_name2, - self.repository_mirrors2) - - # Creating a repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.multi_repo_updater = updater.MultiRepoUpdater(self.map_file) - - # Metadata role keys are needed by the test cases to make changes to the - # repository (e.g., adding a new target file to 'targets.json' and then - # requesting a refresh()). - self.role_keys = _load_role_keys(self.keystore_directory) - - - - def tearDown(self): - - # Cleans the resources and flush the logged lines (if any). - self.server_process_handler.clean() - self.server_process_handler2.clean() - - # updater.Updater() populates the roledb with the name "test_repository1" - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Remove top-level temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - - # UNIT TESTS. - def test__init__(self): - # The client's repository requires a metadata directory (and the 'current' - # and 'previous' sub-directories), and at least the 'root.json' file. - # setUp(), called before each test case, instantiates the required updater - # objects and keys. The needed objects/data is available in - # 'self.repository_updater', 'self.client_directory', etc. - - # Test: Invalid arguments. - # Invalid 'updater_name' argument. String expected. - self.assertRaises(securesystemslib.exceptions.FormatError, - updater.MultiRepoUpdater, 8) - - # Restore 'tuf.settings.repositories_directory' to the original client - # directory. - tuf.settings.repositories_directory = self.client_directory - - # Test for a non-existent map file. - self.assertRaises(tuf.exceptions.Error, updater.MultiRepoUpdater, - 'non-existent.json') - - # Test for a map file that doesn't contain the required fields. - root_filepath = os.path.join( - self.repository_directory, 'metadata', 'root.json') - self.assertRaises(securesystemslib.exceptions.FormatError, - updater.MultiRepoUpdater, root_filepath) - - # Test for a valid instantiation. - multi_repo_updater = updater.MultiRepoUpdater(self.map_file_path) - - - - def test__target_matches_path_pattern(self): - multi_repo_updater = updater.MultiRepoUpdater(self.map_file_path) - paths = ['foo*.tgz', 'bar*.tgz', 'file1.txt'] - self.assertTrue( - multi_repo_updater._target_matches_path_pattern('bar-1.0.tgz', paths)) - self.assertTrue( - multi_repo_updater._target_matches_path_pattern('file1.txt', paths)) - self.assertFalse( - multi_repo_updater._target_matches_path_pattern('baz-1.0.tgz', paths)) - - - - def test_get_valid_targetinfo(self): - multi_repo_updater = updater.MultiRepoUpdater(self.map_file_path) - - # Verify the multi repo updater refuses to save targetinfo if - # required local repositories are missing. - repo_dir = os.path.join(tuf.settings.repositories_directory, - 'test_repository1') - backup_repo_dir = os.path.join(tuf.settings.repositories_directory, - 'test_repository1.backup') - shutil.move(repo_dir, backup_repo_dir) - self.assertRaises(tuf.exceptions.Error, - multi_repo_updater.get_valid_targetinfo, 'file3.txt') - - # Restore the client's repository directory. - shutil.move(backup_repo_dir, repo_dir) - - # Verify that the Root file must exist. - root_filepath = os.path.join(repo_dir, 'metadata', 'current', 'root.json') - backup_root_filepath = os.path.join(root_filepath, root_filepath + '.backup') - shutil.move(root_filepath, backup_root_filepath) - self.assertRaises(tuf.exceptions.Error, - multi_repo_updater.get_valid_targetinfo, 'file3.txt') - - # Restore the Root file. - shutil.move(backup_root_filepath, root_filepath) - - # Test that the first mapping is skipped if it's irrelevant to the target - # file. - self.assertRaises(tuf.exceptions.UnknownTargetError, - multi_repo_updater.get_valid_targetinfo, 'non-existent.txt') - - # Verify that a targetinfo is not returned for a non-existent target. - multi_repo_updater.map_file['mapping'][1]['terminating'] = False - self.assertRaises(tuf.exceptions.UnknownTargetError, - multi_repo_updater.get_valid_targetinfo, 'non-existent.txt') - multi_repo_updater.map_file['mapping'][1]['terminating'] = True - - # Test for a mapping that sets terminating = True, and that appears before - # the final mapping. - multi_repo_updater.map_file['mapping'][0]['terminating'] = True - self.assertRaises(tuf.exceptions.UnknownTargetError, - multi_repo_updater.get_valid_targetinfo, 'bad3.txt') - multi_repo_updater.map_file['mapping'][0]['terminating'] = False - - # Test for the case where multiple repos sign for the same target. - valid_targetinfo = multi_repo_updater.get_valid_targetinfo('file1.txt') - - multi_repo_updater.map_file['mapping'][0]['threshold'] = 2 - valid_targetinfo = multi_repo_updater.get_valid_targetinfo('file1.txt') - - # Verify that valid targetinfo is matched for two repositories that provide - # different custom field. Make sure to set the 'match_custom_field' - # argument to 'False' when calling get_valid_targetinfo(). - repository = repo_tool.load_repository(self.repository_directory2) - - target1 = os.path.join(self.repository_directory2, 'targets', 'file1.txt') - repository.targets.remove_target(os.path.basename(target1)) - - custom_field = {"custom": "my_custom_data"} - repository.targets.add_target(os.path.basename(target1), custom_field) - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory2, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory2, 'metadata.staged'), - os.path.join(self.repository_directory2, 'metadata')) - - # Do we get the expected match for the two targetinfo that only differ - # by the custom field? - valid_targetinfo = multi_repo_updater.get_valid_targetinfo( - 'file1.txt', match_custom_field=False) - - # Verify the case where two repositories provide different targetinfo. - # Modify file1.txt so that different length and hashes are reported by the - # two repositories. - repository = repo_tool.load_repository(self.repository_directory2) - target1 = os.path.join(self.repository_directory2, 'targets', 'file1.txt') - with open(target1, 'ab') as file_object: - file_object.write(b'append extra text') - - repository.targets.remove_target(os.path.basename(target1)) - - repository.targets.add_target(os.path.basename(target1)) - repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory2, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory2, 'metadata.staged'), - os.path.join(self.repository_directory2, 'metadata')) - - # Ensure the threshold is modified to 2 (assumed to be 1, by default) and - # verify that get_valid_targetinfo() raises an UnknownTargetError - # despite both repos signing for file1.txt. - multi_repo_updater.map_file['mapping'][0]['threshold'] = 2 - self.assertRaises(tuf.exceptions.UnknownTargetError, - multi_repo_updater.get_valid_targetinfo, 'file1.txt') - - - - - - def test_get_updater(self): - multi_repo_updater = updater.MultiRepoUpdater(self.map_file_path) - - # Test for a non-existent repository name. - self.assertEqual(None, multi_repo_updater.get_updater('bad_repo_name')) - - # Test get_updater indirectly via the "private" _update_from_repository(). - self.assertRaises(tuf.exceptions.Error, multi_repo_updater._update_from_repository, 'bad_repo_name', 'file3.txt') - - # Test for a repository that doesn't exist. - multi_repo_updater.map_file['repositories']['bad_repo_name'] = ['https://bogus:30002'] - self.assertEqual(None, multi_repo_updater.get_updater('bad_repo_name')) - - -class TestUpdaterRolenames(unittest_toolbox.Modified_TestCase): - def setUp(self): - unittest_toolbox.Modified_TestCase.setUp(self) - - repo_dir = os.path.join(os.getcwd(), 'repository_data', 'fishy_rolenames') - - self.client_dir = self.make_temp_directory() - os.makedirs(os.path.join(self.client_dir, "fishy_rolenames", "metadata", "current")) - os.makedirs(os.path.join(self.client_dir, "fishy_rolenames", "metadata", "previous")) - shutil.copy( - os.path.join(repo_dir, 'metadata', '1.root.json'), - os.path.join(self.client_dir, "fishy_rolenames", "metadata", "current", "root.json") - ) - - simple_server_path = os.path.join(os.getcwd(), 'simple_server.py') - self.server_process_handler = utils.TestServerProcess(log=logger, - server=simple_server_path) - - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + "/repository_data/fishy_rolenames" - - tuf.settings.repositories_directory = self.client_dir - mirrors = {'mirror1': { - 'url_prefix': url_prefix, - 'metadata_path': 'metadata/', - 'targets_path': '' - }} - self.updater = updater.Updater("fishy_rolenames", mirrors) - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - self.server_process_handler.flush_log() - self.server_process_handler.clean() - unittest_toolbox.Modified_TestCase.tearDown(self) - - def test_unusual_rolenames(self): - """Test rolenames that may be tricky to handle as filenames - - The test data in repository_data/fishy_rolenames has been produced - semi-manually using RepositorySimulator: using the RepositorySimulator - in these tests directly (like test_updater_with_simulator.py does for - ngclient) might make more sense... but would require some integration work - """ - - # Make a target search that fetches the delegated targets - self.updater.refresh() - with self.assertRaises(tuf.exceptions.UnknownTargetError): - self.updater.get_one_valid_targetinfo("anything") - - # Assert that the metadata files are in the client metadata directory - metadata_dir = os.path.join( - self.client_dir, "fishy_rolenames", "metadata", "current" - ) - local_metadata = os.listdir(metadata_dir) - for fname in ['%C3%B6.json', '..%2Fa.json', '..json']: - self.assertTrue(fname in local_metadata) - - -def _load_role_keys(keystore_directory): - - # Populating 'self.role_keys' by importing the required public and private - # keys of 'tuf/tests/repository_data/'. The role keys are needed when - # modifying the remote repository used by the test cases in this unit test. - - # The pre-generated key files in 'repository_data/keystore' are all encrypted with - # a 'password' passphrase. - EXPECTED_KEYFILE_PASSWORD = 'password' - - # Store and return the cryptography keys of the top-level roles, including 1 - # delegated role. - role_keys = {} - - root_key_file = os.path.join(keystore_directory, 'root_key') - targets_key_file = os.path.join(keystore_directory, 'targets_key') - snapshot_key_file = os.path.join(keystore_directory, 'snapshot_key') - timestamp_key_file = os.path.join(keystore_directory, 'timestamp_key') - delegation_key_file = os.path.join(keystore_directory, 'delegation_key') - - role_keys = {'root': {}, 'targets': {}, 'snapshot': {}, 'timestamp': {}, - 'role1': {}} - - # Import the top-level and delegated role public keys. - role_keys['root']['public'] = \ - repo_tool.import_rsa_publickey_from_file(root_key_file+'.pub') - role_keys['targets']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(targets_key_file+'.pub') - role_keys['snapshot']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(snapshot_key_file+'.pub') - role_keys['timestamp']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(timestamp_key_file+'.pub') - role_keys['role1']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(delegation_key_file+'.pub') - - # Import the private keys of the top-level and delegated roles. - role_keys['root']['private'] = \ - repo_tool.import_rsa_privatekey_from_file(root_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['targets']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(targets_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['snapshot']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['timestamp']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['role1']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(delegation_key_file, - EXPECTED_KEYFILE_PASSWORD) - - return role_keys - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_updater_root_rotation_integration_old.py b/tests/test_updater_root_rotation_integration_old.py deleted file mode 100755 index b8f93043ba..0000000000 --- a/tests/test_updater_root_rotation_integration_old.py +++ /dev/null @@ -1,685 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - test_updater_root_rotation_integration_old.py - - - Evan Cordell. - - - August 8, 2016. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - 'test_updater_root_rotation.py' provides a collection of methods that test - root key rotation in the example client. - - - Test cases here should follow a specific order (i.e., independent methods are - tested before dependent methods). More accurately, least dependent methods - are tested before most dependent methods. There is no reason to rewrite or - construct other methods that replicate already-tested methods solely for - testing purposes. This is possible because the 'unittest.TestCase' class - guarantees the order of unit tests. The 'test_something_A' method would - be tested before 'test_something_B'. To ensure the expected order of tests, - a number is placed after 'test' and before methods name like so: - 'test_1_check_directory'. The number is a measure of dependence, where 1 is - less dependent than 2. -""" - -import os -import shutil -import tempfile -import logging -import unittest -import filecmp -import sys - -import tuf -import tuf.log -import tuf.keydb -import tuf.roledb -import tuf.exceptions -import tuf.repository_tool as repo_tool -import tuf.unittest_toolbox as unittest_toolbox -import tuf.client.updater as updater -import tuf.settings - -from tests import utils - -import securesystemslib - -logger = logging.getLogger(__name__) -repo_tool.disable_console_log_messages() - - -class TestUpdater(unittest_toolbox.Modified_TestCase): - - @classmethod - def setUpClass(cls): - # Create a temporary directory to store the repository, metadata, and target - # files. 'temporary_directory' must be deleted in TearDownModule() so that - # temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). Test - # cases will request metadata and target files that have been pre-generated - # in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of - # 'test_updater_root_rotation_integration_old.py' assume the - # pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.server_process_handler = utils.TestServerProcess(log=logger) - - - - - @classmethod - def tearDownClass(cls): - # Cleans the resources and flush the logged lines (if any). - cls.server_process_handler.clean() - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated for the test cases. - shutil.rmtree(cls.temporary_directory) - - - - - def setUp(self): - # We are inheriting from custom class. - unittest_toolbox.Modified_TestCase.setUp(self) - - self.repository_name = 'test_repository1' - - # Copy the original repository files provided in the test folder so that - # any modifications made to repository files are restricted to the copies. - # The 'repository_data' directory is expected to exist in 'tuf.tests/'. - original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = \ - self.make_temp_directory(directory=self.temporary_directory) - - # The original repository, keystore, and client directories will be copied - # for each test case. - original_repository = os.path.join(original_repository_files, 'repository') - original_keystore = os.path.join(original_repository_files, 'keystore') - original_client = os.path.join(original_repository_files, 'client') - - # Save references to the often-needed client repository directories. - # Test cases need these references to access metadata and target files. - self.repository_directory = \ - os.path.join(temporary_repository_root, 'repository') - self.keystore_directory = \ - os.path.join(temporary_repository_root, 'keystore') - self.client_directory = os.path.join(temporary_repository_root, 'client') - self.client_metadata = os.path.join(self.client_directory, - self.repository_name, 'metadata') - self.client_metadata_current = os.path.join(self.client_metadata, 'current') - self.client_metadata_previous = os.path.join(self.client_metadata, 'previous') - - # Copy the original 'repository', 'client', and 'keystore' directories - # to the temporary repository the test cases can use. - shutil.copytree(original_repository, self.repository_directory) - shutil.copytree(original_client, self.client_directory) - shutil.copytree(original_keystore, self.keystore_directory) - - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = 'http://' + utils.TEST_HOST_ADDRESS + ':' \ - + str(self.server_process_handler.port) + repository_basepath - - # Setting 'tuf.settings.repository_directory' with the temporary client - # directory copied from the original repository files. - tuf.settings.repositories_directory = self.client_directory - - self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, - 'metadata_path': 'metadata', - 'targets_path': 'targets'}} - - # Creating a repository instance. The test cases will use this client - # updater to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(self.repository_name, - self.repository_mirrors) - - # Metadata role keys are needed by the test cases to make changes to the - # repository (e.g., adding a new target file to 'targets.json' and then - # requesting a refresh()). - self.role_keys = _load_role_keys(self.keystore_directory) - - - - def tearDown(self): - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Logs stdout and stderr from the sever subprocess. - self.server_process_handler.flush_log() - - # Remove temporary directory - unittest_toolbox.Modified_TestCase.tearDown(self) - - - # UNIT TESTS. - def test_root_rotation(self): - repository = repo_tool.load_repository(self.repository_directory) - repository.root.threshold = 2 - - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - - # Errors, not enough signing keys to satisfy root's threshold. - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - - repository.root.add_verification_key(self.role_keys['role1']['public']) - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.root.load_signing_key(self.role_keys['role1']['private']) - repository.writeall() - - repository.root.add_verification_key(self.role_keys['snapshot']['public']) - repository.root.load_signing_key(self.role_keys['snapshot']['private']) - repository.root.threshold = 3 - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - self.repository_updater.refresh() - - - - def test_verify_root_with_current_keyids_and_threshold(self): - """ - Each root file is signed by the current root threshold of keys as well - as the previous root threshold of keys. Test that a root file which is - not 'self-signed' with the current root threshold of keys causes the - update to fail - """ - # Load repository with root.json == 1.root.json (available on client) - # Signing key: "root", Threshold: 1 - repository = repo_tool.load_repository(self.repository_directory) - - # Rotate keys and update root: 1.root.json --> 2.root.json - # Signing key: "root" (previous) and "root2" (current) - # Threshold (for both): 1 - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.root.add_verification_key(self.role_keys['root2']['public']) - repository.root.load_signing_key(self.role_keys['root2']['private']) - # Remove the previous "root" key from the list of current - # verification keys - repository.root.remove_verification_key(self.role_keys['root']['public']) - repository.writeall() - - # Move staged metadata to "live" metadata - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Intercept 2.root.json and tamper with "root2" (current) key signature - root2_path_live = os.path.join( - self.repository_directory, 'metadata', '2.root.json') - root2 = securesystemslib.util.load_json_file(root2_path_live) - - for idx, sig in enumerate(root2['signatures']): - if sig['keyid'] == self.role_keys['root2']['public']['keyid']: - sig_len = len(root2['signatures'][idx]['sig']) - root2['signatures'][idx]['sig'] = "deadbeef".ljust(sig_len, '0') - - roo2_fobj = tempfile.TemporaryFile() - roo2_fobj.write(tuf.repository_lib._get_written_metadata(root2)) - securesystemslib.util.persist_temp_file(roo2_fobj, root2_path_live) - - # Update 1.root.json -> 2.root.json - # Signature verification with current keys should fail because we replaced - with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm: - self.repository_updater.refresh() - - for mirror_url, mirror_error in cm.exception.mirror_errors.items(): - self.assertTrue(mirror_url.endswith('/2.root.json')) - self.assertTrue(isinstance(mirror_error, - securesystemslib.exceptions.BadSignatureError)) - - # Assert that the current 'root.json' on the client side is the verified one - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '1.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - - - - - def test_verify_root_with_duplicate_current_keyids(self): - """ - Each root file is signed by the current root threshold of keys as well - as the previous root threshold of keys. In each case, a keyid must only - count once towards the threshold. Test that the new root signatures - specific signature verification implemented in _verify_root_self_signed() - only counts one signature per keyid towards the threshold. - """ - # Load repository with root.json == 1.root.json (available on client) - # Signing key: "root", Threshold: 1 - repository = repo_tool.load_repository(self.repository_directory) - - # Add an additional signing key and bump the threshold to 2 - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.root.add_verification_key(self.role_keys['root2']['public']) - repository.root.load_signing_key(self.role_keys['root2']['private']) - repository.root.threshold = 2 - repository.writeall() - - # Move staged metadata to "live" metadata - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Modify 2.root.json and list two signatures with the same keyid - root2_path_live = os.path.join( - self.repository_directory, 'metadata', '2.root.json') - root2 = securesystemslib.util.load_json_file(root2_path_live) - - signatures = [] - signatures.append(root2['signatures'][0]) - signatures.append(root2['signatures'][0]) - - root2['signatures'] = signatures - - root2_fobj = tempfile.TemporaryFile() - root2_fobj.write(tuf.repository_lib._get_written_metadata(root2)) - securesystemslib.util.persist_temp_file(root2_fobj, root2_path_live) - - # Update 1.root.json -> 2.root.json - # Signature verification with new keys should fail because the threshold - # can only be met by two signatures with the same keyid - with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm: - self.repository_updater.refresh() - - for mirror_url, mirror_error in cm.exception.mirror_errors.items(): - self.assertTrue(mirror_url.endswith('/2.root.json')) - self.assertTrue(isinstance(mirror_error, - securesystemslib.exceptions.BadSignatureError)) - - # Assert that the current 'root.json' on the client side is the verified one - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '1.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - - - - - def test_root_rotation_full(self): - """Test that a client whose root is outdated by multiple versions and who - has none of the latest nor next-to-latest root keys can still update and - does so by incrementally verifying all roots until the most recent one. """ - # Load initial repository with 1.root.json == root.json, signed by "root" - # key. This is the root.json that is already on the client. - repository = repo_tool.load_repository(self.repository_directory) - - # 1st rotation: 1.root.json --> 2.root.json - # 2.root.json will be signed by previous "root" key and by new "root2" key - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.root.add_verification_key(self.role_keys['root2']['public']) - repository.root.load_signing_key(self.role_keys['root2']['private']) - repository.writeall() - - # 2nd rotation: 2.root.json --> 3.root.json - # 3.root.json will be signed by previous "root2" key and by new "root3" key - repository.root.unload_signing_key(self.role_keys['root']['private']) - repository.root.remove_verification_key(self.role_keys['root']['public']) - repository.root.add_verification_key(self.role_keys['root3']['public']) - repository.root.load_signing_key(self.role_keys['root3']['private']) - repository.writeall() - - # Move staged metadata to "live" metadata - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Update on client 1.root.json --> 2.root.json --> 3.root.json - self.repository_updater.refresh() - - # Assert that client updated to the latest root from the repository - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '3.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - - - def test_root_rotation_max(self): - """Test that client does not rotate beyond a configured upper bound, i.e. - `current_version + MAX_NUMBER_ROOT_ROTATIONS`. """ - # NOTE: The nature of below root changes is irrelevant. Here we only want - # the client to update but not beyond a configured upper bound. - - # 1.root.json --> 2.root.json (add root2 and root3 keys) - repository = repo_tool.load_repository(self.repository_directory) - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.root.add_verification_key(self.role_keys['root2']['public']) - repository.root.load_signing_key(self.role_keys['root2']['private']) - repository.root.add_verification_key(self.role_keys['root3']['public']) - repository.root.load_signing_key(self.role_keys['root3']['private']) - repository.writeall() - - # 2.root.json --> 3.root.json (change threshold) - repository.root.threshold = 2 - repository.writeall() - - # 3.root.json --> 4.root.json (change threshold again) - repository.root.threshold = 3 - repository.writeall() - - # Move staged metadata to "live" metadata - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Assert that repo indeed has "4.root.json" and that it's the latest root - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '4.root.json'), - os.path.join(self.repository_directory, 'metadata', 'root.json'))) - - # Lower max root rotation cap so that client stops updating early - max_rotation_backup = tuf.settings.MAX_NUMBER_ROOT_ROTATIONS - tuf.settings.MAX_NUMBER_ROOT_ROTATIONS = 2 - - # Update on client 1.root.json --> 2.root.json --> 3.root.json, - # but stop before updating to 4.root.json - self.repository_updater.refresh() - - # Assert that the client indeed only updated until 3.root.json - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '3.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - # reset - tuf.settings.MAX_NUMBER_ROOT_ROTATIONS = max_rotation_backup - - - - def test_root_rotation_missing_keys(self): - repository = repo_tool.load_repository(self.repository_directory) - - # A partially written root.json (threshold = 2, and signed with only 1 key) - # causes an invalid root chain later. - repository.root.threshold = 2 - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - - repository.write('root') - repository.write('snapshot') - repository.write('timestamp') - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Create a new, valid root.json. - # Still not valid, because it is not written with a threshold of 2 - # previous keys - repository.root.add_verification_key(self.role_keys['role1']['public']) - repository.root.load_signing_key(self.role_keys['role1']['private']) - - repository.writeall() - - repository.root.add_verification_key(self.role_keys['snapshot']['public']) - repository.root.load_signing_key(self.role_keys['snapshot']['private']) - repository.root.threshold = 3 - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm: - self.repository_updater.refresh() - - for mirror_url, mirror_error in cm.exception.mirror_errors.items(): - self.assertTrue(mirror_url.endswith('/2.root.json')) - self.assertTrue(isinstance(mirror_error, - securesystemslib.exceptions.BadSignatureError)) - - # Assert that the current 'root.json' on the client side is the verified one - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '1.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - - - - def test_root_rotation_unmet_last_version_threshold(self): - """Test that client detects a root.json version that is not signed - by a previous threshold of signatures """ - - repository = repo_tool.load_repository(self.repository_directory) - - # Add verification keys - repository.root.add_verification_key(self.role_keys['root']['public']) - repository.root.add_verification_key(self.role_keys['role1']['public']) - - repository.targets.add_verification_key(self.role_keys['targets']['public']) - repository.snapshot.add_verification_key(self.role_keys['snapshot']['public']) - repository.timestamp.add_verification_key(self.role_keys['timestamp']['public']) - - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - - # Add signing keys - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.root.load_signing_key(self.role_keys['role1']['private']) - - # Set root threshold - repository.root.threshold = 2 - repository.writeall() - - # Unload Root's previous signing keys to ensure that these keys are not - # used by mistake. - repository.root.unload_signing_key(self.role_keys['role1']['private']) - repository.root.unload_signing_key(self.role_keys['root']['private']) - - # Add new verification key - repository.root.add_verification_key(self.role_keys['snapshot']['public']) - - # Remove one of the original signing keys - repository.root.remove_verification_key(self.role_keys['role1']['public']) - - # Set the threshold for the new Root file, but note that the previous - # threshold of 2 must still be met. - repository.root.threshold = 1 - - repository.root.load_signing_key(self.role_keys['role1']['private']) - repository.root.load_signing_key(self.role_keys['snapshot']['private']) - - repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - - # We use write() rather than writeall() because the latter should fail due - # to the missing self.role_keys['root'] signature. - repository.write('root', increment_version_number=True) - repository.write('snapshot', increment_version_number=True) - repository.write('timestamp', increment_version_number=True) - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # The following refresh should fail because root must be signed by the - # previous self.role_keys['root'] key, which wasn't loaded. - with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm: - self.repository_updater.refresh() - - for mirror_url, mirror_error in cm.exception.mirror_errors.items(): - self.assertTrue(mirror_url.endswith('/3.root.json')) - self.assertTrue(isinstance(mirror_error, - securesystemslib.exceptions.BadSignatureError)) - - # Assert that the current 'root.json' on the client side is the verified one - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '2.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - - - def test_root_rotation_unmet_new_threshold(self): - """Test that client detects a root.json version that is not signed - by a current threshold of signatures """ - repository = repo_tool.load_repository(self.repository_directory) - - # Create a new, valid root.json. - repository.root.threshold = 2 - repository.root.load_signing_key(self.role_keys['root']['private']) - repository.root.add_verification_key(self.role_keys['root2']['public']) - repository.root.load_signing_key(self.role_keys['root2']['private']) - - repository.writeall() - - # Increase the threshold and add a new verification key without - # actually loading the signing key - repository.root.threshold = 3 - repository.root.add_verification_key(self.role_keys['root3']['public']) - - # writeall fails as expected since the third signature is missing - self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall) - # write an invalid '3.root.json' as partially signed - repository.write('root') - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - - # The following refresh should fail because root must be signed by the - # current self.role_keys['root3'] key, which wasn't loaded. - with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm: - self.repository_updater.refresh() - - for mirror_url, mirror_error in cm.exception.mirror_errors.items(): - self.assertTrue(mirror_url.endswith('/3.root.json')) - self.assertTrue(isinstance(mirror_error, - securesystemslib.exceptions.BadSignatureError)) - - # Assert that the current 'root.json' on the client side is the verified one - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '2.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - - - def test_root_rotation_discard_untrusted_version(self): - """Test that client discards root.json version that failed the - signature verification """ - repository = repo_tool.load_repository(self.repository_directory) - - # Rotate the root key without signing with the previous version key 'root' - repository.root.remove_verification_key(self.role_keys['root']['public']) - repository.root.add_verification_key(self.role_keys['root2']['public']) - repository.root.load_signing_key(self.role_keys['root2']['private']) - - # 2.root.json - repository.writeall() - - # Move the staged metadata to the "live" metadata. - shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) - shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), - os.path.join(self.repository_directory, 'metadata')) - - # Refresh on the client side should fail because 2.root.json is not signed - # with a threshold of prevous keys - with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm: - self.repository_updater.refresh() - - for mirror_url, mirror_error in cm.exception.mirror_errors.items(): - self.assertTrue(mirror_url.endswith('/2.root.json')) - self.assertTrue(isinstance(mirror_error, - securesystemslib.exceptions.BadSignatureError)) - - # Assert that the current 'root.json' on the client side is the trusted one - # and 2.root.json is discarded - self.assertTrue(filecmp.cmp( - os.path.join(self.repository_directory, 'metadata', '1.root.json'), - os.path.join(self.client_metadata_current, 'root.json'))) - - - - -def _load_role_keys(keystore_directory): - - # Populating 'self.role_keys' by importing the required public and private - # keys of 'tuf/tests/repository_data/'. The role keys are needed when - # modifying the remote repository used by the test cases in this unit test. - - # The pre-generated key files in 'repository_data/keystore' are all encrypted - # with a 'password' passphrase. - EXPECTED_KEYFILE_PASSWORD = 'password' - - # Store and return the cryptography keys of the top-level roles, including 1 - # delegated role. - role_keys = {} - - root_key_file = os.path.join(keystore_directory, 'root_key') - root2_key_file = os.path.join(keystore_directory, 'root_key2') - root3_key_file = os.path.join(keystore_directory, 'root_key3') - targets_key_file = os.path.join(keystore_directory, 'targets_key') - snapshot_key_file = os.path.join(keystore_directory, 'snapshot_key') - timestamp_key_file = os.path.join(keystore_directory, 'timestamp_key') - delegation_key_file = os.path.join(keystore_directory, 'delegation_key') - - role_keys = {'root': {}, 'root2': {}, 'root3': {}, 'targets': {}, 'snapshot': - {}, 'timestamp': {}, 'role1': {}} - - # Import the top-level and delegated role public keys. - role_keys['root']['public'] = \ - repo_tool.import_rsa_publickey_from_file(root_key_file+'.pub') - role_keys['root2']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(root2_key_file+'.pub') - role_keys['root3']['public'] = \ - repo_tool.import_ecdsa_publickey_from_file(root3_key_file+'.pub') - role_keys['targets']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(targets_key_file+'.pub') - role_keys['snapshot']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(snapshot_key_file+'.pub') - role_keys['timestamp']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(timestamp_key_file+'.pub') - role_keys['role1']['public'] = \ - repo_tool.import_ed25519_publickey_from_file(delegation_key_file+'.pub') - - # Import the private keys of the top-level and delegated roles. - role_keys['root']['private'] = \ - repo_tool.import_rsa_privatekey_from_file(root_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['root2']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(root2_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['root3']['private'] = \ - repo_tool.import_ecdsa_privatekey_from_file(root3_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['targets']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(targets_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['snapshot']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(snapshot_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['timestamp']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(timestamp_key_file, - EXPECTED_KEYFILE_PASSWORD) - role_keys['role1']['private'] = \ - repo_tool.import_ed25519_privatekey_from_file(delegation_key_file, - EXPECTED_KEYFILE_PASSWORD) - - return role_keys - - -if __name__ == '__main__': - utils.configure_test_logging(sys.argv) - unittest.main() diff --git a/tests/test_utils.py b/tests/test_utils.py index df4d06e667..2fefeedbdc 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -21,7 +21,6 @@ """ import logging -import os import socket import sys import unittest @@ -57,50 +56,6 @@ def test_simple_server_startup(self) -> None: self.assertTrue(can_connect(server_process_handler.port)) server_process_handler.clean() - def test_simple_https_server_startup(self) -> None: - # Test normal case - good_cert_path = os.path.join("ssl_certs", "ssl_cert.crt") - server_process_handler = utils.TestServerProcess( - log=logger, - server="simple_https_server_old.py", - extra_cmd_args=[good_cert_path], - ) - - # Make sure we can connect to the server - self.assertTrue(can_connect(server_process_handler.port)) - server_process_handler.clean() - - # Test when no cert file is provided - server_process_handler = utils.TestServerProcess( - log=logger, server="simple_https_server_old.py" - ) - - # Make sure we can connect to the server - self.assertTrue(can_connect(server_process_handler.port)) - server_process_handler.clean() - - # Test with a non existing cert file. - non_existing_cert_path = os.path.join("ssl_certs", "non_existing.crt") - server_process_handler = utils.TestServerProcess( - log=logger, - server="simple_https_server_old.py", - extra_cmd_args=[non_existing_cert_path], - ) - - # Make sure we can connect to the server - self.assertTrue(can_connect(server_process_handler.port)) - server_process_handler.clean() - - def test_slow_retrieval_server_startup(self) -> None: - # Test normal case - server_process_handler = utils.TestServerProcess( - log=logger, server="slow_retrieval_server_old.py" - ) - - # Make sure we can connect to the server - self.assertTrue(can_connect(server_process_handler.port)) - server_process_handler.clean() - def test_cleanup(self) -> None: # Test normal case server_process_handler = utils.TestServerProcess( diff --git a/tuf/unittest_toolbox.py b/tuf/unittest_toolbox.py deleted file mode 100755 index ac1305918b..0000000000 --- a/tuf/unittest_toolbox.py +++ /dev/null @@ -1,151 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2012 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - unittest_toolbox.py - - - Konstantin Andrianov. - - - March 26, 2012. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Provides an array of various methods for unit testing. Use it instead of - actual unittest module. This module builds on unittest module. - Specifically, Modified_TestCase is a derived class from unittest.TestCase. -""" - -import os -import shutil -import unittest -import tempfile -import random -import string - -from typing import Optional - -class Modified_TestCase(unittest.TestCase): - """ - - Provide additional test-setup methods to make testing - of module's methods-under-test as independent as possible. - - If you want to modify setUp()/tearDown() do: - class Your_Test_Class(modified_TestCase): - def setUp(): - your setup modification - your setup modification - ... - modified_TestCase.setUp(self) - - - make_temp_directory(self, directory=None): - Creates and returns an absolute path of a temporary directory. - - make_temp_file(self, suffix='.txt', directory=None): - Creates and returns an absolute path of an empty temp file. - - make_temp_data_file(self, suffix='', directory=None, data = junk_data): - Returns an absolute path of a temp file containing some data. - - random_path(self, length = 7): - Generate a 'random' path consisting of n-length strings of random chars. - - - Static Methods: - -------------- - Following methods are static because they technically don't operate - on any instances of the class, what they do is: they modify class variables - (dictionaries) that are shared among all instances of the class. So - it is possible to call them without instantiating the class. - - random_string(length=7): - Generate a 'length' long string of random characters. - """ - - - def setUp(self) -> None: - self._cleanup = [] - - - - def tearDown(self) -> None: - for cleanup_function in self._cleanup: - # Perform clean up by executing clean-up functions. - try: - # OSError will occur if the directory was already removed. - cleanup_function() - - except OSError: - pass - - - - def make_temp_directory(self, directory: Optional[str]=None) -> str: - """Creates and returns an absolute path of a directory.""" - - prefix = self.__class__.__name__+'_' - temp_directory = tempfile.mkdtemp(prefix=prefix, dir=directory) - - def _destroy_temp_directory(): - shutil.rmtree(temp_directory) - - self._cleanup.append(_destroy_temp_directory) - - return temp_directory - - - - def make_temp_file( - self,suffix: str='.txt', directory: Optional[str]=None - ) -> str: - """Creates and returns an absolute path of an empty file.""" - prefix='tmp_file_'+self.__class__.__name__+'_' - temp_file = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=directory) - def _destroy_temp_file(): - os.unlink(temp_file[1]) - self._cleanup.append(_destroy_temp_file) - return temp_file[1] - - - - def make_temp_data_file( - self, suffix: str='', directory: Optional[str]=None, data: str = 'junk data' - ) -> str: - """Returns an absolute path of a temp file containing data.""" - temp_file_path = self.make_temp_file(suffix=suffix, directory=directory) - temp_file = open(temp_file_path, 'wt', encoding='utf8') - temp_file.write(data) - temp_file.close() - return temp_file_path - - - - def random_path(self, length: int = 7) -> str: - """Generate a 'random' path consisting of random n-length strings.""" - - rand_path = '/' + self.random_string(length) - - for junk in range(2): - rand_path = os.path.join(rand_path, self.random_string(length)) - - return rand_path - - - - @staticmethod - def random_string(length: int=15) -> str: - """Generate a random string of specified length.""" - - rand_str = '' - for junk in range(length): - rand_str += random.SystemRandom().choice('abcdefABCDEF' + string.digits) - - return rand_str