diff --git a/README.rst b/README.rst index 156ea2da08..20858997c1 100644 --- a/README.rst +++ b/README.rst @@ -25,6 +25,14 @@ A Framework for Securing Software Update Systems :target: https://bestpractices.coreinfrastructure.org/projects/1351 :alt: CII +.. image:: https://pyup.io/repos/github/theupdateframework/tuf/shield.svg + :target: https://pyup.io/repos/github/theupdateframework/tuf/ + :alt: Updates + +.. image:: https://pyup.io/repos/github/theupdateframework/tuf/python-3-shield.svg + :target: https://pyup.io/repos/github/theupdateframework/tuf/ + :alt: Python 3 + .. image:: /docs/images/banner_readme.JPG The Update Framework (TUF) helps developers to secure new or existing diff --git a/pylintrc b/pylintrc index 5cb1bcc23a..358bd43151 100644 --- a/pylintrc +++ b/pylintrc @@ -50,7 +50,7 @@ confidence= # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" -disable=parameter-unpacking, unpacking-in-except, long-suffix, old-ne-operator, old-octal-literal, import-star-module-level, raw-checker-failed, bad-inline-option, locally-disabled, locally-enabled, file-ignored, suppressed-message, useless-suppression, deprecated-pragma, apply-builtin, basestring-builtin, buffer-builtin, cmp-builtin, coerce-builtin, execfile-builtin, file-builtin, long-builtin, raw_input-builtin, reduce-builtin, standarderror-builtin, unicode-builtin, xrange-builtin, coerce-method, delslice-method, getslice-method, setslice-method, no-absolute-import, old-division, dict-iter-method, dict-view-method, next-method-called, metaclass-assignment, indexing-exception, raising-string, reload-builtin, oct-method, hex-method, nonzero-method, cmp-method, input-builtin, round-builtin, intern-builtin, unichr-builtin, map-builtin-not-iterating, zip-builtin-not-iterating, range-builtin-not-iterating, filter-builtin-not-iterating, using-cmp-argument, eq-without-hash, div-method, idiv-method, rdiv-method, exception-message-attribute, invalid-str-codec, sys-max-int, deprecated-str-translate-call, global-statement, broad-except, C, R +disable=parameter-unpacking, unpacking-in-except, long-suffix, old-ne-operator, old-octal-literal, import-star-module-level, raw-checker-failed, bad-inline-option, locally-disabled, locally-enabled, file-ignored, suppressed-message, useless-suppression, deprecated-pragma, apply-builtin, basestring-builtin, buffer-builtin, cmp-builtin, coerce-builtin, execfile-builtin, file-builtin, long-builtin, raw_input-builtin, reduce-builtin, standarderror-builtin, unicode-builtin, xrange-builtin, coerce-method, delslice-method, getslice-method, setslice-method, no-absolute-import, old-division, dict-iter-method, dict-view-method, next-method-called, metaclass-assignment, indexing-exception, raising-string, reload-builtin, oct-method, hex-method, nonzero-method, cmp-method, input-builtin, round-builtin, intern-builtin, unichr-builtin, map-builtin-not-iterating, zip-builtin-not-iterating, range-builtin-not-iterating, filter-builtin-not-iterating, using-cmp-argument, eq-without-hash, div-method, idiv-method, rdiv-method, exception-message-attribute, invalid-str-codec, sys-max-int, deprecated-str-translate-call, global-statement, broad-except, logging-not-lazy, C, R # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option diff --git a/setup.py b/setup.py index 1396f0d29b..2d92fb5652 100755 --- a/setup.py +++ b/setup.py @@ -108,9 +108,10 @@ 'Topic :: Security', 'Topic :: Software Development' ], - install_requires = ['iso8601>=0.1.12', 'six>=1.11.0', 'securesystemslib>=0.10.8'], + install_requires = ['iso8601>=0.1.12', 'six>=1.11.0', 'securesystemslib>=0.10.8'], packages = find_packages(exclude=['tests']), scripts = [ 'tuf/scripts/basic_client.py', + 'tuf/scripts/simple_server.py' ] ) diff --git a/tests/repository_data/README.md b/tests/repository_data/README.md new file mode 100644 index 0000000000..d214100f12 --- /dev/null +++ b/tests/repository_data/README.md @@ -0,0 +1,48 @@ +# Unit and integration testing + +## Running the tests +The unit and integration tests can be executed by invoking `tox` from any +path under the project directory. + +``` +$ tox +``` + +Or by invoking `aggregate_tests.py` from the +[tests](https://github.com/theupdateframework/tuf/tree/develop/tests) +directory. + +``` +$ python aggregate_tests.py +``` + +Note: integration tests end in `_integration.py`. + +If you wish to run a particular unit test, navigate to the tests directory and +run that specific unit test. For example: + +``` +$ python test_updater.py +``` + +It it also possible to run the test cases of a unit test. For instance: + +``` +$ python -m unittest test_updater.TestMultiRepoUpdater.test_get_one_valid_targetinfo +``` + +## Setup +The unit and integration tests operate on static metadata available in the +[repository_data +directory](https://github.com/theupdateframework/tuf/tree/develop/tests/repository_data/). +Before running the tests, static metadata is first copied to temporary +directories and modified, as needed, by the tests. + +The test modules typically spawn HTTP(S) servers that serve metadata and target +files for the the unit tests. The [map +file](https://github.com/theupdateframework/tuf/tree/develop/tests/repository_data) +specifies the location of the test repositories and other properties. For +specific targets and metadata provided by the tests repositories, please +inspect their [respective +metadata](https://github.com/theupdateframework/tuf/tree/develop/tests/repository_data/repository). + diff --git a/tests/repository_data/client/map.json b/tests/repository_data/client/map.json new file mode 100644 index 0000000000..d683880441 --- /dev/null +++ b/tests/repository_data/client/map.json @@ -0,0 +1,33 @@ +{ + "mapping": [ + { + "paths": [ + "*1.txt" + ], + "repositories": [ + "test_repository1", + "test_repository2" + ], + "terminating": false, + "threshold": 1 + }, + { + "paths": [ + "*3.txt" + ], + "repositories": [ + "test_repository2" + ], + "terminating": true, + "threshold": 1 + } + ], + "repositories": { + "test_repository1": [ + "http://localhost:30001" + ], + "test_repository2": [ + "http://localhost:30002" + ] + } +} diff --git a/tests/repository_data/client/test_repository/metadata/current/1.root.json b/tests/repository_data/client/test_repository1/metadata/current/1.root.json similarity index 100% rename from tests/repository_data/client/test_repository/metadata/current/1.root.json rename to tests/repository_data/client/test_repository1/metadata/current/1.root.json diff --git a/tests/repository_data/client/test_repository/metadata/current/role1.json b/tests/repository_data/client/test_repository1/metadata/current/role1.json similarity index 100% rename from tests/repository_data/client/test_repository/metadata/current/role1.json rename to tests/repository_data/client/test_repository1/metadata/current/role1.json diff --git a/tests/repository_data/client/test_repository/metadata/current/role2.json b/tests/repository_data/client/test_repository1/metadata/current/role2.json similarity index 100% rename from tests/repository_data/client/test_repository/metadata/current/role2.json rename to tests/repository_data/client/test_repository1/metadata/current/role2.json diff --git a/tests/repository_data/client/test_repository/metadata/current/root.json b/tests/repository_data/client/test_repository1/metadata/current/root.json similarity index 100% rename from tests/repository_data/client/test_repository/metadata/current/root.json rename to tests/repository_data/client/test_repository1/metadata/current/root.json diff --git a/tests/repository_data/client/test_repository/metadata/current/snapshot.json b/tests/repository_data/client/test_repository1/metadata/current/snapshot.json similarity index 100% rename from tests/repository_data/client/test_repository/metadata/current/snapshot.json rename to tests/repository_data/client/test_repository1/metadata/current/snapshot.json diff --git a/tests/repository_data/client/test_repository/metadata/current/targets.json b/tests/repository_data/client/test_repository1/metadata/current/targets.json similarity index 100% rename from tests/repository_data/client/test_repository/metadata/current/targets.json rename to tests/repository_data/client/test_repository1/metadata/current/targets.json diff --git a/tests/repository_data/client/test_repository/metadata/current/timestamp.json b/tests/repository_data/client/test_repository1/metadata/current/timestamp.json similarity index 100% rename from tests/repository_data/client/test_repository/metadata/current/timestamp.json rename to tests/repository_data/client/test_repository1/metadata/current/timestamp.json diff --git a/tests/repository_data/client/test_repository/metadata/previous/1.root.json b/tests/repository_data/client/test_repository1/metadata/previous/1.root.json similarity index 100% rename from tests/repository_data/client/test_repository/metadata/previous/1.root.json rename to tests/repository_data/client/test_repository1/metadata/previous/1.root.json diff --git a/tests/repository_data/client/test_repository/metadata/previous/role1.json b/tests/repository_data/client/test_repository1/metadata/previous/role1.json similarity index 100% rename from tests/repository_data/client/test_repository/metadata/previous/role1.json rename to tests/repository_data/client/test_repository1/metadata/previous/role1.json diff --git a/tests/repository_data/client/test_repository/metadata/previous/role2.json b/tests/repository_data/client/test_repository1/metadata/previous/role2.json similarity index 100% rename from tests/repository_data/client/test_repository/metadata/previous/role2.json rename to tests/repository_data/client/test_repository1/metadata/previous/role2.json diff --git a/tests/repository_data/client/test_repository/metadata/previous/root.json b/tests/repository_data/client/test_repository1/metadata/previous/root.json similarity index 100% rename from tests/repository_data/client/test_repository/metadata/previous/root.json rename to tests/repository_data/client/test_repository1/metadata/previous/root.json diff --git a/tests/repository_data/client/test_repository/metadata/previous/snapshot.json b/tests/repository_data/client/test_repository1/metadata/previous/snapshot.json similarity index 100% rename from tests/repository_data/client/test_repository/metadata/previous/snapshot.json rename to tests/repository_data/client/test_repository1/metadata/previous/snapshot.json diff --git a/tests/repository_data/client/test_repository/metadata/previous/targets.json b/tests/repository_data/client/test_repository1/metadata/previous/targets.json similarity index 100% rename from tests/repository_data/client/test_repository/metadata/previous/targets.json rename to tests/repository_data/client/test_repository1/metadata/previous/targets.json diff --git a/tests/repository_data/client/test_repository/metadata/previous/timestamp.json b/tests/repository_data/client/test_repository1/metadata/previous/timestamp.json similarity index 100% rename from tests/repository_data/client/test_repository/metadata/previous/timestamp.json rename to tests/repository_data/client/test_repository1/metadata/previous/timestamp.json diff --git a/tests/repository_data/client/test_repository2/metadata/current/1.root.json b/tests/repository_data/client/test_repository2/metadata/current/1.root.json new file mode 100644 index 0000000000..ccce5381b6 --- /dev/null +++ b/tests/repository_data/client/test_repository2/metadata/current/1.root.json @@ -0,0 +1,87 @@ +{ + "signatures": [ + { + "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", + "sig": "344e2399b982470a06bbaeb7cd52eb58dfa7fba65d179a737b3655ce79fe722480f9515163010b714debd46a3f9bd60cd04c3c3093c050a9bb6b6fdd3dd1e902b3871198bf13c7c4ace9813527ac0b6f2d7652c64d7993ef88328bc82bea69ffec13dfd545e3ff0d327613b4eea6ad2c50baffd4d7233104427d0c6066fbec5f0091a8004c7f40c0ee59abf4955ab8682d8401f09adb42b0e8897c96ac7eb37843287c60b631ee67187d01e5b3936d84a36881c8e9726dc30bc62442f3849d94c96c62eb0aa66c9d3869e403940f8f7df527b2d3aa109cbf37d1349a8043395c6d2ad3550a64db0eaa39d45d6e30ea7fd06457bf0f033afba6796fbb7160e094e67da21c2f8da2220d10cef5df691f1f919ff14fb00e2b4057d5c099ce8b2d5d2a9e233dc7eb1dc77b24f4f22b219e67e45b3592a1ac873e96456078d82a12c085186f6b50131f2bad32f6bdf3359b141fb898819c98866f55eff686ad5cda01d2661dff6c39b66e5d83dfb65bb276ea4164b1f86c3371dbe635b31c118067ee" + } + ], + "signed": { + "_type": "root", + "consistent_snapshot": false, + "expires": "2030-01-01T00:00:00Z", + "keys": { + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "rsa", + "keyval": { + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" + }, + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" + }, + "scheme": "ed25519" + }, + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" + }, + "scheme": "ed25519" + }, + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" + }, + "scheme": "ed25519" + } + }, + "roles": { + "root": { + "keyids": [ + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" + ], + "threshold": 1 + }, + "snapshot": { + "keyids": [ + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" + ], + "threshold": 1 + }, + "targets": { + "keyids": [ + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" + ], + "threshold": 1 + }, + "timestamp": { + "keyids": [ + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" + ], + "threshold": 1 + } + }, + "spec_version": "1.0", + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/client/test_repository2/metadata/current/role1.json b/tests/repository_data/client/test_repository2/metadata/current/role1.json new file mode 100644 index 0000000000..332cd8050f --- /dev/null +++ b/tests/repository_data/client/test_repository2/metadata/current/role1.json @@ -0,0 +1,49 @@ +{ + "signatures": [ + { + "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", + "sig": "749d6373835e9e89a269168d9af22bf7692ee7059a1db5ff1162e07b495ba47ae223e9ece6c27b2981d5d8bc046788d3fad9c2ba83d4be9b6547ed1f909c6204" + } + ], + "signed": { + "_type": "targets", + "delegations": { + "keys": { + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" + }, + "scheme": "ed25519" + } + }, + "roles": [ + { + "keyids": [ + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" + ], + "name": "role2", + "paths": [], + "terminating": false, + "threshold": 1 + } + ] + }, + "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", + "targets": { + "/file3.txt": { + "hashes": { + "sha256": "141f740f53781d1ca54b8a50af22cbf74e44c21a998fa2a8a05aaac2c002886b", + "sha512": "ef5beafa16041bcdd2937140afebd485296cd54f7348ecd5a4d035c09759608de467a7ac0eb58753d0242df873c305e8bffad2454aa48f44480f15efae1cacd0" + }, + "length": 28 + } + }, + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/client/test_repository2/metadata/current/role2.json b/tests/repository_data/client/test_repository2/metadata/current/role2.json new file mode 100644 index 0000000000..46e415a789 --- /dev/null +++ b/tests/repository_data/client/test_repository2/metadata/current/role2.json @@ -0,0 +1,19 @@ +{ + "signatures": [ + { + "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", + "sig": "34ae1e3c897062419722c1747970a632e12060f3aef57314e6e6aa96c3a510a25ec5a8b12022058c768724607dd58106293089c87a7ee4b2ce5b3a8d44deeb06" + } + ], + "signed": { + "_type": "targets", + "delegations": { + "keys": {}, + "roles": [] + }, + "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", + "targets": {}, + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/client/test_repository2/metadata/current/root.json b/tests/repository_data/client/test_repository2/metadata/current/root.json new file mode 100644 index 0000000000..ccce5381b6 --- /dev/null +++ b/tests/repository_data/client/test_repository2/metadata/current/root.json @@ -0,0 +1,87 @@ +{ + "signatures": [ + { + "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", + "sig": "344e2399b982470a06bbaeb7cd52eb58dfa7fba65d179a737b3655ce79fe722480f9515163010b714debd46a3f9bd60cd04c3c3093c050a9bb6b6fdd3dd1e902b3871198bf13c7c4ace9813527ac0b6f2d7652c64d7993ef88328bc82bea69ffec13dfd545e3ff0d327613b4eea6ad2c50baffd4d7233104427d0c6066fbec5f0091a8004c7f40c0ee59abf4955ab8682d8401f09adb42b0e8897c96ac7eb37843287c60b631ee67187d01e5b3936d84a36881c8e9726dc30bc62442f3849d94c96c62eb0aa66c9d3869e403940f8f7df527b2d3aa109cbf37d1349a8043395c6d2ad3550a64db0eaa39d45d6e30ea7fd06457bf0f033afba6796fbb7160e094e67da21c2f8da2220d10cef5df691f1f919ff14fb00e2b4057d5c099ce8b2d5d2a9e233dc7eb1dc77b24f4f22b219e67e45b3592a1ac873e96456078d82a12c085186f6b50131f2bad32f6bdf3359b141fb898819c98866f55eff686ad5cda01d2661dff6c39b66e5d83dfb65bb276ea4164b1f86c3371dbe635b31c118067ee" + } + ], + "signed": { + "_type": "root", + "consistent_snapshot": false, + "expires": "2030-01-01T00:00:00Z", + "keys": { + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "rsa", + "keyval": { + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" + }, + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" + }, + "scheme": "ed25519" + }, + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" + }, + "scheme": "ed25519" + }, + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" + }, + "scheme": "ed25519" + } + }, + "roles": { + "root": { + "keyids": [ + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" + ], + "threshold": 1 + }, + "snapshot": { + "keyids": [ + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" + ], + "threshold": 1 + }, + "targets": { + "keyids": [ + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" + ], + "threshold": 1 + }, + "timestamp": { + "keyids": [ + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" + ], + "threshold": 1 + } + }, + "spec_version": "1.0", + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/client/test_repository2/metadata/current/snapshot.json b/tests/repository_data/client/test_repository2/metadata/current/snapshot.json new file mode 100644 index 0000000000..a713f807b6 --- /dev/null +++ b/tests/repository_data/client/test_repository2/metadata/current/snapshot.json @@ -0,0 +1,28 @@ +{ + "signatures": [ + { + "keyid": "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d", + "sig": "d78e9013bab1da2a8425caa48143cd79a21632dce021ad7e1b883d83775035df333a8b26c9c952d832edaf9dc7be2ef612bdb21326fcc9849346d7e3a162050e" + } + ], + "signed": { + "_type": "snapshot", + "expires": "2030-01-01T00:00:00Z", + "meta": { + "role1.json": { + "version": 1 + }, + "role2.json": { + "version": 1 + }, + "root.json": { + "version": 1 + }, + "targets.json": { + "version": 1 + } + }, + "spec_version": "1.0", + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/client/test_repository2/metadata/current/targets.json b/tests/repository_data/client/test_repository2/metadata/current/targets.json new file mode 100644 index 0000000000..972034d03f --- /dev/null +++ b/tests/repository_data/client/test_repository2/metadata/current/targets.json @@ -0,0 +1,61 @@ +{ + "signatures": [ + { + "keyid": "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093", + "sig": "3b1a1fcb912ea8e03b6f9ad0da29166149d4a6f038b552c204ccee1d396d2dd4095a3ce3c565581f08fa37dddc418b0aee40743a121b1f47c89d51da11f1dc02" + } + ], + "signed": { + "_type": "targets", + "delegations": { + "keys": { + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" + }, + "scheme": "ed25519" + } + }, + "roles": [ + { + "keyids": [ + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" + ], + "name": "role1", + "paths": [ + "/file3.txt" + ], + "terminating": false, + "threshold": 1 + } + ] + }, + "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", + "targets": { + "/file1.txt": { + "custom": { + "file_permissions": "644" + }, + "hashes": { + "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da", + "sha512": "467430a68afae8e9f9c0771ea5d78bf0b3a0d79a2d3d3b40c69fde4dd42c461448aef76fcef4f5284931a1ffd0ac096d138ba3a0d6ca83fa8d7285a47a296f77" + }, + "length": 31 + }, + "/file2.txt": { + "hashes": { + "sha256": "452ce8308500d83ef44248d8e6062359211992fd837ea9e370e561efb1a4ca99", + "sha512": "052b49a21e03606b28942db69aa597530fe52d47ee3d748ba65afcd14b857738e36bc1714c4f4adde46c3e683548552fe5c96722e0e0da3acd9050c2524902d8" + }, + "length": 39 + } + }, + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/client/test_repository2/metadata/current/timestamp.json b/tests/repository_data/client/test_repository2/metadata/current/timestamp.json new file mode 100644 index 0000000000..2cc752b30c --- /dev/null +++ b/tests/repository_data/client/test_repository2/metadata/current/timestamp.json @@ -0,0 +1,23 @@ +{ + "signatures": [ + { + "keyid": "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758", + "sig": "7dddbfe94d6d80253433551700ea6dfe4171a33f1227a07830e951900b8325d67c3dce6410b9cf55abefa3dfca0b57814a4965c2d6ee60bb0336755cd0557e03" + } + ], + "signed": { + "_type": "timestamp", + "expires": "2030-01-01T00:00:00Z", + "meta": { + "snapshot.json": { + "hashes": { + "sha256": "6990b6586ed545387c6a51db62173b903a5dff46b17b1bc3fe1e6ca0d0844f2f" + }, + "length": 554, + "version": 1 + } + }, + "spec_version": "1.0", + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/client/test_repository2/metadata/previous/1.root.json b/tests/repository_data/client/test_repository2/metadata/previous/1.root.json new file mode 100644 index 0000000000..ccce5381b6 --- /dev/null +++ b/tests/repository_data/client/test_repository2/metadata/previous/1.root.json @@ -0,0 +1,87 @@ +{ + "signatures": [ + { + "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", + "sig": "344e2399b982470a06bbaeb7cd52eb58dfa7fba65d179a737b3655ce79fe722480f9515163010b714debd46a3f9bd60cd04c3c3093c050a9bb6b6fdd3dd1e902b3871198bf13c7c4ace9813527ac0b6f2d7652c64d7993ef88328bc82bea69ffec13dfd545e3ff0d327613b4eea6ad2c50baffd4d7233104427d0c6066fbec5f0091a8004c7f40c0ee59abf4955ab8682d8401f09adb42b0e8897c96ac7eb37843287c60b631ee67187d01e5b3936d84a36881c8e9726dc30bc62442f3849d94c96c62eb0aa66c9d3869e403940f8f7df527b2d3aa109cbf37d1349a8043395c6d2ad3550a64db0eaa39d45d6e30ea7fd06457bf0f033afba6796fbb7160e094e67da21c2f8da2220d10cef5df691f1f919ff14fb00e2b4057d5c099ce8b2d5d2a9e233dc7eb1dc77b24f4f22b219e67e45b3592a1ac873e96456078d82a12c085186f6b50131f2bad32f6bdf3359b141fb898819c98866f55eff686ad5cda01d2661dff6c39b66e5d83dfb65bb276ea4164b1f86c3371dbe635b31c118067ee" + } + ], + "signed": { + "_type": "root", + "consistent_snapshot": false, + "expires": "2030-01-01T00:00:00Z", + "keys": { + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "rsa", + "keyval": { + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" + }, + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" + }, + "scheme": "ed25519" + }, + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" + }, + "scheme": "ed25519" + }, + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" + }, + "scheme": "ed25519" + } + }, + "roles": { + "root": { + "keyids": [ + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" + ], + "threshold": 1 + }, + "snapshot": { + "keyids": [ + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" + ], + "threshold": 1 + }, + "targets": { + "keyids": [ + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" + ], + "threshold": 1 + }, + "timestamp": { + "keyids": [ + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" + ], + "threshold": 1 + } + }, + "spec_version": "1.0", + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/client/test_repository2/metadata/previous/role1.json b/tests/repository_data/client/test_repository2/metadata/previous/role1.json new file mode 100644 index 0000000000..332cd8050f --- /dev/null +++ b/tests/repository_data/client/test_repository2/metadata/previous/role1.json @@ -0,0 +1,49 @@ +{ + "signatures": [ + { + "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", + "sig": "749d6373835e9e89a269168d9af22bf7692ee7059a1db5ff1162e07b495ba47ae223e9ece6c27b2981d5d8bc046788d3fad9c2ba83d4be9b6547ed1f909c6204" + } + ], + "signed": { + "_type": "targets", + "delegations": { + "keys": { + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" + }, + "scheme": "ed25519" + } + }, + "roles": [ + { + "keyids": [ + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" + ], + "name": "role2", + "paths": [], + "terminating": false, + "threshold": 1 + } + ] + }, + "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", + "targets": { + "/file3.txt": { + "hashes": { + "sha256": "141f740f53781d1ca54b8a50af22cbf74e44c21a998fa2a8a05aaac2c002886b", + "sha512": "ef5beafa16041bcdd2937140afebd485296cd54f7348ecd5a4d035c09759608de467a7ac0eb58753d0242df873c305e8bffad2454aa48f44480f15efae1cacd0" + }, + "length": 28 + } + }, + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/client/test_repository2/metadata/previous/role2.json b/tests/repository_data/client/test_repository2/metadata/previous/role2.json new file mode 100644 index 0000000000..46e415a789 --- /dev/null +++ b/tests/repository_data/client/test_repository2/metadata/previous/role2.json @@ -0,0 +1,19 @@ +{ + "signatures": [ + { + "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", + "sig": "34ae1e3c897062419722c1747970a632e12060f3aef57314e6e6aa96c3a510a25ec5a8b12022058c768724607dd58106293089c87a7ee4b2ce5b3a8d44deeb06" + } + ], + "signed": { + "_type": "targets", + "delegations": { + "keys": {}, + "roles": [] + }, + "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", + "targets": {}, + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/client/test_repository2/metadata/previous/root.json b/tests/repository_data/client/test_repository2/metadata/previous/root.json new file mode 100644 index 0000000000..ccce5381b6 --- /dev/null +++ b/tests/repository_data/client/test_repository2/metadata/previous/root.json @@ -0,0 +1,87 @@ +{ + "signatures": [ + { + "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", + "sig": "344e2399b982470a06bbaeb7cd52eb58dfa7fba65d179a737b3655ce79fe722480f9515163010b714debd46a3f9bd60cd04c3c3093c050a9bb6b6fdd3dd1e902b3871198bf13c7c4ace9813527ac0b6f2d7652c64d7993ef88328bc82bea69ffec13dfd545e3ff0d327613b4eea6ad2c50baffd4d7233104427d0c6066fbec5f0091a8004c7f40c0ee59abf4955ab8682d8401f09adb42b0e8897c96ac7eb37843287c60b631ee67187d01e5b3936d84a36881c8e9726dc30bc62442f3849d94c96c62eb0aa66c9d3869e403940f8f7df527b2d3aa109cbf37d1349a8043395c6d2ad3550a64db0eaa39d45d6e30ea7fd06457bf0f033afba6796fbb7160e094e67da21c2f8da2220d10cef5df691f1f919ff14fb00e2b4057d5c099ce8b2d5d2a9e233dc7eb1dc77b24f4f22b219e67e45b3592a1ac873e96456078d82a12c085186f6b50131f2bad32f6bdf3359b141fb898819c98866f55eff686ad5cda01d2661dff6c39b66e5d83dfb65bb276ea4164b1f86c3371dbe635b31c118067ee" + } + ], + "signed": { + "_type": "root", + "consistent_snapshot": false, + "expires": "2030-01-01T00:00:00Z", + "keys": { + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "rsa", + "keyval": { + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" + }, + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" + }, + "scheme": "ed25519" + }, + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" + }, + "scheme": "ed25519" + }, + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" + }, + "scheme": "ed25519" + } + }, + "roles": { + "root": { + "keyids": [ + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" + ], + "threshold": 1 + }, + "snapshot": { + "keyids": [ + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" + ], + "threshold": 1 + }, + "targets": { + "keyids": [ + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" + ], + "threshold": 1 + }, + "timestamp": { + "keyids": [ + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" + ], + "threshold": 1 + } + }, + "spec_version": "1.0", + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/client/test_repository2/metadata/previous/snapshot.json b/tests/repository_data/client/test_repository2/metadata/previous/snapshot.json new file mode 100644 index 0000000000..a713f807b6 --- /dev/null +++ b/tests/repository_data/client/test_repository2/metadata/previous/snapshot.json @@ -0,0 +1,28 @@ +{ + "signatures": [ + { + "keyid": "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d", + "sig": "d78e9013bab1da2a8425caa48143cd79a21632dce021ad7e1b883d83775035df333a8b26c9c952d832edaf9dc7be2ef612bdb21326fcc9849346d7e3a162050e" + } + ], + "signed": { + "_type": "snapshot", + "expires": "2030-01-01T00:00:00Z", + "meta": { + "role1.json": { + "version": 1 + }, + "role2.json": { + "version": 1 + }, + "root.json": { + "version": 1 + }, + "targets.json": { + "version": 1 + } + }, + "spec_version": "1.0", + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/client/test_repository2/metadata/previous/targets.json b/tests/repository_data/client/test_repository2/metadata/previous/targets.json new file mode 100644 index 0000000000..972034d03f --- /dev/null +++ b/tests/repository_data/client/test_repository2/metadata/previous/targets.json @@ -0,0 +1,61 @@ +{ + "signatures": [ + { + "keyid": "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093", + "sig": "3b1a1fcb912ea8e03b6f9ad0da29166149d4a6f038b552c204ccee1d396d2dd4095a3ce3c565581f08fa37dddc418b0aee40743a121b1f47c89d51da11f1dc02" + } + ], + "signed": { + "_type": "targets", + "delegations": { + "keys": { + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" + }, + "scheme": "ed25519" + } + }, + "roles": [ + { + "keyids": [ + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" + ], + "name": "role1", + "paths": [ + "/file3.txt" + ], + "terminating": false, + "threshold": 1 + } + ] + }, + "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", + "targets": { + "/file1.txt": { + "custom": { + "file_permissions": "644" + }, + "hashes": { + "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da", + "sha512": "467430a68afae8e9f9c0771ea5d78bf0b3a0d79a2d3d3b40c69fde4dd42c461448aef76fcef4f5284931a1ffd0ac096d138ba3a0d6ca83fa8d7285a47a296f77" + }, + "length": 31 + }, + "/file2.txt": { + "hashes": { + "sha256": "452ce8308500d83ef44248d8e6062359211992fd837ea9e370e561efb1a4ca99", + "sha512": "052b49a21e03606b28942db69aa597530fe52d47ee3d748ba65afcd14b857738e36bc1714c4f4adde46c3e683548552fe5c96722e0e0da3acd9050c2524902d8" + }, + "length": 39 + } + }, + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/client/test_repository2/metadata/previous/timestamp.json b/tests/repository_data/client/test_repository2/metadata/previous/timestamp.json new file mode 100644 index 0000000000..2cc752b30c --- /dev/null +++ b/tests/repository_data/client/test_repository2/metadata/previous/timestamp.json @@ -0,0 +1,23 @@ +{ + "signatures": [ + { + "keyid": "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758", + "sig": "7dddbfe94d6d80253433551700ea6dfe4171a33f1227a07830e951900b8325d67c3dce6410b9cf55abefa3dfca0b57814a4965c2d6ee60bb0336755cd0557e03" + } + ], + "signed": { + "_type": "timestamp", + "expires": "2030-01-01T00:00:00Z", + "meta": { + "snapshot.json": { + "hashes": { + "sha256": "6990b6586ed545387c6a51db62173b903a5dff46b17b1bc3fe1e6ca0d0844f2f" + }, + "length": 554, + "version": 1 + } + }, + "spec_version": "1.0", + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/map.json b/tests/repository_data/map.json new file mode 100644 index 0000000000..d683880441 --- /dev/null +++ b/tests/repository_data/map.json @@ -0,0 +1,33 @@ +{ + "mapping": [ + { + "paths": [ + "*1.txt" + ], + "repositories": [ + "test_repository1", + "test_repository2" + ], + "terminating": false, + "threshold": 1 + }, + { + "paths": [ + "*3.txt" + ], + "repositories": [ + "test_repository2" + ], + "terminating": true, + "threshold": 1 + } + ], + "repositories": { + "test_repository1": [ + "http://localhost:30001" + ], + "test_repository2": [ + "http://localhost:30002" + ] + } +} diff --git a/tests/test_arbitrary_package_attack.py b/tests/test_arbitrary_package_attack.py index b4e981e0f9..feed8d1f88 100755 --- a/tests/test_arbitrary_package_attack.py +++ b/tests/test_arbitrary_package_attack.py @@ -113,7 +113,7 @@ def setUp(self): # We are inheriting from custom class. unittest_toolbox.Modified_TestCase.setUp(self) - self.repository_name = 'test_repository' + self.repository_name = 'test_repository1' # Copy the original repository files provided in the test folder so that # any modifications made to repository files are restricted to the copies. @@ -162,7 +162,7 @@ def tearDown(self): # Modified_TestCase.tearDown() automatically deletes temporary files and # directories that may have been created during each test case. unittest_toolbox.Modified_TestCase.tearDown(self) - # updater.Updater() populates the roledb with the name "test_repository" + # updater.Updater() populates the roledb with the name "test_repository1" tuf.roledb.clear_roledb(clear_all=True) tuf.keydb.clear_keydb(clear_all=True) diff --git a/tests/test_endless_data_attack.py b/tests/test_endless_data_attack.py index 61420c1749..fa26b932b3 100755 --- a/tests/test_endless_data_attack.py +++ b/tests/test_endless_data_attack.py @@ -115,7 +115,7 @@ def setUp(self): # We are inheriting from custom class. unittest_toolbox.Modified_TestCase.setUp(self) - self.repository_name = 'test_repository' + self.repository_name = 'test_repository1' # Copy the original repository files provided in the test folder so that # any modifications made to repository files are restricted to the copies. diff --git a/tests/test_extraneous_dependencies_attack.py b/tests/test_extraneous_dependencies_attack.py index caa61021e3..9b16b48665 100755 --- a/tests/test_extraneous_dependencies_attack.py +++ b/tests/test_extraneous_dependencies_attack.py @@ -119,7 +119,7 @@ def setUp(self): # We are inheriting from custom class. unittest_toolbox.Modified_TestCase.setUp(self) - self.repository_name = 'test_repository' + self.repository_name = 'test_repository1' # Copy the original repository files provided in the test folder so that # any modifications made to repository files are restricted to the copies. diff --git a/tests/test_indefinite_freeze_attack.py b/tests/test_indefinite_freeze_attack.py index 95573a64ad..975bea8c45 100755 --- a/tests/test_indefinite_freeze_attack.py +++ b/tests/test_indefinite_freeze_attack.py @@ -125,7 +125,7 @@ def tearDownClass(cls): def setUp(self): # We are inheriting from custom class. unittest_toolbox.Modified_TestCase.setUp(self) - self.repository_name = 'test_repository' + self.repository_name = 'test_repository1' # Copy the original repository files provided in the test folder so that # any modifications made to repository files are restricted to the copies. diff --git a/tests/test_key_revocation_integration.py b/tests/test_key_revocation_integration.py index ad1de8f848..243573c744 100755 --- a/tests/test_key_revocation_integration.py +++ b/tests/test_key_revocation_integration.py @@ -115,7 +115,7 @@ def setUp(self): # We are inheriting from custom class. unittest_toolbox.Modified_TestCase.setUp(self) - self.repository_name = 'test_repository' + self.repository_name = 'test_repository1' # Copy the original repository files provided in the test folder so that # any modifications made to repository files are restricted to the copies. diff --git a/tests/test_mix_and_match_attack.py b/tests/test_mix_and_match_attack.py index 2cabb0c2cc..cc37026a07 100755 --- a/tests/test_mix_and_match_attack.py +++ b/tests/test_mix_and_match_attack.py @@ -120,7 +120,7 @@ def setUp(self): # We are inheriting from custom class. unittest_toolbox.Modified_TestCase.setUp(self) - self.repository_name = 'test_repository' + self.repository_name = 'test_repository1' # Copy the original repository files provided in the test folder so that # any modifications made to repository files are restricted to the copies. diff --git a/tests/test_multiple_repositories_integration.py b/tests/test_multiple_repositories_integration.py index bddd6254b8..4c2800c8ca 100755 --- a/tests/test_multiple_repositories_integration.py +++ b/tests/test_multiple_repositories_integration.py @@ -17,10 +17,8 @@ See LICENSE-MIT.txt OR LICENSE-APACHE.txt for licensing information. - Verify that clients are able to keep track of multiple repositories and - separate sets of metadata for each. - - TODO: Verify that multiple repositories can be set for the repository tool. + Verify that clients and the repository tools are able to keep track of + multiple repositories and separate sets of metadata for each. """ # Help with Python 3 compatibility, where the print statement is a function, an @@ -40,108 +38,70 @@ import time import shutil import unittest +import json import tuf import tuf.log import tuf.roledb import tuf.client.updater as updater import tuf.settings +import securesystemslib import tuf.unittest_toolbox as unittest_toolbox import tuf.repository_tool as repo_tool +import six +import securesystemslib + logger = logging.getLogger('test_multiple_repositories_integration') + repo_tool.disable_console_log_messages() class TestMultipleRepositoriesIntegration(unittest_toolbox.Modified_TestCase): - @classmethod - def setUpClass(cls): - # setUpClass() is called before any of the test cases are executed. - - # Create a temporary directory to store the repository, metadata, and - # target files. 'temporary_directory' must be deleted in TearDownModule() - # so that temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.SERVER_PORT = random.randint(30000, 45000) - cls.SERVER_PORT2 = random.randint(30000, 45000) - command = ['python', 'simple_server.py', str(cls.SERVER_PORT)] - command2 = ['python', 'simple_server.py', str(cls.SERVER_PORT2)] - cls.server_process = subprocess.Popen(command, stderr=subprocess.PIPE) - cls.server_process2 = subprocess.Popen(command2, stderr=subprocess.PIPE) - logger.info('Server processes started.') - logger.info('Server process id: ' + str(cls.server_process.pid)) - logger.info('Serving on port: ' + str(cls.SERVER_PORT)) - logger.info('Server 2 process id: ' + str(cls.server_process2.pid)) - logger.info('Serving 2 on port: ' + str(cls.SERVER_PORT2)) - cls.url = 'http://localhost:' + str(cls.SERVER_PORT) + os.path.sep - cls.url2 = 'http://localhost:' + str(cls.SERVER_PORT2) + os.path.sep - - # NOTE: Following error is raised if a delay is not applied: - # - time.sleep(1) - - - - @classmethod - def tearDownClass(cls): - # tearDownModule() is called after all the test cases have run. - # http://docs.python.org/2/library/unittest.html#class-and-module-fixtures - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated of all the test cases. - shutil.rmtree(cls.temporary_directory) - - # Kill the SimpleHTTPServer process. - if cls.server_process.returncode is None: - logger.info('Server process ' + str(cls.server_process.pid) + ' terminated.') - cls.server_process.kill() - - if cls.server_process2.returncode is None: - logger.info('Server 2 process ' + str(cls.server_process2.pid) + ' terminated.') - cls.server_process2.kill() - - - def setUp(self): # We are inheriting from custom class. unittest_toolbox.Modified_TestCase.setUp(self) + self.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) + # Copy the original repository files provided in the test folder so that # any modifications made to repository files are restricted to the copies. # The 'repository_data' directory is expected to exist in 'tuf/tests/'. original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = self.make_temp_directory(directory= + + self.temporary_repository_root = self.make_temp_directory(directory= self.temporary_directory) # The original repository, keystore, and client directories will be copied # for each test case. original_repository = os.path.join(original_repository_files, 'repository') - original_client = os.path.join(original_repository_files, 'client', 'test_repository') + original_client = os.path.join(original_repository_files, 'client', 'test_repository1') + original_keystore = os.path.join(original_repository_files, 'keystore') + original_map_file = os.path.join(original_repository_files, 'map.json') # Save references to the often-needed client repository directories. # Test cases need these references to access metadata and target files. - self.repository_directory = os.path.join(temporary_repository_root, + self.repository_directory = os.path.join(self.temporary_repository_root, 'repository_server1') - self.repository_directory2 = os.path.join(temporary_repository_root, + self.repository_directory2 = os.path.join(self.temporary_repository_root, 'repository_server2') # Setting 'tuf.settings.repositories_directory' with the temporary client # directory copied from the original repository files. - tuf.settings.repositories_directory = temporary_repository_root + tuf.settings.repositories_directory = self.temporary_repository_root + + self.repository_name = 'test_repository1' + self.repository_name2 = 'test_repository2' + + self.client_directory = os.path.join(self.temporary_repository_root, + self.repository_name) + self.client_directory2 = os.path.join(self.temporary_repository_root, + self.repository_name2) - repository_name = 'repository1' - repository_name2 = 'repository2' - self.client_directory = os.path.join(temporary_repository_root, repository_name) - self.client_directory2 = os.path.join(temporary_repository_root, repository_name2) + self.keystore_directory = os.path.join(self.temporary_repository_root, 'keystore') + self.map_file = os.path.join(self.client_directory, 'map.json') + self.map_file2 = os.path.join(self.client_directory2, 'map.json') # Copy the original 'repository', 'client', and 'keystore' directories # to the temporary repository the test cases can use. @@ -149,14 +109,51 @@ def setUp(self): shutil.copytree(original_repository, self.repository_directory2) shutil.copytree(original_client, self.client_directory) shutil.copytree(original_client, self.client_directory2) + shutil.copyfile(original_map_file, self.map_file) + shutil.copyfile(original_map_file, self.map_file2) + shutil.copytree(original_keystore, self.keystore_directory) - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = \ - 'http://localhost:' + str(self.SERVER_PORT) + repository_basepath - url_prefix2 = \ - 'http://localhost:' + str(self.SERVER_PORT2) + repository_basepath + # Launch a SimpleHTTPServer (serves files in the current directory). + # Test cases will request metadata and target files that have been + # pre-generated in 'tuf/tests/repository_data', which will be served by the + # SimpleHTTPServer launched here. The test cases of this unit test assume + # the pre-generated metadata files have a specific structure, such + # as a delegated role 'targets/role1', three target files, five key files, + # etc. + self.SERVER_PORT = random.SystemRandom().randint(30000, 45000) + self.SERVER_PORT2 = random.SystemRandom().randint(30000, 45000) + + # Avoid duplicate port numbers, to prevent multiple localhosts from + # listening on the same port. + while self.SERVER_PORT == self.SERVER_PORT2: + self.SERVER_PORT2 = random.SystemRandom().randint(30000, 45000) + + command = ['simple_server.py', str(self.SERVER_PORT)] + command2 = ['simple_server.py', str(self.SERVER_PORT2)] + + self.server_process = subprocess.Popen(command, stderr=subprocess.PIPE, + cwd=self.repository_directory) + + logger.debug('Server process started.') + logger.debug('Server process id: ' + str(self.server_process.pid)) + logger.debug('Serving on port: ' + str(self.SERVER_PORT)) + + self.server_process2 = subprocess.Popen(command2, stderr=subprocess.PIPE, + cwd=self.repository_directory2) + + + logger.debug('Server process 2 started.') + logger.debug('Server 2 process id: ' + str(self.server_process2.pid)) + logger.debug('Serving 2 on port: ' + str(self.SERVER_PORT2)) + self.url = 'http://localhost:' + str(self.SERVER_PORT) + os.path.sep + self.url2 = 'http://localhost:' + str(self.SERVER_PORT2) + os.path.sep + + # NOTE: Following error is raised if a delay is not applied: + # + time.sleep(.8) + + url_prefix = 'http://localhost:' + str(self.SERVER_PORT) + url_prefix2 = 'http://localhost:' + str(self.SERVER_PORT2) self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, 'metadata_path': 'metadata', @@ -170,9 +167,9 @@ def setUp(self): # Create the repository instances. The test cases will use these client # updaters to refresh metadata, fetch target files, etc. - self.repository_updater = updater.Updater(repository_name, + self.repository_updater = updater.Updater(self.repository_name, self.repository_mirrors) - self.repository_updater2 = updater.Updater(repository_name2, + self.repository_updater2 = updater.Updater(self.repository_name2, self.repository_mirrors2) @@ -181,49 +178,118 @@ def tearDown(self): # directories that may have been created during each test case. unittest_toolbox.Modified_TestCase.tearDown(self) - # updater.Updater() populates the roledb with the name "test_repository" + # Remove the temporary repository directory, which should contain all the + # metadata, targets, and key files generated of all the test cases. + shutil.rmtree(self.temporary_directory) + + # Kill the SimpleHTTPServer process. + if self.server_process.returncode is None: + logger.info('Server process ' + str(self.server_process.pid) + ' terminated.') + self.server_process.kill() + + if self.server_process2.returncode is None: + logger.info('Server 2 process ' + str(self.server_process2.pid) + ' terminated.') + self.server_process2.kill() + + # updater.Updater() populates the roledb with the name "test_repository1" tuf.roledb.clear_roledb(clear_all=True) tuf.keydb.clear_keydb(clear_all=True) + def test_update(self): - self.assertEqual('repository1', str(self.repository_updater)) - self.assertEqual('repository2', str(self.repository_updater2)) + self.assertEqual('test_repository1', str(self.repository_updater)) + self.assertEqual('test_repository2', str(self.repository_updater2)) self.assertEqual(sorted(['role1', 'root', 'snapshot', 'targets', 'timestamp']), - sorted(tuf.roledb.get_rolenames('repository1'))) + sorted(tuf.roledb.get_rolenames('test_repository1'))) self.assertEqual(sorted(['role1', 'root', 'snapshot', 'targets', 'timestamp']), - sorted(tuf.roledb.get_rolenames('repository2'))) + sorted(tuf.roledb.get_rolenames('test_repository2'))) self.repository_updater.refresh() self.assertEqual(sorted(['role1', 'root', 'snapshot', 'targets', 'timestamp']), - sorted(tuf.roledb.get_rolenames('repository1'))) + sorted(tuf.roledb.get_rolenames('test_repository1'))) self.assertEqual(sorted(['role1', 'root', 'snapshot', 'targets', 'timestamp']), - sorted(tuf.roledb.get_rolenames('repository2'))) + sorted(tuf.roledb.get_rolenames('test_repository2'))) # 'role1.json' should be downloaded, because it provides info for the # requested 'file3.txt'. - valid_targetinfo = self.repository_updater.get_one_valid_targetinfo('file3.txt') + valid_targetinfo = self.repository_updater.get_one_valid_targetinfo('/file3.txt') self.assertEqual(sorted(['role2', 'role1', 'root', 'snapshot', 'targets', 'timestamp']), - sorted(tuf.roledb.get_rolenames('repository1'))) + sorted(tuf.roledb.get_rolenames('test_repository1'))) + + def test_repository_tool(self): - repository_name1 = 'repository1' - repository_name2 = 'repository2' - self.assertEqual(repository_name1, str(self.repository_updater)) - self.assertEqual(repository_name2, str(self.repository_updater2)) + self.assertEqual(self.repository_name, str(self.repository_updater)) + self.assertEqual(self.repository_name2, str(self.repository_updater2)) + + repository = repo_tool.load_repository(self.repository_directory, + self.repository_name) + repository2 = repo_tool.load_repository(self.repository_directory2, + self.repository_name2) + + repository.timestamp.version = 88 + self.assertEqual(['timestamp'], tuf.roledb.get_dirty_roles( + self.repository_name)) + self.assertEqual([], tuf.roledb.get_dirty_roles(self.repository_name2)) + + repository2.timestamp.version = 100 + self.assertEqual(['timestamp'], tuf.roledb.get_dirty_roles( + self.repository_name2)) + + key_file = os.path.join(self.keystore_directory, 'timestamp_key') + timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file, "password") + + repository.timestamp.load_signing_key(timestamp_private) + repository2.timestamp.load_signing_key(timestamp_private) + + repository.write('timestamp', increment_version_number=False) + repository2.write('timestamp', increment_version_number=False) + + # And move the staged metadata to the "live" metadata. + shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) + shutil.rmtree(os.path.join(self.repository_directory2, 'metadata')) + + shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), + os.path.join(self.repository_directory, 'metadata')) + shutil.copytree(os.path.join(self.repository_directory2, 'metadata.staged'), + os.path.join(self.repository_directory2, 'metadata')) + + # Verify that the client retrieves the expected updates. + logger.info('Downloading timestamp from server 1.') + self.repository_updater.refresh() + + self.assertEqual( + 88, self.repository_updater.metadata['current']['timestamp']['version']) + logger.info('Downloading timestamp from server 2.') + self.repository_updater2.refresh() + + self.assertEqual( + 100, self.repository_updater2.metadata['current']['timestamp']['version']) + + # Test the behavior of the multi-repository updater. + map_file = securesystemslib.util.load_json_file(self.map_file) + map_file['repositories'][self.repository_name] = ['http://localhost:' + str(self.SERVER_PORT)] + map_file['repositories'][self.repository_name2] = ['http://localhost:' + str(self.SERVER_PORT2)] + with open(self.map_file, 'w') as file_object: + file_object.write(json.dumps(map_file)) + + # Try to load a non-existent map file. + self.assertRaises(tuf.exceptions.Error, updater.MultiRepoUpdater, 'bad_path') + + multi_repo_updater = updater.MultiRepoUpdater(self.map_file) + valid_targetinfo = multi_repo_updater.get_valid_targetinfo('file3.txt') - repository1 = repo_tool.load_repository(self.repository_directory, repository_name1) - repository2 = repo_tool.load_repository(self.repository_directory2, repository_name2) + for my_updater, my_targetinfo in six.iteritems(valid_targetinfo): + my_updater.download_target(my_targetinfo, self.temporary_directory) + self.assertTrue(os.path.exists(os.path.join(self.temporary_directory, 'file3.txt'))) - repository2.timestamp.version = 2 - self.assertEqual([], tuf.roledb.get_dirty_roles(repository_name1)) - self.assertEqual(['timestamp'], tuf.roledb.get_dirty_roles(repository_name2)) if __name__ == '__main__': diff --git a/tests/test_replay_attack.py b/tests/test_replay_attack.py index 05d4c26830..fdf0895f3e 100755 --- a/tests/test_replay_attack.py +++ b/tests/test_replay_attack.py @@ -119,7 +119,7 @@ def setUp(self): # We are inheriting from custom class. unittest_toolbox.Modified_TestCase.setUp(self) - self.repository_name = 'test_repository' + self.repository_name = 'test_repository1' # Copy the original repository files provided in the test folder so that # any modifications made to repository files are restricted to the copies. diff --git a/tests/test_slow_retrieval_attack.py b/tests/test_slow_retrieval_attack.py index 4cc9abffad..34ca3a809c 100755 --- a/tests/test_slow_retrieval_attack.py +++ b/tests/test_slow_retrieval_attack.py @@ -126,7 +126,7 @@ def setUp(self): # We are inheriting from custom class. unittest_toolbox.Modified_TestCase.setUp(self) - self.repository_name = 'test_repository' + self.repository_name = 'test_repository1' # Copy the original repository files provided in the test folder so that # any modifications made to repository files are restricted to the copies. diff --git a/tests/test_updater.py b/tests/test_updater.py index d72cfa6bc9..add90e2d2d 100644 --- a/tests/test_updater.py +++ b/tests/test_updater.py @@ -133,7 +133,7 @@ def setUp(self): tuf.roledb.clear_roledb(clear_all=True) tuf.keydb.clear_keydb(clear_all=True) - self.repository_name = 'test_repository' + self.repository_name = 'test_repository1' # Copy the original repository files provided in the test folder so that # any modifications made to repository files are restricted to the copies. @@ -228,7 +228,7 @@ def test_1__init__exceptions(self): # 'tuf.client.updater.py' requires that the client's repositories directory # be configured in 'tuf.settings.py'. tuf.settings.repositories_directory = None - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository', + self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', self.repository_mirrors) # Restore 'tuf.settings.repositories_directory' to the original client # directory. @@ -238,7 +238,7 @@ def test_1__init__exceptions(self): # Test: empty client repository (i.e., no metadata directory). metadata_backup = self.client_metadata + '.backup' shutil.move(self.client_metadata, metadata_backup) - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository', + self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', self.repository_mirrors) # Restore the client's metadata directory. shutil.move(metadata_backup, self.client_metadata) @@ -251,7 +251,7 @@ def test_1__init__exceptions(self): shutil.move(self.client_metadata_current, current_backup) shutil.move(self.client_metadata_previous, previous_backup) - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository', + self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', self.repository_mirrors) # Restore the client's previous directory. The required 'current' directory @@ -260,7 +260,7 @@ def test_1__init__exceptions(self): # Test: repository with only a '{repository_directory}/metadata/previous' # directory. - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository', + self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', self.repository_mirrors) # Restore the client's current directory. shutil.move(current_backup, self.client_metadata_current) @@ -268,7 +268,7 @@ def test_1__init__exceptions(self): # Test: repository with a '{repository_directory}/metadata/current' # directory, but the 'previous' directory is missing. shutil.move(self.client_metadata_previous, previous_backup) - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository', + self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', self.repository_mirrors) shutil.move(previous_backup, self.client_metadata_previous) @@ -276,13 +276,13 @@ def test_1__init__exceptions(self): client_root_file = os.path.join(self.client_metadata_current, 'root.json') backup_root_file = client_root_file + '.backup' shutil.move(client_root_file, backup_root_file) - self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository', + self.assertRaises(tuf.exceptions.RepositoryError, updater.Updater, 'test_repository1', self.repository_mirrors) # Restore the client's 'root.json file. shutil.move(backup_root_file, client_root_file) # Test: Normal 'tuf.client.updater.Updater' instantiation. - updater.Updater('test_repository', self.repository_mirrors) + updater.Updater('test_repository1', self.repository_mirrors) @@ -1059,8 +1059,7 @@ def test_6_get_one_valid_targetinfo(self): # Test: invalid target path. self.assertRaises(tuf.exceptions.UnknownTargetError, - self.repository_updater.get_one_valid_targetinfo, - self.random_path()) + self.repository_updater.get_one_valid_targetinfo, self.random_path()) # Test updater.get_one_valid_targetinfo() backtracking behavior (enabled by # default.) @@ -1628,8 +1627,6 @@ def verify_target_file(targets_path): self.repository_updater._get_file('targets.json', verify_target_file, file_type, file_size, download_safely=False) - - def test_14__targets_of_role(self): # Test case where a list of targets is given. By default, the 'targets' # parameter is None. @@ -1639,6 +1636,316 @@ def test_14__targets_of_role(self): + +class TestMultiRepoUpdater(unittest_toolbox.Modified_TestCase): + + def setUp(self): + # We are inheriting from custom class. + unittest_toolbox.Modified_TestCase.setUp(self) + + self.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) + + # Copy the original repository files provided in the test folder so that + # any modifications made to repository files are restricted to the copies. + # The 'repository_data' directory is expected to exist in 'tuf/tests/'. + original_repository_files = os.path.join(os.getcwd(), 'repository_data') + + self.temporary_repository_root = self.make_temp_directory(directory= + self.temporary_directory) + + # The original repository, keystore, and client directories will be copied + # for each test case. + original_repository = os.path.join(original_repository_files, 'repository') + original_client = os.path.join(original_repository_files, 'client', 'test_repository1') + original_keystore = os.path.join(original_repository_files, 'keystore') + original_map_file = os.path.join(original_repository_files, 'map.json') + + # Save references to the often-needed client repository directories. + # Test cases need these references to access metadata and target files. + self.repository_directory = os.path.join(self.temporary_repository_root, + 'repository_server1') + self.repository_directory2 = os.path.join(self.temporary_repository_root, + 'repository_server2') + + # Setting 'tuf.settings.repositories_directory' with the temporary client + # directory copied from the original repository files. + tuf.settings.repositories_directory = self.temporary_repository_root + + repository_name = 'test_repository1' + repository_name2 = 'test_repository2' + + self.client_directory = os.path.join(self.temporary_repository_root, + repository_name) + self.client_directory2 = os.path.join(self.temporary_repository_root, + repository_name2) + + self.keystore_directory = os.path.join(self.temporary_repository_root, + 'keystore') + self.map_file = os.path.join(self.client_directory, 'map.json') + self.map_file2 = os.path.join(self.client_directory2, 'map.json') + + # Copy the original 'repository', 'client', and 'keystore' directories + # to the temporary repository the test cases can use. + shutil.copytree(original_repository, self.repository_directory) + shutil.copytree(original_repository, self.repository_directory2) + shutil.copytree(original_client, self.client_directory) + shutil.copytree(original_client, self.client_directory2) + shutil.copyfile(original_map_file, self.map_file) + shutil.copyfile(original_map_file, self.map_file2) + shutil.copytree(original_keystore, self.keystore_directory) + + # Launch a SimpleHTTPServer (serves files in the current directory). + # Test cases will request metadata and target files that have been + # pre-generated in 'tuf/tests/repository_data', which will be served by the + # SimpleHTTPServer launched here. The test cases of this unit test assume + # the pre-generated metadata files have a specific structure, such + # as a delegated role 'targets/role1', three target files, five key files, + # etc. + self.SERVER_PORT = 30001 + self.SERVER_PORT2 = 30002 + + command = ['simple_server.py', str(self.SERVER_PORT)] + command2 = ['simple_server.py', str(self.SERVER_PORT2)] + + self.server_process = subprocess.Popen(command, stderr=subprocess.PIPE, + cwd=self.repository_directory) + + logger.debug('Server process started.') + logger.debug('Server process id: ' + str(self.server_process.pid)) + logger.debug('Serving on port: ' + str(self.SERVER_PORT)) + + self.server_process2 = subprocess.Popen(command2, stderr=subprocess.PIPE, + cwd=self.repository_directory2) + + logger.debug('Server process 2 started.') + logger.debug('Server 2 process id: ' + str(self.server_process2.pid)) + logger.debug('Serving 2 on port: ' + str(self.SERVER_PORT2)) + self.url = 'http://localhost:' + str(self.SERVER_PORT) + os.path.sep + self.url2 = 'http://localhost:' + str(self.SERVER_PORT2) + os.path.sep + + # NOTE: Following error is raised if a delay is not applied: + # + time.sleep(.8) + + url_prefix = 'http://localhost:' + str(self.SERVER_PORT) + url_prefix2 = 'http://localhost:' + str(self.SERVER_PORT2) + + self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, + 'metadata_path': 'metadata', 'targets_path': 'targets', + 'confined_target_dirs': ['']}} + + self.repository_mirrors2 = {'mirror1': {'url_prefix': url_prefix2, + 'metadata_path': 'metadata', 'targets_path': 'targets', + 'confined_target_dirs': ['']}} + + # Create the repository instances. The test cases will use these client + # updaters to refresh metadata, fetch target files, etc. + self.repository_updater = updater.Updater(repository_name, + self.repository_mirrors) + self.repository_updater2 = updater.Updater(repository_name2, + self.repository_mirrors2) + + # Creating a repository instance. The test cases will use this client + # updater to refresh metadata, fetch target files, etc. + self.multi_repo_updater = updater.MultiRepoUpdater(self.map_file) + + # Metadata role keys are needed by the test cases to make changes to the + # repository (e.g., adding a new target file to 'targets.json' and then + # requesting a refresh()). + self.role_keys = _load_role_keys(self.keystore_directory) + + + + def tearDown(self): + # Modified_TestCase.tearDown() automatically deletes temporary files and + # directories that may have been created during each test case. + unittest_toolbox.Modified_TestCase.tearDown(self) + + # Remove the temporary repository directory, which should contain all the + # metadata, targets, and key files generated of all the test cases. + shutil.rmtree(self.temporary_directory) + + # Kill the SimpleHTTPServer process. + if self.server_process.returncode is None: + logger.info('Server process ' + str(self.server_process.pid) + ' terminated.') + self.server_process.kill() + + if self.server_process2.returncode is None: + logger.info('Server 2 process ' + str(self.server_process2.pid) + ' terminated.') + self.server_process2.kill() + + # updater.Updater() populates the roledb with the name "test_repository1" + tuf.roledb.clear_roledb(clear_all=True) + tuf.keydb.clear_keydb(clear_all=True) + + + + # UNIT TESTS. + def test__init__(self): + # The client's repository requires a metadata directory (and the 'current' + # and 'previous' sub-directories), and at least the 'root.json' file. + # setUp(), called before each test case, instantiates the required updater + # objects and keys. The needed objects/data is available in + # 'self.repository_updater', 'self.client_directory', etc. + + # Test: Invalid arguments. + # Invalid 'updater_name' argument. String expected. + self.assertRaises(securesystemslib.exceptions.FormatError, + updater.MultiRepoUpdater, 8) + + # Restore 'tuf.settings.repositories_directory' to the original client + # directory. + tuf.settings.repositories_directory = self.client_directory + + # Test for a non-existent map file. + self.assertRaises(tuf.exceptions.Error, updater.MultiRepoUpdater, + 'non-existent.json') + + # Test for a map file that doesn't contain the required fields. + root_filepath = os.path.join( + self.repository_directory, 'metadata', 'root.json') + self.assertRaises(securesystemslib.exceptions.FormatError, + updater.MultiRepoUpdater, root_filepath) + + # Test for a valid instantiation. + map_file = os.path.join(self.client_directory, 'map.json') + multi_repo_updater = updater.MultiRepoUpdater(map_file) + + + + def test__target_matches_path_pattern(self): + map_file = os.path.join(self.client_directory, 'map.json') + multi_repo_updater = updater.MultiRepoUpdater(map_file) + paths = ['foo*.tgz', 'bar*.tgz', 'file1.txt'] + self.assertTrue( + multi_repo_updater._target_matches_path_pattern('bar-1.0.tgz', paths)) + self.assertTrue( + multi_repo_updater._target_matches_path_pattern('file1.txt', paths)) + self.assertFalse( + multi_repo_updater._target_matches_path_pattern('baz-1.0.tgz', paths)) + + + + def test_get_valid_targetinfo(self): + map_file = os.path.join(self.client_directory, 'map.json') + multi_repo_updater = updater.MultiRepoUpdater(map_file) + + # Verify the multi repo updater refuses to save targetinfo if + # required local repositories are missing. + repo_dir = os.path.join(tuf.settings.repositories_directory, + 'test_repository1') + backup_repo_dir = os.path.join(tuf.settings.repositories_directory, + 'test_repository1.backup') + shutil.move(repo_dir, backup_repo_dir) + self.assertRaises(tuf.exceptions.Error, + multi_repo_updater.get_valid_targetinfo, 'file3.txt') + + # Restore the client's repository directory. + shutil.move(backup_repo_dir, repo_dir) + + # Verify that the Root file must exist. + root_filepath = os.path.join(repo_dir, 'metadata', 'current', 'root.json') + backup_root_filepath = os.path.join(root_filepath, root_filepath + '.backup') + shutil.move(root_filepath, backup_root_filepath) + self.assertRaises(tuf.exceptions.Error, + multi_repo_updater.get_valid_targetinfo, 'file3.txt') + + # Restore the Root file. + shutil.move(backup_root_filepath, root_filepath) + + # Test that the first mapping is skipped if it's irrelevant to the target + # file. + self.assertRaises(tuf.exceptions.UnknownTargetError, + multi_repo_updater.get_valid_targetinfo, 'non-existent.txt') + + # Verify that a targetinfo is not returned for a non-existent target. + multi_repo_updater.map_file['mapping'][1]['terminating'] = False + self.assertRaises(tuf.exceptions.UnknownTargetError, + multi_repo_updater.get_valid_targetinfo, 'non-existent.txt') + multi_repo_updater.map_file['mapping'][1]['terminating'] = True + + # Test for a mapping that sets terminating = True, and that appears before + # the final mapping. + multi_repo_updater.map_file['mapping'][0]['terminating'] = True + self.assertRaises(tuf.exceptions.UnknownTargetError, + multi_repo_updater.get_valid_targetinfo, 'bad3.txt') + multi_repo_updater.map_file['mapping'][0]['terminating'] = False + + # Test for the case where multiple repos sign for the same target. + valid_targetinfo = multi_repo_updater.get_valid_targetinfo('file1.txt') + + multi_repo_updater.map_file['mapping'][0]['threshold'] = 2 + valid_targetinfo = multi_repo_updater.get_valid_targetinfo('file1.txt') + + # Verify that valid targetinfo is matched for two repositories that provide + # different custom field. Make sure to set the 'match_custom_field' + # argument to 'False' when calling get_valid_targetinfo(). + repository = repo_tool.load_repository(self.repository_directory2) + target1 = os.path.join(self.repository_directory2, 'targets', 'file1.txt') + repository.targets.remove_target(target1) + custom_field = {"custom": "my_custom_data"} + repository.targets.add_target(target1, custom_field) + repository.targets.load_signing_key(self.role_keys['targets']['private']) + repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) + repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) + repository.writeall() + + # Move the staged metadata to the "live" metadata. + shutil.rmtree(os.path.join(self.repository_directory2, 'metadata')) + shutil.copytree(os.path.join(self.repository_directory2, 'metadata.staged'), + os.path.join(self.repository_directory2, 'metadata')) + + # Do we get the expected match for the two targetinfo that only differ + # by the custom field? + valid_targetinfo = multi_repo_updater.get_valid_targetinfo( + 'file1.txt', match_custom_field=False) + + # Verify the case where two repositories provide different targetinfo. + # Modify file1.txt so that different length and hashes are reported by the + # two repositories. + repository = repo_tool.load_repository(self.repository_directory2) + target1 = os.path.join(self.repository_directory2, 'targets', 'file1.txt') + with open(target1, 'ab') as file_object: + file_object.write(b'append extra text') + repository.targets.remove_target(target1) + repository.targets.add_target(target1) + repository.targets.load_signing_key(self.role_keys['targets']['private']) + repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) + repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) + repository.writeall() + + # Move the staged metadata to the "live" metadata. + shutil.rmtree(os.path.join(self.repository_directory2, 'metadata')) + shutil.copytree(os.path.join(self.repository_directory2, 'metadata.staged'), + os.path.join(self.repository_directory2, 'metadata')) + + # Ensure the threshold is modified to 2 (assumed to be 1, by default) and + # verify that get_valid_targetinfo() raises an UnknownTargetError + # despite both repos signing for file1.txt. + multi_repo_updater.map_file['mapping'][0]['threshold'] = 2 + self.assertRaises(tuf.exceptions.UnknownTargetError, + multi_repo_updater.get_valid_targetinfo, 'file1.txt') + + + + + + def test_get_updater(self): + map_file = os.path.join(self.client_directory, 'map.json') + multi_repo_updater = updater.MultiRepoUpdater(map_file) + + # Test for a non-existent repository name. + self.assertEqual(None, multi_repo_updater.get_updater('bad_repo_name')) + + # Test get_updater indirectly via the "private" _update_from_repository(). + self.assertRaises(tuf.exceptions.Error, multi_repo_updater._update_from_repository, 'bad_repo_name', 'file3.txt') + + # Test for a repository that doesn't exist. + multi_repo_updater.map_file['repositories']['bad_repo_name'] = ['https://bogus:30002'] + self.assertEqual(None, multi_repo_updater.get_updater('bad_repo_name')) + + + def _load_role_keys(keystore_directory): # Populating 'self.role_keys' by importing the required public and private diff --git a/tests/test_updater_root_rotation_integration.py b/tests/test_updater_root_rotation_integration.py index 32a1e0ca72..bc806945cd 100755 --- a/tests/test_updater_root_rotation_integration.py +++ b/tests/test_updater_root_rotation_integration.py @@ -120,7 +120,7 @@ def setUp(self): # We are inheriting from custom class. unittest_toolbox.Modified_TestCase.setUp(self) - self.repository_name = 'test_repository' + self.repository_name = 'test_repository1' # Copy the original repository files provided in the test folder so that # any modifications made to repository files are restricted to the copies. diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 295b64a347..f406574bbb 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -118,6 +118,7 @@ import shutil import time import fnmatch +import copy import tuf import tuf.download @@ -161,6 +162,388 @@ # For example, "1.4.3" and "1.0.0" are supported. "2.0.0" is not supported. SUPPORTED_MAJOR_VERSION = 1 + +class MultiRepoUpdater(object): + """ + + Provide a way for clients to request a target file from multiple + repositories. Which repositories to query is determined by the map + file (i.e,. map.json). + + See TAP 4 for more information on the map file and how to request updates + from multiple repositories. TAP 4 describes how users may specify that a + particular threshold of repositories be used for some targets, while a + different threshold of repositories be used for others. + + + map_file: + The path of the map file. The map file is needed to determine which + repositories to query given a target file. + + + securesystemslib.exceptions.FormatError, if the map file is improperly + formatted. + + tuf.exceptions.Error, if the map file cannot be loaded. + + + None. + + + None. + """ + + def __init__(self, map_file): + # Is 'map_file' a path? If not, raise + # 'securesystemslib.exceptions.FormatError'. The actual content of the map + # file is validated later on in this method. + securesystemslib.formats.PATH_SCHEMA.check_match(map_file) + + # A dictionary mapping repositories to TUF updaters. + self.repository_names_to_updaters = {} + + try: + # The map file dictionary that associates targets with repositories. + self.map_file = securesystemslib.util.load_json_file(map_file) + + except (securesystemslib.exceptions.Error, IOError) as e: + raise tuf.exceptions.Error('Cannot load the map file: ' + str(e)) + + # Raise securesystemslib.exceptions.FormatError if the map file is + # improperly formatted. + tuf.formats.MAPFILE_SCHEMA.check_match(self.map_file) + + # Save the the "repositories" entry of the map file, with the following + # example format: + # + # "repositories": { + # "Django": ["https://djangoproject.com/"], + # "PyPI": ["https://pypi.python.org/"] + # } + self.repository_names_to_mirrors = self.map_file['repositories'] + + + + def get_valid_targetinfo(self, target_filename, match_custom_field=True): + """ + + Get valid targetinfo, if any, for the given 'target_filename'. The map + file controls the targetinfo returned (see TAP 4). Return a dict of the + form {updater1: targetinfo, updater2: targetinfo, ...}, where the dict + keys are updater objects, and the dict values the matching targetinfo for + 'target_filename'. + + + target_filename: + The relative path of the target file to update. + + match_custom_field: + Boolean that indicates whether the optional custom field in targetinfo + should match across the targetinfo provided by the threshold of + repositories. + + + tuf.exceptions.FormatError, if the argument is improperly formatted. + + tuf.exceptions.Error, if the required local metadata directory or the + Root file does not exist. + + tuf.exceptions.UnknownTargetError, if the repositories in the map file do + not agree on the target, or none of them have signed for the target. + + + None. + + + A dict of the form: {updater1: targetinfo, updater2: targetinfo, ...}. + The targetinfo (conformant with tuf.formats.TARGETINFO_SCHEMA) is for + 'target_filename'. + """ + + # Is the argument properly formatted? If not, raise + # 'tuf.exceptions.FormatError'. + tuf.formats.RELPATH_SCHEMA.check_match(target_filename) + + # TAP 4 requires that the following attributes be present in mappings: + # "paths", "repositories", "terminating", and "threshold". + tuf.formats.MAPPING_SCHEMA.check_match(self.map_file['mapping']) + + # Set the top-level directory containing the metadata for each repository. + repositories_directory = tuf.settings.repositories_directory + + # Verify that the required local directories exist for each repository. + self._verify_metadata_directories(repositories_directory) + + # Iterate mappings. + # [{"paths": [], "repositories": [], "terminating": Boolean, "threshold": + # NUM}, ...] + for mapping in self.map_file['mapping']: + + logger.debug('Interrogating mappings..' + repr(mapping)) + if not self._target_matches_path_pattern( + target_filename, mapping['paths']): + # The mapping is irrelevant to the target file. Try the next one, if + # any. + continue + + # The mapping is relevant to the target... + else: + # Do the repositories in the mapping provide a threshold of matching + # targetinfo? + valid_targetinfo = self._matching_targetinfo(target_filename, + mapping, match_custom_field) + + if valid_targetinfo: + return valid_targetinfo + + else: + # If we are here, it means either (1) the mapping is irrelevant to + # the target, (2) the targets were missing from all repositories in + # this mapping, or (3) the targets on all repositories did not match. + # Whatever the case may be, are we allowed to continue to the next + # mapping? Let's check the terminating entry! + if not mapping['terminating']: + logger.debug('The mapping was irrelevant to the target, and' + ' "terminating" was set to False. Trying the next mapping...') + continue + + else: + raise tuf.exceptions.UnknownTargetError('The repositories in the' + ' mapping do not agree on the target, or none of them have' + ' signed for the target, and "terminating" was set to True.') + + # If we are here, it means either there were no mappings, or none of the + # mappings provided the target. + logger.debug('Did not find valid targetinfo for ' + repr(target_filename)) + raise tuf.exceptions.UnknownTargetError('The repositories in the map' + ' file do not agree on the target, or none of them have signed' + ' for the target.') + + + + + + def _verify_metadata_directories(self, repositories_directory): + # Iterate 'self.repository_names_to_mirrors' and verify that the expected + # local files and directories exist. TAP 4 requires a separate local + # directory for each repository. + for repository_name in self.repository_names_to_mirrors: + + logger.debug('Interrogating repository: ' + repr(repository_name)) + # Each repository must cache its metadata in a separate location. + repository_directory = os.path.join(repositories_directory, + repository_name) + + if not os.path.isdir(repository_directory): + raise tuf.exceptions.Error('The metadata directory' + ' for ' + repr(repository_name) + ' must exist' + ' at ' + repr(repository_directory)) + + else: + logger.debug('Found local directory for ' + repr(repository_name)) + + # The latest known root metadata file must also exist on disk. + root_file = os.path.join( + repository_directory, 'metadata', 'current', 'root.json') + + if not os.path.isfile(root_file): + raise tuf.exceptions.Error( + 'The Root file must exist at ' + repr(root_file)) + + else: + logger.debug('Found local Root file at ' + repr(root_file)) + + + + + + def _matching_targetinfo( + self, target_filename, mapping, match_custom_field=True): + valid_targetinfo = {} + + # Retrieve the targetinfo from each repository using the underlying + # Updater() instance. + for repository_name in mapping['repositories']: + logger.debug('Retrieving targetinfo for ' + repr(target_filename) + + ' from repository...') + + try: + targetinfo, updater = self._update_from_repository( + repository_name, target_filename) + + except (tuf.exceptions.UnknownTargetError, tuf.exceptions.Error): + continue + + valid_targetinfo[updater] = targetinfo + + matching_targetinfo = {} + logger.debug('Verifying that a threshold of targetinfo are equal...') + + # Iterate 'valid_targetinfo', looking for a threshold number of matches + # for 'targetinfo'. The first targetinfo to reach the required threshold + # is returned. For example, suppose the following list of targetinfo and + # a threshold of 2: + # [A, B, C, B, A, C] + # In this case, targetinfo B is returned. + for valid_updater, compared_targetinfo in six.iteritems(valid_targetinfo): + + if not self._targetinfo_match( + targetinfo, compared_targetinfo, match_custom_field): + continue + + else: + + matching_targetinfo[valid_updater] = targetinfo + + if not len(matching_targetinfo) >= mapping['threshold']: + continue + + else: + logger.debug('Found a threshold of matching targetinfo!') + # We now have a targetinfo (that matches across a threshold of + # repositories as instructed by the map file), along with the + # updaters that sign for it. + logger.debug( + 'Returning updaters for targetinfo: ' + repr(targetinfo)) + + return matching_targetinfo + + return None + + + + + + def _targetinfo_match(self, targetinfo1, targetinfo2, match_custom_field=True): + if match_custom_field: + return (targetinfo1 == targetinfo2) + + else: + targetinfo1_without_custom = copy.deepcopy(targetinfo1) + targetinfo2_without_custom = copy.deepcopy(targetinfo2) + targetinfo1_without_custom['fileinfo'].pop('custom', None) + targetinfo2_without_custom['fileinfo'].pop('custom', None) + + return (targetinfo1_without_custom == targetinfo2_without_custom) + + + + + + def _target_matches_path_pattern(self, target_filename, path_patterns): + for path_pattern in path_patterns: + logger.debug('Interrogating pattern ' + repr(path_pattern) + 'for' + ' target: ' + repr(target_filename)) + + if fnmatch.fnmatch(target_filename, path_pattern): + logger.debug('Found a match for ' + repr(target_filename)) + return True + + else: + logger.debug('Continue searching for relevant paths.') + continue + + # If we are here, then none of the paths are relevant to the target. + logger.debug('None of the paths are relevant.') + return False + + + + + + + def get_updater(self, repository_name): + """ + + Get the updater instance corresponding to 'repository_name'. + + + repository_name: + The name of the repository as it appears in the map file. For example, + "Django" and "PyPI" in the "repositories" entry of the map file. + + "repositories": { + "Django": ["https://djangoproject.com/"], + "PyPI": ["https://pypi.python.org/"] + } + + + tuf.exceptions.FormatError, if any of the arguments are improperly + formatted. + + + None. + + + Returns the Updater() instance for 'repository_name'. If the instance + does not exist, return None. + """ + + # Are the arguments properly formatted? If not, raise + # 'tuf.exceptions.FormatError'. + tuf.formats.NAME_SCHEMA.check_match(repository_name) + + updater = self.repository_names_to_updaters.get(repository_name) + + if not updater: + + if repository_name not in self.repository_names_to_mirrors: + return None + + else: + # Create repository mirrors object needed by the + # tuf.client.updater.Updater(). Each 'repository_name' can have more + # than one mirror. + mirrors = {} + + for url in self.repository_names_to_mirrors[repository_name]: + mirrors[url] = { + 'url_prefix': url, + 'metadata_path': 'metadata', + 'targets_path': 'targets', + 'confined_target_dirs': ['']} + + try: + # NOTE: State (e.g., keys) should NOT be shared across different + # updater instances. + logger.debug('Adding updater for ' + repr(repository_name)) + updater = tuf.client.updater.Updater(repository_name, mirrors) + + except Exception: + return None + + else: + self.repository_names_to_updaters[repository_name] = updater + + else: + logger.debug('Found an updater for ' + repr(repository_name)) + + # Ensure the updater's metadata is the latest before returning it. + updater.refresh() + return updater + + + + + + def _update_from_repository(self, repository_name, target_filename): + + updater = self.get_updater(repository_name) + + if not updater: + raise tuf.exceptions.Error( + 'Cannot load updater for ' + repr(repository_name)) + + else: + # Get one valid target info from the Updater object. + # 'tuf.exceptions.UnknownTargetError' raised by get_one_valid_targetinfo + # if a valid target cannot be found. + return updater.get_one_valid_targetinfo(target_filename), updater + + + + + class Updater(object): """ @@ -433,7 +816,8 @@ def _load_metadata_from_file(self, metadata_set, metadata_role): # Ensure we have a valid metadata set. if metadata_set not in ['current', 'previous']: - raise securesystemslib.exceptions.Error('Invalid metadata set: ' + repr(metadata_set)) + raise securesystemslib.exceptions.Error( + 'Invalid metadata set: ' + repr(metadata_set)) # Save and construct the full metadata path. metadata_directory = self.metadata_directory[metadata_set] @@ -445,7 +829,8 @@ def _load_metadata_from_file(self, metadata_set, metadata_role): # Load the file. The loaded object should conform to # 'tuf.formats.SIGNABLE_SCHEMA'. try: - metadata_signable = securesystemslib.util.load_json_file(metadata_filepath) + metadata_signable = securesystemslib.util.load_json_file( + metadata_filepath) # Although the metadata file may exist locally, it may not # be a valid json file. On the next refresh cycle, it will be @@ -593,7 +978,7 @@ def _import_delegations(self, parent_role): except tuf.exceptions.RoleAlreadyExistsError: logger.warning('Role already exists: ' + rolename) - except: + except Exception: logger.exception('Failed to add delegated role: ' + repr(rolename) + '.') raise @@ -653,7 +1038,8 @@ def refresh(self, unsafely_update_root_if_necessary=True): # number of objects and object types, and that all dict # keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fail. - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(unsafely_update_root_if_necessary) + securesystemslib.formats.BOOLEAN_SCHEMA.check_match( + unsafely_update_root_if_necessary) # Update the top-level metadata. The _update_metadata_if_changed() and # _update_metadata() calls below do NOT perform an update if there @@ -690,7 +1076,7 @@ def refresh(self, unsafely_update_root_if_necessary=True): # fileinfo referenced there matches what was fetched earlier in # _update_root_metadata() or make another attempt to download root.json. self._update_metadata_if_changed('snapshot', - referenced_metadata='timestamp') + referenced_metadata='timestamp') self._update_metadata_if_changed('targets') @@ -720,11 +1106,11 @@ def _update_root_metadata(self, current_root_metadata): """ # Retrieve the latest, remote root.json. - latest_root_metadata_file = \ - self._get_metadata_file('root', 'root.json', DEFAULT_ROOT_UPPERLENGTH, - None) - latest_root_metadata = \ - securesystemslib.util.load_json_string(latest_root_metadata_file.read().decode('utf-8')) + latest_root_metadata_file = self._get_metadata_file( + 'root', 'root.json', DEFAULT_ROOT_UPPERLENGTH, None) + + latest_root_metadata = securesystemslib.util.load_json_string( + latest_root_metadata_file.read().decode('utf-8')) next_version = current_root_metadata['version'] + 1 @@ -755,8 +1141,9 @@ def _check_hashes(self, file_object, trusted_hashes): file_object: - A 'securesystemslib.util.TempFile' file-like object. 'file_object' ensures that a - read() without a size argument properly reads the entire file. + A 'securesystemslib.util.TempFile' file-like object. 'file_object' + ensures that a read() without a size argument properly reads the entire + file. trusted_hashes: A dictionary with hash-algorithm names as keys and hashes as dict values. @@ -782,9 +1169,12 @@ def _check_hashes(self, file_object, trusted_hashes): # Raise an exception if any of the hashes are incorrect. if trusted_hash != computed_hash: - raise securesystemslib.exceptions.BadHashError(trusted_hash, computed_hash) + raise securesystemslib.exceptions.BadHashError(trusted_hash, + computed_hash) + else: - logger.info('The file\'s ' + algorithm + ' hash is correct: ' + trusted_hash) + logger.info('The file\'s ' + algorithm + ' hash is' + ' correct: ' + trusted_hash) @@ -818,8 +1208,9 @@ def _hard_check_file_length(self, file_object, trusted_file_length): None. """ - # Read the entire contents of 'file_object', a 'securesystemslib.util.TempFile' file-like - # object that ensures the entire file is read. + # Read the entire contents of 'file_object', a + # 'securesystemslib.util.TempFile' file-like object that ensures the entire + # file is read. observed_length = len(file_object.read()) # Return and log a message if the length 'file_object' is equal to @@ -828,10 +1219,11 @@ def _hard_check_file_length(self, file_object, trusted_file_length): # file length. if observed_length != trusted_file_length: raise tuf.exceptions.DownloadLengthMismatchError(trusted_file_length, - observed_length) + observed_length) + else: - logger.debug('Observed length ('+str(observed_length)+\ - ') == trusted length ('+str(trusted_file_length)+')') + logger.debug('Observed length (' + str(observed_length) +\ + ') == trusted length (' + str(trusted_file_length) + ')') @@ -877,10 +1269,11 @@ def _soft_check_file_length(self, file_object, trusted_file_length): # ensures that an upper bound restricts how large a file is downloaded. if observed_length > trusted_file_length: raise tuf.exceptions.DownloadLengthMismatchError(trusted_file_length, - observed_length) + observed_length) + else: - logger.debug('Observed length ('+str(observed_length)+\ - ') <= trusted length ('+str(trusted_file_length)+')') + logger.debug('Observed length (' + str(observed_length) +\ + ') <= trusted length (' + str(trusted_file_length) + ')') @@ -889,9 +1282,9 @@ def _soft_check_file_length(self, file_object, trusted_file_length): def _get_target_file(self, target_filepath, file_length, file_hashes): """ - Non-public method that safely (i.e., the file length and hash are strictly - equal to the trusted) downloads a target file up to a certain length, and - checks its hashes thereafter. + Non-public method that safely (i.e., the file length and hash are + strictly equal to the trusted) downloads a target file up to a certain + length, and checks its hashes thereafter. target_filepath: @@ -943,7 +1336,7 @@ def verify_target_file(target_file_object): def _verify_uncompressed_metadata_file(self, metadata_file_object, - metadata_role): + metadata_role): """ Non-public method that verifies an uncompressed metadata file. An @@ -952,8 +1345,9 @@ def _verify_uncompressed_metadata_file(self, metadata_file_object, metadata_file_object: - A 'securesystemslib.util.TempFile' instance containing the metadata file. - 'metadata_file_object' ensures the entire file is returned with read(). + A 'securesystemslib.util.TempFile' instance containing the metadata + file. 'metadata_file_object' ensures the entire file is returned with + read(). metadata_role: The role name of the metadata (e.g., 'root', 'targets', @@ -1004,8 +1398,8 @@ def _verify_uncompressed_metadata_file(self, metadata_file_object, # metadata. # Verify the signature on the downloaded metadata object. - - valid = tuf.sig.verify(metadata_signable, metadata_role, self.repository_name) + valid = tuf.sig.verify(metadata_signable, metadata_role, + self.repository_name) if not valid: raise securesystemslib.exceptions.BadSignatureError(metadata_role) @@ -1049,11 +1443,13 @@ def _get_metadata_file(self, metadata_role, remote_filename, file and returned. - A 'securesystemslib.util.TempFile' file-like object containing the metadata. + A 'securesystemslib.util.TempFile' file-like object containing the + metadata. """ file_mirrors = tuf.mirrors.get_list_of_mirrors('meta', remote_filename, - self.mirrors) + self.mirrors) + # file_mirror (URL): error (Exception) file_mirror_errors = {} file_object = None @@ -1061,7 +1457,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, for file_mirror in file_mirrors: try: file_object = tuf.download.unsafe_download(file_mirror, - upperbound_filelength) + upperbound_filelength) # Verify 'file_object' according to the callable function. # 'file_object' is also verified if decompressed above (i.e., the @@ -1100,7 +1496,8 @@ def _get_metadata_file(self, metadata_role, remote_filename, ' number MUST be: ' + repr(expected_version)) # The caller does not know which version to download. Verify that the - # downloaded version is at least greater than the one locally available. + # downloaded version is at least greater than the one locally + # available. else: # Verify that the version number of the locally stored # 'timestamp.json', if available, is less than what was downloaded. @@ -1112,8 +1509,8 @@ def _get_metadata_file(self, metadata_role, remote_filename, self.metadata['current'][metadata_role]['version'] if version_downloaded < current_version: - raise tuf.exceptions.ReplayedMetadataError(metadata_role, version_downloaded, - current_version) + raise tuf.exceptions.ReplayedMetadataError(metadata_role, + version_downloaded, current_version) except KeyError: logger.info(metadata_role + ' not available locally.') @@ -1159,8 +1556,8 @@ def _verify_root_chain_link(self, rolename, current_root_metadata, - def _get_file(self, filepath, verify_file_function, file_type, - file_length, download_safely=True): + def _get_file(self, filepath, verify_file_function, file_type, file_length, + download_safely=True): """ Non-public method that tries downloading, up to a certain length, a @@ -1201,12 +1598,13 @@ def _get_file(self, filepath, verify_file_function, file_type, file and returned. - A 'securesystemslib.util.TempFile' file-like object containing the metadata - or target. + A 'securesystemslib.util.TempFile' file-like object containing the + metadata or target. """ file_mirrors = tuf.mirrors.get_list_of_mirrors(file_type, filepath, - self.mirrors) + self.mirrors) + # file_mirror (URL): error (Exception) file_mirror_errors = {} file_object = None @@ -1218,11 +1616,10 @@ def _get_file(self, filepath, verify_file_function, file_type, # other one for "unsafe" download? This should induce safer and more # readable code. if download_safely: - file_object = tuf.download.safe_download(file_mirror, - file_length) + file_object = tuf.download.safe_download(file_mirror, file_length) + else: - file_object = tuf.download.unsafe_download(file_mirror, - file_length) + file_object = tuf.download.unsafe_download(file_mirror, file_length) # Verify 'file_object' according to the callable function. # 'file_object' is also verified if decompressed above (i.e., the @@ -1287,7 +1684,8 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None): None. """ - # Construct the metadata filename as expected by the download/mirror modules. + # Construct the metadata filename as expected by the download/mirror + # modules. metadata_filename = metadata_role + '.json' metadata_filename = metadata_filename @@ -1313,7 +1711,8 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None): if self.consistent_snapshot and version: filename_version = version dirname, basename = os.path.split(remote_filename) - remote_filename = os.path.join(dirname, str(filename_version) + '.' + basename) + remote_filename = os.path.join( + dirname, str(filename_version) + '.' + basename) metadata_file_object = \ self._get_metadata_file(metadata_role, remote_filename, @@ -1328,7 +1727,7 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None): securesystemslib.util.ensure_parent_dir(current_filepath) previous_filepath = os.path.join(self.metadata_directory['previous'], - metadata_filename) + metadata_filename) previous_filepath = os.path.abspath(previous_filepath) if os.path.exists(current_filepath): @@ -1379,10 +1778,10 @@ def _update_metadata_if_changed(self, metadata_role, Non-public method that updates the metadata for 'metadata_role' if it has changed. With the exception of the 'timestamp' role, all the top-level roles are updated by this method. The 'timestamp' role is always - downloaded from a mirror without first checking if it has been updated; it - is updated in refresh() by calling _update_metadata('timestamp'). This - method is also called for delegated role metadata, which are referenced by - 'snapshot'. + downloaded from a mirror without first checking if it has been updated; + it is updated in refresh() by calling _update_metadata('timestamp'). + This method is also called for delegated role metadata, which are + referenced by 'snapshot'. If the metadata needs to be updated but an update cannot be obtained, this method will delete the file (with the exception of the root @@ -1413,8 +1812,8 @@ def _update_metadata_if_changed(self, metadata_role, tuf.exceptions.NoWorkingMirrorError: - If 'metadata_role' could not be downloaded after determining that it had - changed. + If 'metadata_role' could not be downloaded after determining that it + had changed. tuf.exceptions.RepositoryError: If the referenced metadata is missing. @@ -1453,11 +1852,10 @@ def _update_metadata_if_changed(self, metadata_role, # metadata. The metadata is considered updated if its version number is # strictly greater than its currently trusted version number. expected_versioninfo = self.metadata['current'][referenced_metadata] \ - ['meta'] \ - [metadata_filename] + ['meta'][metadata_filename] if not self._versioninfo_has_been_updated(metadata_filename, - expected_versioninfo): + expected_versioninfo): logger.info(repr(metadata_filename) + ' up-to-date.') # Since we have not downloaded a new version of this metadata, we should @@ -1465,7 +1863,7 @@ def _update_metadata_if_changed(self, metadata_role, # This raises tuf.exceptions.ExpiredMetadataError if the metadata we have # is expired. Resolves issue #322. self._ensure_not_expired(self.metadata['current'][metadata_role], - metadata_role) + metadata_role) # TODO: If 'metadata_role' is root or snapshot, we should verify that # root's hash matches what's in snapshot, and that snapshot hash matches @@ -1493,7 +1891,7 @@ def _update_metadata_if_changed(self, metadata_role, self._update_metadata(metadata_role, upperbound_filelength, expected_versioninfo['version']) - except: + except Exception: # The current metadata we have is not current but we couldn't get new # metadata. We shouldn't use the old metadata anymore. This will get rid # of in-memory knowledge of the role and delegated roles, but will leave @@ -1503,7 +1901,8 @@ def _update_metadata_if_changed(self, metadata_role, # need to, but we need to check the trust implications of the current # implementation. self._delete_metadata(metadata_role) - logger.error('Metadata for ' + repr(metadata_role) + ' cannot be updated.') + logger.error('Metadata for ' + repr(metadata_role) + ' cannot' + ' be updated.') raise else: @@ -1568,6 +1967,10 @@ def _versioninfo_has_been_updated(self, metadata_filename, new_versioninfo): current_versioninfo = self.versioninfo[metadata_filename] + logger.debug('New version for ' + repr(metadata_filename) + + ': ' + repr(new_versioninfo['version']) + '. Old version: ' + + repr(current_versioninfo['version'])) + if new_versioninfo['version'] > current_versioninfo['version']: return True @@ -1609,7 +2012,8 @@ def _update_versioninfo(self, metadata_filename): # Save the path to the current metadata file for 'metadata_filename'. current_filepath = os.path.join(self.metadata_directory['current'], - metadata_filename) + metadata_filename) + # If the path is invalid, simply return and leave versioninfo unset. if not os.path.exists(current_filepath): self.versioninfo[metadata_filename] = None @@ -1634,7 +2038,8 @@ def _update_versioninfo(self, metadata_filename): # client's copy of snapshot.json. try: timestamp_version_number = self.metadata['current']['snapshot']['version'] - trusted_versioninfo = tuf.formats.make_versioninfo(timestamp_version_number) + trusted_versioninfo = tuf.formats.make_versioninfo( + timestamp_version_number) except KeyError: trusted_versioninfo = \ @@ -1765,7 +2170,8 @@ def _update_fileinfo(self, metadata_filename): # Save the path to the current metadata file for 'metadata_filename'. current_filepath = os.path.join(self.metadata_directory['current'], - metadata_filename) + metadata_filename) + # If the path is invalid, simply return and leave fileinfo unset. if not os.path.exists(current_filepath): self.fileinfo[metadata_filename] = None @@ -1773,7 +2179,8 @@ def _update_fileinfo(self, metadata_filename): # Extract the file information from the actual file and save it # to the fileinfo store. - file_length, hashes = securesystemslib.util.get_file_details(current_filepath) + file_length, hashes = securesystemslib.util.get_file_details( + current_filepath) metadata_fileinfo = tuf.formats.make_fileinfo(file_length, hashes) self.fileinfo[metadata_filename] = metadata_fileinfo @@ -2264,10 +2671,11 @@ def _preorder_depth_first_walk(self, target_filepath): number_of_delegations = tuf.settings.MAX_NUMBER_OF_DELEGATIONS # Ensure the client has the most up-to-date version of 'targets.json'. - # Raise 'tuf.exceptions.NoWorkingMirrorError' if the changed metadata cannot be - # successfully downloaded and 'tuf.exceptions.RepositoryError' if the referenced - # metadata is missing. Target methods such as this one are called after - # the top-level metadata have been refreshed (i.e., updater.refresh()). + # Raise 'tuf.exceptions.NoWorkingMirrorError' if the changed metadata + # cannot be successfully downloaded and 'tuf.exceptions.RepositoryError' if + # the referenced metadata is missing. Target methods such as this one are + # called after the top-level metadata have been refreshed (i.e., + # updater.refresh()). self._update_metadata_if_changed('targets') # Preorder depth-first traversal of the graph of target delegations. @@ -2287,7 +2695,8 @@ def _preorder_depth_first_walk(self, target_filepath): # _refresh_targets_metadata() does not refresh 'targets.json', it # expects _update_metadata_if_changed() to have already refreshed it, # which this function has checked above. - self._refresh_targets_metadata(role_name, refresh_all_delegated_roles=False) + self._refresh_targets_metadata(role_name, + refresh_all_delegated_roles=False) role_metadata = current_metadata[role_name] targets = role_metadata['targets'] @@ -2332,8 +2741,8 @@ def _preorder_depth_first_walk(self, target_filepath): if target is None and number_of_delegations == 0 and len(role_names) > 0: logger.debug(repr(len(role_names)) + ' roles left to visit, ' + - 'but allowed to visit at most ' + - repr(tuf.settings.MAX_NUMBER_OF_DELEGATIONS) + ' delegations.') + 'but allowed to visit at most ' + + repr(tuf.settings.MAX_NUMBER_OF_DELEGATIONS) + ' delegations.') return target @@ -2721,14 +3130,14 @@ def download_target(self, target, destination_directory): # '_get_target_file()' checks every mirror and returns the first target # that passes verification. target_file_object = self._get_target_file(target_filepath, trusted_length, - trusted_hashes) + trusted_hashes) # We acquired a target file object from a mirror. Move the file into place # (i.e., locally to 'destination_directory'). Note: join() discards # 'destination_directory' if 'target_path' contains a leading path # separator (i.e., is treated as an absolute path). destination = os.path.join(destination_directory, - target_filepath.lstrip(os.sep)) + target_filepath.lstrip(os.sep)) destination = os.path.abspath(destination) target_dirpath = os.path.dirname(destination) diff --git a/tuf/formats.py b/tuf/formats.py index 329a59163f..b3df5d9855 100755 --- a/tuf/formats.py +++ b/tuf/formats.py @@ -258,6 +258,28 @@ # A list of TARGETINFO_SCHEMA. TARGETINFOS_SCHEMA = SCHEMA.ListOf(TARGETINFO_SCHEMA) +# A string representing a named oject. +NAME_SCHEMA = SCHEMA.AnyString() + +# A dict of repository names to mirrors. +REPO_NAMES_TO_MIRRORS_SCHEMA = SCHEMA.DictOf( + key_schema = NAME_SCHEMA, + value_schema = SCHEMA.ListOf(securesystemslib.formats.URL_SCHEMA)) + +# An object containing the map file's "mapping" attribute. +MAPPING_SCHEMA = SCHEMA.ListOf(SCHEMA.Object( + paths = RELPATHS_SCHEMA, + repositories = SCHEMA.ListOf(NAME_SCHEMA), + terminating = BOOLEAN_SCHEMA, + threshold = THRESHOLD_SCHEMA)) + +# A dict containing the map file (named 'map.json', by default). The format of +# the map file is covered in TAP 4: Multiple repository consensus on entrusted +# targets. +MAPFILE_SCHEMA = SCHEMA.Object( + repositories = REPO_NAMES_TO_MIRRORS_SCHEMA, + mapping = MAPPING_SCHEMA) + # Like ROLEDICT_SCHEMA, except that ROLE_SCHEMA instances are stored in order. ROLELIST_SCHEMA = SCHEMA.ListOf(ROLE_SCHEMA) diff --git a/tuf/scripts/simple_server.py b/tuf/scripts/simple_server.py new file mode 100755 index 0000000000..68b3ac6f6f --- /dev/null +++ b/tuf/scripts/simple_server.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# Copyright 2012 - 2017, New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +""" + + simple_server.py + + + Konstantin Andrianov. + + + February 15, 2012. + + + See LICENSE for licensing information. + + + This is a basic server that was designed to be used in conjunction with + test_download.py to test download.py module. + + + SimpleHTTPServer: + https://docs.python.org/2/library/simplehttpserver.html +""" + +# Help with Python 3 compatibility, where the print statement is a function, an +# implicit relative import is invalid, and the '/' operator performs true +# division. Example: print 'hello world' raises a 'SyntaxError' exception. +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +import sys +import random + +import six + +PORT = 0 + +def _port_gen(): + return random.SystemRandom().randint(30000, 45000) + +if len(sys.argv) > 1: + try: + PORT = int(sys.argv[1]) + + # Enforce arbitrarily chosen port range. + if PORT < 30000 or PORT > 45000: + raise ValueError + + except ValueError: + PORT = _port_gen() + +else: + PORT = _port_gen() + + +if __name__ == '__main__': + + Handler = six.moves.SimpleHTTPServer.SimpleHTTPRequestHandler + httpd = six.moves.socketserver.TCPServer(('', PORT), Handler) + + httpd.serve_forever()