From 28a60340ca1f97e4e6a2adc8a23ec80e1826ca65 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Wed, 17 Mar 2021 13:01:29 +0100 Subject: [PATCH 001/105] Updated pylint and fixed duplicate-code, raise-missing-from In PR #249, django has been updated and it triggered a lots of no-member pylint errors when referencing django models. This can be solved by updating pylint to a version >= 2.6. The update of 2.6 added a new raise-missing-from check which produced some errors in our current code. As this check is very usefull, see also https://stefan.sofa-rockers.org/2020/10/28/raise-from/ I fixed these errors properly. Note I used `from None` for the ValidationError as these ones are caught and translated into HTTP 400 errors, they don't have any backtrace. The current pylint version 2.7.0-2.7.2 have fixed an issue with the similarities check, where the check was not done at all when runnings pylint --jobs=2+, which we do. This showed several duplicate-code issue, some of them where very relevant, like the pagination duplicate unittest code and also the bbox conversion to geometry that I moved into a functions. Unfortunately these pylint version still suffers from a bug concerning the similarities, the config min-similarity-lines is ignored ! For our case the default value of 4 is too small and this cause some false positive, testing pylint version 2.6.2 --jobs=1 showed that having a min-similarity-lines=8 don't have false positive, therefore I change it to this value which seems to me a convenable value. We need to see then in the next 2.7 release if the bug is fixed or if we need to either add the next 2.7 to the ignore list or totally disable the duplicate-code checker. I personally find this checker very good as it allow to improves code and I would try to keep it, unfortunately, currently the only way to have it working correctly would be to set the --job=1 and then the lintings takes ~50s instead of ~15s... --- .pylintrc | 4 +- Pipfile | 10 +- Pipfile.lock | 472 +++++------------- app/config/settings_prod.py | 8 +- app/stac_api/collection_spatial_extent.py | 2 +- .../commands/profile_cursor_paginator.py | 1 + app/stac_api/managers.py | 17 +- app/stac_api/pagination.py | 2 +- app/stac_api/utils.py | 27 + app/stac_api/validators.py | 2 +- app/stac_api/validators_serializer.py | 16 +- app/tests/data_factory.py | 2 +- app/tests/sample_data/collection_samples.py | 3 +- app/tests/sample_data/item_samples.py | 3 +- app/tests/test_collections_endpoint.py | 29 -- app/tests/test_generic_api.py | 169 ++++--- app/tests/test_items_endpoint.py | 2 +- app/tests/test_serializer.py | 2 +- app/wsgi.py | 2 +- 19 files changed, 288 insertions(+), 485 deletions(-) diff --git a/.pylintrc b/.pylintrc index 5322561d..871414b9 100644 --- a/.pylintrc +++ b/.pylintrc @@ -384,10 +384,10 @@ ignore-comments=yes ignore-docstrings=yes # Ignore imports when computing similarities. -ignore-imports=no +ignore-imports=yes # Minimum lines number of a similarity. -min-similarity-lines=4 +min-similarity-lines=8 [VARIABLES] diff --git a/Pipfile b/Pipfile index faa566d5..0a9caf5b 100644 --- a/Pipfile +++ b/Pipfile @@ -5,14 +5,14 @@ verify_ssl = true [dev-packages] yapf = "~=0.30.0" -pylint = "~=2.5.3" isort = "~=4.3.21" -pylint-django = "~=2.3.0" -django-extensions = "~=3.0.9" -pip = "*" +pylint = "!=2.7.2,!=2.7.1,!=2.7.0" # These version of pylint have issues with similarities config +pylint-django = "*" +django-extensions = "*" django-debug-toolbar = "*" -mock = "==4.0.2" +pip = "*" tblib = "*" # needed for traceback when running tests in parallel +mock = "~=4.0.2" responses = "!=0.12.1" # skip version 0.12.1 which has a bug see https://github.com/getsentry/responses/issues/358 moto = {extras = [ "s3",], version = "*"} requests-mock = "*" diff --git a/Pipfile.lock b/Pipfile.lock index a18d62b0..c5109443 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "3722e461bea60e7ba7bd6875f1ef15e063189d9fa0c233d6f3e935648c0fda57" + "sha256": "f697d717d9aab05ad9a74092f464fb51906de745d7500cfe240fad81a0b59742" }, "pipfile-spec": 6, "requires": { @@ -21,7 +21,6 @@ "sha256:5ee950735509d04eb673bd7f7120f8fa1c9e2df495394992c73234d526907e17", "sha256:7162a3cb30ab0609f1a4c95938fd73e8604f63bdba516a7f7d64b83ff09478f0" ], - "markers": "python_version >= '3.5'", "version": "==3.3.1" }, "base58": { @@ -29,23 +28,22 @@ "sha256:171a547b4a3c61e1ae3807224a6f7aec75e364c4395e7562649d7335768001a2", "sha256:8225891d501b68c843ffe30b86371f844a21c6ba00da76f52f9b998ba771fb48" ], - "markers": "python_version >= '3.5'", "version": "==2.1.0" }, "boto3": { "hashes": [ - "sha256:550a513315194292651bb6cc96e94185bfc4dc6b299c3cf1594882bdd16b3905", - "sha256:f8a2f0bf929af92c4d254d1e495f6642dd335818cc7172e1bdc3dfe28655fb94" + "sha256:1c0003609e63e8cff51dee7a49e904bcdb20e140b5f7a10a03006289fd8c8dc1", + "sha256:c919dac9773115025e1e2a7e462f60ca082e322bb6f4354247523e4226133b0b" ], "index": "pypi", - "version": "==1.16.59" + "version": "==1.16.63" }, "botocore": { "hashes": [ - "sha256:33959aa19cb6d336c47495c871b00d8670de0023b53bbbbd25790ba0bc5cefe9", - "sha256:67d273b5dcc5033edb2def244ecab51ca24351becf5c1644de279e5653e4e932" + "sha256:ad4adfcc195b5401d84b0c65d3a89e507c1d54c201879c8761ff10ef5c361e21", + "sha256:d3694f6ef918def8082513e5ef309cd6cd83b612e9984e3a66e8adc98c650a92" ], - "version": "==1.19.59" + "version": "==1.19.63" }, "certifi": { "hashes": [ @@ -59,16 +57,15 @@ "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==4.0.0" }, "django": { "hashes": [ - "sha256:2d78425ba74c7a1a74b196058b261b9733a8570782f4e2828974777ccca7edf7", - "sha256:efa2ab96b33b20c2182db93147a0c3cd7769d418926f9e9f140a60dca7c64ca9" + "sha256:32ce792ee9b6a0cbbec340123e229ac9f765dff8c2a4ae9247a14b2ba3a365a7", + "sha256:baf099db36ad31f970775d0be5587cc58a6256a6771a44eb795b554d45f211b8" ], "index": "pypi", - "version": "==3.1.5" + "version": "==3.1.7" }, "django-prometheus": { "hashes": [ @@ -209,7 +206,6 @@ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, "jmespath": { @@ -217,16 +213,15 @@ "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9", "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.10.0" }, "logging-utilities": { "hashes": [ - "sha256:4f5f257d87362e1b20c8f2218b223cc7594115b9454594438709b48a524a2e20", - "sha256:fd0e53b3031f768a3155ae004a4bc4ab5746d45c438e745996c3a6e66c5c752d" + "sha256:00f8630c7a6966ff7a7654346d70ca96baa42f054edd8fe8d28a2cf647ad7d4a", + "sha256:bff5f522f4e1c421697dd9e70d3c332242b06d4d3c5d336ccf0df1a4e1b77974" ], "index": "pypi", - "version": "==1.2.0" + "version": "==1.2.2" }, "morphys": { "hashes": [ @@ -355,10 +350,10 @@ }, "pytz": { "hashes": [ - "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4", - "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5" + "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da", + "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798" ], - "version": "==2020.5" + "version": "==2021.1" }, "pyyaml": { "hashes": [ @@ -399,7 +394,6 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "sqlparse": { @@ -407,16 +401,15 @@ "sha256:017cde379adbd6a1f15a61873f43e8274179378e95ef3fede90b5aa64d304ed0", "sha256:0f91fd2e829c44362cbcfab3e9ae12e22badaa8a29ad5ff599f9ec109f0454e8" ], - "markers": "python_version >= '3.5'", "version": "==0.4.1" }, "urllib3": { "hashes": [ - "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08", - "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473" + "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", + "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" ], "markers": "python_version != '3.4'", - "version": "==1.26.2" + "version": "==1.26.4" }, "varint": { "hashes": [ @@ -494,7 +487,6 @@ "sha256:f37d45fab14ffef9d33a0dc3bc59ce0c5313e2253323312d47739192da94f5fd", "sha256:f44906f70205d456d503105023041f1e63aece7623b31c390a0103db4de17537" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==5.2.0" } }, @@ -504,7 +496,6 @@ "sha256:5ee950735509d04eb673bd7f7120f8fa1c9e2df495394992c73234d526907e17", "sha256:7162a3cb30ab0609f1a4c95938fd73e8604f63bdba516a7f7d64b83ff09478f0" ], - "markers": "python_version >= '3.5'", "version": "==3.3.1" }, "astroid": { @@ -512,53 +503,22 @@ "sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703", "sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386" ], - "markers": "python_version >= '3.5'", "version": "==2.4.2" }, - "attrs": { - "hashes": [ - "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", - "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==20.3.0" - }, - "aws-sam-translator": { - "hashes": [ - "sha256:505d18b0bad8702bfba80fc5bc78d9c4b003ab009a9e42648561bdf1fd67bf01", - "sha256:89c5c997164231b847634d8034d3534d3a048d88f4b66b2897f6251366e640f5", - "sha256:9f3767614746a38300ee988ef70d6f862e71e59ea536252bbf9a319daaac1fff" - ], - "version": "==1.33.0" - }, - "aws-xray-sdk": { - "hashes": [ - "sha256:076f7c610cd3564bbba3507d43e328fb6ff4a2e841d3590f39b2c3ce99d41e1d", - "sha256:abf5b90f740e1f402e23414c9670e59cb9772e235e271fef2bce62b9100cbc77" - ], - "version": "==2.6.0" - }, - "boto": { - "hashes": [ - "sha256:147758d41ae7240dc989f0039f27da8ca0d53734be0eb869ef16e3adcfa462e8", - "sha256:ea0d3b40a2d852767be77ca343b58a9e3a4b00d9db440efb8da74b4e58025e5a" - ], - "version": "==2.49.0" - }, "boto3": { "hashes": [ - "sha256:550a513315194292651bb6cc96e94185bfc4dc6b299c3cf1594882bdd16b3905", - "sha256:f8a2f0bf929af92c4d254d1e495f6642dd335818cc7172e1bdc3dfe28655fb94" + "sha256:1c0003609e63e8cff51dee7a49e904bcdb20e140b5f7a10a03006289fd8c8dc1", + "sha256:c919dac9773115025e1e2a7e462f60ca082e322bb6f4354247523e4226133b0b" ], "index": "pypi", - "version": "==1.16.59" + "version": "==1.16.63" }, "botocore": { "hashes": [ - "sha256:33959aa19cb6d336c47495c871b00d8670de0023b53bbbbd25790ba0bc5cefe9", - "sha256:67d273b5dcc5033edb2def244ecab51ca24351becf5c1644de279e5653e4e932" + "sha256:ad4adfcc195b5401d84b0c65d3a89e507c1d54c201879c8761ff10ef5c361e21", + "sha256:d3694f6ef918def8082513e5ef309cd6cd83b612e9984e3a66e8adc98c650a92" ], - "version": "==1.19.59" + "version": "==1.19.63" }, "certifi": { "hashes": [ @@ -569,94 +529,77 @@ }, "cffi": { "hashes": [ - "sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e", - "sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d", - "sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a", - "sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec", - "sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362", - "sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668", - "sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c", - "sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b", - "sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06", - "sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698", - "sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2", - "sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c", - "sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7", - "sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009", - "sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03", - "sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b", - "sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909", - "sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53", - "sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35", - "sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26", - "sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b", - "sha256:a5ed8c05548b54b998b9498753fb9cadbfd92ee88e884641377d8a8b291bcc01", - "sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb", - "sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293", - "sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd", - "sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d", - "sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3", - "sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d", - "sha256:d5ff0621c88ce83a28a10d2ce719b2ee85635e85c515f12bac99a95306da4b2e", - "sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca", - "sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d", - "sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775", - "sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375", - "sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b", - "sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b", - "sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f" - ], - "version": "==1.14.4" - }, - "cfn-lint": { - "hashes": [ - "sha256:1966fc96d2c70db70b525d495a6a912e223802b4d33bfd9876992cdb9bdaaf44", - "sha256:6889c171eb2bbbe9e175149d8bada8ae627137748c42b04581e79469dc6b35e7" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.44.5" + "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813", + "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06", + "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea", + "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee", + "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396", + "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73", + "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315", + "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1", + "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49", + "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892", + "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482", + "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058", + "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5", + "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53", + "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045", + "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3", + "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5", + "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e", + "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c", + "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369", + "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827", + "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053", + "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa", + "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4", + "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322", + "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132", + "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62", + "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa", + "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0", + "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396", + "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e", + "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991", + "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6", + "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1", + "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406", + "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d", + "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c" + ], + "version": "==1.14.5" }, "chardet": { "hashes": [ "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==4.0.0" }, "cryptography": { "hashes": [ - "sha256:0003a52a123602e1acee177dc90dd201f9bb1e73f24a070db7d36c588e8f5c7d", - "sha256:0e85aaae861d0485eb5a79d33226dd6248d2a9f133b81532c8f5aae37de10ff7", - "sha256:594a1db4511bc4d960571536abe21b4e5c3003e8750ab8365fafce71c5d86901", - "sha256:69e836c9e5ff4373ce6d3ab311c1a2eed274793083858d3cd4c7d12ce20d5f9c", - "sha256:788a3c9942df5e4371c199d10383f44a105d67d401fb4304178020142f020244", - "sha256:7e177e4bea2de937a584b13645cab32f25e3d96fc0bc4a4cf99c27dc77682be6", - "sha256:83d9d2dfec70364a74f4e7c70ad04d3ca2e6a08b703606993407bf46b97868c5", - "sha256:84ef7a0c10c24a7773163f917f1cb6b4444597efd505a8aed0a22e8c4780f27e", - "sha256:9e21301f7a1e7c03dbea73e8602905a4ebba641547a462b26dd03451e5769e7c", - "sha256:9f6b0492d111b43de5f70052e24c1f0951cb9e6022188ebcb1cc3a3d301469b0", - "sha256:a69bd3c68b98298f490e84519b954335154917eaab52cf582fa2c5c7efc6e812", - "sha256:b4890d5fb9b7a23e3bf8abf5a8a7da8e228f1e97dc96b30b95685df840b6914a", - "sha256:c366df0401d1ec4e548bebe8f91d55ebcc0ec3137900d214dd7aac8427ef3030", - "sha256:dc42f645f8f3a489c3dd416730a514e7a91a59510ddaadc09d04224c098d3302" - ], - "version": "==3.3.1" - }, - "decorator": { - "hashes": [ - "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760", - "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7" - ], - "version": "==4.4.2" + "sha256:066bc53f052dfeda2f2d7c195cf16fb3e5ff13e1b6b7415b468514b40b381a5b", + "sha256:0923ba600d00718d63a3976f23cab19aef10c1765038945628cd9be047ad0336", + "sha256:2d32223e5b0ee02943f32b19245b61a62db83a882f0e76cc564e1cec60d48f87", + "sha256:4169a27b818de4a1860720108b55a2801f32b6ae79e7f99c00d79f2a2822eeb7", + "sha256:57ad77d32917bc55299b16d3b996ffa42a1c73c6cfa829b14043c561288d2799", + "sha256:5ecf2bcb34d17415e89b546dbb44e73080f747e504273e4d4987630493cded1b", + "sha256:600cf9bfe75e96d965509a4c0b2b183f74a4fa6f5331dcb40fb7b77b7c2484df", + "sha256:66b57a9ca4b3221d51b237094b0303843b914b7d5afd4349970bb26518e350b0", + "sha256:93cfe5b7ff006de13e1e89830810ecbd014791b042cbe5eec253be11ac2b28f3", + "sha256:9e98b452132963678e3ac6c73f7010fe53adf72209a32854d55690acac3f6724", + "sha256:df186fcbf86dc1ce56305becb8434e4b6b7504bc724b71ad7a3239e0c9d14ef2", + "sha256:fec7fb46b10da10d9e1d078d1ff8ed9e05ae14f431fdbd11145edd0550b9a964" + ], + "version": "==3.4.6" }, "django": { "hashes": [ - "sha256:2d78425ba74c7a1a74b196058b261b9733a8570782f4e2828974777ccca7edf7", - "sha256:efa2ab96b33b20c2182db93147a0c3cd7769d418926f9e9f140a60dca7c64ca9" + "sha256:32ce792ee9b6a0cbbec340123e229ac9f765dff8c2a4ae9247a14b2ba3a365a7", + "sha256:baf099db36ad31f970775d0be5587cc58a6256a6771a44eb795b554d45f211b8" ], "index": "pypi", - "version": "==3.1.5" + "version": "==3.1.7" }, "django-debug-toolbar": { "hashes": [ @@ -668,51 +611,19 @@ }, "django-extensions": { "hashes": [ - "sha256:6809c89ca952f0e08d4e0766bc0101dfaf508d7649aced1180c091d737046ea7", - "sha256:dc663652ac9460fd06580a973576820430c6d428720e874ae46b041fa63e0efa" + "sha256:674ad4c3b1587a884881824f40212d51829e662e52f85b012cd83d83fe1271d9", + "sha256:9507f8761ee760748938fd8af766d0608fb2738cf368adfa1b2451f61c15ae35" ], "index": "pypi", - "version": "==3.0.9" - }, - "docker": { - "hashes": [ - "sha256:0604a74719d5d2de438753934b755bfcda6f62f49b8e4b30969a4b0a2a8a1220", - "sha256:e455fa49aabd4f22da9f4e1c1f9d16308286adc60abaf64bf3e1feafaed81d06" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==4.4.1" - }, - "ecdsa": { - "hashes": [ - "sha256:64c613005f13efec6541bb0a33290d0d03c27abab5f15fbab20fb0ee162bdd8e", - "sha256:e108a5fe92c67639abae3260e43561af914e7fd0d27bae6d2ec1312ae7934dfe" - ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.14.1" - }, - "future": { - "hashes": [ - "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d" - ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.18.2" + "version": "==3.1.1" }, "idna": { "hashes": [ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.10" }, - "importlib-metadata": { - "hashes": [ - "sha256:ace61d5fc652dc280e7b6b4ff732a9c2d40db2c0f92bc6cb74e07b73d53a1771", - "sha256:fa5daa4477a7414ae34e95942e4dd07f62adf589143c875c133c1e53c4eff38d" - ], - "markers": "python_version < '3.8'", - "version": "==3.4.0" - }, "isort": { "hashes": [ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1", @@ -723,63 +634,18 @@ }, "jinja2": { "hashes": [ - "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", - "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" + "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", + "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==2.11.2" + "version": "==2.11.3" }, "jmespath": { "hashes": [ "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9", "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.10.0" }, - "jsondiff": { - "hashes": [ - "sha256:34941bc431d10aa15828afe1cbb644977a114e75eef6cc74fb58951312326303" - ], - "version": "==1.2.0" - }, - "jsonpatch": { - "hashes": [ - "sha256:da3831be60919e8c98564acfc1fa918cb96e7c9750b0428388483f04d0d1c5a7", - "sha256:e930adc932e4d36087dbbf0f22e1ded32185dfb20662f2e3dd848677a5295a14" - ], - "markers": "python_version != '3.4'", - "version": "==1.28" - }, - "jsonpickle": { - "hashes": [ - "sha256:1bd34a2ae8e51d3adbcafe83dc2d5cc81be53ada8bb16959ca6aca499bceada2", - "sha256:423d7b5e6c606d4c0efd93819913191e375f3a23c0874f39df94d2e20dd21c93" - ], - "markers": "python_version >= '2.7'", - "version": "==1.5.0" - }, - "jsonpointer": { - "hashes": [ - "sha256:c192ba86648e05fdae4f08a17ec25180a9aef5008d973407b581798a83975362", - "sha256:ff379fa021d1b81ab539f5ec467c7745beb1a5671463f9dcc2b2d458bd361c1e" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.0" - }, - "jsonschema": { - "hashes": [ - "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163", - "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a" - ], - "version": "==3.2.0" - }, - "junit-xml": { - "hashes": [ - "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732" - ], - "version": "==1.9" - }, "lazy-object-proxy": { "hashes": [ "sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d", @@ -804,7 +670,6 @@ "sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4", "sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.4.3" }, "markupsafe": { @@ -814,8 +679,12 @@ "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", + "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f", + "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39", "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", + "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014", + "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f", "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", @@ -824,26 +693,40 @@ "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", + "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85", + "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1", "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", + "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850", + "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0", "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", + "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb", "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", + "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1", + "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2", "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", + "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7", "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", + "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8", "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", + "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193", "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", + "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b", "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", + "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5", + "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c", + "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032", "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", - "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" + "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", + "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.1" }, "mccabe": { @@ -855,80 +738,49 @@ }, "mock": { "hashes": [ - "sha256:3f9b2c0196c60d21838f307f5825a7b86b678cedc58ab9e50a8988187b4d81e0", - "sha256:dd33eb70232b6118298d516bbcecd26704689c386594f0f3c4f13867b2c56f72" + "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62", + "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc" ], "index": "pypi", - "version": "==4.0.2" + "version": "==4.0.3" }, "more-itertools": { "hashes": [ - "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330", - "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf" + "sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced", + "sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713" ], - "markers": "python_version >= '3.5'", - "version": "==8.6.0" + "version": "==8.7.0" }, "moto": { - "extras": [ - "s3" - ], "hashes": [ - "sha256:6c686b1f117563391957ce47c2106bc3868783d59d0e004d2446dce875bec07f", - "sha256:f51903b6b532f6c887b111b3343f6925b77eef0505a914138d98290cf3526df9" + "sha256:4610d27ead9124eaa84a78eca7dfa25a8ccb66cf6a7cb8a8889b5ca0c7796889", + "sha256:f5db62e50a5377da4457307675281198e9ffbe9425866a88f523bef0c6e8d463" ], "index": "pypi", - "version": "==1.3.16" - }, - "networkx": { - "hashes": [ - "sha256:7978955423fbc9639c10498878be59caf99b44dc304c2286162fd24b458c1602", - "sha256:8c5812e9f798d37c50570d15c4a69d5710a18d77bafc903ee9c5fba7454c616c" - ], - "markers": "python_version >= '3.5'", - "version": "==2.5" - }, - "pyasn1": { - "hashes": [ - "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359", - "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576", - "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf", - "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7", - "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d", - "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00", - "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8", - "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86", - "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12", - "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776", - "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba", - "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2", - "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3" - ], - "version": "==0.4.8" + "version": "==2.0.2" }, "pycparser": { "hashes": [ "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.20" }, "pylint": { "hashes": [ - "sha256:7dd78437f2d8d019717dbf287772d0b2dbdfd13fc016aa7faa08d67bccc46adc", - "sha256:d0ece7d223fe422088b0e8f13fa0a1e8eb745ebffcb8ed53d3e95394b6101a1c" + "sha256:718b74786ea7ed07aa0c58bf572154d4679f960d26e9641cc1de204a30b87fc9", + "sha256:e71c2e9614a4f06e36498f310027942b0f4f2fde20aebb01655b31edc63b9eaf" ], "index": "pypi", - "version": "==2.5.3" + "version": "==2.6.2" }, "pylint-django": { "hashes": [ - "sha256:770e0c55fb054c6378e1e8bb3fe22c7032a2c38ba1d1f454206ee9c6591822d7", - "sha256:b8dcb6006ae9fa911810aba3bec047b9410b7d528f89d5aca2506b03c9235a49" + "sha256:355dddb25ef07dbdb77a818b0860ada722aab654c24da34aab916ec26d6390ba", + "sha256:f8d77f7da47a7019cda5cb669c214f03033208f9e945094661299d2637c0da06" ], "index": "pypi", - "version": "==2.3.0" + "version": "==2.4.2" }, "pylint-plugin-utils": { "hashes": [ @@ -937,13 +789,6 @@ ], "version": "==0.6" }, - "pyrsistent": { - "hashes": [ - "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e" - ], - "markers": "python_version >= '3.5'", - "version": "==0.17.3" - }, "python-dateutil": { "hashes": [ "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", @@ -952,22 +797,12 @@ "index": "pypi", "version": "==2.8.1" }, - "python-jose": { - "extras": [ - "cryptography" - ], - "hashes": [ - "sha256:4e4192402e100b5fb09de5a8ea6bcc39c36ad4526341c123d401e2561720335b", - "sha256:67d7dfff599df676b04a996520d9be90d6cdb7e6dd10b4c7cacc0c3e2e92f2be" - ], - "version": "==3.2.0" - }, "pytz": { "hashes": [ - "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4", - "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5" + "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da", + "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798" ], - "version": "==2020.5" + "version": "==2021.1" }, "pyyaml": { "hashes": [ @@ -1006,19 +841,11 @@ }, "responses": { "hashes": [ - "sha256:0de50fbf600adf5ef9f0821b85cc537acca98d66bc7776755924476775c1989c", - "sha256:e80d5276011a4b79ecb62c5f82ba07aa23fb31ecbc95ee7cad6de250a3c97444" + "sha256:27d8822d65dc8875a039301831de8ac17db2473ae2a8fabd4e6599b25ce2f353", + "sha256:a4a90c8244006c01f4246aecf532fbb5429c4031df4adcc7638061f0f3ce4ceb" ], "index": "pypi", - "version": "==0.12.0" - }, - "rsa": { - "hashes": [ - "sha256:69805d6b69f56eb05b62daea3a7dbd7aa44324ad1306445e05da8060232d00f4", - "sha256:a8774e55b59fd9fc893b0d05e9bfc6f47081f46ff5b46f39ccf24631b7be356b" - ], - "markers": "python_version >= '3.5' and python_version < '4'", - "version": "==4.7" + "version": "==0.13.0" }, "s3transfer": { "hashes": [ @@ -1032,7 +859,6 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "sqlparse": { @@ -1040,17 +866,8 @@ "sha256:017cde379adbd6a1f15a61873f43e8274179378e95ef3fede90b5aa64d304ed0", "sha256:0f91fd2e829c44362cbcfab3e9ae12e22badaa8a29ad5ff599f9ec109f0454e8" ], - "markers": "python_version >= '3.5'", "version": "==0.4.1" }, - "sshpubkeys": { - "hashes": [ - "sha256:41fbaf0e57bc0cf7e0139b71146de59b80aa9e14a97d2278417571e120d6b13e", - "sha256:89e10a0caf38407426a05e3f5b5243d6e2f9575d6af45e9321291d20bcfca8f7" - ], - "markers": "python_version >= '3.1'", - "version": "==3.3.0" - }, "tblib": { "hashes": [ "sha256:059bd77306ea7b419d4f76016aef6d7027cc8a0785579b5aad198803435f882c", @@ -1064,7 +881,6 @@ "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.10.2" }, "typed-ast": { @@ -1100,39 +916,22 @@ "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166", "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10" ], - "markers": "python_version < '3.8' and implementation_name == 'cpython'", + "markers": "implementation_name == 'cpython' and python_version < '3.8'", "version": "==1.4.2" }, - "typing-extensions": { - "hashes": [ - "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918", - "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c", - "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f" - ], - "markers": "python_version < '3.8'", - "version": "==3.7.4.3" - }, "urllib3": { "hashes": [ - "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08", - "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473" + "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", + "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" ], "markers": "python_version != '3.4'", - "version": "==1.26.2" - }, - "websocket-client": { - "hashes": [ - "sha256:0fc45c961324d79c781bab301359d5a1b00b13ad1b10415a4780229ef71a5549", - "sha256:d735b91d6d1692a6a181f2a8c9e0238e5f6373356f561bb9dc4c7af36f452010" - ], - "version": "==0.57.0" + "version": "==1.26.4" }, "werkzeug": { "hashes": [ "sha256:2de2a5db0baeae7b2d2664949077c2ac63fbd16d98da0ff71837f7d1dea3fd43", "sha256:6c80b1e5ad3665290ea39320b91e1be1e0d5f60652b964a3070216de83d2e47c" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==1.0.1" }, "wrapt": { @@ -1158,11 +957,10 @@ }, "zipp": { "hashes": [ - "sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108", - "sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb" + "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76", + "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098" ], - "markers": "python_version >= '3.6'", - "version": "==3.4.0" + "version": "==3.4.1" } } } diff --git a/app/config/settings_prod.py b/app/config/settings_prod.py index c28fe46c..c213b17f 100644 --- a/app/config/settings_prod.py +++ b/app/config/settings_prod.py @@ -108,7 +108,7 @@ try: CACHE_MIDDLEWARE_SECONDS = int(os.environ.get('HTTP_CACHE_SECONDS', '600')) except ValueError as error: - raise ValueError('Invalid HTTP_CACHE_SECONDS environment value: must be an integer') + raise ValueError('Invalid HTTP_CACHE_SECONDS environment value: must be an integer') from error ROOT_URLCONF = 'config.urls' API_BASE = 'api' @@ -194,7 +194,9 @@ try: WHITENOISE_MAX_AGE = int(os.environ.get('HTTP_STATIC_CACHE_SECONDS', '3600')) except ValueError as error: - raise ValueError('Invalid HTTP_STATIC_CACHE_SECONDS environment value: must be an integer') + raise ValueError( + 'Invalid HTTP_STATIC_CACHE_SECONDS environment value: must be an integer' + ) from error WHITENOISE_MIMETYPES = { # These sets the mime types for the api/stac/static/spec/v0.9/openapi.yaml static file # otherwise a default application/octet-stream is used. @@ -230,7 +232,7 @@ try: STORAGE_ASSETS_CACHE_SECONDS = int(os.environ.get('HTTP_ASSETS_CACHE_SECONDS', '7200')) except ValueError as err: - raise ValueError('Invalid HTTP_ASSETS_CACHE_SECONDS, must be an integer') + raise ValueError('Invalid HTTP_ASSETS_CACHE_SECONDS, must be an integer') from err # Logging # https://docs.djangoproject.com/en/3.1/topics/logging/ diff --git a/app/stac_api/collection_spatial_extent.py b/app/stac_api/collection_spatial_extent.py index ae276ea3..6dbf7089 100644 --- a/app/stac_api/collection_spatial_extent.py +++ b/app/stac_api/collection_spatial_extent.py @@ -159,5 +159,5 @@ def update_bbox_extent(self, trigger, geometry, original_geometry, item): raise GEOSException( f'Failed to update spatial extend in colletion {self.name} with item ' f'{item.name}: {error}' - ) + ) from error return updated diff --git a/app/stac_api/management/commands/profile_cursor_paginator.py b/app/stac_api/management/commands/profile_cursor_paginator.py index ad679cce..9b02266f 100644 --- a/app/stac_api/management/commands/profile_cursor_paginator.py +++ b/app/stac_api/management/commands/profile_cursor_paginator.py @@ -36,6 +36,7 @@ def profiling(self): f'{settings.BASE_DIR}/logs/stats-file', sort=self.options['sort'] ) + # pylint: disable=duplicate-code stats = pstats.Stats(f'{settings.BASE_DIR}/logs/stats-file') stats.sort_stats(self.options['sort']).print_stats() diff --git a/app/stac_api/managers.py b/app/stac_api/managers.py index dc72a01f..fa61be97 100644 --- a/app/stac_api/managers.py +++ b/app/stac_api/managers.py @@ -1,16 +1,14 @@ import logging from datetime import datetime -from decimal import Decimal from django.contrib.gis.db import models from django.contrib.gis.geos import GEOSGeometry -from django.contrib.gis.geos import Point -from django.contrib.gis.geos import Polygon from django.db.models import Q from django.utils.translation import gettext_lazy as _ from rest_framework.exceptions import ValidationError +from stac_api.utils import geometry_from_bbox from stac_api.validators import validate_geometry logger = logging.getLogger(__name__) @@ -38,14 +36,7 @@ def filter_by_bbox(self, bbox): ''' try: logger.debug('Query parameter bbox = %s', bbox) - list_bbox_values = bbox.split(',') - if ( - Decimal(list_bbox_values[0]) == Decimal(list_bbox_values[2]) and - Decimal(list_bbox_values[1]) == Decimal(list_bbox_values[3]) - ): - bbox_geometry = Point(float(list_bbox_values[0]), float(list_bbox_values[1])) - else: - bbox_geometry = Polygon.from_bbox(list_bbox_values) + bbox_geometry = geometry_from_bbox(bbox) validate_geometry(bbox_geometry) except (ValueError, ValidationError, IndexError) as error: @@ -60,7 +51,7 @@ def filter_by_bbox(self, bbox): _('Invalid bbox query parameter, ' ' has to contain 4 values. f.ex. bbox=5.96,45.82,10.49,47.81'), code='bbox-invalid' - ) + ) from None return self.filter(geometry__intersects=bbox_geometry) @@ -144,7 +135,7 @@ def _parse_datetime_query(self, date_time): raise ValidationError( _('Invalid datetime query parameter, must be isoformat'), code='datetime' - ) + ) from None if end == '': end = None diff --git a/app/stac_api/pagination.py b/app/stac_api/pagination.py index c1e7bfc3..ee8339dd 100644 --- a/app/stac_api/pagination.py +++ b/app/stac_api/pagination.py @@ -68,7 +68,7 @@ def get_page_size(self, request): raise ValidationError( _('invalid limit query parameter: must be an integer'), code='limit' - ) + ) from None if page_size <= 0: logger.error( diff --git a/app/stac_api/utils.py b/app/stac_api/utils.py index 7e03eae5..e6364ecd 100644 --- a/app/stac_api/utils.py +++ b/app/stac_api/utils.py @@ -3,6 +3,7 @@ import logging from datetime import datetime from datetime import timezone +from decimal import Decimal from urllib import parse import boto3 @@ -10,6 +11,8 @@ from botocore.client import Config from django.conf import settings +from django.contrib.gis.geos import Point +from django.contrib.gis.geos import Polygon logger = logging.getLogger(__name__) @@ -267,3 +270,27 @@ def print_success(self, message, *args, level=1): def print_error(self, message, *args): self.stderr.write(self.style.ERROR(message % (args))) + + +def geometry_from_bbox(bbox): + '''Returns a Geometry from a bbox + + Args: + bbox: string + bbox as string comma separated or as float list + + Returns: + Geometry + + Raises: + ValueError, IndexError, GDALException + ''' + list_bbox_values = bbox.split(',') + if ( + Decimal(list_bbox_values[0]) == Decimal(list_bbox_values[2]) and + Decimal(list_bbox_values[1]) == Decimal(list_bbox_values[3]) + ): + bbox_geometry = Point(float(list_bbox_values[0]), float(list_bbox_values[1])) + else: + bbox_geometry = Polygon.from_bbox(list_bbox_values) + return bbox_geometry diff --git a/app/stac_api/validators.py b/app/stac_api/validators.py index f664cda7..3d2bf383 100644 --- a/app/stac_api/validators.py +++ b/app/stac_api/validators.py @@ -226,4 +226,4 @@ def validate_asset_multihash(value): code='checksum:multihash', message=_('Invalid multihash value; %(error)s'), params={'error': error} - ) + ) from None diff --git a/app/stac_api/validators_serializer.py b/app/stac_api/validators_serializer.py index 5455b2c1..3bad12a0 100644 --- a/app/stac_api/validators_serializer.py +++ b/app/stac_api/validators_serializer.py @@ -1,6 +1,5 @@ import json import logging -from decimal import Decimal import botocore import multihash @@ -10,8 +9,6 @@ from django.conf import settings from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.geos import GEOSGeometry -from django.contrib.gis.geos import Point -from django.contrib.gis.geos import Polygon from django.utils.translation import gettext_lazy as _ from rest_framework.exceptions import APIException @@ -20,6 +17,7 @@ from stac_api.utils import create_multihash from stac_api.utils import create_multihash_string from stac_api.utils import fromisoformat +from stac_api.utils import geometry_from_bbox from stac_api.utils import get_asset_path from stac_api.utils import get_s3_resource from stac_api.utils import harmonize_post_get_for_search @@ -288,7 +286,7 @@ def validate_query(self, query): message = f"The application could not decode the query parameter" \ f"Please check the syntax ({error})." \ f"{query}" - raise ValidationError(code='query-invalid', detail=_(message)) + raise ValidationError(code='query-invalid', detail=_(message)) from None self._query_validate_length_of_query(query_dict) for attribute in query_dict: @@ -477,15 +475,7 @@ def validate_bbox(self, bbox): float values. F. ex.: 5.96,45.82,10.49,47.81 ''' try: - list_bbox_values = bbox.split(',') - if ( - Decimal(list_bbox_values[0]) == Decimal(list_bbox_values[2]) and - Decimal(list_bbox_values[1]) == Decimal(list_bbox_values[3]) - ): - bbox_geometry = Point(float(list_bbox_values[0]), float(list_bbox_values[1])) - else: - bbox_geometry = Polygon.from_bbox(list_bbox_values) - validate_geometry(bbox_geometry) + validate_geometry(geometry_from_bbox(bbox)) except (ValueError, ValidationError, IndexError, GDALException) as error: message = f"Invalid bbox query parameter: " \ diff --git a/app/tests/data_factory.py b/app/tests/data_factory.py index ca18286f..fe165c04 100644 --- a/app/tests/data_factory.py +++ b/app/tests/data_factory.py @@ -132,7 +132,7 @@ def __init__(self, sample, required_only=False, **kwargs): try: sample = self.samples_dict[sample] except KeyError as error: - raise KeyError(f'Unknown {self.sample_name} sample: {error}') + raise KeyError(f'Unknown {self.sample_name} sample: {error}') from None # Sets attributes from sample for key, value in sample.items(): diff --git a/app/tests/sample_data/collection_samples.py b/app/tests/sample_data/collection_samples.py index 7185e997..19d5d60d 100644 --- a/app/tests/sample_data/collection_samples.py +++ b/app/tests/sample_data/collection_samples.py @@ -26,13 +26,14 @@ 'link-1': { 'rel': 'describedBy', 'href': 'https://www.example.com/described-by', - 'title': 'This is an extra link', + 'title': 'This is an extra collection link', 'link_type': 'description' } } links_invalid = { 'link-invalid': { + 'title': 'invalid collection link relation', 'rel': 'invalid relation', 'href': 'not a url', } diff --git a/app/tests/sample_data/item_samples.py b/app/tests/sample_data/item_samples.py index b07bba23..55babe4b 100644 --- a/app/tests/sample_data/item_samples.py +++ b/app/tests/sample_data/item_samples.py @@ -67,13 +67,14 @@ 'link-1': { 'rel': 'describedBy', 'href': 'https://www.example.com/described-by', - 'title': 'This is an extra link', + 'title': 'This is an extra item link', 'link_type': 'description' } } links_invalid = { 'link-invalid': { + 'title': 'invalid item link relation', 'rel': 'invalid relation', 'href': 'not a url', } diff --git a/app/tests/test_collections_endpoint.py b/app/tests/test_collections_endpoint.py index 9b83ff46..c8a24cc4 100644 --- a/app/tests/test_collections_endpoint.py +++ b/app/tests/test_collections_endpoint.py @@ -43,35 +43,6 @@ def test_single_collection_endpoint(self): self.check_stac_collection(self.collection_1.json, response_json) - def test_collections_limit_query(self): - response = self.client.get(f"/{STAC_BASE_V}/collections?limit=1") - self.assertStatusCode(200, response) - self.assertLessEqual(1, len(response.json()['collections'])) - - response = self.client.get(f"/{STAC_BASE_V}/collections?limit=0") - self.assertStatusCode(400, response) - self.assertEqual(['limit query parameter too small, must be in range 1..100'], - response.json()['description'], - msg='Unexpected error message') - - response = self.client.get(f"/{STAC_BASE_V}/collections?limit=test") - self.assertStatusCode(400, response) - self.assertEqual(['invalid limit query parameter: must be an integer'], - response.json()['description'], - msg='Unexpected error message') - - response = self.client.get(f"/{STAC_BASE_V}/collections?limit=-1") - self.assertStatusCode(400, response) - self.assertEqual(['limit query parameter too small, must be in range 1..100'], - response.json()['description'], - msg='Unexpected error message') - - response = self.client.get(f"/{STAC_BASE_V}/collections?limit=1000") - self.assertStatusCode(400, response) - self.assertEqual(['limit query parameter too big, must be in range 1..100'], - response.json()['description'], - msg='Unexpected error message') - class CollectionsWriteEndpointTestCase(StacBaseTestCase): diff --git a/app/tests/test_generic_api.py b/app/tests/test_generic_api.py index a9e8201e..e14e6317 100644 --- a/app/tests/test_generic_api.py +++ b/app/tests/test_generic_api.py @@ -39,95 +39,116 @@ class ApiPaginationTestCase(StacBaseTestCase): @classmethod def setUpTestData(cls): - Factory().create_collection_samples(3, db_create=True) + cls.factory = Factory() + cls.collections = cls.factory.create_collection_samples(3, db_create=True) def setUp(self): self.client = Client() def test_invalid_limit_query(self): - response = self.client.get(f"/{STAC_BASE_V}/collections?limit=0") - self.assertStatusCode(400, response) - self.assertEqual(['limit query parameter too small, must be in range 1..100'], - response.json()['description'], - msg='Unexpected error message') - - response = self.client.get(f"/{STAC_BASE_V}/collections?limit=test") - self.assertStatusCode(400, response) - self.assertEqual(['invalid limit query parameter: must be an integer'], - response.json()['description'], - msg='Unexpected error message') - - response = self.client.get(f"/{STAC_BASE_V}/collections?limit=-1") - self.assertStatusCode(400, response) - self.assertEqual(['limit query parameter too small, must be in range 1..100'], - response.json()['description'], - msg='Unexpected error message') - - response = self.client.get(f"/{STAC_BASE_V}/collections?limit=1000") - self.assertStatusCode(400, response) - self.assertEqual(['limit query parameter too big, must be in range 1..100'], - response.json()['description'], - msg='Unexpected error message') + items = self.factory.create_item_samples(3, self.collections[0].model, db_create=True) + for endpoint in ['collections', f'collections/{self.collections[0]["name"]}/items']: + with self.subTest(endpoint=endpoint): + response = self.client.get(f"/{STAC_BASE_V}/{endpoint}?limit=0") + self.assertStatusCode(400, response) + self.assertEqual(['limit query parameter too small, must be in range 1..100'], + response.json()['description'], + msg='Unexpected error message') + + response = self.client.get(f"/{STAC_BASE_V}/{endpoint}?limit=test") + self.assertStatusCode(400, response) + self.assertEqual(['invalid limit query parameter: must be an integer'], + response.json()['description'], + msg='Unexpected error message') + + response = self.client.get(f"/{STAC_BASE_V}/{endpoint}?limit=-1") + self.assertStatusCode(400, response) + self.assertEqual(['limit query parameter too small, must be in range 1..100'], + response.json()['description'], + msg='Unexpected error message') + + response = self.client.get(f"/{STAC_BASE_V}/{endpoint}?limit=1000") + self.assertStatusCode(400, response) + self.assertEqual(['limit query parameter too big, must be in range 1..100'], + response.json()['description'], + msg='Unexpected error message') def test_pagination(self): + items = self.factory.create_item_samples(3, self.collections[0].model, db_create=True) + for endpoint in [ + 'collections', + f'collections/{self.collections[0]["name"]}/items', + ]: + with self.subTest(endpoint=endpoint): + response = self.client.get(f"/{STAC_BASE_V}/{endpoint}?limit=1") + json_data = response.json() + self.assertEqual( + 200, response.status_code, msg=get_http_error_description(json_data) + ) - response = self.client.get(f"/{STAC_BASE_V}/collections?limit=1") - json_data = response.json() - self.assertEqual(200, response.status_code, msg=get_http_error_description(json_data)) - - # Check next link - next_link = get_link(json_data['links'], 'next') - self.assertIsNotNone(next_link, msg='Pagination next link missing') - self.assertTrue(isinstance(next_link['href'], str), msg='href is not a string') - self.assertTrue( - next_link['href'].startswith('http://testserver/api/stac/v0.9/collections?cursor='), - msg='Invalid href link pagination string' - ) + # Check next link + next_link = get_link(json_data['links'], 'next') + self.assertIsNotNone(next_link, msg='Pagination next link missing') + self.assertTrue(isinstance(next_link['href'], str), msg='href is not a string') + self.assertTrue( + next_link['href']. + startswith(f'http://testserver/api/stac/v0.9/{endpoint}?cursor='), + msg='Invalid href link pagination string' + ) - # Check previous link - previous_link = get_link(json_data['links'], 'previous') - self.assertIsNone(previous_link, msg='Pagination previous link present for initial query') + # Check previous link + previous_link = get_link(json_data['links'], 'previous') + self.assertIsNone( + previous_link, msg='Pagination previous link present for initial query' + ) - # Get the next page - response = self.client.get(next_link['href'].replace('http://testserver', '')) - json_data = response.json() - self.assertEqual(200, response.status_code, msg=get_http_error_description(json_data)) + # Get the next page + response = self.client.get(next_link['href'].replace('http://testserver', '')) + json_data = response.json() + self.assertEqual( + 200, response.status_code, msg=get_http_error_description(json_data) + ) - # Check next link - next_link = get_link(json_data['links'], 'next') - self.assertIsNotNone(next_link, msg='Pagination next link missing') - self.assertTrue(isinstance(next_link['href'], str), msg='href is not a string') - self.assertTrue( - next_link['href'].startswith('http://testserver/api/stac/v0.9/collections?cursor='), - msg='Invalid href link pagination string' - ) + # Check next link + next_link = get_link(json_data['links'], 'next') + self.assertIsNotNone(next_link, msg='Pagination next link missing') + self.assertTrue(isinstance(next_link['href'], str), msg='href is not a string') + self.assertTrue( + next_link['href']. + startswith(f'http://testserver/api/stac/v0.9/{endpoint}?cursor='), + msg='Invalid href link pagination string' + ) - # Check previous link - previous_link = get_link(json_data['links'], 'previous') - self.assertIsNotNone(previous_link, msg='Pagination previous link is missing') - self.assertTrue(isinstance(previous_link['href'], str), msg='href is not a string') - self.assertTrue( - previous_link['href'].startswith('http://testserver/api/stac/v0.9/collections?cursor='), - msg='Invalid href link pagination string' - ) + # Check previous link + previous_link = get_link(json_data['links'], 'previous') + self.assertIsNotNone(previous_link, msg='Pagination previous link is missing') + self.assertTrue(isinstance(previous_link['href'], str), msg='href is not a string') + self.assertTrue( + previous_link['href']. + startswith(f'http://testserver/api/stac/v0.9/{endpoint}?cursor='), + msg='Invalid href link pagination string' + ) - # Get the next page - response = self.client.get(next_link['href'].replace('http://testserver', '')) - json_data = response.json() - self.assertEqual(200, response.status_code, msg=get_http_error_description(json_data)) + # Get the next page + response = self.client.get(next_link['href'].replace('http://testserver', '')) + json_data = response.json() + self.assertEqual( + 200, response.status_code, msg=get_http_error_description(json_data) + ) - # Check next link - next_link = get_link(json_data['links'], 'next') - self.assertIsNone(next_link, msg='Pagination next link is present') + # Check next link + next_link = get_link(json_data['links'], 'next') + self.assertIsNone(next_link, msg='Pagination next link is present') - # Check previous link - previous_link = get_link(json_data['links'], 'previous') - self.assertIsNotNone(previous_link, msg='Pagination previous link is missing') - self.assertTrue(isinstance(previous_link['href'], str), msg='href is not a string') - self.assertTrue( - previous_link['href'].startswith('http://testserver/api/stac/v0.9/collections?cursor='), - msg='Invalid href link pagination string' - ) + # Check previous link + previous_link = get_link(json_data['links'], 'previous') + self.assertIsNotNone(previous_link, msg='Pagination previous link is missing') + self.assertTrue(isinstance(previous_link['href'], str), msg='href is not a string') + self.assertTrue( + previous_link['href']. + startswith(f'http://testserver/api/stac/v0.9/{endpoint}?cursor='), + msg='Invalid href link pagination string' + ) class ApiETagPreconditionTestCase(StacBaseTestCase): diff --git a/app/tests/test_items_endpoint.py b/app/tests/test_items_endpoint.py index b1cf7009..47098392 100644 --- a/app/tests/test_items_endpoint.py +++ b/app/tests/test_items_endpoint.py @@ -362,7 +362,7 @@ def setUpTestData(cls): 'item-switzerland-east', 'item-switzerland-north', 'item-switzerland-south', - 'item-paris' + 'item-paris', ], cls.collection, db_create=True, diff --git a/app/tests/test_serializer.py b/app/tests/test_serializer.py index 1b11b984..64252878 100644 --- a/app/tests/test_serializer.py +++ b/app/tests/test_serializer.py @@ -99,7 +99,7 @@ def test_collection_serialization(self): ('href', 'https://www.example.com/described-by'), ('rel', 'describedBy'), ('type', 'description'), - ('title', 'This is an extra link'), + ('title', 'This is an extra collection link'), ]) ], 'providers': [ diff --git a/app/wsgi.py b/app/wsgi.py index 0799d031..ded19ec8 100755 --- a/app/wsgi.py +++ b/app/wsgi.py @@ -45,7 +45,7 @@ class StandaloneApplication(BaseApplication): # pylint: disable=abstract-method def __init__(self, app, options=None): # pylint: disable=redefined-outer-name self.options = options or {} self.application = app - super(StandaloneApplication, self).__init__() + super().__init__() def load_config(self): config = { From fbff8f3b6855a48a662ff6911772f26e66489ae3 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 18 Mar 2021 09:34:23 +0100 Subject: [PATCH 002/105] Improved pagination unittest based on code review Now the pagination unittest code is a bit clearer and the test has been improved where we test navigation up and down making sure that different page repsonse are different and that previous pages are equal to initial ones. --- app/tests/test_generic_api.py | 136 ++++++++++++++++++++-------------- 1 file changed, 79 insertions(+), 57 deletions(-) diff --git a/app/tests/test_generic_api.py b/app/tests/test_generic_api.py index e14e6317..66f565b4 100644 --- a/app/tests/test_generic_api.py +++ b/app/tests/test_generic_api.py @@ -44,6 +44,32 @@ def setUpTestData(cls): def setUp(self): self.client = Client() + self.maxDiff = None # pylint: disable=invalid-name + + def _get_check_link(self, links, rel, endpoint): + link = get_link(links, rel) + self.assertIsNotNone(link, msg=f'Pagination {rel} link missing') + self.assertTrue(isinstance(link['href'], str), msg='href is not a string') + self.assertTrue( + link['href'].startswith(f'http://testserver/api/stac/v0.9/{endpoint}?cursor='), + msg='Invalid href link pagination string' + ) + return link + + def _read_link(self, link, rel, other_pages, result_attribute): + # Read the link page + response = self.client.get(link['href'].replace('http://testserver', '')) + json_data = response.json() + self.assertEqual(200, response.status_code, msg=get_http_error_description(json_data)) + + # Make sure next page is different from others + for page in other_pages: + self.assertNotEqual( + page[result_attribute], + json_data[result_attribute], + msg=f"{rel} page is not different from initial" + ) + return json_data def test_invalid_limit_query(self): items = self.factory.create_item_samples(3, self.collections[0].model, db_create=True) @@ -74,80 +100,76 @@ def test_invalid_limit_query(self): msg='Unexpected error message') def test_pagination(self): + # pylint: disable=too-many-locals items = self.factory.create_item_samples(3, self.collections[0].model, db_create=True) - for endpoint in [ - 'collections', - f'collections/{self.collections[0]["name"]}/items', + for endpoint, result_attribute in [ + ('collections', 'collections'), + (f'collections/{self.collections[0]["name"]}/items', 'features') ]: with self.subTest(endpoint=endpoint): + # Page 1: response = self.client.get(f"/{STAC_BASE_V}/{endpoint}?limit=1") - json_data = response.json() - self.assertEqual( - 200, response.status_code, msg=get_http_error_description(json_data) - ) - - # Check next link - next_link = get_link(json_data['links'], 'next') - self.assertIsNotNone(next_link, msg='Pagination next link missing') - self.assertTrue(isinstance(next_link['href'], str), msg='href is not a string') - self.assertTrue( - next_link['href']. - startswith(f'http://testserver/api/stac/v0.9/{endpoint}?cursor='), - msg='Invalid href link pagination string' - ) + page_1 = response.json() + self.assertEqual(200, response.status_code, msg=get_http_error_description(page_1)) - # Check previous link - previous_link = get_link(json_data['links'], 'previous') + # Make sure previous link is not present self.assertIsNone( - previous_link, msg='Pagination previous link present for initial query' + get_link(page_1['links'], 'previous'), + msg='Pagination previous link present for initial query' ) - # Get the next page - response = self.client.get(next_link['href'].replace('http://testserver', '')) - json_data = response.json() - self.assertEqual( - 200, response.status_code, msg=get_http_error_description(json_data) - ) + # Get and check next link + next_link_2 = self._get_check_link(page_1['links'], 'next', endpoint) - # Check next link - next_link = get_link(json_data['links'], 'next') - self.assertIsNotNone(next_link, msg='Pagination next link missing') - self.assertTrue(isinstance(next_link['href'], str), msg='href is not a string') - self.assertTrue( - next_link['href']. - startswith(f'http://testserver/api/stac/v0.9/{endpoint}?cursor='), - msg='Invalid href link pagination string' + # PAGE 2: + # Read the next page + page_2 = self._read_link(next_link_2, 'next', [page_1], result_attribute) + + # get and check next link + next_link_3 = self._get_check_link(page_2['links'], 'next', endpoint) + + # Get and check previous link + previous_link_1 = self._get_check_link(page_2['links'], 'previous', endpoint) + + # PAGE 3: + # Read the next page + page_3 = self._read_link(next_link_3, 'next', [page_1, page_2], result_attribute) + + # Make sure next link is not present + self.assertIsNone( + get_link(page_3['links'], 'next'), + msg='Pagination next link present for last page' ) - # Check previous link - previous_link = get_link(json_data['links'], 'previous') - self.assertIsNotNone(previous_link, msg='Pagination previous link is missing') - self.assertTrue(isinstance(previous_link['href'], str), msg='href is not a string') - self.assertTrue( - previous_link['href']. - startswith(f'http://testserver/api/stac/v0.9/{endpoint}?cursor='), - msg='Invalid href link pagination string' + # Get and check previous link + previous_link_2 = self._get_check_link(page_3['links'], 'previous', endpoint) + + # Navigate back with previous links + # PAGE: 2 + _page_2 = self._read_link( + previous_link_2, 'previous', [page_1, page_3], result_attribute ) - # Get the next page - response = self.client.get(next_link['href'].replace('http://testserver', '')) - json_data = response.json() self.assertEqual( - 200, response.status_code, msg=get_http_error_description(json_data) + page_2[result_attribute], + _page_2[result_attribute], + msg="Previous link for page 2 is not equal to next link to page 2" ) - # Check next link - next_link = get_link(json_data['links'], 'next') - self.assertIsNone(next_link, msg='Pagination next link is present') + # get and check next link + _next_link_3 = self._get_check_link(_page_2['links'], 'next', endpoint) - # Check previous link - previous_link = get_link(json_data['links'], 'previous') - self.assertIsNotNone(previous_link, msg='Pagination previous link is missing') - self.assertTrue(isinstance(previous_link['href'], str), msg='href is not a string') - self.assertTrue( - previous_link['href']. - startswith(f'http://testserver/api/stac/v0.9/{endpoint}?cursor='), - msg='Invalid href link pagination string' + # Get and check previous link + _previous_link_1 = self._get_check_link(_page_2['links'], 'previous', endpoint) + + # PAGE 1: + _page_1 = self._read_link( + _previous_link_1, 'previous', [_page_2, page_2, page_3], result_attribute + ) + self.assertEqual( + page_1[result_attribute], + _page_1[result_attribute], + msg="Previous link for page 1 is not equal to initial page 1" ) From 6e89195994fdb7f1615bcb5de435d40a553b51c8 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Mon, 15 Mar 2021 16:37:25 +0100 Subject: [PATCH 003/105] BGDIINF_SB-1646 using 3rd party package for filtering items per collection using https://github.com/farhan0581/django-admin-autocomplete-filter to allow filtering for items of a specified collection. This adds an dropdown list with available collection names as well as an autocomplete textfield for the search. --- app/config/settings_prod.py | 1 + app/stac_api/admin.py | 7 +++++++ 2 files changed, 8 insertions(+) diff --git a/app/config/settings_prod.py b/app/config/settings_prod.py index c213b17f..35b593da 100644 --- a/app/config/settings_prod.py +++ b/app/config/settings_prod.py @@ -82,6 +82,7 @@ 'django_prometheus', 'config.apps.StacAdminConfig', 'stac_api.apps.StacApiConfig', + 'admin_auto_filters', ] # Middlewares are executed in order, once for the incoming diff --git a/app/stac_api/admin.py b/app/stac_api/admin.py index 49ebcfb4..ad4491a2 100644 --- a/app/stac_api/admin.py +++ b/app/stac_api/admin.py @@ -16,6 +16,7 @@ from stac_api.models import LandingPageLink from stac_api.models import Provider from stac_api.utils import build_asset_href +from admin_auto_filters.filters import AutocompleteFilter class LandingPageLinkInline(admin.TabularInline): @@ -82,6 +83,11 @@ class ItemLinkInline(admin.TabularInline): extra = 0 +class CollectionFilter(AutocompleteFilter): + title = 'Collection name' # display title + field_name = 'collection' # name of the foreign key field + + @admin.register(Item) class ItemAdmin(admin.GeoModelAdmin): @@ -113,6 +119,7 @@ class Media: wms_layer = 'ch.swisstopo.pixelkarte-farbe-pk1000.noscale' wms_url = 'https://wms.geo.admin.ch/' list_display = ['name', 'collection'] + list_filter = [CollectionFilter] def get_search_results(self, request, queryset, search_term): queryset, use_distinct = super().get_search_results(request, queryset, search_term) From 11454018fe50c034a98419aed82ce56eef6cbda6 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Mon, 15 Mar 2021 16:42:09 +0100 Subject: [PATCH 004/105] BGDIINF_SB-1646 added Pipfile and Pipfile.lock --- Pipfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Pipfile b/Pipfile index 0a9caf5b..6ce35878 100644 --- a/Pipfile +++ b/Pipfile @@ -38,6 +38,7 @@ requests = "~=2.25.0" py-multihash = "~=2.0.1" pypatch = "*" django-prometheus = "*" +django-admin-autocomplete-filter = "~=0.6.1" [requires] python_version = "3.7" From 86eed2c7878f30073e6ad1b7fc0f6d7d5f9c89a5 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Tue, 16 Mar 2021 08:47:34 +0100 Subject: [PATCH 005/105] BGDIINF_SB-1646 linting and formatting --- app/stac_api/admin.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/stac_api/admin.py b/app/stac_api/admin.py index ad4491a2..2cf07f10 100644 --- a/app/stac_api/admin.py +++ b/app/stac_api/admin.py @@ -1,3 +1,5 @@ +from admin_auto_filters.filters import AutocompleteFilter + from django.contrib.gis import admin from django.contrib.postgres.fields import ArrayField from django.contrib.staticfiles import finders @@ -16,7 +18,6 @@ from stac_api.models import LandingPageLink from stac_api.models import Provider from stac_api.utils import build_asset_href -from admin_auto_filters.filters import AutocompleteFilter class LandingPageLinkInline(admin.TabularInline): From 6c6944023988c20dd03c987fa3e5e728d8545011 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Tue, 16 Mar 2021 10:37:26 +0100 Subject: [PATCH 006/105] BGDIINF_SB-1646 ordering of INSTALLED_APPS in settings corrected --- app/config/settings_prod.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/config/settings_prod.py b/app/config/settings_prod.py index 35b593da..00344566 100644 --- a/app/config/settings_prod.py +++ b/app/config/settings_prod.py @@ -76,13 +76,13 @@ 'rest_framework.authtoken', # Note: If you use TokenAuthentication in production you must ensure # that your API is only available over https. + 'admin_auto_filters', 'solo.apps.SoloAppConfig', 'storages', 'whitenoise.runserver_nostatic', 'django_prometheus', 'config.apps.StacAdminConfig', 'stac_api.apps.StacApiConfig', - 'admin_auto_filters', ] # Middlewares are executed in order, once for the incoming From 6ab1b392cb8cdf7b810937c59002608768f78b09 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Thu, 18 Mar 2021 12:01:15 +0100 Subject: [PATCH 007/105] BGDIINF_SB-1646 added updated Pipfile.lock after rebasing --- Pipfile.lock | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index c5109443..846f8dc3 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "f697d717d9aab05ad9a74092f464fb51906de745d7500cfe240fad81a0b59742" + "sha256": "a7fcba27d693e9f1b737e27611e9755be5102950979660ce0eb4afcdf1ed38e6" }, "pipfile-spec": 6, "requires": { @@ -67,6 +67,14 @@ "index": "pypi", "version": "==3.1.7" }, + "django-admin-autocomplete-filter": { + "hashes": [ + "sha256:0358594110e5e498306388a9e4a0a2b57dde3abd302f9945337396003345af04", + "sha256:5016a8cb9aff7a7e7a796870ac572ac248a17d36339215878df9ba24761ce65f" + ], + "index": "pypi", + "version": "==0.6.1" + }, "django-prometheus": { "hashes": [ "sha256:c338d6efde1ca336e90c540b5e87afe9287d7bcc82d651a778f302b0be17a933", @@ -841,11 +849,11 @@ }, "responses": { "hashes": [ - "sha256:27d8822d65dc8875a039301831de8ac17db2473ae2a8fabd4e6599b25ce2f353", - "sha256:a4a90c8244006c01f4246aecf532fbb5429c4031df4adcc7638061f0f3ce4ceb" + "sha256:3b1ea9cf026edaaf25e853abc4d3b2687d25467e9d8d41e77ee525cad0673f3e", + "sha256:cf62ab0f4119b81d485521b2c950d8aa55a885c90126488450b7acb8ee3f77ac" ], "index": "pypi", - "version": "==0.13.0" + "version": "==0.13.1" }, "s3transfer": { "hashes": [ From b517ceed98aae9e266d93db1de37adfca3b39edf Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 11 Mar 2021 16:47:40 +0100 Subject: [PATCH 008/105] BGDIINF_SB-1566: Added Asset media type name extension validation Now the asset name extension is validated against the media type --- app/stac_api/models.py | 7 +++- app/stac_api/serializers.py | 12 +++++- app/stac_api/validators.py | 44 +++++++++++++++++++++- app/tests/data_factory.py | 49 ++++++++++++++++++------- app/tests/sample_data/asset_samples.py | 12 +++--- app/tests/test_admin_page.py | 2 +- app/tests/test_asset_model.py | 1 - app/tests/test_assets_endpoint.py | 17 ++++++--- app/tests/test_collections_summaries.py | 22 +++++------ app/tests/test_generic_api.py | 6 +-- 10 files changed, 124 insertions(+), 48 deletions(-) diff --git a/app/stac_api/models.py b/app/stac_api/models.py index 3a5f36f5..c9f53420 100644 --- a/app/stac_api/models.py +++ b/app/stac_api/models.py @@ -25,6 +25,8 @@ from stac_api.utils import get_asset_path # from stac_api.utils import get_s3_resource # Un-comment with BGDIINF_SB-1625 from stac_api.validators import MEDIA_TYPES +from stac_api.validators import validate_asset_name +from stac_api.validators import validate_asset_name_with_media_type from stac_api.validators import validate_geoadmin_variant from stac_api.validators import validate_geometry from stac_api.validators import validate_item_properties_datetimes @@ -548,7 +550,7 @@ class Meta: help_text=_(SEARCH_TEXT_HELP_ITEM) ) # using "name" instead of "id", as "id" has a default meaning in django - name = models.CharField('id', max_length=255, validators=[validate_name]) + name = models.CharField('id', max_length=255, validators=[validate_asset_name]) file = models.FileField(upload_to=upload_asset_to_path_hook, max_length=255) @property @@ -733,3 +735,6 @@ def delete(self, *args, **kwargs): # pylint: disable=signature-differs self.item.collection.save() self.item.save() # We save the item to update its ETag super().delete(*args, **kwargs) + + def clean(self): + validate_asset_name_with_media_type(self.name, self.media_type) diff --git a/app/stac_api/serializers.py b/app/stac_api/serializers.py index 8d3a4d4a..d5c2ae2f 100644 --- a/app/stac_api/serializers.py +++ b/app/stac_api/serializers.py @@ -24,6 +24,8 @@ from stac_api.utils import isoformat from stac_api.validators import MEDIA_TYPES_MIMES from stac_api.validators import validate_asset_multihash +from stac_api.validators import validate_asset_name +from stac_api.validators import validate_asset_name_with_media_type from stac_api.validators import validate_geoadmin_variant from stac_api.validators import validate_item_properties_datetimes from stac_api.validators import validate_name @@ -570,7 +572,7 @@ class Meta: id = serializers.CharField( source='name', max_length=255, - validators=[validate_name, UniqueValidator(queryset=Asset.objects.all())] + validators=[validate_asset_name, UniqueValidator(queryset=Asset.objects.all())] ) title = serializers.CharField( required=False, max_length=255, allow_null=True, allow_blank=False @@ -615,6 +617,14 @@ class Meta: updated = serializers.DateTimeField(read_only=True) def validate(self, attrs): + if not self.partial: + validate_asset_name_with_media_type(attrs.get('name'), attrs.get('media_type')) + if self.partial and ('name' in attrs or 'media_type' in attrs): + validate_asset_name_with_media_type( + attrs.get('name', self.instance.name), + attrs.get('media_type', self.instance.media_type) + ) + validate_json_payload(self) if not self.partial: diff --git a/app/stac_api/validators.py b/app/stac_api/validators.py index 3d2bf383..1150a53a 100644 --- a/app/stac_api/validators.py +++ b/app/stac_api/validators.py @@ -60,7 +60,9 @@ ('text/plain', 'Text', ['.txt']), ('text/x.plain+zip', 'Zipped text', ['.zip']), ] -MEDIA_TYPES_MIMES = [x[0] for x in iter(MEDIA_TYPES)] +MEDIA_TYPES_MIMES = [x[0] for x in MEDIA_TYPES] +MEDIA_TYPES_EXTENSIONS = [ext for media_type in MEDIA_TYPES for ext in media_type[2]] +MEDIA_TYPES_BY_TYPE = {media[0]: media for media in MEDIA_TYPES} def validate_name(name): @@ -69,7 +71,45 @@ def validate_name(name): if not re.match(r'^[0-9a-z-_.]+$', name): logger.error('Invalid name %s, only the following characters are allowed: 0-9a-z-_.', name) raise ValidationError( - _('Invalid name, only the following characters are allowed: 0-9a-z-_.'), + _('Invalid id, only the following characters are allowed: 0-9a-z-_.'), + code='id' + ) + + +def validate_asset_name(name): + '''Validate Asset name used in URL + ''' + if not name: + logger.error('Invalid asset name, must not be empty') + raise ValidationError({'id': _("Invalid id must not be empty")}, code='id') + validate_name(name) + ext = name.rsplit('.', maxsplit=1)[-1] + if f'.{ext}' not in MEDIA_TYPES_EXTENSIONS: + logger.error( + 'Invalid name %s extension %s, name must ends with a valid file extension', name, ext + ) + raise ValidationError( + _(f"Invalid id extension '.{ext}', id must have a valid file extension"), + code='id' + ) + + +def validate_asset_name_with_media_type(name, media_type): + '''Validate Asset name against the media type + ''' + ext = f".{name.rsplit('.', maxsplit=1)[-1]}" + if media_type not in MEDIA_TYPES_BY_TYPE: + logger.error("Invalid media_type %s for asset %s", media_type, name) + raise ValidationError(_(f"Invalid media type {media_type}"), code='type') + if ext not in MEDIA_TYPES_BY_TYPE[media_type][2]: + logger.error( + "Invalid name %s extension %s, don't match the media type %s", + name, + ext, + MEDIA_TYPES_BY_TYPE[media_type], + ) + raise ValidationError( + _(f"Invalid id extension '{ext}', id must match its media type {media_type}"), code='id' ) diff --git a/app/tests/data_factory.py b/app/tests/data_factory.py index fe165c04..1d95d3a9 100644 --- a/app/tests/data_factory.py +++ b/app/tests/data_factory.py @@ -106,6 +106,7 @@ from stac_api.utils import get_s3_resource from stac_api.utils import get_sha256_multihash from stac_api.utils import isoformat +from stac_api.validators import MEDIA_TYPES_BY_TYPE from tests.sample_data.asset_samples import assets as asset_samples from tests.sample_data.collection_samples import collections as collection_samples @@ -806,13 +807,15 @@ def __init__(self): self.last = None @classmethod - def get_last_name(cls, last): + def get_last_name(cls, last, extension=''): '''Return a factory name incremented by one (e.g. 'collection-1') ''' if last is None: - last = f'{cls.factory_name}-0' - last = '{}-{}'.format( - cls.factory_name, int(re.match(fr"{cls.factory_name}-(\d+)", last).group(1)) + 1 + last = f'{cls.factory_name}-0{extension}' + last = '{}-{}{}'.format( + cls.factory_name, + int(re.match(fr"{cls.factory_name}-(\d+)(\.\w+)?", last).group(1)) + 1, + extension ) return last @@ -1071,19 +1074,22 @@ def create_sample( Returns: The data sample ''' - sample = super().create_sample( - sample, - name=name, - item=item, - db_create=db_create, - required_only=required_only, - **kwargs - ) + if name: + data_sample = AssetSample( + item, sample=sample, name=name, required_only=required_only, **kwargs + ) + else: + self.last = self.get_last_name(self.last, extension=self.get_extension(sample, kwargs)) + data_sample = AssetSample( + item, sample=sample, name=self.last, required_only=required_only, **kwargs + ) + if db_create: + data_sample.create() if not db_create and create_asset_file: # when db_create is true, the asset file automatically created therefore it is not # necessary to explicitely create it again. - sample.create_asset_file() - return sample + data_sample.create_asset_file() + return data_sample def create_samples(self, samples, item, db_create=False, create_asset_file=False, **kwargs): '''Creates several Asset samples @@ -1109,6 +1115,21 @@ def create_samples(self, samples, item, db_create=False, create_asset_file=False samples, item=item, db_create=db_create, create_asset_file=create_asset_file, **kwargs ) + def get_extension(self, sample_name, kwargs): + media = 'text/plain' + if 'media_type' in kwargs: + media = kwargs['media_type'] + else: + try: + sample = AssetSample.samples_dict[sample_name] + except KeyError as error: + raise KeyError(f'Unknown {sample_name} sample: {error}') + if 'media_type' in sample: + media = sample['media_type'] + if media not in MEDIA_TYPES_BY_TYPE: + media = 'text/plain' + return MEDIA_TYPES_BY_TYPE[media][2][0] + class Factory: '''Factory for data samples (Collection, Item and Asset) diff --git a/app/tests/sample_data/asset_samples.py b/app/tests/sample_data/asset_samples.py index 8a49ed20..81778817 100644 --- a/app/tests/sample_data/asset_samples.py +++ b/app/tests/sample_data/asset_samples.py @@ -6,7 +6,7 @@ assets = { 'asset-1': { - 'name': 'asset-1', + 'name': 'asset-1.tiff', 'title': 'Asset 1 Title', 'description': 'This is a full description of asset 1', 'eo_gsd': 3.4, @@ -18,7 +18,7 @@ 'file': FILE_CONTENT_1 }, 'asset-1-updated': { - 'name': 'asset-2', + 'name': 'asset-2.txt', 'title': 'Asset 2 Title', 'description': 'This is a full description of asset 2', 'eo_gsd': 4, @@ -28,7 +28,7 @@ 'media_type': "text/plain" }, 'asset-2': { - 'name': 'asset-2', + 'name': 'asset-2.txt', 'title': 'Asset 2 Title', 'description': 'This is a full description of asset 2', 'eo_gsd': 4, @@ -40,7 +40,7 @@ 'file': FILE_CONTENT_2 }, 'asset-3': { - 'name': 'asset-3', + 'name': 'asset-3.pdf', 'title': 'Asset 3 Title', 'description': 'This is a full description of asset 3', 'eo_gsd': 5.4, @@ -66,7 +66,7 @@ 'name': 'asset-missing-required', }, 'asset-valid-geoadmin-variant': { - 'name': 'geodadmin-variant', + 'name': 'geodadmin-variant.txt', 'title': 'Asset Variant', 'description': 'This asset should pass the test', 'eo_gsd': 4, @@ -77,7 +77,7 @@ 'file': b'Asset with long geoadmin:variant' }, 'asset-invalid-geoadmin-variant': { - 'name': 'invalid-geodadmin-variant', + 'name': 'invalid-geodadmin-variant.txt', 'title': 'Asset Variant Testing', 'description': 'This asset shouldn\'t pass the test', 'eo_gsd': 4, diff --git a/app/tests/test_admin_page.py b/app/tests/test_admin_page.py index 48f1f941..537d6508 100644 --- a/app/tests/test_admin_page.py +++ b/app/tests/test_admin_page.py @@ -226,7 +226,7 @@ def _create_asset(self, item, extra=None): data = { "item": item.id, - "name": "test_asset", + "name": "test_asset.zip", "description": "This is a description", "eo_gsd": 10, "geoadmin_lang": "en", diff --git a/app/tests/test_asset_model.py b/app/tests/test_asset_model.py index 4db6fa7b..001fb4d2 100644 --- a/app/tests/test_asset_model.py +++ b/app/tests/test_asset_model.py @@ -71,7 +71,6 @@ def test_create_asset_only_required_attributes(self): # Should not raise any errors. self.factory.create_asset_sample( item=self.item, - name="asset-required-only", sample="asset-valid-geoadmin-variant", db_create=True, required_only=True diff --git a/app/tests/test_assets_endpoint.py b/app/tests/test_assets_endpoint.py index 1d75299d..fef34caf 100644 --- a/app/tests/test_assets_endpoint.py +++ b/app/tests/test_assets_endpoint.py @@ -342,7 +342,7 @@ def test_asset_endpoint_post_asset_file_dont_exists(self): description = response.json()['description'] self.assertIn('href', description, msg=f'Unexpected field error {description}') self.assertEqual( - "Asset doesn't exists at href http://testserver/collection-1/item-1/asset-1", + "Asset doesn't exists at href http://testserver/collection-1/item-1/asset-1.tiff", description['href'][0], msg="Unexpected error message" ) @@ -393,8 +393,8 @@ def test_asset_endpoint_post_s3_without_sha256(self): description = response.json()['description'] self.assertIn('non_field_errors', description, msg=f'Unexpected field error {description}') self.assertEqual( - "Asset at href http://testserver/collection-1/item-1/asset-1 has a md5 multihash while " - "a sha2-256 multihash is defined in the checksum:multihash attribute", + "Asset at href http://testserver/collection-1/item-1/asset-1.tiff has a md5 multihash " + "while a sha2-256 multihash is defined in the checksum:multihash attribute", description['non_field_errors'][0], msg="Unexpected error message" ) @@ -420,7 +420,7 @@ def test_asset_endpoint_post_wrong_checksum(self): description = response.json()['description'] self.assertIn('non_field_errors', description, msg=f'Unexpected field error {description}') self.assertEqual( - "Asset at href http://testserver/collection-1/item-1/asset-1 with sha2-256 hash " + "Asset at href http://testserver/collection-1/item-1/asset-1.tiff with sha2-256 hash " "a7f5e7ca03b0f80a2fcfe5142642377e7654df2dfa736fe4d925322d8a651efe doesn't match the " "checksum:multihash 3db85f41709d08bf1f2907042112bf483b28e12db4b3ffb5428a1f28308847ba", description['non_field_errors'][0], @@ -546,6 +546,7 @@ def test_asset_endpoint_put(self): item=self.item.model, name=asset_name, sample='asset-1-updated', + media_type=self.asset['media_type'], checksum_multihash=self.asset['checksum_multihash'], create_asset_file=False ) @@ -576,6 +577,7 @@ def test_asset_endpoint_put_extra_payload(self): item=self.item.model, name=asset_name, sample='asset-1-updated', + media_type=self.asset['media_type'], checksum_multihash=self.asset['checksum_multihash'], extra_attribute='not allowed', create_asset_file=False @@ -599,6 +601,7 @@ def test_asset_endpoint_put_read_only_in_payload(self): name=asset_name, checksum_multihash=self.asset['checksum_multihash'], sample='asset-1-updated', + media_type=self.asset['media_type'], created=utc_aware(datetime.utcnow()), create_asset_file=False ) @@ -618,7 +621,7 @@ def test_asset_endpoint_put_rename_asset(self): collection_name = self.collection['name'] item_name = self.item['name'] asset_name = self.asset['name'] - new_asset_name = "new-asset-name" + new_asset_name = "new-asset-name.txt" changed_asset = self.factory.create_asset_sample( item=self.item.model, name=new_asset_name, @@ -652,7 +655,7 @@ def test_asset_endpoint_patch_rename_asset(self): collection_name = self.collection['name'] item_name = self.item['name'] asset_name = self.asset['name'] - new_asset_name = "new-asset-name" + new_asset_name = "new-asset-name.txt" changed_asset = self.factory.create_asset_sample( item=self.item.model, name=new_asset_name, sample='asset-1-updated' ) @@ -688,6 +691,7 @@ def test_asset_endpoint_patch_extra_payload(self): item=self.item.model, name=asset_name, sample='asset-1-updated', + media_type=self.asset['media_type'], extra_payload='invalid' ) @@ -708,6 +712,7 @@ def test_asset_endpoint_patch_read_only_in_payload(self): item=self.item.model, name=asset_name, sample='asset-1-updated', + media_type=self.asset['media_type'], created=utc_aware(datetime.utcnow()) ) diff --git a/app/tests/test_collections_summaries.py b/app/tests/test_collections_summaries.py index 2586a774..070d4260 100644 --- a/app/tests/test_collections_summaries.py +++ b/app/tests/test_collections_summaries.py @@ -48,13 +48,9 @@ def add_single_datetime_item(self, datetime_val, name): ).model return item - def add_asset(self, item, name, eo_gsd, geoadmin_variant, proj_epsg): + def add_asset(self, item, eo_gsd, geoadmin_variant, proj_epsg): asset = self.data_factory.create_asset_sample( - item=item, - name=name, - eo_gsd=eo_gsd, - geoadmin_variant=geoadmin_variant, - proj_epsg=proj_epsg + item=item, eo_gsd=eo_gsd, geoadmin_variant=geoadmin_variant, proj_epsg=proj_epsg ).model return asset @@ -65,7 +61,7 @@ def test_update_collection_summaries_asset_insertion(self): item1 = self.add_range_item(self.y200, self.y8000, "item1") item2 = self.add_range_item(self.y200, self.y8000, "item2") - self.add_asset(item1, "asset1", 1.2, "kgrs", 1234) + self.add_asset(item1, 1.2, "kgrs", 1234) self.assertEqual( self.collection.summaries["eo:gsd"], [1.2], @@ -83,7 +79,7 @@ def test_update_collection_summaries_asset_insertion(self): "after asset has been inserted." ) - self.add_asset(item2, "asset2", 2.1, "komb", 4321) + self.add_asset(item2, 2.1, "komb", 4321) self.assertEqual( self.collection.summaries["eo:gsd"], [1.2, 2.1], "Collection's summaries[eo:gsd] has not been correctly updated " @@ -106,8 +102,8 @@ def test_update_collection_summaries_asset_deletion(self): item1 = self.add_range_item(self.y200, self.y8000, "item1") - asset1 = self.add_asset(item1, "asset1", 1.2, "kgrs", 1234) - asset2 = self.add_asset(item1, "asset2", 2.1, "komb", 4321) + asset1 = self.add_asset(item1, 1.2, "kgrs", 1234) + asset2 = self.add_asset(item1, 2.1, "komb", 4321) asset2.delete() @@ -190,8 +186,8 @@ def test_update_collection_summaries_asset_update(self): # Tests if collection's summaries are updated correctly after an # asset was updated item1 = self.add_range_item(self.y200, self.y8000, "item1") - asset1 = self.add_asset(item1, "asset1", 1.2, "kgrs", 1234) - asset2 = self.add_asset(item1, "asset2", 2.1, "komb", 4321) + asset1 = self.add_asset(item1, 1.2, "kgrs", 1234) + asset2 = self.add_asset(item1, 2.1, "komb", 4321) asset1.eo_gsd = 12.34 asset1.geoadmin_variant = "krel" @@ -218,7 +214,7 @@ def test_update_collection_summaries_asset_update(self): def test_update_collection_summaries_none_values(self): # update a variant, that as been None as a start value item = self.data_factory.create_item_sample(collection=self.collection).model - asset = self.add_asset(item, 'asset-1', None, None, None) + asset = self.add_asset(item, None, None, None) self.assertEqual( self.collection.summaries, { 'eo:gsd': [], 'proj:epsg': [], 'geoadmin:variant': [] diff --git a/app/tests/test_generic_api.py b/app/tests/test_generic_api.py index 66f565b4..f16c93aa 100644 --- a/app/tests/test_generic_api.py +++ b/app/tests/test_generic_api.py @@ -189,7 +189,7 @@ def setUp(self): ) self.asset = self.factory.create_asset_sample( item=self.item.model, - name='asset-1', + name='asset-1.tiff', db_create=True, ) @@ -247,7 +247,8 @@ def test_put_precondition(self): item=self.item.model, name=self.asset["name"], sample='asset-1-updated', - checksum_multihash=self.asset.model.checksum_multihash + media_type=self.asset['media_type'], + checksum_multihash=self.asset["checksum_multihash"] ) ), ]: @@ -377,7 +378,6 @@ def setUpTestData(cls): ) cls.asset = cls.factory.create_asset_sample( item=cls.item.model, - name='asset-1', db_create=True, ) From 0a626027e3213db4c63a4abbf090a6921c477169 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 16 Mar 2021 08:45:27 +0100 Subject: [PATCH 009/105] BGDIINF_SB-1566: Make uses of django urls.reverse in admin test This method was already used by some tests, use it every where in admin test to be consistent. --- app/tests/test_admin_page.py | 38 ++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/app/tests/test_admin_page.py b/app/tests/test_admin_page.py index 537d6508..0c9fdeca 100644 --- a/app/tests/test_admin_page.py +++ b/app/tests/test_admin_page.py @@ -76,7 +76,7 @@ def _create_collection(self, with_link=False, with_provider=False, extra=None): }) if extra is not None: data.update(extra) - response = self.client.post("/api/stac/admin/stac_api/collection/add/", data) + response = self.client.post(reverse('admin:stac_api_collection_add'), data) # Status code for successful creation is 302, since in the admin UI # you're redirected to the list view after successful creation @@ -134,7 +134,7 @@ def _create_item(self, collection, with_link=False, extra=None): }) if extra: data.update(extra) - response = self.client.post("/api/stac/admin/stac_api/item/add/", data) + response = self.client.post(reverse('admin:stac_api_item_add'), data) # Status code for successful creation is 302, since in the admin UI # you're redirected to the list view after successful creation @@ -187,7 +187,7 @@ def _create_asset_minimal(self, item): "file": filelike } - response = self.client.post("/api/stac/admin/stac_api/asset/add/", data) + response = self.client.post(reverse('admin:stac_api_asset_add'), data) logger.debug('Asset created in %fs', time.time() - start) # Status code for successful creation is 302, since in the admin UI @@ -236,7 +236,7 @@ def _create_asset(self, item, extra=None): "media_type": "application/x.filegdb+zip", "file": filelike } - response = self.client.post("/api/stac/admin/stac_api/asset/add/", data) + response = self.client.post(reverse('admin:stac_api_asset_add'), data) logger.debug('Asset created in %fs', time.time() - start) # Status code for successful creation is 302, since in the admin UI @@ -295,7 +295,7 @@ def test_add_update_collection(self): # update some data data['title'] = "New title" response = self.client.post( - f"/api/stac/admin/stac_api/collection/{collection.id}/change/", data + reverse('admin:stac_api_collection_change', args=[collection.id]), data ) # Status code for successful creation is 302, since in the admin UI @@ -318,7 +318,7 @@ def test_add_update_collection_with_provider(self): data["providers-0-collection"] = collection.id data["providers-0-roles"] = "licensor,producer" response = self.client.post( - f"/api/stac/admin/stac_api/collection/{collection.id}/change/", data + reverse('admin:stac_api_collection_change', args=[collection.id]), data ) # Status code for successful creation is 302, since in the admin UI @@ -344,7 +344,7 @@ def test_add_update_collection_with_link(self): data["links-0-collection"] = collection.id data["links-0-title"] = "New Title" response = self.client.post( - f"/api/stac/admin/stac_api/collection/{collection.id}/change/", data + reverse('admin:stac_api_collection_change', args=[collection.id]), data ) # Status code for successful update is 302, since in the admin UI @@ -377,7 +377,7 @@ def test_add_collection_with_invalid_data(self): "links-0-link_type": "example", "links-0-title": "Example test", } - response = self.client.post("/api/stac/admin/stac_api/collection/add/", data) + response = self.client.post(reverse('admin:stac_api_collection_add'), data) # Status code for unsuccessful creation is 200, since in the admin UI # is returning an error message @@ -399,7 +399,7 @@ def test_add_update_collection_remove_provider(self): data["providers-0-collection"] = collection.id data["providers-0-DELETE"] = "on" response = self.client.post( - f"/api/stac/admin/stac_api/collection/{collection.id}/change/", data + reverse('admin:stac_api_collection_change', args=[collection.id]), data ) # Status code for successful creation is 302, since in the admin UI @@ -422,7 +422,7 @@ def test_add_update_collection_remove_link(self): data["links-0-collection"] = collection.id data["links-0-DELETE"] = "on" response = self.client.post( - f"/api/stac/admin/stac_api/collection/{collection.id}/change/", data + reverse('admin:stac_api_collection_change', args=[collection.id]), data ) # Status code for successful creation is 302, since in the admin UI @@ -447,7 +447,7 @@ def test_add_remove_collection(self): # remove collection with links and providers response = self.client.post( - f"/api/stac/admin/stac_api/collection/{collection.id}/delete/", {"post": "yes"} + reverse('admin:stac_api_collection_delete', args=[collection.id]), {"post": "yes"} ) # Status code for successful creation is 302, since in the admin UI @@ -491,7 +491,7 @@ def test_add_update_item(self): # update some data data['properties_title'] = "New title" - response = self.client.post(f"/api/stac/admin/stac_api/item/{item.id}/change/", data) + response = self.client.post(reverse('admin:stac_api_item_change', args=[item.id]), data) # Status code for successful creation is 302, since in the admin UI # you're redirected to the list view after successful creation @@ -511,7 +511,7 @@ def test_add_update_item_remove_title(self): # remove the title data['properties_title'] = "" - response = self.client.post(f"/api/stac/admin/stac_api/item/{item.id}/change/", data) + response = self.client.post(reverse('admin:stac_api_item_change', args=[item.id]), data) # Status code for successful creation is 302, since in the admin UI # you're redirected to the list view after successful creation @@ -533,7 +533,7 @@ def test_add_update_item_with_link(self): data["links-0-id"] = link.id data["links-0-item"] = item.id data["links-0-link_type"] = "New type" - response = self.client.post(f"/api/stac/admin/stac_api/item/{item.id}/change/", data) + response = self.client.post(reverse('admin:stac_api_item_change', args=[item.id]), data) # Status code for successful creation is 302, since in the admin UI # you're redirected to the list view after successful creation @@ -564,7 +564,7 @@ def test_add_item_with_invalid_data(self): "links-TOTAL_FORMS": "0", "links-INITIAL_FORMS": "0", } - response = self.client.post("/api/stac/admin/stac_api/item/add/", data) + response = self.client.post(reverse('admin:stac_api_item_add'), data) # Status code for unsuccessful creation is 200, since in the admin UI # is returning an error message @@ -585,7 +585,7 @@ def test_add_update_item_remove_link(self): data["links-0-id"] = link.id data["links-0-item"] = item.id data["links-0-DELETE"] = "on" - response = self.client.post(f"/api/stac/admin/stac_api/item/{item.id}/change/", data) + response = self.client.post(reverse('admin:stac_api_item_change', args=[item.id]), data) # Status code for successful creation is 302, since in the admin UI # you're redirected to the list view after successful creation @@ -605,7 +605,7 @@ def test_add_remove_item(self): # remove item with links response = self.client.post( - f"/api/stac/admin/stac_api/item/{item.id}/delete/", {"post": "yes"} + reverse('admin:stac_api_item_delete', args=[item.id]), {"post": "yes"} ) # Status code for successful creation is 302, since in the admin UI @@ -657,7 +657,7 @@ def test_add_update_asset(self): data["title"] = "New Asset for test" data["media_type"] = "application/x.ascii-grid+zip" data["file"] = filelike - response = self.client.post(f"/api/stac/admin/stac_api/asset/{asset.id}/change/", data) + response = self.client.post(reverse('admin:stac_api_asset_change', args=[asset.id]), data) # Status code for successful creation is 302, since in the admin UI # you're redirected to the list view after successful creation @@ -703,7 +703,7 @@ def test_add_asset_with_invalid_data(self): "item": self.item.id, "name": "test asset invalid name", } - response = self.client.post("/api/stac/admin/stac_api/asset/add/", data) + response = self.client.post(reverse('admin:stac_api_asset_add'), data) # Status code for unsuccessful creation is 200, since in the admin UI # is returning an error message From bdd065283f2051b2a02d751a2de4d91626f0a20f Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 16 Mar 2021 08:52:45 +0100 Subject: [PATCH 010/105] BGDIINF_SB-1566: Improved admin test code Moved the login to the setup phase to reduce code size. --- app/tests/test_admin_page.py | 55 ++++-------------------------------- 1 file changed, 6 insertions(+), 49 deletions(-) diff --git a/app/tests/test_admin_page.py b/app/tests/test_admin_page.py index 0c9fdeca..566f31c1 100644 --- a/app/tests/test_admin_page.py +++ b/app/tests/test_admin_page.py @@ -286,10 +286,11 @@ def test_login(self): class AdminCollectionTestCase(AdminBaseTestCase): - def test_add_update_collection(self): - # Login the user first + def setUp(self): + super().setUp() self.client.login(username=self.username, password=self.password) + def test_add_update_collection(self): collection, data = self._create_collection()[:2] # update some data @@ -307,9 +308,6 @@ def test_add_update_collection(self): ) def test_add_update_collection_with_provider(self): - # Login the user first - self.client.login(username=self.username, password=self.password) - collection, data, link, provider = self._create_collection(with_provider=True) # update some data in provider @@ -333,9 +331,6 @@ def test_add_update_collection_with_provider(self): ) def test_add_update_collection_with_link(self): - # Login the user first - self.client.login(username=self.username, password=self.password) - collection, data, link = self._create_collection(with_link=True)[:3] # update some data in link @@ -357,9 +352,6 @@ def test_add_update_collection_with_link(self): ) def test_add_collection_with_invalid_data(self): - # Login the user first - self.client.login(username=self.username, password=self.password) - # Post data to create a new collection # Note: the *-*_FORMS fields are necessary management form fields # originating from the AdminInline and must be present @@ -388,9 +380,6 @@ def test_add_collection_with_invalid_data(self): ) def test_add_update_collection_remove_provider(self): - # Login the user first - self.client.login(username=self.username, password=self.password) - collection, data, link, provider = self._create_collection(with_provider=True) # remove provider @@ -411,9 +400,6 @@ def test_add_update_collection_remove_provider(self): ) def test_add_update_collection_remove_link(self): - # Login the user first - self.client.login(username=self.username, password=self.password) - collection, data, link = self._create_collection(with_link=True)[:3] # remove provider @@ -435,9 +421,6 @@ def test_add_update_collection_remove_link(self): @mock_s3_asset_file def test_add_remove_collection(self): - # Login the user first - self.client.login(username=self.username, password=self.password) - collection, data, link, provider = self._create_collection( with_link=True, with_provider=True, @@ -482,11 +465,9 @@ class AdminItemTestCase(AdminBaseTestCase): def setUp(self): super().setUp() self._setup(create_collection=True) - - def test_add_update_item(self): - # Login the user first self.client.login(username=self.username, password=self.password) + def test_add_update_item(self): item, data = self._create_item(self.collection)[:2] # update some data @@ -504,9 +485,6 @@ def test_add_update_item(self): ) def test_add_update_item_remove_title(self): - # Login the user first - self.client.login(username=self.username, password=self.password) - item, data = self._create_item(self.collection)[:2] # remove the title @@ -522,9 +500,6 @@ def test_add_update_item_remove_title(self): ) def test_add_update_item_with_link(self): - # Login the user first - self.client.login(username=self.username, password=self.password) - item, data, link = self._create_item(self.collection, with_link=True) # update some data @@ -552,9 +527,6 @@ def test_add_update_item_with_link(self): ) def test_add_item_with_invalid_data(self): - # Login the user first - self.client.login(username=self.username, password=self.password) - # Post data to create a new item # Note: the *-*_FORMS fields are necessary management form fields # originating from the AdminInline and must be present @@ -575,9 +547,6 @@ def test_add_item_with_invalid_data(self): ) def test_add_update_item_remove_link(self): - # Login the user first - self.client.login(username=self.username, password=self.password) - item, data, link = self._create_item(self.collection, with_link=True) # remove provider @@ -597,9 +566,6 @@ def test_add_update_item_remove_link(self): @mock_s3_asset_file def test_add_remove_item(self): - # Login the user first - self.client.login(username=self.username, password=self.password) - item, data, link = self._create_item(self.collection, with_link=True) asset = self._create_asset(item)[0] @@ -630,17 +596,14 @@ def setUp(self): super().setUp() self._setup(create_collection=True, create_item=True) - @mock_s3_asset_file - def test_add_asset_minimal(self): - # Login the user first self.client.login(username=self.username, password=self.password) + @mock_s3_asset_file + def test_add_asset_minimal(self): self._create_asset_minimal(self.item) @mock_s3_asset_file def test_add_update_asset(self): - # Login the user first - self.client.login(username=self.username, password=self.password) asset, data = self._create_asset(self.item) @@ -673,8 +636,6 @@ def test_add_update_asset(self): self.assertEqual(filecontent, fd.read()) def test_rename_asset(self): - # Login the user first - self.client.login(username=self.username, password=self.password) asset, data = self._create_asset(self.item) @@ -696,8 +657,6 @@ def test_rename_asset(self): # self.assertEqual(asset.file.name, new_path) def test_add_asset_with_invalid_data(self): - # Login the user first - self.client.login(username=self.username, password=self.password) data = { "item": self.item.id, @@ -715,8 +674,6 @@ def test_add_asset_with_invalid_data(self): @mock_s3_asset_file def test_add_remove_asset(self): - # Login the user first - self.client.login(username=self.username, password=self.password) asset, data = self._create_asset(self.item) path = f"{asset.item.collection.name}/{asset.item.name}/{data['name']}" From 33f260cccabc6950ba374e97f80baa130f1698ce Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 16 Mar 2021 10:51:29 +0100 Subject: [PATCH 011/105] BGDIINF_SB-1566: Added application/vnd.mapbox-vector-tile extension This extension is taken from https://github.com/mapbox/vector-tile-spec/tree/master/2.1#21-file-extension --- app/stac_api/validators.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/stac_api/validators.py b/app/stac_api/validators.py index 1150a53a..966d1ba9 100644 --- a/app/stac_api/validators.py +++ b/app/stac_api/validators.py @@ -56,7 +56,7 @@ ('application/x.netcdf+zip', 'Zipped NetCDF', ['.zip']), ('application/xml', 'XML', ['.xml']), ('application/x.xml+zip', 'Zipped XML', ['.zip']), - ('application/vnd.mapbox-vector-tile', 'mbtiles', ['???']), + ('application/vnd.mapbox-vector-tile', 'mbtiles', ['.mvt']), ('text/plain', 'Text', ['.txt']), ('text/x.plain+zip', 'Zipped text', ['.zip']), ] From 5a33e8acbe8a5fef83b6c0dadad1a469a501bd10 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 16 Mar 2021 10:53:56 +0100 Subject: [PATCH 012/105] BGDIINF_SB-1566: Added unittest for asset media type validation --- app/tests/test_admin_page.py | 20 ++++++ app/tests/test_asset_model.py | 118 ++++++++++++++++++++++++++++++++++ 2 files changed, 138 insertions(+) diff --git a/app/tests/test_admin_page.py b/app/tests/test_admin_page.py index 566f31c1..21c10128 100644 --- a/app/tests/test_admin_page.py +++ b/app/tests/test_admin_page.py @@ -14,6 +14,7 @@ from stac_api.models import ItemLink from stac_api.models import Provider +from tests.data_factory import Factory from tests.utils import S3TestMixin from tests.utils import mock_s3_asset_file @@ -25,6 +26,7 @@ class AdminBaseTestCase(TestCase): def setUp(self): + self.factory = Factory() self.password = 'sesame' self.username = 'admin_user' self.admin_user = get_user_model().objects.create_superuser( @@ -236,6 +238,8 @@ def _create_asset(self, item, extra=None): "media_type": "application/x.filegdb+zip", "file": filelike } + if extra: + data.update(extra) response = self.client.post(reverse('admin:stac_api_asset_add'), data) logger.debug('Asset created in %fs', time.time() - start) @@ -692,3 +696,19 @@ def test_add_remove_asset(self): ) # self.assertS3ObjectNotExists(path) # Un-comment with BGDIINF_SB-1625 + + @mock_s3_asset_file + def test_add_update_asset_invalid_media_type(self): + sample = self.factory.create_asset_sample( + self.item, name='asset.txt', media_type='image/tiff; application=geotiff' + ).attributes + # Admin page doesn't uses the name for foreign key but the internal db id. + sample['item'] = self.item.id + response = self.client.post(reverse('admin:stac_api_asset_add'), sample) + # Status code for unsuccessful creation is 200, since in the admin UI + # is returning an error message + self.assertEqual(response.status_code, 200) + self.assertFalse( + Asset.objects.filter(item=self.item, name=sample["name"]).exists(), + msg="Asset with invalid data has been added to db" + ) diff --git a/app/tests/test_asset_model.py b/app/tests/test_asset_model.py index 001fb4d2..46ecc9eb 100644 --- a/app/tests/test_asset_model.py +++ b/app/tests/test_asset_model.py @@ -75,3 +75,121 @@ def test_create_asset_only_required_attributes(self): db_create=True, required_only=True ) + + def test_create_update_asset_invalid_media_type(self): + # try to create an asset with invalid media type + with self.assertRaises( + ValidationError, msg="asset with invalid media type was accepted." + ) as context: + self.factory.create_asset_sample( + item=self.item, + name='my-asset.yaml', + media_type="application/vnd.oai.openapi+yaml;version=3.0", + db_create=True, + ) + exception = context.exception + self.assertIn( + "Invalid id extension '.yaml', id must have a valid file extension", exception.messages + ) + self.assertIn( + "Value 'application/vnd.oai.openapi+yaml;version=3.0' is not a valid choice.", + exception.messages + ) + self.assertIn( + 'Invalid media type application/vnd.oai.openapi+yaml;version=3.0', exception.messages + ) + + with self.assertRaises( + ValidationError, msg="asset with name missmatch media type was accepted." + ) as context: + self.factory.create_asset_sample( + item=self.item, + name='my-asset.txt', + media_type="application/json", + db_create=True, + ) + exception = context.exception + self.assertIn( + "Invalid id extension '.txt', id must match its media type application/json", + exception.messages + ) + + # Test invalid media type/name update + asset = self.factory.create_asset_sample( + item=self.item, name='asset.xml', media_type='application/gml+xml' + ).model + with self.assertRaises( + ValidationError, msg="asset with name missmatch media type was accepted." + ) as context: + asset.name = 'asset.zip' + asset.full_clean() + asset.save() + asset.refresh_from_db() + exception = context.exception + self.assertIn( + "Invalid id extension '.zip', id must match its media type application/gml+xml", + exception.messages + ) + with self.assertRaises( + ValidationError, msg="asset with name missmatch media type was accepted." + ) as context: + asset.media_type = 'text/plain' + asset.full_clean() + asset.save() + asset.refresh_from_db() + exception = context.exception + self.assertIn( + "Invalid id extension '.xml', id must match its media type text/plain", + exception.messages + ) + with self.assertRaises( + ValidationError, msg="asset with name missmatch media type was accepted." + ) as context: + asset.media_type = 'invalid/media-type' + asset.full_clean() + asset.save() + asset.refresh_from_db() + exception = context.exception + self.assertIn("Value 'invalid/media-type' is not a valid choice.", exception.messages) + + def test_create_asset_media_type_validation(self): + # try to create an asset of media type with several extensions + self.factory.create_asset_sample( + item=self.item, name='asset.xml', media_type='application/gml+xml', db_create=True + ) + self.factory.create_asset_sample( + item=self.item, name='asset.gml', media_type='application/gml+xml', db_create=True + ) + + def test_create_update_asset_media_type_validation(self): + # try to create an asset of media type with several extensions + asset = self.factory.create_asset_sample( + item=self.item, name='asset.xml', media_type='application/gml+xml' + ).model + + # correct the extension + asset.name = 'asset.gml' + asset.full_clean() + asset.save() + self.assertEqual(Asset.objects.get(pk=asset.pk).name, asset.name) + + # Change media type with same extension + asset = self.factory.create_asset_sample( + item=self.item, name='asset.xml', media_type='application/gml+xml' + ).model + asset.media_type = 'application/x.interlis; version=2.3' + asset.full_clean() + asset.save() + self.assertEqual(Asset.objects.get(pk=asset.pk).media_type, asset.media_type) + + # Change media type and extension + asset = self.factory.create_asset_sample( + item=self.item, name='asset.json', media_type='application/json' + ).model + asset.name = 'asset.zip' + asset.media_type = 'text/x.plain+zip' + asset.full_clean() + asset.save() + _asset = Asset.objects.get(pk=asset.pk) + self.assertEqual(_asset.name, asset.name) + self.assertEqual(_asset.media_type, asset.media_type) From 95fc23a2cc7884e150cc76a866116f806e0a3e2f Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 18 Mar 2021 08:36:19 +0100 Subject: [PATCH 013/105] BGDIINF_SB-1566: Changed mbtiles extension Changed this extension based on Chris Boecklin comment, see ticket. --- app/stac_api/validators.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/stac_api/validators.py b/app/stac_api/validators.py index 966d1ba9..b6426a4f 100644 --- a/app/stac_api/validators.py +++ b/app/stac_api/validators.py @@ -56,7 +56,7 @@ ('application/x.netcdf+zip', 'Zipped NetCDF', ['.zip']), ('application/xml', 'XML', ['.xml']), ('application/x.xml+zip', 'Zipped XML', ['.zip']), - ('application/vnd.mapbox-vector-tile', 'mbtiles', ['.mvt']), + ('application/vnd.mapbox-vector-tile', 'mbtiles', ['.mbtiles']), ('text/plain', 'Text', ['.txt']), ('text/x.plain+zip', 'Zipped text', ['.zip']), ] From c149095193bd8fb7371a902e01e39d0027d003e4 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 18 Mar 2021 11:05:49 +0100 Subject: [PATCH 014/105] BGDIINF_SB-1566: renamed private method --- app/tests/data_factory.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/tests/data_factory.py b/app/tests/data_factory.py index 1d95d3a9..ecd97e31 100644 --- a/app/tests/data_factory.py +++ b/app/tests/data_factory.py @@ -1079,7 +1079,7 @@ def create_sample( item, sample=sample, name=name, required_only=required_only, **kwargs ) else: - self.last = self.get_last_name(self.last, extension=self.get_extension(sample, kwargs)) + self.last = self.get_last_name(self.last, extension=self._get_extension(sample, kwargs)) data_sample = AssetSample( item, sample=sample, name=self.last, required_only=required_only, **kwargs ) @@ -1115,7 +1115,7 @@ def create_samples(self, samples, item, db_create=False, create_asset_file=False samples, item=item, db_create=db_create, create_asset_file=create_asset_file, **kwargs ) - def get_extension(self, sample_name, kwargs): + def _get_extension(self, sample_name, kwargs): media = 'text/plain' if 'media_type' in kwargs: media = kwargs['media_type'] From 6d626f76b5b3dc092230378b42a476e651234301 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 18 Mar 2021 11:35:22 +0100 Subject: [PATCH 015/105] BGDIINF_SB-1566: Fixed linting raise-missing-from --- app/tests/data_factory.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/tests/data_factory.py b/app/tests/data_factory.py index ecd97e31..b6d4b144 100644 --- a/app/tests/data_factory.py +++ b/app/tests/data_factory.py @@ -1123,7 +1123,7 @@ def _get_extension(self, sample_name, kwargs): try: sample = AssetSample.samples_dict[sample_name] except KeyError as error: - raise KeyError(f'Unknown {sample_name} sample: {error}') + raise KeyError(f'Unknown {sample_name} sample: {error}') from None if 'media_type' in sample: media = sample['media_type'] if media not in MEDIA_TYPES_BY_TYPE: From eb48bf3c1fc5825750634d13522663d92b1ef2f6 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 16 Mar 2021 10:56:15 +0100 Subject: [PATCH 016/105] Added a performance profiling comparaison command --- .../commands/profile_serializer_vs_no_drf.py | 107 ++++++++++++++++++ 1 file changed, 107 insertions(+) create mode 100644 app/stac_api/management/commands/profile_serializer_vs_no_drf.py diff --git a/app/stac_api/management/commands/profile_serializer_vs_no_drf.py b/app/stac_api/management/commands/profile_serializer_vs_no_drf.py new file mode 100644 index 00000000..4427f8bb --- /dev/null +++ b/app/stac_api/management/commands/profile_serializer_vs_no_drf.py @@ -0,0 +1,107 @@ +import json +import logging +from timeit import timeit + +from django.conf import settings +from django.core.management.base import BaseCommand + +from rest_framework.test import APIRequestFactory + +from stac_api.models import Item +from stac_api.utils import CommandHandler + +logger = logging.getLogger(__name__) + +STAC_BASE_V = settings.STAC_BASE_V + + +class Handler(CommandHandler): + + def profiling(self): + # pylint: disable=import-outside-toplevel,possibly-unused-variable + + self.print('Starting profiling') + from stac_api.serializers import ItemSerializer + + def serialize(qs): + return { + 'features': [{ + "id": item.name, + "collection": item.collection.name, + "geometry": str(item.geometry), + "created": item.created.isoformat(), + "updated": item.updated.isoformat(), + "properties": { + "datetime": + item.properties_datetime.isoformat() + if item.properties_datetime else '', + "properties_start_datetime": + item.properties_start_datetime.isoformat() + if item.properties_start_datetime else '', + "properties_end_datetime": + item.properties_end_datetime.isoformat() + if item.properties_end_datetime else '', + "properties_title": item.properties_title, + }, + "type": "feature", + "stac_version": "0.9.0", + "assets": { + asset.name: { + "id": asset.name, + "title": asset.title, + "checksum_multihash": asset.checksum_multihash, + "description": asset.description, + "eo_gsd": asset.eo_gsd, + "geoadmin_lang": asset.geoadmin_lang, + "geoadmin_variant": asset.geoadmin_variant, + "proj_epsg": asset.proj_epsg, + "type": asset.media_type, + "created": asset.created.isoformat(), + "updated": asset.updated.isoformat() + } for asset in item.assets.all() + } + } for item in qs] + } + + collection_id = self.options["collection"] + qs = Item.objects.filter(collection__name=collection_id + ).prefetch_related('assets', 'links')[:self.options['limit']] + serialize(qs) + context = { + 'request': APIRequestFactory().get(f'{STAC_BASE_V}/collections/{collection_id}/items') + } + # self.print(json.dumps(ItemSerializer(qs, context=context, many=True).data)) + serializer_time = timeit( + stmt='ItemSerializer(qs, context=context, many=True).data', + number=self.options['repeat'], + globals=locals() + ) + + self.print('=' * 80) + self.print(json.dumps(serialize(qs), indent=2)) + no_drf_time = timeit(stmt='serialize(qs)', number=self.options['repeat'], globals=locals()) + + self.print_success('DRF time: %fms', serializer_time / self.options['repeat'] * 1000) + self.print_success('NO DRF time: %fms', no_drf_time / self.options['repeat'] * 1000) + + +class Command(BaseCommand): + help = """ItemSerializer vs simple serializer profiling command + + Profiling of the serialization of many items using DRF vs using a simple function. + + See https://docs.python.org/3.7/library/profile.html + """ + + def add_arguments(self, parser): + parser.add_argument( + '--collection', + type=str, + default='perftest-collection-0', + help="Collection ID to use for the ItemSerializer profiling" + ) + parser.add_argument('--limit', type=int, default=100, help="Limit to use for the query") + parser.add_argument('--repeat', type=int, default=100, help="Repeat the measurement") + + def handle(self, *args, **options): + Handler(self, options).profiling() From d695d76d22c9862cd6f58cb8cf71d61896813918 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Mon, 22 Mar 2021 12:08:20 +0100 Subject: [PATCH 017/105] BGDIINF_SB-1723: Moved TestHttp500 in its own module This class is only for unittest purpose therefore move it into its own module. --- app/config/urls.py | 2 +- app/stac_api/views.py | 9 --------- app/stac_api/views_test.py | 16 ++++++++++++++++ 3 files changed, 17 insertions(+), 10 deletions(-) create mode 100644 app/stac_api/views_test.py diff --git a/app/config/urls.py b/app/config/urls.py index f1583f5b..69b75c35 100644 --- a/app/config/urls.py +++ b/app/config/urls.py @@ -37,7 +37,7 @@ def checker(request): if settings.DEBUG: import debug_toolbar - from stac_api.views import TestHttp500 + from stac_api.views_test import TestHttp500 urlpatterns = [ path('__debug__/', include(debug_toolbar.urls)), diff --git a/app/stac_api/views.py b/app/stac_api/views.py index 4e946d4b..3ae8a27c 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -520,12 +520,3 @@ def patch(self, request, *args, **kwargs): @etag(get_asset_etag) def delete(self, request, *args, **kwargs): return self.destroy(request, *args, **kwargs) - - -class TestHttp500(generics.GenericAPIView): - queryset = LandingPage.objects.all() - - def get(self, request, *args, **kwargs): - logger.debug('Test request that raises an exception') - - raise AttributeError('test exception') diff --git a/app/stac_api/views_test.py b/app/stac_api/views_test.py new file mode 100644 index 00000000..fea487e9 --- /dev/null +++ b/app/stac_api/views_test.py @@ -0,0 +1,16 @@ +import logging + +from rest_framework import generics + +from stac_api.models import LandingPage + +logger = logging.getLogger(__name__) + + +class TestHttp500(generics.GenericAPIView): + queryset = LandingPage.objects.all() + + def get(self, request, *args, **kwargs): + logger.debug('Test request that raises an exception') + + raise AttributeError('test exception') From 60c3d9077fe90010618bd77f677c18418dfac435 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 23 Mar 2021 07:34:17 +0100 Subject: [PATCH 018/105] BGDIINF_SB-1723: Split the unittest STAC base class Now we have two unittest STAC base class, one which inherit from django TestCase and one that inherit from django TransactionTestCase. The latter one is need for some test cases, for example for testing race condition using separate threads. --- app/tests/base_test.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/app/tests/base_test.py b/app/tests/base_test.py index 900f5a0d..68d544f8 100644 --- a/app/tests/base_test.py +++ b/app/tests/base_test.py @@ -6,6 +6,7 @@ from django.contrib.gis.geos.geometry import GEOSGeometry from django.test import TestCase +from django.test import TransactionTestCase from stac_api.utils import fromisoformat from stac_api.utils import get_link @@ -19,7 +20,9 @@ TEST_LINK_ROOT = {'rel': 'root', 'href': f'{TEST_LINK_ROOT_HREF}/'} -class StacBaseTestCase(TestCase): +class StacTestMixin: + """Adds some useful checks for STAC API unittesting + """ # we keep the TestCase nomenclature here therefore we disable the pylint invalid-name def assertStatusCode(self, code, response, msg=None): # pylint: disable=invalid-name @@ -407,3 +410,12 @@ def _check_value(self, path, key, value, current, ignore): self.assertEqual( value, current[key], msg=f'{path}: current value is not equal to the expected' ) + + +class StacBaseTestCase(TestCase, StacTestMixin): + """Django TestCase with additional STAC check methods""" + + +class StacBaseTransactionTestCase(TransactionTestCase, StacTestMixin): + """Django TransactionTestCase with additional STAC check methods + """ From 79215f376ae6a6ee1ea9bc34f160d21f81fcc6d2 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 23 Mar 2021 08:26:28 +0100 Subject: [PATCH 019/105] BGDIINF_SB-1723: Added disableLogger context manager This avoid having a CRITICAL log when testing exceptions. --- app/tests/test_generic_api.py | 3 ++- app/tests/utils.py | 27 +++++++++++++++++++++++++++ 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/app/tests/test_generic_api.py b/app/tests/test_generic_api.py index f16c93aa..0875f37b 100644 --- a/app/tests/test_generic_api.py +++ b/app/tests/test_generic_api.py @@ -11,6 +11,7 @@ from tests.base_test import StacBaseTestCase from tests.data_factory import Factory from tests.utils import client_login +from tests.utils import disableLogger from tests.utils import get_http_error_description from tests.utils import mock_s3_asset_file @@ -29,7 +30,7 @@ def test_http_error_collection_not_found(self): self.assertStatusCode(404, response) def test_http_error_500_exception(self): - with self.settings(DEBUG_PROPAGATE_API_EXCEPTIONS=True): + with self.settings(DEBUG_PROPAGATE_API_EXCEPTIONS=True), disableLogger('stac_api.apps'): response = self.client.get("/tests/test_http_500") self.assertStatusCode(500, response) self.assertEqual(response.json()['description'], "AttributeError('test exception')") diff --git a/app/tests/utils.py b/app/tests/utils.py index a1e662a2..ec7200d8 100644 --- a/app/tests/utils.py +++ b/app/tests/utils.py @@ -169,3 +169,30 @@ def client_login(client): username, 'test_e_mail1234@some_fantasy_domainname.com', password ) client.login(username=username, password=password) + + +class disableLogger: # pylint: disable=invalid-name + """Disable temporarily a logger with a with statement + + Args: + logger_name: str | None + logger name to disable, by default use the root logger (None) + + Example: + with disableLogger('stac_api.apps'): + # the stac_api.apps logger is totally disable within the with statement + logger = logging.getLogger('stac_api.apps') + logger.critical('This log will not be printed anywhere') + """ + + def __init__(self, logger_name=None): + if logger_name: + self.logger = logging.getLogger(logger_name) + else: + self.logger = logging.getLogger() + + def __enter__(self): + self.logger.disabled = True + + def __exit__(self, exc_type, exc_val, exc_tb): + self.logger.disabled = False From bf27d2b0754f519a2b179c67139192ae33d5074d Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 23 Mar 2021 08:28:19 +0100 Subject: [PATCH 020/105] BGDIINF_SB-1723: Added unittesting for Collection Upsert We also test the db atomic transaction and for race condition using separate threads. --- app/config/urls.py | 6 + app/stac_api/views_test.py | 8 ++ app/tests/test_collections_endpoint.py | 159 +++++++++++++++++++++---- 3 files changed, 153 insertions(+), 20 deletions(-) diff --git a/app/config/urls.py b/app/config/urls.py index 69b75c35..7eb7f981 100644 --- a/app/config/urls.py +++ b/app/config/urls.py @@ -38,8 +38,14 @@ def checker(request): if settings.DEBUG: import debug_toolbar from stac_api.views_test import TestHttp500 + from stac_api.views_test import TestCollectionUpsertHttp500 urlpatterns = [ path('__debug__/', include(debug_toolbar.urls)), path('tests/test_http_500', TestHttp500.as_view()), + path( + 'tests/test_collection_upsert_http_500/', + TestCollectionUpsertHttp500.as_view(), + name='test-collection-detail-http-500' + ), ] + urlpatterns diff --git a/app/stac_api/views_test.py b/app/stac_api/views_test.py index fea487e9..d3f9f1a4 100644 --- a/app/stac_api/views_test.py +++ b/app/stac_api/views_test.py @@ -3,6 +3,7 @@ from rest_framework import generics from stac_api.models import LandingPage +from stac_api.views import CollectionDetail logger = logging.getLogger(__name__) @@ -14,3 +15,10 @@ def get(self, request, *args, **kwargs): logger.debug('Test request that raises an exception') raise AttributeError('test exception') + + +class TestCollectionUpsertHttp500(CollectionDetail): + + def perform_upsert(self, serializer, lookup): + serializer.upsert(**lookup) + raise AttributeError('test exception') diff --git a/app/tests/test_collections_endpoint.py b/app/tests/test_collections_endpoint.py index c8a24cc4..4dd7b1ef 100644 --- a/app/tests/test_collections_endpoint.py +++ b/app/tests/test_collections_endpoint.py @@ -1,14 +1,21 @@ import logging +from concurrent.futures import ThreadPoolExecutor +from concurrent.futures import as_completed from datetime import datetime from django.conf import settings +from django.contrib.auth import get_user_model +from django.db import connections from django.test import Client +from django.urls import reverse from stac_api.utils import utc_aware from tests.base_test import StacBaseTestCase +from tests.base_test import StacBaseTransactionTestCase from tests.data_factory import CollectionFactory from tests.utils import client_login +from tests.utils import disableLogger logger = logging.getLogger(__name__) @@ -44,7 +51,7 @@ def test_single_collection_endpoint(self): self.check_stac_collection(self.collection_1.json, response_json) -class CollectionsWriteEndpointTestCase(StacBaseTestCase): +class CollectionsWriteEndpointTestCase(StacBaseTransactionTestCase): def setUp(self): # pylint: disable=invalid-name self.client = Client() @@ -162,10 +169,10 @@ def setUp(self): # pylint: disable=invalid-name self.client = Client() client_login(self.client) self.collection_factory = CollectionFactory() - self.collection = self.collection_factory.create_sample().model + self.collection = self.collection_factory.create_sample(db_create=True) self.maxDiff = None # pylint: disable=invalid-name - def test_collection_put_dont_exists(self): + def test_collection_upsert_create(self): sample = self.collection_factory.create_sample(sample='collection-2') # the dataset to update does not exist yet @@ -174,11 +181,13 @@ def test_collection_put_dont_exists(self): data=sample.get_json('put'), content_type='application/json' ) - self.assertStatusCode(404, response) + self.assertStatusCode(201, response) + + self.check_stac_collection(sample.json, response.json()) def test_collections_put(self): sample = self.collection_factory.create_sample( - name=self.collection.name, sample='collection-2' + name=self.collection['name'], sample='collection-2' ) response = self.client.put( @@ -197,7 +206,7 @@ def test_collections_put(self): def test_collections_put_extra_payload(self): sample = self.collection_factory.create_sample( - name=self.collection.name, sample='collection-2', extra_payload='not valid' + name=self.collection['name'], sample='collection-2', extra_payload='not valid' ) response = self.client.put( @@ -212,7 +221,9 @@ def test_collections_put_extra_payload(self): def test_collections_put_read_only_in_payload(self): sample = self.collection_factory.create_sample( - name=self.collection.name, sample='collection-2', created=utc_aware(datetime.utcnow()) + name=self.collection['name'], + sample='collection-2', + created=utc_aware(datetime.utcnow()) ) response = self.client.put( @@ -231,9 +242,9 @@ def test_collection_put_change_id(self): ) # for the start, the id have to be different - self.assertNotEqual(self.collection.name, sample['name']) + self.assertNotEqual(self.collection['name'], sample['name']) response = self.client.put( - f"/{STAC_BASE_V}/collections/{self.collection.name}", + f"/{STAC_BASE_V}/collections/{self.collection['name']}", data=sample.get_json('put'), content_type='application/json' ) @@ -245,17 +256,17 @@ def test_collection_put_change_id(self): self.check_stac_collection(sample.json, response.json()) # the old collection shouldn't exist any more - response = self.client.get(f"/{STAC_BASE_V}/collections/{self.collection.name}") + response = self.client.get(f"/{STAC_BASE_V}/collections/{self.collection['name']}") self.assertStatusCode(404, response) def test_collection_put_remove_optional_fields(self): - collection_name = self.collection.name # get a name that is registered in the service + collection_name = self.collection['name'] # get a name that is registered in the service sample = self.collection_factory.create_sample( name=collection_name, sample='collection-1', required_only=True ) # for the start, the collection[1] has to have a title - self.assertNotEqual('', f'{self.collection.title}') + self.assertNotEqual('', f'{self.collection["title"]}') response = self.client.put( f"/{STAC_BASE_V}/collections/{sample['name']}", data=sample.get_json('put'), @@ -267,10 +278,10 @@ def test_collection_put_remove_optional_fields(self): self.assertNotIn('providers', response_json.keys()) # key does not exist def test_collection_patch(self): - collection_name = self.collection.name # get a name that is registered in the service + collection_name = self.collection['name'] # get a name that is registered in the service payload_json = {'license': 'open-source'} # for the start, the collection[1] has to have a different licence than the payload - self.assertNotEqual(self.collection.license, payload_json['license']) + self.assertNotEqual(self.collection["license"], payload_json['license']) # for start the payload has no description self.assertNotIn('title', payload_json.keys()) @@ -285,13 +296,13 @@ def test_collection_patch(self): self.assertEqual(payload_json['license'], response_json['license']) # description not affected by patch - self.assertEqual(self.collection.description, response_json['description']) + self.assertEqual(self.collection["description"], response_json['description']) def test_collection_patch_extra_payload(self): - collection_name = self.collection.name # get a name that is registered in the service + collection_name = self.collection['name'] # get a name that is registered in the service payload_json = {'license': 'open-source', 'extra_payload': True} # for the start, the collection[1] has to have a different licence than the payload - self.assertNotEqual(self.collection.license, payload_json['license']) + self.assertNotEqual(self.collection['license'], payload_json['license']) # for start the payload has no description response = self.client.patch( f"/{STAC_BASE_V}/collections/{collection_name}", @@ -304,10 +315,10 @@ def test_collection_patch_extra_payload(self): msg='Unexpected error message') def test_collection_patch_read_only_in_payload(self): - collection_name = self.collection.name # get a name that is registered in the service + collection_name = self.collection['name'] # get a name that is registered in the service payload_json = {'license': 'open-source', 'created': utc_aware(datetime.utcnow())} # for the start, the collection[1] has to have a different licence than the payload - self.assertNotEqual(self.collection.license, payload_json['license']) + self.assertNotEqual(self.collection['license'], payload_json['license']) response = self.client.patch( f"/{STAC_BASE_V}/collections/{collection_name}", data=payload_json, @@ -319,13 +330,121 @@ def test_collection_patch_read_only_in_payload(self): msg='Unexpected error message') def test_authorized_collection_delete(self): - path = f'/{STAC_BASE_V}/collections/{self.collection.name}' + path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}' response = self.client.delete(path) # Collection delete is not implemented (and currently not foreseen), hence # the status code should be 405. If it should get implemented in future # an unauthorized delete should get a status code of 401 (see test above). self.assertStatusCode(405, response, msg="unimplemented collection delete was permitted.") + def test_collection_atomic_upsert_create_500(self): + sample = self.collection_factory.create_sample(sample='collection-2') + + # the dataset to update does not exist yet + with self.settings(DEBUG_PROPAGATE_API_EXCEPTIONS=True), disableLogger('stac_api.apps'): + response = self.client.put( + reverse('test-collection-detail-http-500', args=[sample['name']]), + data=sample.get_json('put'), + content_type='application/json' + ) + self.assertStatusCode(500, response) + self.assertEqual(response.json()['description'], "AttributeError('test exception')") + + # Make sure that the ressource has not been created + response = self.client.get(reverse('collection-detail', args=[sample['name']])) + self.assertStatusCode(404, response) + + def test_collection_atomic_upsert_update_500(self): + sample = self.collection_factory.create_sample( + sample='collection-2', name=self.collection['name'] + ) + + # Make sure samples is different from actual data + self.assertNotEqual(sample.attributes, self.collection.attributes) + + # the dataset to update does not exist yet + with self.settings(DEBUG_PROPAGATE_API_EXCEPTIONS=True), disableLogger('stac_api.apps'): + # because we explicitely test a crash here we don't want to print a CRITICAL log on the + # console therefore disable it. + response = self.client.put( + reverse('test-collection-detail-http-500', args=[sample['name']]), + data=sample.get_json('put'), + content_type='application/json' + ) + self.assertStatusCode(500, response) + self.assertEqual(response.json()['description'], "AttributeError('test exception')") + + # Make sure that the ressource has not been created + response = self.client.get(reverse('collection-detail', args=[sample['name']])) + self.assertStatusCode(200, response) + self.check_stac_collection(self.collection.json, response.json()) + + +class CollectionUpsertAtomicTest(StacBaseTransactionTestCase): + + def setUp(self): + self.username = 'user' + self.password = 'dummy-password' + get_user_model().objects.create_superuser(self.username, password=self.password) + + def test_collection_atomic_upsert(self): + workers = 10 + status_201 = 0 + sample = CollectionFactory().create_sample(sample='collection-2') + + def on_done(future): + # Because each thread has a db connection, we call close_all() when the thread is + # terminated. This is needed because the thread are not managed by django here but + # by us. + connections.close_all() + + def collection_atomic_upsert_test(worker): + # This method run on separate thread therefore it requires to create a new client and + # to login it for each call. + client = Client() + client.login(username=self.username, password=self.password) + return client.put( + reverse('collection-detail', args=[sample['name']]), + data=sample.get_json('put'), + content_type='application/json' + ) + + # We call the PUT collection several times in parallel with the same data to make sure + # that we don't have any race condition. + errors = [] + responses = [] + with ThreadPoolExecutor(max_workers=workers) as executor: + futures = {} + for worker in range(workers): + future = executor.submit(collection_atomic_upsert_test, worker) + future.add_done_callback(on_done) + futures[future] = worker + for future in as_completed(futures): + try: + response = future.result() + except Exception as exc: # pylint: disable=broad-except + errors.append((futures[future], str(exc))) + else: + responses.append((futures[future], response)) + + self.assertEqual( + len(responses) + len(errors), + workers, + msg='Number of responses/errors doesn\'t match the number of worker' + ) + + for worker, response in responses: + if response.status_code == 201: + status_201 += 1 + self.assertIn( + response.status_code, [200, 201], + msg=f'Unexpected response status code {response.status_code} for worker {worker}' + ) + self.check_stac_collection(sample.json, response.json()) + self.assertEqual(status_201, 1, msg="Not only one upsert did a create !") + for worker, error in errors: + self.fail(msg=f'Worker {worker} failed: {error}') + class CollectionsUnauthorizeEndpointTestCase(StacBaseTestCase): From b68656876d0ee6147030547f8bdade7682c1792a Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 23 Mar 2021 08:39:22 +0100 Subject: [PATCH 021/105] BGDIINF_SB-1723: Implemented Collection upsert We also make use of the DB atomic transaction to do proper DB rollback in case of errors. The get_object() of the views have been removed in order to use the default implementation using the lookup_field. Also made all endpoints atomic to assure proper db rollback in case of error. --- app/stac_api/serializers.py | 50 +++++++++++++++--- app/stac_api/validators_serializer.py | 32 ++++++++++++ app/stac_api/views.py | 32 ++++-------- app/stac_api/views_mixins.py | 54 +++++++++++++++++--- app/tests/base_test.py | 38 ++++++++++++++ app/tests/test_collections_endpoint.py | 71 +++++++++++++------------- 6 files changed, 204 insertions(+), 73 deletions(-) diff --git a/app/stac_api/serializers.py b/app/stac_api/serializers.py index d5c2ae2f..4d63933b 100644 --- a/app/stac_api/serializers.py +++ b/app/stac_api/serializers.py @@ -31,6 +31,7 @@ from stac_api.validators import validate_name from stac_api.validators_serializer import validate_asset_file from stac_api.validators_serializer import validate_json_payload +from stac_api.validators_serializer import validate_uniqueness_and_create logger = logging.getLogger(__name__) @@ -90,6 +91,30 @@ def update_or_create_links(model, instance, instance_type, links_data): ) +class UpsertModelSerializerMixin: + """Add support for Upsert in serializer + """ + + def upsert(self, **kwargs): + """ + Update or insert an instance and return it. + """ + self.instance, created = self.update_or_create(self.validated_data.copy(), **kwargs) + return self.instance, created + + def update_or_create(self, validated_data, **kwargs): + """This method must be implemented by the serializer and must make use of the DB + objects.update_or_create() method. + + Args: + validated_data: dict + Copy of the validated_data to be used in the objects.update_or_create() method. + **kwargs: + Must be a unique query to be used in the objects.update_or_create() method. + """ + raise NotImplementedError("update_or_create() not implemented") + + class NonNullModelSerializer(serializers.ModelSerializer): """Filter fields with null value @@ -315,7 +340,7 @@ class Meta: ) -class CollectionSerializer(NonNullModelSerializer): +class CollectionSerializer(NonNullModelSerializer, UpsertModelSerializerMixin): class Meta: model = Collection @@ -340,10 +365,7 @@ class Meta: # NOTE: when explicitely declaring fields, we need to add the validation as for the field # in model ! id = serializers.CharField( - required=True, - max_length=255, - source="name", - validators=[validate_name, UniqueValidator(queryset=Collection.objects.all())] + required=True, max_length=255, source="name", validators=[validate_name] ) title = serializers.CharField(required=False, allow_blank=False, default=None, max_length=255) # Also links are required in the spec, the main links (self, root, items) are automatically @@ -414,7 +436,7 @@ def create(self, validated_data): """ providers_data = validated_data.pop('providers', []) links_data = validated_data.pop('links', []) - collection = Collection.objects.create(**validated_data) + collection = validate_uniqueness_and_create(Collection, validated_data) self._update_or_create_providers(collection=collection, providers_data=providers_data) update_or_create_links( instance_type="collection", @@ -439,6 +461,22 @@ def update(self, instance, validated_data): ) return super().update(instance, validated_data) + def update_or_create(self, validated_data, **kwargs): + """ + Create and return a new `Collection` instance, given the validated data. + """ + providers_data = validated_data.pop('providers', []) + links_data = validated_data.pop('links', []) + collection, created = Collection.objects.update_or_create(**kwargs, defaults=validated_data) + self._update_or_create_providers(collection=collection, providers_data=providers_data) + update_or_create_links( + instance_type="collection", + model=CollectionLink, + instance=collection, + links_data=links_data + ) + return collection, created + def to_representation(self, instance): name = instance.name stac_base_v = settings.STAC_BASE_V diff --git a/app/stac_api/validators_serializer.py b/app/stac_api/validators_serializer.py index 3bad12a0..db4c596d 100644 --- a/app/stac_api/validators_serializer.py +++ b/app/stac_api/validators_serializer.py @@ -9,6 +9,8 @@ from django.conf import settings from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.geos import GEOSGeometry +from django.db import IntegrityError +from django.db import transaction from django.utils.translation import gettext_lazy as _ from rest_framework.exceptions import APIException @@ -527,3 +529,33 @@ def validate_query_parameters_post_search(self, query_param): logger.error( 'Query contains the non-allowed parameter(s): %s', list(wrong_query_parameters) ) + + +def validate_uniqueness_and_create(model_class, validated_data): + """Validate for uniqueness and create object + + Try to create an object and if it fails with db IntegrityError due to non unique object by name + re-raise a ValidationError(), otherwise re-raise the IntegrityError + + Args: + model_class: Model + A model Class to use for the create() + validated_data: dict + Validated data to use for the create method + + Returns: + the object created + + Raises: + ValidationError: when the new object is not unique by name in db. + IntegrityError: for any other DB errors + """ + try: + with transaction.atomic(): + return model_class.objects.create(**validated_data) + except IntegrityError as error: + if model_class.objects.all().filter(name=validated_data['name']).exists(): + raise ValidationError( + code='unique', detail={'id': ['This field must be unique.']} + ) from None + raise diff --git a/app/stac_api/views.py b/app/stac_api/views.py index 3ae8a27c..b12efe17 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -5,7 +5,6 @@ from django.conf import settings from django.http import Http404 -from django.shortcuts import get_object_or_404 from rest_framework import generics from rest_framework import mixins @@ -244,9 +243,12 @@ def get(self, request, *args, **kwargs): return Response(data) -class CollectionDetail(generics.GenericAPIView, mixins.RetrieveModelMixin, mixins.UpdateModelMixin): +class CollectionDetail( + generics.GenericAPIView, mixins.RetrieveModelMixin, views_mixins.UpdateInsertModelMixin +): serializer_class = CollectionSerializer lookup_url_kwarg = "collection_name" + lookup_field = "name" queryset = Collection.objects.all().prefetch_related('providers', 'links') @etag(get_collection_etag) @@ -256,19 +258,13 @@ def get(self, request, *args, **kwargs): # Here the etag is only added to support pre-conditional If-Match and If-Not-Match @etag(get_collection_etag) def put(self, request, *args, **kwargs): - return self.update(request, *args, **kwargs) + return self.upsert(request, *args, **kwargs) # Here the etag is only added to support pre-conditional If-Match and If-Not-Match @etag(get_collection_etag) def patch(self, request, *args, **kwargs): return self.partial_update(request, *args, **kwargs) - def get_object(self): - collection_name = self.kwargs.get(self.lookup_url_kwarg) - queryset = self.get_queryset().filter(name=collection_name) - obj = get_object_or_404(queryset) - return obj - class ItemsList(generics.GenericAPIView, views_mixins.CreateModelMixin): serializer_class = ItemSerializer @@ -345,11 +341,12 @@ def post(self, request, *args, **kwargs): class ItemDetail( generics.GenericAPIView, mixins.RetrieveModelMixin, - views_mixins.UpdateModelMixin, + views_mixins.UpdateInsertModelMixin, views_mixins.DestroyModelMixin ): serializer_class = ItemSerializer lookup_url_kwarg = "item_name" + lookup_field = "name" def get_queryset(self): # filter based on the url @@ -370,12 +367,6 @@ def get_write_request_data(self, request, *args, partial=False, **kwargs): data['collection'] = kwargs['collection_name'] return data - def get_object(self): - item_name = self.kwargs.get(self.lookup_url_kwarg) - queryset = self.get_queryset().filter(name=item_name) - obj = get_object_or_404(queryset) - return obj - @etag(get_item_etag) def get(self, request, *args, **kwargs): return self.retrieve(request, *args, **kwargs) @@ -468,11 +459,12 @@ def get(self, request, *args, **kwargs): class AssetDetail( generics.GenericAPIView, mixins.RetrieveModelMixin, - views_mixins.UpdateModelMixin, + views_mixins.UpdateInsertModelMixin, views_mixins.DestroyModelMixin ): serializer_class = AssetSerializer lookup_url_kwarg = "asset_name" + lookup_field = "name" def get_write_request_data(self, request, *args, partial=False, **kwargs): data = request.data.copy() @@ -491,12 +483,6 @@ def get_queryset(self): item__name=self.kwargs['item_name'] ) - def get_object(self): - asset_name = self.kwargs.get(self.lookup_url_kwarg) - queryset = self.get_queryset().filter(name=asset_name) - obj = get_object_or_404(queryset) - return obj - def get_serializer(self, *args, **kwargs): serializer_class = self.get_serializer_class() kwargs.setdefault('context', self.get_serializer_context()) diff --git a/app/stac_api/views_mixins.py b/app/stac_api/views_mixins.py index 04c5a552..efea2882 100644 --- a/app/stac_api/views_mixins.py +++ b/app/stac_api/views_mixins.py @@ -1,5 +1,7 @@ import logging +from django.db import transaction + from rest_framework import status from rest_framework.response import Response @@ -8,6 +10,14 @@ logger = logging.getLogger(__name__) +def get_success_headers(data): + try: + return {'Location': get_link(data['links'], 'self', raise_exception=True)['href']} + except KeyError as err: + logger.error('Failed to set the Location header for model creation %s: %s', err, data) + return {} + + class CreateModelMixin: """ Create a model instance. @@ -24,6 +34,7 @@ class CreateModelMixin: def get_write_request_data(self, request, *args, **kwargs): return request.data + @transaction.atomic def create(self, request, *args, **kwargs): data = self.get_write_request_data(request, *args, **kwargs) serializer = self.get_serializer(data=data) @@ -36,27 +47,26 @@ def perform_create(self, serializer): serializer.save() def get_success_headers(self, data): - try: - return {'Location': get_link(data['links'], 'self', raise_exception=True)['href']} - except KeyError as err: - logger.error('Failed to set the Location header for item creation: %s', err) - return {} + return get_success_headers(data) -class UpdateModelMixin: +class UpdateInsertModelMixin: """ - Update a model instance. + Update/insert a model instance. - This is a copy of the original UpdateModelMixin, but the request.data needs to be patched with + This is a copy of the original UpdateMixin, but the request.data needs to be patched with the collection_name and/or item_name depending on the view. This patching cannot be done with the original mixin because the request.data is immutable. This new mixin allow this patching through the `get_write_request_data` method. + + It also add the upsert method to perform update_or_create operation """ def get_write_request_data(self, request, *args, partial=False, **kwargs): return request.data + @transaction.atomic def update(self, request, *args, **kwargs): partial = kwargs.pop('partial', False) serializer_kwargs = {'partial': partial} @@ -73,13 +83,40 @@ def update(self, request, *args, **kwargs): return Response(serializer.data) + @transaction.atomic + def upsert(self, request, *args, **kwargs): + data = self.get_write_request_data(request, *args, **kwargs) + serializer = self.get_serializer(data=data) + serializer.is_valid(raise_exception=True) + lookup = {} + if self.lookup_url_kwarg: + lookup = {self.lookup_field: self.kwargs[self.lookup_url_kwarg]} + instance, created = self.perform_upsert(serializer, lookup) + + if getattr(instance, '_prefetched_objects_cache', None): + # If 'prefetch_related' has been applied to a queryset, we need to + # forcibly invalidate the prefetch cache on the instance. + instance._prefetched_objects_cache = {} # pylint: disable=protected-access + + return Response( + serializer.data, + status=status.HTTP_201_CREATED if created else status.HTTP_200_OK, + headers=self.get_success_headers(serializer.data) + ) + def perform_update(self, serializer): serializer.save() + def perform_upsert(self, serializer, lookup): + return serializer.upsert(**lookup) + def partial_update(self, request, *args, **kwargs): kwargs['partial'] = True return self.update(request, *args, **kwargs) + def get_success_headers(self, data): + return get_success_headers(data) + class DestroyModelMixin: """ @@ -89,6 +126,7 @@ class DestroyModelMixin: instead of 204 No Content. """ + @transaction.atomic def destroy(self, request, *args, **kwargs): instance = self.get_object() self.perform_destroy(instance) diff --git a/app/tests/base_test.py b/app/tests/base_test.py index 68d544f8..84634257 100644 --- a/app/tests/base_test.py +++ b/app/tests/base_test.py @@ -1,10 +1,13 @@ import json import logging +from concurrent.futures import ThreadPoolExecutor +from concurrent.futures import as_completed from datetime import timedelta from pprint import pformat from urllib.parse import urlparse from django.contrib.gis.geos.geometry import GEOSGeometry +from django.db import connections from django.test import TestCase from django.test import TransactionTestCase @@ -419,3 +422,38 @@ class StacBaseTestCase(TestCase, StacTestMixin): class StacBaseTransactionTestCase(TransactionTestCase, StacTestMixin): """Django TransactionTestCase with additional STAC check methods """ + + @staticmethod + def on_done(future): + # Because each thread has a db connection, we call close_all() when the thread is + # terminated. This is needed because the thread are not managed by django here but + # by us. + connections.close_all() + + def run_parallel(self, workers, func): + errors = [] + responses = [] + with ThreadPoolExecutor(max_workers=workers) as executor: + futures = {} + for worker in range(workers): + future = executor.submit(func, worker) + future.add_done_callback(self.on_done) + futures[future] = worker + for future in as_completed(futures): + try: + response = future.result() + except Exception as exc: # pylint: disable=broad-except + errors.append((futures[future], str(exc))) + else: + responses.append((futures[future], response)) + + self.assertEqual( + len(responses) + len(errors), + workers, + msg='Number of responses/errors doesn\'t match the number of worker' + ) + + for worker, error in errors: + self.fail(msg=f'Worker {worker} failed: {error}') + + return responses, errors diff --git a/app/tests/test_collections_endpoint.py b/app/tests/test_collections_endpoint.py index 4dd7b1ef..ac0272f2 100644 --- a/app/tests/test_collections_endpoint.py +++ b/app/tests/test_collections_endpoint.py @@ -1,11 +1,8 @@ import logging -from concurrent.futures import ThreadPoolExecutor -from concurrent.futures import as_completed from datetime import datetime from django.conf import settings from django.contrib.auth import get_user_model -from django.db import connections from django.test import Client from django.urls import reverse @@ -51,7 +48,7 @@ def test_single_collection_endpoint(self): self.check_stac_collection(self.collection_1.json, response_json) -class CollectionsWriteEndpointTestCase(StacBaseTransactionTestCase): +class CollectionsWriteEndpointTestCase(StacBaseTestCase): def setUp(self): # pylint: disable=invalid-name self.client = Client() @@ -380,24 +377,18 @@ def test_collection_atomic_upsert_update_500(self): self.check_stac_collection(self.collection.json, response.json()) -class CollectionUpsertAtomicTest(StacBaseTransactionTestCase): +class CollectionRaceConditionTest(StacBaseTransactionTestCase): def setUp(self): self.username = 'user' self.password = 'dummy-password' get_user_model().objects.create_superuser(self.username, password=self.password) - def test_collection_atomic_upsert(self): - workers = 10 + def test_collection_upsert_race_condition(self): + workers = 5 status_201 = 0 sample = CollectionFactory().create_sample(sample='collection-2') - def on_done(future): - # Because each thread has a db connection, we call close_all() when the thread is - # terminated. This is needed because the thread are not managed by django here but - # by us. - connections.close_all() - def collection_atomic_upsert_test(worker): # This method run on separate thread therefore it requires to create a new client and # to login it for each call. @@ -411,27 +402,7 @@ def collection_atomic_upsert_test(worker): # We call the PUT collection several times in parallel with the same data to make sure # that we don't have any race condition. - errors = [] - responses = [] - with ThreadPoolExecutor(max_workers=workers) as executor: - futures = {} - for worker in range(workers): - future = executor.submit(collection_atomic_upsert_test, worker) - future.add_done_callback(on_done) - futures[future] = worker - for future in as_completed(futures): - try: - response = future.result() - except Exception as exc: # pylint: disable=broad-except - errors.append((futures[future], str(exc))) - else: - responses.append((futures[future], response)) - - self.assertEqual( - len(responses) + len(errors), - workers, - msg='Number of responses/errors doesn\'t match the number of worker' - ) + responses, errors = self.run_parallel(workers, collection_atomic_upsert_test) for worker, response in responses: if response.status_code == 201: @@ -442,8 +413,36 @@ def collection_atomic_upsert_test(worker): ) self.check_stac_collection(sample.json, response.json()) self.assertEqual(status_201, 1, msg="Not only one upsert did a create !") - for worker, error in errors: - self.fail(msg=f'Worker {worker} failed: {error}') + + def test_collection_post_race_condition(self): + workers = 5 + status_201 = 0 + sample = CollectionFactory().create_sample(sample='collection-2') + + def collection_atomic_post_test(worker): + # This method run on separate thread therefore it requires to create a new client and + # to login it for each call. + client = Client() + client.login(username=self.username, password=self.password) + return client.post( + reverse('collection-list'), + data=sample.get_json('post'), + content_type='application/json' + ) + + # We call the PUT collection several times in parallel with the same data to make sure + # that we don't have any race condition. + responses, errors = self.run_parallel(workers, collection_atomic_post_test) + + for worker, response in responses: + self.assertIn(response.status_code, [201, 400]) + if response.status_code == 201: + self.check_stac_collection(sample.json, response.json()) + status_201 += 1 + else: + self.assertIn('id', response.json()['description'].keys()) + self.assertIn('This field must be unique.', response.json()['description']['id']) + self.assertEqual(status_201, 1, msg="Not only one POST was successfull") class CollectionsUnauthorizeEndpointTestCase(StacBaseTestCase): From cef81b9aa901510fb17321573efa42dd47352907 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 23 Mar 2021 14:56:46 +0100 Subject: [PATCH 022/105] BGDIINF_SB-1723: Docstring correction --- app/stac_api/serializers.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/app/stac_api/serializers.py b/app/stac_api/serializers.py index 4d63933b..ef33ed52 100644 --- a/app/stac_api/serializers.py +++ b/app/stac_api/serializers.py @@ -463,7 +463,19 @@ def update(self, instance, validated_data): def update_or_create(self, validated_data, **kwargs): """ - Create and return a new `Collection` instance, given the validated data. + Update or create the collection object selected by kwargs and return the instance. + + When no collection object matching the kwargs selection, a new object is created. + + Args: + validated_data: dict + Copy of the validated_data to use for update + kwargs: dict + Object selection arguments (NOTE: the selection arguments must match a unique + object in DB otherwise an IntegrityError will be raised) + + Returns: tuple + Collection instance and True if created otherwise false """ providers_data = validated_data.pop('providers', []) links_data = validated_data.pop('links', []) From 39956e70ad69d81068c27f2abb4e1015886e4f0e Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Wed, 24 Mar 2021 11:52:22 +0100 Subject: [PATCH 023/105] BGDIINF_SB-1625: Removed asset file verification during POST/PUT/PATCH asset These checks were removed in order to prepare for the new asset management with presigned url were we first need to create the asset metadata before uploading the asset file. See BGDIINF_SB-1739. --- app/stac_api/serializers.py | 11 ----- app/tests/test_assets_endpoint.py | 81 ------------------------------- 2 files changed, 92 deletions(-) diff --git a/app/stac_api/serializers.py b/app/stac_api/serializers.py index ef33ed52..ae18898f 100644 --- a/app/stac_api/serializers.py +++ b/app/stac_api/serializers.py @@ -29,7 +29,6 @@ from stac_api.validators import validate_geoadmin_variant from stac_api.validators import validate_item_properties_datetimes from stac_api.validators import validate_name -from stac_api.validators_serializer import validate_asset_file from stac_api.validators_serializer import validate_json_payload from stac_api.validators_serializer import validate_uniqueness_and_create @@ -680,16 +679,6 @@ def validate(self, attrs): if not self.partial: attrs['file'] = get_asset_path(attrs['item'], attrs['name']) - # Check if the asset exits for non partial update or when the checksum is available - if not self.partial or 'checksum_multihash' in attrs: - original_name = attrs['name'] - if self.instance: - original_name = self.instance.name - path = get_asset_path(attrs['item'], original_name) - request = self.context.get("request") - href = build_asset_href(request, path) - attrs = validate_asset_file(href, original_name, attrs) - return attrs def get_fields(self): diff --git a/app/tests/test_assets_endpoint.py b/app/tests/test_assets_endpoint.py index fef34caf..3d977f30 100644 --- a/app/tests/test_assets_endpoint.py +++ b/app/tests/test_assets_endpoint.py @@ -329,30 +329,6 @@ def setUp(self): # pylint: disable=invalid-name client_login(self.client) self.maxDiff = None # pylint: disable=invalid-name - def test_asset_endpoint_post_asset_file_dont_exists(self): - collection_name = self.collection.name - item_name = self.item.name - asset = self.factory.create_asset_sample(item=self.item, create_asset_file=False) - - path = f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets' - response = self.client.post( - path, data=asset.get_json('post'), content_type="application/json" - ) - self.assertStatusCode(400, response) - description = response.json()['description'] - self.assertIn('href', description, msg=f'Unexpected field error {description}') - self.assertEqual( - "Asset doesn't exists at href http://testserver/collection-1/item-1/asset-1.tiff", - description['href'][0], - msg="Unexpected error message" - ) - - # Make sure that the asset is not found in DB - self.assertFalse( - Asset.objects.filter(name=asset.json['id']).exists(), - msg="Invalid asset has been created in DB" - ) - # NOTE: Unfortunately this test cannot be done with the moto mocking. # def test_asset_endpoint_post_s3_not_answering(self): # collection_name = self.collection.name @@ -376,63 +352,6 @@ def test_asset_endpoint_post_asset_file_dont_exists(self): # msg="Invalid asset has been created in DB" # ) - def test_asset_endpoint_post_s3_without_sha256(self): - collection_name = self.collection.name - item_name = self.item.name - asset = self.factory.create_asset_sample(item=self.item, create_asset_file=False) - - upload_file_on_s3( - f'{collection_name}/{item_name}/{asset["name"]}', asset["file"], params={} - ) - - path = f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets' - response = self.client.post( - path, data=asset.get_json('post'), content_type="application/json" - ) - self.assertStatusCode(400, response) - description = response.json()['description'] - self.assertIn('non_field_errors', description, msg=f'Unexpected field error {description}') - self.assertEqual( - "Asset at href http://testserver/collection-1/item-1/asset-1.tiff has a md5 multihash " - "while a sha2-256 multihash is defined in the checksum:multihash attribute", - description['non_field_errors'][0], - msg="Unexpected error message" - ) - - # Make sure that the asset is not found in DB - self.assertFalse( - Asset.objects.filter(name=asset.json['id']).exists(), - msg="Invalid asset has been created in DB" - ) - - def test_asset_endpoint_post_wrong_checksum(self): - collection_name = self.collection.name - item_name = self.item.name - asset = self.factory.create_asset_sample(item=self.item, create_asset_file=True) - asset_json = asset.get_json('post') - asset_json['checksum:multihash'] = get_sha256_multihash( - b'new dummy content that do not match real checksum' - ) - - path = f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets' - response = self.client.post(path, data=asset_json, content_type="application/json") - self.assertStatusCode(400, response) - description = response.json()['description'] - self.assertIn('non_field_errors', description, msg=f'Unexpected field error {description}') - self.assertEqual( - "Asset at href http://testserver/collection-1/item-1/asset-1.tiff with sha2-256 hash " - "a7f5e7ca03b0f80a2fcfe5142642377e7654df2dfa736fe4d925322d8a651efe doesn't match the " - "checksum:multihash 3db85f41709d08bf1f2907042112bf483b28e12db4b3ffb5428a1f28308847ba", - description['non_field_errors'][0], - msg="Unexpected error message" - ) - - # Make sure that the asset is not found in DB - self.assertFalse( - Asset.objects.filter(name=asset.json['id']).exists(), - msg="Invalid asset has been created in DB" - ) - class AssetsUpdateEndpointAssetFileTestCase(StacBaseTestCase): From 95ded408a25016b8d2e3dfa26eeedab5a49a79c8 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 25 Mar 2021 11:04:09 +0100 Subject: [PATCH 024/105] BGDIINF_SB-1566: Added missing migration --- .../migrations/0003_auto_20210325_1001.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 app/stac_api/migrations/0003_auto_20210325_1001.py diff --git a/app/stac_api/migrations/0003_auto_20210325_1001.py b/app/stac_api/migrations/0003_auto_20210325_1001.py new file mode 100644 index 00000000..8e132b55 --- /dev/null +++ b/app/stac_api/migrations/0003_auto_20210325_1001.py @@ -0,0 +1,30 @@ +# Generated by Django 3.1.7 on 2021-03-25 10:01 + +from django.db import migrations, models +import django.db.models.deletion +import stac_api.validators + + +class Migration(migrations.Migration): + + dependencies = [ + ('stac_api', '0002_auto_20210218_0726'), + ] + + operations = [ + migrations.AlterField( + model_name='asset', + name='item', + field=models.ForeignKey(help_text='\n
\n Search Usage:\n
    \n
  • \n arg will make a non exact search checking if >arg\n is part of the Item path\n
  • \n
  • \n Multiple arg can be used, separated by spaces. This will search\n for all elements containing all arguments in their path\n
  • \n
  • \n "collectionID/itemID" will make an exact search for the specified item.\n
  • \n
\n Examples :\n
    \n
  • \n Searching for pixelkarte will return all items which have\n pixelkarte as a part of either their collection ID or their item ID\n
  • \n
  • \n Searching for pixelkarte 2016 4 will return all items\n which have pixelkarte, 2016 AND 4 as part of their collection ID or\n item ID\n
  • \n
  • \n Searching for "ch.swisstopo.pixelkarte.example/item2016-4-example"\n will yield only this item, if this item exists.\n
  • \n
\n
', on_delete=django.db.models.deletion.CASCADE, related_name='assets', related_query_name='asset', to='stac_api.item'), + ), + migrations.AlterField( + model_name='asset', + name='name', + field=models.CharField(max_length=255, validators=[stac_api.validators.validate_asset_name], verbose_name='id'), + ), + migrations.AlterField( + model_name='item', + name='collection', + field=models.ForeignKey(help_text='\n
\n Search Usage:\n
    \n
  • \n arg will make a non exact search checking if arg is part of\n the collection ID\n
  • \n
  • \n Multiple arg can be used, separated by spaces. This will search for all\n collections ID containing all arguments.\n
  • \n
  • \n "collectionID" will make an exact search for the specified collection.\n
  • \n
\n Examples :\n
    \n
  • \n Searching for pixelkarte will return all collections which have\n pixelkarte as a part of their collection ID\n
  • \n
  • \n Searching for pixelkarte 2016 4 will return all collection\n which have pixelkarte, 2016 AND 4 as part of their collection ID\n
  • \n
  • \n Searching for ch.swisstopo.pixelkarte.example will yield only this\n collection, if this collection exists. Please note that it would not return\n a collection named ch.swisstopo.pixelkarte.example.2.\n
  • \n
\n
', on_delete=django.db.models.deletion.CASCADE, to='stac_api.collection'), + ), + ] From f23175d23c8b548208688a8143075d5ad381da1c Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Thu, 25 Mar 2021 14:00:35 +0100 Subject: [PATCH 025/105] this should fix the django-not-configure warning --- .pylintrc | 1 + 1 file changed, 1 insertion(+) diff --git a/.pylintrc b/.pylintrc index 871414b9..20af64ed 100644 --- a/.pylintrc +++ b/.pylintrc @@ -33,6 +33,7 @@ limit-inference-results=100 # List of plugins (as comma separated values of python module names) to load, # usually to register additional checkers. load-plugins=pylint_django +django-settings-module=stac_api.settings # Pickle collected data for later comparisons. persistent=yes From fb69373db25d214f45c27ab8df96703d7968dcaf Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 25 Mar 2021 09:14:43 +0100 Subject: [PATCH 026/105] Improved code for auto generated links The new code make use of django reverse url functionality. --- app/stac_api/serializers.py | 143 ++++++++++++------------- app/stac_api/urls.py | 2 +- app/tests/test_collections_endpoint.py | 2 +- 3 files changed, 68 insertions(+), 79 deletions(-) diff --git a/app/stac_api/serializers.py b/app/stac_api/serializers.py index ae18898f..dc217512 100644 --- a/app/stac_api/serializers.py +++ b/app/stac_api/serializers.py @@ -4,6 +4,7 @@ from django.conf import settings from django.contrib.gis.geos import GEOSGeometry +from django.urls import reverse from rest_framework import serializers from rest_framework.utils.serializer_helpers import ReturnDict @@ -90,6 +91,44 @@ def update_or_create_links(model, instance, instance_type, links_data): ) +def get_relation_links(request, view, view_args): + '''Returns a list of auto generated relation links + + Returns the self, root and parent auto generated links. + + Args: + request: HttpRequest + request object + view: string + name of the view that originate the call + view_args: list + args to construct the view path + + Returns: list + List of auto generated links + ''' + self_url = request.build_absolute_uri(reverse(view, args=view_args)) + return [ + OrderedDict([ + ('rel', 'self'), + ('href', self_url), + ]), + OrderedDict([ + ('rel', 'root'), + ('href', request.build_absolute_uri(reverse('landing-page'))), + ]), + OrderedDict([ + ('rel', 'parent'), + ('href', self_url.rsplit('/', maxsplit=1)[0]), + ]), + ] + + +def get_url(request, view, args=None): + '''Get an full url based on a view name''' + return request.build_absolute_uri(reverse(view, args=args)) + + class UpsertModelSerializerMixin: """Add support for Upsert in serializer """ @@ -218,7 +257,6 @@ def get_stac_version(self, obj): def to_representation(self, instance): representation = super().to_representation(instance) - stac_base_v = settings.STAC_BASE_V request = self.context.get("request") spec_base = urlparse(settings.STATIC_SPEC_URL).path.strip('/') @@ -229,7 +267,7 @@ def to_representation(self, instance): representation['links'][:0] = [ OrderedDict([ ('rel', 'self'), - ('href', request.build_absolute_uri(f'/{stac_base_v}/')), + ('href', get_url(request, 'landing-page')), ("type", "application/json"), ("title", "This document"), ]), @@ -247,25 +285,25 @@ def to_representation(self, instance): ]), OrderedDict([ ("rel", "conformance"), - ("href", request.build_absolute_uri(f'/{stac_base_v}/conformance')), + ("href", get_url(request, 'conformance')), ("type", "application/json"), ("title", "OGC API conformance classes implemented by this server"), ]), OrderedDict([ ('rel', 'data'), - ('href', request.build_absolute_uri(f'/{stac_base_v}/collections')), + ('href', get_url(request, 'collections-list')), ("type", "application/json"), ("title", "Information about the feature collections"), ]), OrderedDict([ - ("href", request.build_absolute_uri(f"/{stac_base_v}/search")), + ("href", get_url(request, 'search-list')), ("rel", "search"), ("method", "GET"), ("type", "application/json"), ("title", "Search across feature collections"), ]), OrderedDict([ - ("href", request.build_absolute_uri(f"/{stac_base_v}/search")), + ("href", get_url(request, 'search-list')), ("rel", "search"), ("method", "POST"), ("type", "application/json"), @@ -497,22 +535,10 @@ def to_representation(self, instance): # We use OrderedDict, although it is not necessary, because the default serializer/model for # links already uses OrderedDict, this way we keep consistency between auto link and user # link - representation['links'][:0] = [ - OrderedDict([ - ('rel', 'self'), - ('href', request.build_absolute_uri(f'/{stac_base_v}/collections/{name}')), - ]), - OrderedDict([ - ('rel', 'root'), - ('href', request.build_absolute_uri(f'/{stac_base_v}/')), - ]), - OrderedDict([ - ('rel', 'parent'), - ('href', request.build_absolute_uri(f'/{stac_base_v}/collections')), - ]), + representation['links'][:0] = get_relation_links(request, 'collection-detail', [name]) + [ OrderedDict([ ('rel', 'items'), - ('href', request.build_absolute_uri(f'/{stac_base_v}/collections/{name}/items')), + ('href', get_url(request, 'items-list', [name])), ]) ] return representation @@ -702,47 +728,24 @@ def to_representation(self, instance): collection = instance.item.collection.name item = instance.item.name name = instance.name - api = settings.STAC_BASE_V request = self.context.get("request") representation = super().to_representation(instance) # Add auto links # We use OrderedDict, although it is not necessary, because the default serializer/model for # links already uses OrderedDict, this way we keep consistency between auto link and user # link - representation['links'] = [ - OrderedDict([ - ('rel', 'self'), - ( - 'href', - request.build_absolute_uri( - f'/{api}/collections/{collection}/items/{item}/assets/{name}' - ) - ), - ]), - OrderedDict([ - ('rel', 'root'), - ('href', request.build_absolute_uri(f'/{api}/')), - ]), - OrderedDict([ - ('rel', 'parent'), - ( - 'href', - request. - build_absolute_uri(f'/{api}/collections/{collection}/items/{item}/assets') - ), - ]), - OrderedDict([ - ('rel', 'item'), - ( - 'href', - request.build_absolute_uri(f'/{api}/collections/{collection}/items/{item}') - ), - ]), - OrderedDict([ - ('rel', 'collection'), - ('href', request.build_absolute_uri(f'/{api}/collections/{collection}')), - ]) - ] + representation['links'] = \ + get_relation_links(request, 'asset-detail', [collection, item, name]) \ + + [ + OrderedDict([ + ('rel', 'item'), + ('href', get_url(request, 'item-detail', [collection, item])), + ]), + OrderedDict([ + ('rel', 'collection'), + ('href', get_url(request, 'collection-detail', [collection])), + ]) + ] return representation @@ -821,34 +824,20 @@ def get_stac_version(self, obj): def to_representation(self, instance): collection = instance.collection.name name = instance.name - api = settings.STAC_BASE_V request = self.context.get("request") representation = super().to_representation(instance) # Add auto links # We use OrderedDict, although it is not necessary, because the default serializer/model for # links already uses OrderedDict, this way we keep consistency between auto link and user # link - representation['links'][:0] = [ - OrderedDict([ - ('rel', 'self'), - ( - 'href', - request.build_absolute_uri(f'/{api}/collections/{collection}/items/{name}') - ), - ]), - OrderedDict([ - ('rel', 'root'), - ('href', request.build_absolute_uri(f'/{api}/')), - ]), - OrderedDict([ - ('rel', 'parent'), - ('href', request.build_absolute_uri(f'/{api}/collections/{collection}/items')), - ]), - OrderedDict([ - ('rel', 'collection'), - ('href', request.build_absolute_uri(f'/{api}/collections/{collection}')), - ]) - ] + representation['links'][:0] = \ + get_relation_links(request, 'item-detail', [collection, name]) \ + + [ + OrderedDict([ + ('rel', 'collection'), + ('href', get_url(request, 'collection-detail', [collection])), + ]) + ] return representation def create(self, validated_data): diff --git a/app/stac_api/urls.py b/app/stac_api/urls.py index 9ed94555..ec5e7485 100644 --- a/app/stac_api/urls.py +++ b/app/stac_api/urls.py @@ -22,7 +22,7 @@ path(f"{STAC_VERSION_SHORT}/", LandingPageDetail.as_view(), name='landing-page'), path(f"{STAC_VERSION_SHORT}/conformance", ConformancePageDetail.as_view(), name='conformance'), path(f"{STAC_VERSION_SHORT}/search", SearchList.as_view(), name='search-list'), - path(f"{STAC_VERSION_SHORT}/collections", CollectionList.as_view(), name='collection-list'), + path(f"{STAC_VERSION_SHORT}/collections", CollectionList.as_view(), name='collections-list'), path( f"{STAC_VERSION_SHORT}/collections/", CollectionDetail.as_view(), diff --git a/app/tests/test_collections_endpoint.py b/app/tests/test_collections_endpoint.py index ac0272f2..908ed1b5 100644 --- a/app/tests/test_collections_endpoint.py +++ b/app/tests/test_collections_endpoint.py @@ -425,7 +425,7 @@ def collection_atomic_post_test(worker): client = Client() client.login(username=self.username, password=self.password) return client.post( - reverse('collection-list'), + reverse('collections-list'), data=sample.get_json('post'), content_type='application/json' ) From ed39fefa539a2eacfc2961596b4bba397c7d3b06 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 23 Mar 2021 16:30:38 +0100 Subject: [PATCH 027/105] BGDIINF_SB-1723: Updated the spec for collection upsert Plus some small improvements using shorter summary for operations. --- spec/paths.yml | 6 ++-- spec/static/spec/v0.9/openapi.yaml | 6 ++-- .../spec/v0.9/openapitransactional.yaml | 28 +++++++++++++++---- spec/transaction/transaction.yml | 21 ++++++++++++-- 4 files changed, 48 insertions(+), 13 deletions(-) diff --git a/spec/paths.yml b/spec/paths.yml index 0041e59e..492aa106 100644 --- a/spec/paths.yml +++ b/spec/paths.yml @@ -23,7 +23,8 @@ paths: $ref: "#/components/responses/Collections" "500": $ref: "#/components/responses/ServerError" - summary: The feature collections in the dataset + summary: Fetch collections + description: The feature collections in the dataset tags: - Data /collections/{collectionId}: @@ -44,7 +45,8 @@ paths: $ref: "#/components/responses/PreconditionFailed" "500": $ref: "#/components/responses/ServerError" - summary: Describe the feature collection with id `collectionId` + summary: Fetch a single collection + description: Describe the feature collection with id `collectionId` tags: - Data /collections/{collectionId}/items: diff --git a/spec/static/spec/v0.9/openapi.yaml b/spec/static/spec/v0.9/openapi.yaml index 09dcf4a9..93e03073 100644 --- a/spec/static/spec/v0.9/openapi.yaml +++ b/spec/static/spec/v0.9/openapi.yaml @@ -1595,7 +1595,8 @@ paths: $ref: "#/components/responses/Collections" "500": $ref: "#/components/responses/ServerError" - summary: The feature collections in the dataset + summary: Fetch collections + description: The feature collections in the dataset tags: - Data /collections/{collectionId}: @@ -1616,7 +1617,8 @@ paths: $ref: "#/components/responses/PreconditionFailed" "500": $ref: "#/components/responses/ServerError" - summary: Describe the feature collection with id `collectionId` + summary: Fetch a single collection + description: Describe the feature collection with id `collectionId` tags: - Data /collections/{collectionId}/items: diff --git a/spec/static/spec/v0.9/openapitransactional.yaml b/spec/static/spec/v0.9/openapitransactional.yaml index 6a43a629..18794bd4 100644 --- a/spec/static/spec/v0.9/openapitransactional.yaml +++ b/spec/static/spec/v0.9/openapitransactional.yaml @@ -2021,7 +2021,8 @@ paths: $ref: "#/components/responses/Collections" "500": $ref: "#/components/responses/ServerError" - summary: The feature collections in the dataset + summary: Fetch collections + description: The feature collections in the dataset tags: - Data post: @@ -2091,15 +2092,17 @@ paths: $ref: "#/components/responses/PreconditionFailed" "500": $ref: "#/components/responses/ServerError" - summary: Describe the feature collection with id `collectionId` + summary: Fetch a single collection + description: Describe the feature collection with id `collectionId` tags: - Data put: tags: - Data Management - summary: >- - Update an existing collection with Id `collectionId` with a complete collection - definition + summary: Update or create a collection + description: >- + Update or create a collection with Id `collectionId` with a complete collection + definition. If the collection doesn't exists it is then created. operationId: updateCollection parameters: - $ref: "#/components/parameters/collectionId" @@ -2129,6 +2132,18 @@ paths: url: https://www.swisstopo.admin.ch title: National Map 1:200'000 responses: + "201": + description: Returns the created Collection + headers: + Location: + description: A link to the collection + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "#/components/schemas/collection" "200": description: Returns the updated Collection content: @@ -2146,7 +2161,8 @@ paths: patch: tags: - Data Management - summary: >- + summary: Partial update of a collection + description: >- Update an existing collection with Id `collectionId` with a partial collection definition operationId: partialUpdateCollection diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml index 92cdc818..bbb39716 100644 --- a/spec/transaction/transaction.yml +++ b/spec/transaction/transaction.yml @@ -53,8 +53,10 @@ paths: put: tags: - Data Management - summary: >- - Update an existing collection with Id `collectionId` with a complete collection definition + summary: Update or create a collection + description: >- + Update or create a collection with Id `collectionId` with a complete collection definition. + If the collection doesn't exists it is then created. operationId: updateCollection parameters: - $ref: "#/components/parameters/collectionId" @@ -82,6 +84,18 @@ paths: url: https://www.swisstopo.admin.ch title: National Map 1:200'000 responses: + "201": + description: Returns the created Collection + headers: + Location: + description: A link to the collection + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "#/components/schemas/collection" "200": description: Returns the updated Collection content: @@ -99,7 +113,8 @@ paths: patch: tags: - Data Management - summary: >- + summary: Partial update of a collection + description: >- Update an existing collection with Id `collectionId` with a partial collection definition operationId: partialUpdateCollection parameters: From 1f8c3c497cb60d82f53430c08abf158c3126015b Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Fri, 19 Mar 2021 13:36:21 +0100 Subject: [PATCH 028/105] BGDIINF_SB-1647 filter assets for their collection and/or item added autocomplete search fields and dropdown list filters for filtering assets for their collection and/or items. Following behaviour is implemented: If a collection is defined in the collection filter, only those items will appear in the dropdown list of the item filter, that are in the currently selected collection. ItemAdmin's get_search_results() was overriden with a few hacky lines for this. Otherwise, all items would appear in the list, which does not make sense. Note: The responsiveness after clicking on those filters could be better. Its a bit slow when there are many items or collections. Probably will improve this soon. --- app/stac_api/admin.py | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/app/stac_api/admin.py b/app/stac_api/admin.py index 2cf07f10..bfd559f9 100644 --- a/app/stac_api/admin.py +++ b/app/stac_api/admin.py @@ -1,4 +1,5 @@ from admin_auto_filters.filters import AutocompleteFilter +from admin_auto_filters.filters import AutocompleteFilterFactory from django.contrib.gis import admin from django.contrib.postgres.fields import ArrayField @@ -84,7 +85,7 @@ class ItemLinkInline(admin.TabularInline): extra = 0 -class CollectionFilter(AutocompleteFilter): +class CollectionFilterForItems(AutocompleteFilter): title = 'Collection name' # display title field_name = 'collection' # name of the foreign key field @@ -120,11 +121,26 @@ class Media: wms_layer = 'ch.swisstopo.pixelkarte-farbe-pk1000.noscale' wms_url = 'https://wms.geo.admin.ch/' list_display = ['name', 'collection'] - list_filter = [CollectionFilter] + list_filter = [CollectionFilterForItems] def get_search_results(self, request, queryset, search_term): queryset, use_distinct = super().get_search_results(request, queryset, search_term) - if search_term.startswith('"') and search_term.endswith('"'): + + # The following few lines are a bit hacky and are needed for the item dropdown list + # to depend on the currently selected collection in the collection dropdown filter. + # With this "hack", only those items appear in the "filter by item name" dropdown list, + # that belong to the currently selected collection in the "filter by collection name" + # dropdown list. Otherwise all items would appear in the dropdown list, which does not + # make sense. + + # this asserts that the request comes from the autocomplete filters. + if "autocomplete" in request.__dict__['environ']['PATH_INFO']: + if "item__collection" in request.__dict__['environ']['HTTP_REFERER']: + current_collection_pk = request.__dict__['environ']['HTTP_REFERER'].split( + 'item__collection=' + )[1].split("&")[0] + queryset = self.model.objects.filter(collection__pk__exact=current_collection_pk) + elif search_term.startswith('"') and search_term.endswith('"'): search_terms = search_term.strip('"').split('/', maxsplit=2) if len(search_terms) == 2: collection_name = search_terms[0] @@ -163,6 +179,10 @@ class Media: 'fields': ('eo_gsd', 'proj_epsg', 'geoadmin_variant', 'geoadmin_lang') }), ) + list_filter = [ + AutocompleteFilterFactory('Item name', 'item', use_pk_exact=True), + AutocompleteFilterFactory('Collection name', 'item__collection', use_pk_exact=True) + ] def get_search_results(self, request, queryset, search_term): queryset, use_distinct = super().get_search_results(request, queryset, search_term) From 4576d567515c8e0bebfc15966dd9f92958ffd7cb Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Fri, 19 Mar 2021 14:17:28 +0100 Subject: [PATCH 029/105] BGDIINF_SB-1647 now using request.environ.copy() instead of request.__dict__ directly --- app/stac_api/admin.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/app/stac_api/admin.py b/app/stac_api/admin.py index bfd559f9..ef357c14 100644 --- a/app/stac_api/admin.py +++ b/app/stac_api/admin.py @@ -134,11 +134,11 @@ def get_search_results(self, request, queryset, search_term): # make sense. # this asserts that the request comes from the autocomplete filters. - if "autocomplete" in request.__dict__['environ']['PATH_INFO']: - if "item__collection" in request.__dict__['environ']['HTTP_REFERER']: - current_collection_pk = request.__dict__['environ']['HTTP_REFERER'].split( - 'item__collection=' - )[1].split("&")[0] + environ = request.environ.copy() + if "autocomplete" in environ['PATH_INFO']: + if "item__collection" in environ['HTTP_REFERER']: + current_collection_pk = environ['HTTP_REFERER'].split('item__collection=' + )[1].split("&")[0] queryset = self.model.objects.filter(collection__pk__exact=current_collection_pk) elif search_term.startswith('"') and search_term.endswith('"'): search_terms = search_term.strip('"').split('/', maxsplit=2) From 1642002e1990b882af2f42c5703197ec872caa2b Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Tue, 23 Mar 2021 14:36:45 +0100 Subject: [PATCH 030/105] BGDIINF_SB-1647 using request.headers and parsing the querystring with get_query_params() in get_search_results() in admin.py --- app/stac_api/admin.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/app/stac_api/admin.py b/app/stac_api/admin.py index ef357c14..71f946ef 100644 --- a/app/stac_api/admin.py +++ b/app/stac_api/admin.py @@ -19,6 +19,7 @@ from stac_api.models import LandingPageLink from stac_api.models import Provider from stac_api.utils import build_asset_href +from stac_api.utils import get_query_params class LandingPageLinkInline(admin.TabularInline): @@ -87,7 +88,7 @@ class ItemLinkInline(admin.TabularInline): class CollectionFilterForItems(AutocompleteFilter): title = 'Collection name' # display title - field_name = 'collection' # name of the foreign key field + field_name = 'collection' # name of the foreign key @admin.register(Item) @@ -134,13 +135,13 @@ def get_search_results(self, request, queryset, search_term): # make sense. # this asserts that the request comes from the autocomplete filters. - environ = request.environ.copy() - if "autocomplete" in environ['PATH_INFO']: - if "item__collection" in environ['HTTP_REFERER']: - current_collection_pk = environ['HTTP_REFERER'].split('item__collection=' - )[1].split("&")[0] - queryset = self.model.objects.filter(collection__pk__exact=current_collection_pk) - elif search_term.startswith('"') and search_term.endswith('"'): + if request.path.endswith("/autocomplete/"): + collection_filter_param = get_query_params( + request.headers['Referer'], 'item__collection' + ) + if collection_filter_param: + queryset = queryset.filter(collection__pk__exact=collection_filter_param[0]) + if search_term.startswith('"') and search_term.endswith('"'): search_terms = search_term.strip('"').split('/', maxsplit=2) if len(search_terms) == 2: collection_name = search_terms[0] From 48d715eb352757693a368b37854e9fbbad4dd8e3 Mon Sep 17 00:00:00 2001 From: Marcel Clausen Date: Thu, 25 Mar 2021 08:20:21 +0100 Subject: [PATCH 031/105] remove md5 support --- Dockerfile | 4 +--- Makefile | 4 ---- README.md | 2 +- app/stac_api/utils.py | 4 ++-- app/stac_api/validators_serializer.py | 18 ++++++------------ multihash.patch | 10 ---------- spec/components/schemas.yml | 2 +- spec/static/spec/v0.9/openapi.yaml | 2 +- .../static/spec/v0.9/openapitransactional.yaml | 4 ++-- spec/transaction/transaction.yml | 2 +- 10 files changed, 15 insertions(+), 37 deletions(-) delete mode 100644 multihash.patch diff --git a/Dockerfile b/Dockerfile index b2fe9850..ac0c680e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -20,8 +20,6 @@ RUN groupadd -r geoadmin \ COPY Pipfile* multihash.patch /tmp/ RUN cd /tmp && \ pipenv install --system --deploy --ignore-pipfile && \ - # Patch multihash for md5 support - pipenv run pypatch apply ./multihash.patch multihash # Set the working dir and copy the app WORKDIR /app @@ -92,4 +90,4 @@ USER geoadmin EXPOSE $HTTP_PORT # Use a real WSGI server -ENTRYPOINT ["python3"] \ No newline at end of file +ENTRYPOINT ["python3"] diff --git a/Makefile b/Makefile index a2ed6765..80b5df41 100644 --- a/Makefile +++ b/Makefile @@ -104,8 +104,6 @@ $(SETTINGS_TIMESTAMP): $(TIMESTAMPS) setup: $(SETTINGS_TIMESTAMP) # Create virtual env with all packages for development pipenv install --dev - # Patch multihash for md5 support - pipenv run pypatch apply ./multihash.patch multihash # Create volume directories for postgres and minio # Note that the '/service_stac_local' part is already the bucket name mkdir -p .volumes/minio/service-stac-local @@ -119,8 +117,6 @@ setup: $(SETTINGS_TIMESTAMP) ci: $(SETTINGS_TIMESTAMP) # Create virtual env with all packages for development using the Pipfile.lock pipenv sync --dev - # Patch multihash for md5 support - pipenv run pypatch apply ./multihash.patch multihash # call yapf to make sure your code is easier to read and respects some conventions. diff --git a/README.md b/README.md index 261b251e..af9a860b 100644 --- a/README.md +++ b/README.md @@ -101,7 +101,7 @@ These steps will ensure you have everything needed to start working locally. ``` An alternative to ```pipenv install``` is to use the ```make setup``` command, which will install the environment, -apply a patch to the multihash package to support md5, create the volumes needed by the Postgres and MinIO containers +create the volumes needed by the Postgres and MinIO containers and run those containers. ```Make setup``` assume a standard local installation with a dev environment. ### Setting up the local database diff --git a/app/stac_api/utils.py b/app/stac_api/utils.py index e6364ecd..29ebe648 100644 --- a/app/stac_api/utils.py +++ b/app/stac_api/utils.py @@ -153,7 +153,7 @@ def create_multihash(digest, hash_type): Args: digest: string hash_type: string - hash type (sha2-256, md5, ...) + hash type sha2-256 Returns: multihash multihash @@ -167,7 +167,7 @@ def create_multihash_string(digest, hash_code): Args: digest: string hash_code: string | int - hash code (sha2-256, md5, ...) + hash code sha2-256 Returns: string multihash string diff --git a/app/stac_api/validators_serializer.py b/app/stac_api/validators_serializer.py index db4c596d..c1b6f5f9 100644 --- a/app/stac_api/validators_serializer.py +++ b/app/stac_api/validators_serializer.py @@ -123,24 +123,18 @@ def validate_asset_file(href, original_name, attrs): # Get the hash from response asset_multihash = None asset_sha256 = obj.metadata.get('sha256', None) - asset_md5 = obj.e_tag.strip('"') - logger.debug( - 'Asset file %s checksums from headers: sha256=%s, md5=%s', href, asset_sha256, asset_md5 - ) + logger.debug('Asset file %s checksums from headers: sha256=%s', href, asset_sha256) if asset_sha256: asset_multihash = create_multihash(asset_sha256, 'sha2-256') - elif asset_md5: - asset_multihash = create_multihash(asset_md5, 'md5') if asset_multihash is None: logger.error( - 'Asset at href %s, doesn\'t provide a sha2-256 hash in header x-amz-meta-sha256 ' \ - 'or an ETag md5 checksum', href + f"Asset at href {href} doesn't provide a mandatory checksum header " + "(x-amz-meta-sha256) for validation", + href ) - raise APIException({ - 'href': _(f"Asset at href {href} doesn't provide a valid checksum header " - "(ETag or x-amz-meta-sha256) for validation") - }) + raise ValidationError(code='query-invalid', detail=_(f"Asset at href {href} doesn't provide a mandatory checksum header " + "(x-amz-meta-sha256) for validation")) from None expected_multihash = attrs.get('checksum_multihash', None) if expected_multihash is None: diff --git a/multihash.patch b/multihash.patch deleted file mode 100644 index 259589fb..00000000 --- a/multihash.patch +++ /dev/null @@ -1,10 +0,0 @@ ---- constants.py 2020-12-24 14:58:13.097432567 +0100 -+++ constants.py 2020-12-24 14:59:00.473144878 +0100 -@@ -16,6 +16,7 @@ - {'code': 0x22, 'length': 0x20, 'hash': 'murmur3-128'}, - {'code': 0x23, 'hash': 'murmur3-32'}, - {'code': 0x56, 'length': 0x20, 'hash': 'dbl-sha2-256'}, -+ {'code': 0xd5, 'length': 0x10, 'hash': 'md5'}, - {'code': 0xb201, 'length': 0x1, 'hash': 'blake2b-8'}, - {'code': 0xb202, 'length': 0x2, 'hash': 'blake2b-16'}, - {'code': 0xb203, 'length': 0x3, 'hash': 'blake2b-24'}, diff --git a/spec/components/schemas.yml b/spec/components/schemas.yml index 561d2b33..ae49cb67 100644 --- a/spec/components/schemas.yml +++ b/spec/components/schemas.yml @@ -157,7 +157,7 @@ components: $ref: "#/components/schemas/bboxfilter" checksum:multihash: description: >- - `sha2-256` or `md5` checksum of the asset in [multihash](https://multiformats.io/multihash/) + `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) format. example: 90e402107a7f2588a85362b9beea2a12d4514d45 diff --git a/spec/static/spec/v0.9/openapi.yaml b/spec/static/spec/v0.9/openapi.yaml index 93e03073..08c81d00 100644 --- a/spec/static/spec/v0.9/openapi.yaml +++ b/spec/static/spec/v0.9/openapi.yaml @@ -385,7 +385,7 @@ components: $ref: "#/components/schemas/bboxfilter" checksum:multihash: description: >- - `sha2-256` or `md5` checksum of the asset in [multihash](https://multiformats.io/multihash/) + `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) format. example: 90e402107a7f2588a85362b9beea2a12d4514d45 pattern: ^[a-f0-9]+$ diff --git a/spec/static/spec/v0.9/openapitransactional.yaml b/spec/static/spec/v0.9/openapitransactional.yaml index 18794bd4..d7f3b25a 100644 --- a/spec/static/spec/v0.9/openapitransactional.yaml +++ b/spec/static/spec/v0.9/openapitransactional.yaml @@ -527,7 +527,7 @@ components: $ref: "#/components/schemas/bboxfilter" checksum:multihash: description: >- - `sha2-256` or `md5` checksum of the asset in [multihash](https://multiformats.io/multihash/) + `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) format. example: 90e402107a7f2588a85362b9beea2a12d4514d45 pattern: ^[a-f0-9]+$ @@ -1845,7 +1845,7 @@ components: $ref: "#/components/schemas/updateAssetId" writeChecksumMultihash: description: >- - `sha2-256` or `md5` checksum of the asset in [multihash](https://multiformats.io/multihash/) + `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) format. diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml index bbb39716..f0f5ae08 100644 --- a/spec/transaction/transaction.yml +++ b/spec/transaction/transaction.yml @@ -791,7 +791,7 @@ components: $ref: "#/components/schemas/updateAssetId" writeChecksumMultihash: description: >- - `sha2-256` or `md5` checksum of the asset in [multihash](https://multiformats.io/multihash/) + `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) format. From 9a03afa4ce6a08e1b34fbf7e87cb33b035af28fd Mon Sep 17 00:00:00 2001 From: Marcel Clausen Date: Thu, 25 Mar 2021 08:52:28 +0100 Subject: [PATCH 032/105] format and lint --- app/stac_api/validators_serializer.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/app/stac_api/validators_serializer.py b/app/stac_api/validators_serializer.py index c1b6f5f9..f47a11c9 100644 --- a/app/stac_api/validators_serializer.py +++ b/app/stac_api/validators_serializer.py @@ -129,12 +129,13 @@ def validate_asset_file(href, original_name, attrs): if asset_multihash is None: logger.error( - f"Asset at href {href} doesn't provide a mandatory checksum header " + "Asset at href %s doesn't provide a mandatory checksum header " "(x-amz-meta-sha256) for validation", href ) - raise ValidationError(code='query-invalid', detail=_(f"Asset at href {href} doesn't provide a mandatory checksum header " - "(x-amz-meta-sha256) for validation")) from None + raise ValidationError(code='query-invalid', detail=_( + f"Asset at href {href} doesn't provide a mandatory checksum header " + "(x-amz-meta-sha256) for validation")) from None expected_multihash = attrs.get('checksum_multihash', None) if expected_multihash is None: From e667e1aee99531bb868aa46c8c61eb79cf2f5693 Mon Sep 17 00:00:00 2001 From: Marcel Clausen Date: Thu, 25 Mar 2021 17:14:57 +0100 Subject: [PATCH 033/105] fix Dockerfile --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index ac0c680e..c180ebbb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -17,9 +17,9 @@ RUN groupadd -r geoadmin \ && pip3 install pipenv \ && pipenv --version -COPY Pipfile* multihash.patch /tmp/ +COPY Pipfile* /tmp/ RUN cd /tmp && \ - pipenv install --system --deploy --ignore-pipfile && \ + pipenv install --system --deploy --ignore-pipfile # Set the working dir and copy the app WORKDIR /app From 54192f0e920ff59ac081b92e72970cc4d5c4fdb3 Mon Sep 17 00:00:00 2001 From: Tobias Reber Date: Tue, 30 Mar 2021 17:00:57 +0200 Subject: [PATCH 034/105] BGDIINF_SB-1754: wrong media type with 451 wrong media type should be answered with a 415 Feedback @ltbsh: Test the write endpoints returning 415 --- app/stac_api/apps.py | 5 +- app/tests/test_generic_api.py | 94 ++++++++++++++++++++++++++--------- 2 files changed, 75 insertions(+), 24 deletions(-) diff --git a/app/stac_api/apps.py b/app/stac_api/apps.py index bc990ded..38b9f24c 100644 --- a/app/stac_api/apps.py +++ b/app/stac_api/apps.py @@ -45,7 +45,10 @@ def custom_exception_handler(exc, context): "request.query": context['request']._request.GET.urlencode() } - if context['request']._request.method.upper() in ["PATCH", "POST", "PUT"]: + if ( + context['request']._request.method.upper() in ["PATCH", "POST", "PUT"] and + 'application/json' in context['request']._request.headers['content-type'].lower() + ): extra["request.payload"] = context['request'].data logger.error("Response %s: %s", response.status_code, response.data, extra=extra) diff --git a/app/tests/test_generic_api.py b/app/tests/test_generic_api.py index 0875f37b..66b11cd9 100644 --- a/app/tests/test_generic_api.py +++ b/app/tests/test_generic_api.py @@ -194,6 +194,15 @@ def setUp(self): db_create=True, ) + def get_etag(self, endpoint): + # Get first the ETag + _response = self.client.get(f"/{STAC_BASE_V}/{endpoint}") + self.assertStatusCode(200, _response) + # The ETag change between each test call due to the created, + # updated time that are in the hash computation of the ETag + self.check_header_etag(None, _response) + return _response['ETag'] + def test_get_precondition(self): for endpoint in [ f'collections/{self.collection["name"]}', @@ -254,13 +263,6 @@ def test_put_precondition(self): ), ]: with self.subTest(endpoint=endpoint): - # Get first the ETag - response = self.client.get(f"/{STAC_BASE_V}/{endpoint}") - self.assertStatusCode(200, response) - # The ETag change between each test call due to the created, updated time that are - # in the hash computation of the ETag - self.check_header_etag(None, response) - etag1 = response['ETag'] response = self.client.put( f"/{STAC_BASE_V}/{endpoint}", @@ -274,10 +276,69 @@ def test_put_precondition(self): f"/{STAC_BASE_V}/{endpoint}", sample.get_json('put'), content_type="application/json", - HTTP_IF_MATCH=etag1 + HTTP_IF_MATCH=self.get_etag(endpoint) ) self.assertStatusCode(200, response) + def test_wrong_media_type(self): + client_login(self.client) + + for (request_methods, endpoint, data) in [ + ( + ['put', 'patch'], + f'collections/{self.collection["name"]}', + {}, + ), + ( + ['put', 'patch'], + f'collections/{self.collection["name"]}/items/{self.item["name"]}', + {}, + ), + ( + ['post'], + 'collections', + self.factory.create_collection_sample( + name=self.collection["name"], + sample='collection-2', + ), + ), + ( + ['post'], + f'collections/{self.collection["name"]}/items', + self.factory.create_item_sample( + collection=self.collection.model, + name=self.item["name"], + sample='item-2', + ), + ), + ( + ['post'], + f'collections/{self.collection["name"]}/items/{self.item["name"]}' + f'/assets', + self.factory.create_asset_sample( + item=self.item.model, + name=self.asset["name"], + sample='asset-1-updated', + media_type=self.asset['media_type'], + checksum_multihash=self.asset["checksum_multihash"] + ), + ), + (['post'], 'search', { + "query": { + "title": { + "eq": "My item 1" + } + } + }), + ]: + with self.subTest(endpoint=endpoint): + client_requests = [getattr(self.client, method) for method in request_methods] + for client_request in client_requests: + response = client_request( + f"/{STAC_BASE_V}/{endpoint}", data=data, content_type="plain/text" + ) + self.assertStatusCode(415, response) + def test_patch_precondition(self): client_login(self.client) for (endpoint, data) in [ @@ -304,13 +365,6 @@ def test_patch_precondition(self): ), ]: with self.subTest(endpoint=endpoint): - # Get first the ETag - response = self.client.get(f"/{STAC_BASE_V}/{endpoint}") - self.assertStatusCode(200, response) - # The ETag change between each test call due to the created, updated time that are - # in the hash computation of the ETag - self.check_header_etag(None, response) - etag1 = response['ETag'] response = self.client.patch( f"/{STAC_BASE_V}/{endpoint}", @@ -324,7 +378,7 @@ def test_patch_precondition(self): f"/{STAC_BASE_V}/{endpoint}", data, content_type="application/json", - HTTP_IF_MATCH=etag1 + HTTP_IF_MATCH=self.get_etag(endpoint) ) self.assertStatusCode(200, response) @@ -337,13 +391,7 @@ def test_delete_precondition(self): # f'collections/{self.collection["name"]}', ]: with self.subTest(endpoint=endpoint): - # Get first the ETag - response = self.client.get(f"/{STAC_BASE_V}/{endpoint}") - self.assertStatusCode(200, response) - # The ETag change between each test call due to the created, updated time that are - # in the hash computation of the ETag - self.check_header_etag(None, response) - etag1 = response['ETag'] + etag1 = self.get_etag(endpoint) response = self.client.delete( f"/{STAC_BASE_V}/{endpoint}", From c8ce251619796d5a478e037d1cd7caaccbc0eb3a Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 30 Mar 2021 13:40:24 +0200 Subject: [PATCH 035/105] BGDIINF_SB-1739: Reorganized URLS Reorganized the URLs using the django includes. --- app/stac_api/urls.py | 52 +++++++++++++++++++++----------------------- 1 file changed, 25 insertions(+), 27 deletions(-) diff --git a/app/stac_api/urls.py b/app/stac_api/urls.py index ec5e7485..5a002024 100644 --- a/app/stac_api/urls.py +++ b/app/stac_api/urls.py @@ -1,4 +1,5 @@ from django.conf import settings +from django.urls import include from django.urls import path from rest_framework.authtoken.views import obtain_auth_token @@ -16,36 +17,33 @@ STAC_VERSION_SHORT = settings.STAC_VERSION_SHORT HEALTHCHECK_ENDPOINT = settings.HEALTHCHECK_ENDPOINT +asset_urls = [ + path("", AssetDetail.as_view(), name='asset-detail'), +] + +item_urls = [ + path("", ItemDetail.as_view(), name='item-detail'), + path("/assets", AssetsList.as_view(), name='assets-list'), + path("/assets/", include(asset_urls)) +] + +collection_urls = [ + path("", CollectionDetail.as_view(), name='collection-detail'), + path("/items", ItemsList.as_view(), name='items-list'), + path("/items/", include(item_urls)) +] + urlpatterns = [ path(f"{HEALTHCHECK_ENDPOINT}", CollectionList.as_view(), name='health-check'), path("get-token", obtain_auth_token, name='get-token'), - path(f"{STAC_VERSION_SHORT}/", LandingPageDetail.as_view(), name='landing-page'), - path(f"{STAC_VERSION_SHORT}/conformance", ConformancePageDetail.as_view(), name='conformance'), - path(f"{STAC_VERSION_SHORT}/search", SearchList.as_view(), name='search-list'), - path(f"{STAC_VERSION_SHORT}/collections", CollectionList.as_view(), name='collections-list'), - path( - f"{STAC_VERSION_SHORT}/collections/", - CollectionDetail.as_view(), - name='collection-detail' - ), - path( - f"{STAC_VERSION_SHORT}/collections//items", - ItemsList.as_view(), - name='items-list' - ), - path( - f"{STAC_VERSION_SHORT}/collections//items/", - ItemDetail.as_view(), - name='item-detail' - ), - path( - f"{STAC_VERSION_SHORT}/collections//items//assets", - AssetsList.as_view(), - name='assets-list' - ), path( - f"{STAC_VERSION_SHORT}/collections//items//assets/", - AssetDetail.as_view(), - name='asset-detail' + f"{STAC_VERSION_SHORT}/", + include([ + path("", LandingPageDetail.as_view(), name='landing-page'), + path("conformance", ConformancePageDetail.as_view(), name='conformance'), + path("search", SearchList.as_view(), name='search-list'), + path("collections", CollectionList.as_view(), name='collections-list'), + path("collections/", include(collection_urls)) + ]) ) ] From 1f7a0996abee3ee82dbbe3e5f77db2e96d5c59e7 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 30 Mar 2021 13:43:25 +0200 Subject: [PATCH 036/105] BGDIINF_SB-1739: follow django Validator best practice Updated the django validators to follow the django best practice (see https://docs.djangoproject.com/en/3.1/ref/forms/validation/#raising-validationerror) Also changed the checksum_multihash validator to make sure that it is a sha256 checksum as we will only support those. --- app/stac_api/serializers.py | 5 +-- app/stac_api/validators.py | 76 ++++++++++++++++++++++--------------- 2 files changed, 47 insertions(+), 34 deletions(-) diff --git a/app/stac_api/serializers.py b/app/stac_api/serializers.py index dc217512..5b14df82 100644 --- a/app/stac_api/serializers.py +++ b/app/stac_api/serializers.py @@ -24,9 +24,9 @@ from stac_api.utils import build_asset_href from stac_api.utils import isoformat from stac_api.validators import MEDIA_TYPES_MIMES -from stac_api.validators import validate_asset_multihash from stac_api.validators import validate_asset_name from stac_api.validators import validate_asset_name_with_media_type +from stac_api.validators import validate_checksum_multihash_sha256 from stac_api.validators import validate_geoadmin_variant from stac_api.validators import validate_item_properties_datetimes from stac_api.validators import validate_name @@ -528,7 +528,6 @@ def update_or_create(self, validated_data, **kwargs): def to_representation(self, instance): name = instance.name - stac_base_v = settings.STAC_BASE_V request = self.context.get("request") representation = super().to_representation(instance) # Add auto links @@ -684,7 +683,7 @@ class Meta: max_length=255, required=False, allow_blank=False, - validators=[validate_asset_multihash] + validators=[validate_checksum_multihash_sha256] ) # read only fields href = HrefField(source='file', read_only=True) diff --git a/app/stac_api/validators.py b/app/stac_api/validators.py index b6426a4f..273ac1d1 100644 --- a/app/stac_api/validators.py +++ b/app/stac_api/validators.py @@ -3,6 +3,8 @@ from datetime import datetime import multihash +from multihash.constants import CODE_HASHES +from multihash.constants import HASH_CODES from django.contrib.gis.geos import GEOSGeometry from django.core.exceptions import ValidationError @@ -72,7 +74,7 @@ def validate_name(name): logger.error('Invalid name %s, only the following characters are allowed: 0-9a-z-_.', name) raise ValidationError( _('Invalid id, only the following characters are allowed: 0-9a-z-_.'), - code='id' + code='invalid' ) @@ -81,7 +83,7 @@ def validate_asset_name(name): ''' if not name: logger.error('Invalid asset name, must not be empty') - raise ValidationError({'id': _("Invalid id must not be empty")}, code='id') + raise ValidationError(_("Invalid id must not be empty"), code='missing') validate_name(name) ext = name.rsplit('.', maxsplit=1)[-1] if f'.{ext}' not in MEDIA_TYPES_EXTENSIONS: @@ -89,8 +91,9 @@ def validate_asset_name(name): 'Invalid name %s extension %s, name must ends with a valid file extension', name, ext ) raise ValidationError( - _(f"Invalid id extension '.{ext}', id must have a valid file extension"), - code='id' + _("Invalid id extension '.%(ext)s', id must have a valid file extension"), + params={'ext': ext}, + code='invalid' ) @@ -100,7 +103,11 @@ def validate_asset_name_with_media_type(name, media_type): ext = f".{name.rsplit('.', maxsplit=1)[-1]}" if media_type not in MEDIA_TYPES_BY_TYPE: logger.error("Invalid media_type %s for asset %s", media_type, name) - raise ValidationError(_(f"Invalid media type {media_type}"), code='type') + raise ValidationError( + _("Invalid media type %(media_type)s"), + params={'media_type': media_type}, + code='invalid' + ) if ext not in MEDIA_TYPES_BY_TYPE[media_type][2]: logger.error( "Invalid name %s extension %s, don't match the media type %s", @@ -109,8 +116,9 @@ def validate_asset_name_with_media_type(name, media_type): MEDIA_TYPES_BY_TYPE[media_type], ) raise ValidationError( - _(f"Invalid id extension '{ext}', id must match its media type {media_type}"), - code='id' + _("Invalid id extension '%(ext)s', id must match its media type %(media_type)s"), + params={'ext': ext, 'media_type': media_type}, + code='invalid' ) @@ -132,9 +140,10 @@ def validate_geoadmin_variant(variant): variant ) raise ValidationError( - _(f'Invalid geoadmin:variant "{variant}", ' + _('Invalid geoadmin:variant "%(variant)s", ' 'special characters beside one space are not allowed'), - code="geoadmin:variant" + params={'variant': variant}, + code="invalid" ) @@ -153,8 +162,9 @@ def validate_link_rel(value): if value in invalid_rel: logger.error("Link rel attribute %s is not allowed, it is a reserved attribute", value) raise ValidationError( - _(f'Invalid rel attribute, must not be in {invalid_rel}'), - code="rel" + _('Invalid rel attribute, must not be in %(invalid_rel)s'), + params={'invalid_rel': invalid_rel}, + code="invalid" ) @@ -173,13 +183,15 @@ def validate_geometry(geometry): ''' geos_geometry = GEOSGeometry(geometry) if geos_geometry.empty: - message = "The geometry is empty: %s" % geos_geometry.wkt - logger.error(message) - raise ValidationError(_(message), code='geometry') + message = "The geometry is empty: %(error)s" + params = {'error': geos_geometry.wkt} + logger.error(message, params) + raise ValidationError(_(message), params=params, code='invalid') if not geos_geometry.valid: - message = "The geometry is not valid: %s" % geos_geometry.valid_reason - logger.error(message) - raise ValidationError(_(message), code='geometry') + message = "The geometry is not valid: %s" + params = {'error': geos_geometry.valid_reason} + logger.error(message, params) + raise ValidationError(_(message), params=params, code='invalid') return geometry @@ -208,29 +220,31 @@ def validate_item_properties_datetimes_dependencies( properties_end_datetime = fromisoformat(properties_end_datetime) except ValueError as error: logger.error("Invalid datetime string %s", error) - raise ValidationError(f'Invalid datetime string {error}') from error + raise ValidationError( + _('Invalid datetime string %(error)s'), params={'error': error}, code='invalid' + ) from error if properties_datetime is not None: if (properties_start_datetime is not None or properties_end_datetime is not None): message = 'Cannot provide together property datetime with datetime range ' \ '(start_datetime, end_datetime)' logger.error(message) - raise ValidationError(_(message)) + raise ValidationError(_(message), code='invalid') else: if properties_end_datetime is None: message = "Property end_datetime can't be null when no property datetime is given" logger.error(message) - raise ValidationError(_(message)) + raise ValidationError(_(message), code='invalid') if properties_start_datetime is None: message = "Property start_datetime can't be null when no property datetime is given" logger.error(message) - raise ValidationError(_(message)) + raise ValidationError(_(message), code='invalid') if properties_datetime is None: if properties_end_datetime < properties_start_datetime: message = "Property end_datetime can't refer to a date earlier than property "\ "start_datetime" - raise ValidationError(_(message)) + raise ValidationError(_(message), code='invalid') def validate_item_properties_datetimes( @@ -246,10 +260,10 @@ def validate_item_properties_datetimes( ) -def validate_asset_multihash(value): - '''Validate the Asset multihash field +def validate_checksum_multihash_sha256(value): + '''Validate the checksum multihash field - The field value must be a multihash string + The field value must be a multihash sha256 string Args: value: string @@ -260,10 +274,10 @@ def validate_asset_multihash(value): ''' try: mhash = multihash.decode(multihash.from_hex_string(value)) - except ValueError as error: + except (ValueError, TypeError) as error: logger.error("Invalid multihash %s; %s", value, error) - raise ValidationError( - code='checksum:multihash', - message=_('Invalid multihash value; %(error)s'), - params={'error': error} - ) from None + raise ValidationError(_('Invalid multihash value; %(error)s'), + params={'error': error}, code='invalid') from None + if mhash.code != HASH_CODES['sha2-256']: + raise ValidationError(_('Invalid multihash value: must be sha2-256 but is %(code)s'), + params={'code': CODE_HASHES[mhash.code]}, code='invalid') From 8ca8afbc0886fbbc820c3044a11748b458bac934 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 30 Mar 2021 13:49:31 +0200 Subject: [PATCH 037/105] BGDIINF_SB-1739: Moved and reused view validation as well as etag fix The view validation code to check if parent exists in list views has been moved in a separate module in order to be better reused. The performance has been slightly improved due to the fact that we only do one DB hit by checking the direct parent which can only exists if its parent exists. The Asset Etag was also not computed correctly if two asset in two different items would have had the same name which is allowed. So now the query to compute the asset etag takes also the parents into account. --- app/stac_api/validators_view.py | 94 +++++++++++++++++++++++++++++++++ app/stac_api/views.py | 29 ++++------ 2 files changed, 105 insertions(+), 18 deletions(-) create mode 100644 app/stac_api/validators_view.py diff --git a/app/stac_api/validators_view.py b/app/stac_api/validators_view.py new file mode 100644 index 00000000..89d03d2f --- /dev/null +++ b/app/stac_api/validators_view.py @@ -0,0 +1,94 @@ +import logging + +from django.http import Http404 +from django.utils.translation import gettext_lazy as _ + +from rest_framework.exceptions import ValidationError + +from stac_api.models import Asset +from stac_api.models import Collection +from stac_api.models import Item + +logger = logging.getLogger(__name__) + + +def validate_collection(kwargs): + '''Validate that the collection given in request kwargs exists + + Args: + kwargs: dict + request kwargs dictionary + + Raises: + Http404: when the collection doesn't exists + ''' + if not Collection.objects.filter(name=kwargs['collection_name']).exists(): + logger.error("The collection %s does not exist", kwargs['collection_name']) + raise Http404(f"The collection {kwargs['collection_name']} does not exist") + + +def validate_item(kwargs): + '''Validate that the item given in request kwargs exists + + Args: + kwargs: dict + request kwargs dictionary + + Raises: + Http404: when the item doesn't exists + ''' + if not Item.objects.filter( + name=kwargs['item_name'], collection__name=kwargs['collection_name'] + ).exists(): + logger.error( + "The item %s is not part of the collection %s", + kwargs['item_name'], + kwargs['collection_name'] + ) + raise Http404( + f"The item {kwargs['item_name']} is not part of the collection " + f"{kwargs['collection_name']}" + ) + + +def validate_asset(kwargs): + '''Validate that the asset given in request kwargs exists + + Args: + kwargs: dict + request kwargs dictionary + + Raises: + Http404: when the asset doesn't exists + ''' + if not Asset.objects.filter( + name=kwargs['asset_name'], + item__name=kwargs['item_name'], + item__collection__name=kwargs['collection_name'] + ).exists(): + logger.error( + "The asset %s is not part of the item %s in collection %s", + kwargs['asset_name'], + kwargs['item_name'], + kwargs['collection_name'] + ) + raise Http404( + f"The asset {kwargs['asset_name']} is not part of " + f"the item {kwargs['item_name']} in collection {kwargs['collection_name']}" + ) + + +def validate_upload_parts(request): + '''Validate the multiparts upload parts from request + Args: + request: HttpRequest + + ''' + if 'parts' not in request.data: + message = 'Required "parts" attribute is missing' + logger.error(message, extra={'request': request}) + raise ValidationError({'parts': _(message)}, code='missing') + if not isinstance(request.data['parts'], list): + message = f'Required "parts" must be a list, not a {type(request.data["parts"])}' + logger.error(message, extra={'request': request}) + raise ValidationError({'parts': _(message)}, code='invalid') diff --git a/app/stac_api/views.py b/app/stac_api/views.py index b12efe17..85f1b04b 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -4,7 +4,6 @@ from datetime import datetime from django.conf import settings -from django.http import Http404 from rest_framework import generics from rest_framework import mixins @@ -27,6 +26,8 @@ from stac_api.utils import harmonize_post_get_for_search from stac_api.utils import utc_aware from stac_api.validators_serializer import ValidateSearchRequest +from stac_api.validators_view import validate_collection +from stac_api.validators_view import validate_item logger = logging.getLogger(__name__) @@ -89,7 +90,13 @@ def get_asset_etag(request, *args, **kwargs): The ETag is an UUID4 computed on each object changes ''' - tag = get_etag(Asset.objects.filter(item__name=kwargs['item_name'], name=kwargs['asset_name'])) + tag = get_etag( + Asset.objects.filter( + item__collection__name=kwargs['collection_name'], + item__name=kwargs['item_name'], + name=kwargs['asset_name'] + ) + ) if settings.DEBUG_ENABLE_DB_EXPLAIN_ANALYZE: logger.debug( @@ -297,9 +304,7 @@ def get_queryset(self): return queryset def list(self, request, *args, **kwargs): - if not Collection.objects.filter(name=self.kwargs['collection_name']).exists(): - logger.error("The collection %s does not exist", self.kwargs['collection_name']) - raise Http404(f"The collection {self.kwargs['collection_name']} does not exists.") + validate_collection(self.kwargs) queryset = self.filter_queryset(self.get_queryset()) page = self.paginate_queryset(queryset) if page is not None: @@ -411,19 +416,7 @@ def get_queryset(self): ) def get(self, request, *args, **kwargs): - if not Collection.objects.filter(name=self.kwargs['collection_name']).exists(): - logger.error("The collection %s does not exist", self.kwargs['collection_name']) - raise Http404(f"The collection {self.kwargs['collection_name']} does not exist") - if not Item.objects.filter(name=self.kwargs['item_name']).exists(): - logger.error( - "The item %s is not part of the collection, %s", - self.kwargs['item_name'], - self.kwargs['collection_name'] - ) - raise Http404( - f"The item {self.kwargs['item_name']} is not part of the collection " - f"{self.kwargs['collection_name']}" - ) + validate_item(self.kwargs) queryset = self.filter_queryset(self.get_queryset()) serializer = self.get_serializer(queryset, many=True) From 28887d56bb742ba5d9c3bfbd41c7543991152271 Mon Sep 17 00:00:00 2001 From: Tobias Reber Date: Tue, 30 Mar 2021 11:08:36 +0200 Subject: [PATCH 038/105] BGDIINF_SB-1740: Uncommented delete asset on S3 --- app/stac_api/signals.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/app/stac_api/signals.py b/app/stac_api/signals.py index af585df6..8446ec60 100644 --- a/app/stac_api/signals.py +++ b/app/stac_api/signals.py @@ -1,17 +1,18 @@ # Un-comment with BGDIINF_SB-1625 -# import logging +import logging -# from django.db.models.signals import pre_delete -# from django.dispatch import receiver +from django.db.models.signals import pre_delete +from django.dispatch import receiver -# from stac_api.models import Asset +from stac_api.models import Asset -# logger = logging.getLogger(__name__) +logger = logging.getLogger(__name__) -# @receiver(pre_delete, sender=Asset) -# def delete_s3_asset(sender, instance, **kwargs): -# # The file is not automatically deleted by Django -# # when the object holding its reference is deleted -# # hence it has to be done here. -# logger.info("The asset %s is deleted from s3", instance.file.name) -# instance.file.delete(save=False) + +@receiver(pre_delete, sender=Asset) +def delete_s3_asset(sender, instance, **kwargs): + # The file is not automatically deleted by Django + # when the object holding its reference is deleted + # hence it has to be done here. + logger.info("The asset %s is deleted from s3", instance.file.name) + instance.file.delete(save=False) From 0ae7799b6730e0ee4b1d5ab815e8a428e60523b6 Mon Sep 17 00:00:00 2001 From: Tobias Reber Date: Tue, 30 Mar 2021 15:09:44 +0200 Subject: [PATCH 039/105] BGDIINF_SB-1740: Unittests on S3 after delete --- app/stac_api/signals.py | 1 - app/tests/test_admin_page.py | 2 +- app/tests/test_assets_endpoint.py | 7 ++++++- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/app/stac_api/signals.py b/app/stac_api/signals.py index 8446ec60..4ac74032 100644 --- a/app/stac_api/signals.py +++ b/app/stac_api/signals.py @@ -1,4 +1,3 @@ -# Un-comment with BGDIINF_SB-1625 import logging from django.db.models.signals import pre_delete diff --git a/app/tests/test_admin_page.py b/app/tests/test_admin_page.py index 21c10128..706db20a 100644 --- a/app/tests/test_admin_page.py +++ b/app/tests/test_admin_page.py @@ -695,7 +695,7 @@ def test_add_remove_asset(self): Asset.objects.filter(name=data["name"]).exists(), msg="Admin page asset still in DB" ) - # self.assertS3ObjectNotExists(path) # Un-comment with BGDIINF_SB-1625 + self.assertS3ObjectNotExists(path) @mock_s3_asset_file def test_add_update_asset_invalid_media_type(self): diff --git a/app/tests/test_assets_endpoint.py b/app/tests/test_assets_endpoint.py index 3d977f30..8233f930 100644 --- a/app/tests/test_assets_endpoint.py +++ b/app/tests/test_assets_endpoint.py @@ -8,12 +8,14 @@ from django.test import Client from stac_api.models import Asset +from stac_api.utils import get_asset_path from stac_api.utils import get_sha256_multihash from stac_api.utils import utc_aware from tests.base_test import StacBaseTestCase from tests.data_factory import Factory from tests.utils import client_login +from tests.utils import S3TestMixin from tests.utils import mock_s3_asset_file from tests.utils import upload_file_on_s3 @@ -647,7 +649,7 @@ def test_asset_endpoint_patch_read_only_in_payload(self): msg='Unexpected error message') -class AssetsDeleteEndpointTestCase(StacBaseTestCase): +class AssetsDeleteEndpointTestCase(StacBaseTestCase, S3TestMixin): @mock_s3_asset_file def setUp(self): # pylint: disable=invalid-name @@ -664,10 +666,13 @@ def test_asset_endpoint_delete_asset(self): item_name = self.item.name asset_name = self.asset.name path = f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets/{asset_name}' + s3_path = get_asset_path(self.item, asset_name) + self.assertS3ObjectExists(s3_path) response = self.client.delete(path) self.assertStatusCode(200, response) # Check that is has really been deleted + self.assertS3ObjectNotExists(s3_path) response = self.client.get(path) self.assertStatusCode(404, response) From 5833383ff392d9b0de4773d77e893ea38a9bb35b Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Sun, 21 Mar 2021 14:28:25 +0100 Subject: [PATCH 040/105] BGDIINF_SB-1724 using UpsertModelSerializerMixin | more unit tests * using UpsertModelSerializerMixing for asset an item serializers * adapted validators for asset and item serializer * added update_or_create() of asset and item serializer * added create() with a call of validate_uniqueness_and_create() in asset serializer * added more unit tests --- app/stac_api/serializers.py | 57 ++++++++++++++++++++++++------- app/stac_api/views.py | 4 +-- app/tests/test_assets_endpoint.py | 43 ++++++++++++++++------- app/tests/test_items_endpoint.py | 18 ++++++++++ 4 files changed, 96 insertions(+), 26 deletions(-) diff --git a/app/stac_api/serializers.py b/app/stac_api/serializers.py index 5b14df82..29eb05f5 100644 --- a/app/stac_api/serializers.py +++ b/app/stac_api/serializers.py @@ -616,7 +616,7 @@ def to_representation(self, value): return build_asset_href(request, path) -class AssetBaseSerializer(NonNullModelSerializer): +class AssetBaseSerializer(NonNullModelSerializer, UpsertModelSerializerMixin): '''Asset serializer base class ''' @@ -643,11 +643,7 @@ class Meta: item = serializers.SlugRelatedField( slug_field='name', write_only=True, queryset=Item.objects.all() ) - id = serializers.CharField( - source='name', - max_length=255, - validators=[validate_asset_name, UniqueValidator(queryset=Asset.objects.all())] - ) + id = serializers.CharField(source='name', max_length=255, validators=[validate_asset_name]) title = serializers.CharField( required=False, max_length=255, allow_null=True, allow_blank=False ) @@ -690,6 +686,26 @@ class Meta: created = serializers.DateTimeField(read_only=True) updated = serializers.DateTimeField(read_only=True) + def create(self, validated_data): + asset = validate_uniqueness_and_create(Asset, validated_data) + return asset + + def update_or_create(self, validated_data, **kwargs): + """ + Update or create the asset object selected by kwargs and return the instance. + When no asset object matching the kwargs selection, a new asset is created. + Args: + validated_data: dict + Copy of the validated_data to use for update + kwargs: dict + Object selection arguments (NOTE: the selection arguments must match a unique + object in DB otherwise an IntegrityError will be raised) + Returns: tuple + Asset instance and True if created otherwise false + """ + asset, created = Asset.objects.update_or_create(**kwargs, defaults=validated_data) + return asset, created + def validate(self, attrs): if not self.partial: validate_asset_name_with_media_type(attrs.get('name'), attrs.get('media_type')) @@ -775,7 +791,7 @@ class Meta: ] -class ItemSerializer(NonNullModelSerializer): +class ItemSerializer(NonNullModelSerializer, UpsertModelSerializerMixin): class Meta: model = Item @@ -796,10 +812,7 @@ class Meta: # in model ! collection = serializers.SlugRelatedField(slug_field='name', queryset=Collection.objects.all()) id = serializers.CharField( - source='name', - required=True, - max_length=255, - validators=[validate_name, UniqueValidator(queryset=Collection.objects.all())] + source='name', required=True, max_length=255, validators=[validate_name] ) properties = ItemsPropertiesSerializer(source='*', required=True) geometry = gis_serializers.GeometryField(required=True) @@ -841,7 +854,7 @@ def to_representation(self, instance): def create(self, validated_data): links_data = validated_data.pop('links', []) - item = Item.objects.create(**validated_data) + item = validate_uniqueness_and_create(Item, validated_data) update_or_create_links( instance_type="item", model=ItemLink, instance=item, links_data=links_data ) @@ -854,6 +867,26 @@ def update(self, instance, validated_data): ) return super().update(instance, validated_data) + def update_or_create(self, validated_data, **kwargs): + """ + Update or create the item object selected by kwargs and return the instance. + When no item object matching the kwargs selection, a new item is created. + Args: + validated_data: dict + Copy of the validated_data to use for update + kwargs: dict + Object selection arguments (NOTE: the selection arguments must match a unique + object in DB otherwise an IntegrityError will be raised) + Returns: tuple + Item instance and True if created otherwise false + """ + links_data = validated_data.pop('links', []) + item, created = Item.objects.update_or_create(**kwargs, defaults=validated_data) + update_or_create_links( + instance_type="item", model=ItemLink, instance=item, links_data=links_data + ) + return item, created + def validate(self, attrs): if ( not self.partial or \ diff --git a/app/stac_api/views.py b/app/stac_api/views.py index 85f1b04b..18a666d5 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -379,7 +379,7 @@ def get(self, request, *args, **kwargs): # Here the etag is only added to support pre-conditional If-Match and If-Not-Match @etag(get_item_etag) def put(self, request, *args, **kwargs): - return self.update(request, *args, **kwargs) + return self.upsert(request, *args, **kwargs) # Here the etag is only added to support pre-conditional If-Match and If-Not-Match @etag(get_item_etag) @@ -488,7 +488,7 @@ def get(self, request, *args, **kwargs): # Here the etag is only added to support pre-conditional If-Match and If-Not-Match @etag(get_asset_etag) def put(self, request, *args, **kwargs): - return self.update(request, *args, **kwargs) + return self.upsert(request, *args, **kwargs) # Here the etag is only added to support pre-conditional If-Match and If-Not-Match @etag(get_asset_etag) diff --git a/app/tests/test_assets_endpoint.py b/app/tests/test_assets_endpoint.py index 8233f930..7270189d 100644 --- a/app/tests/test_assets_endpoint.py +++ b/app/tests/test_assets_endpoint.py @@ -445,20 +445,39 @@ def setUp(self): # pylint: disable=invalid-name client_login(self.client) self.maxDiff = None # pylint: disable=invalid-name - def test_asset_put_dont_exists(self): - collection_name = self.collection['name'] - item_name = self.item['name'] - payload_json = self.factory.create_asset_sample( - item=self.item.model, sample='asset-2', create_asset_file=False - ).get_json('put') - - # the dataset to update does not exist yet - path = \ - (f"/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets/" - f"{payload_json['id']}") - response = self.client.put(path, data=payload_json, content_type='application/json') + def test_put_non_existing_asset(self): + collection = self.collection.model + item = self.item.model + asset = self.factory.create_asset_sample(item=item, create_asset_file=True) + asset_name = asset['name'] + print(asset) + + path = f'/{STAC_BASE_V}/collections/{collection.name}/items/{item.name}/assets/{asset_name}' + + # Check that assert does not exist already + response = self.client.get(path) self.assertStatusCode(404, response) + # Check also, that the asset does not exist in the DB already + self.assertFalse( + Asset.objects.filter(name=asset_name).exists(), msg="Deleted asset still found in DB" + ) + + # Now use upsert to create the new assert + response = self.client.put( + path, data=asset.get_json('post'), content_type="application/json" + ) + json_data = response.json() + self.assertStatusCode(201, response) + self.check_header_location(f"{path}", response) + self.check_stac_asset(asset.json, json_data, collection.name, item.name, ignore=['item']) + + # Check the data by reading it back + response = self.client.get(response['Location']) + json_data = response.json() + self.assertStatusCode(200, response) + self.check_stac_asset(asset.json, json_data, collection.name, item.name, ignore=['item']) + def test_asset_endpoint_put(self): collection_name = self.collection['name'] item_name = self.item['name'] diff --git a/app/tests/test_items_endpoint.py b/app/tests/test_items_endpoint.py index 47098392..9943cb05 100644 --- a/app/tests/test_items_endpoint.py +++ b/app/tests/test_items_endpoint.py @@ -3,8 +3,10 @@ from datetime import timedelta from django.conf import settings +from django.contrib.auth import get_user_model from django.contrib.gis.geos.geometry import GEOSGeometry from django.test import Client +from django.urls import reverse from stac_api.models import BBOX_CH from stac_api.models import Item @@ -14,8 +16,10 @@ from stac_api.utils import utc_aware from tests.base_test import StacBaseTestCase +from tests.base_test import StacBaseTransactionTestCase from tests.data_factory import Factory from tests.utils import client_login +from tests.utils import disableLogger logger = logging.getLogger(__name__) @@ -667,6 +671,20 @@ def test_item_endpoint_patch_rename_item(self): self.assertStatusCode(200, response) self.assertEqual(data['id'], json_data['id']) + def test_item_upsert_create(self): + + sample = self.factory.create_item_sample(self.collection, required_only=True) + print(sample.json["id"]) + path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{sample.json["id"]}' + print(sample.get_json('post')) + response = self.client.put( + path, data=sample.get_json('post'), content_type="application/json" + ) + json_data = response.json() + print(json_data) + self.assertStatusCode(201, response) + self.check_stac_item(sample.json, json_data, self.collection.name) + class ItemsDeleteEndpointTestCase(StacBaseTestCase): From b16b1e5e175bd79d1edf866bc620c16502b2c6d8 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Fri, 26 Mar 2021 06:19:03 +0100 Subject: [PATCH 041/105] BGDIING_SB-1724 get instance for serializer when UPSERTing already existing objects In case of UPSERTing already existing objects, the serializer needs to be passed the correct instance. For this self.get_object() ineeds to be called inside the UpdateInsertModelMixin. In case self.get_object() fails with a Http404, instance will be set to None and the object will be created. Otherwise the instance will be passed to the serializer and the object will be updated. --- app/stac_api/views_mixins.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/app/stac_api/views_mixins.py b/app/stac_api/views_mixins.py index efea2882..d9f2b5fc 100644 --- a/app/stac_api/views_mixins.py +++ b/app/stac_api/views_mixins.py @@ -1,6 +1,7 @@ import logging from django.db import transaction +from django.http import Http404 from rest_framework import status from rest_framework.response import Response @@ -86,7 +87,18 @@ def update(self, request, *args, **kwargs): @transaction.atomic def upsert(self, request, *args, **kwargs): data = self.get_write_request_data(request, *args, **kwargs) - serializer = self.get_serializer(data=data) + try: + instance = self.get_object() + except Http404: + instance = None + + if instance: + partial = kwargs.pop('partial', False) + serializer_kwargs = {'partial': partial} + serializer = self.get_serializer(instance, data=data, **serializer_kwargs) + else: + serializer = self.get_serializer(data=data) + serializer.is_valid(raise_exception=True) lookup = {} if self.lookup_url_kwarg: From 9ac846839eb975f68a29d180203013e1841d92be Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Fri, 26 Mar 2021 11:35:05 +0100 Subject: [PATCH 042/105] BGDIINF_SB-1724 UpsertHttp500 views for unit tests | unit tests added TestItemUpsertHttp500 and TestAssetHttp500 views to force raising a 500 in unit tests adapted urls.py and views_test.py accordingly added more unit tests for items and assets formatting of already existing migrations file --- app/config/urls.py | 12 ++ .../migrations/0003_auto_20210325_1001.py | 26 ++- app/stac_api/views_test.py | 16 ++ app/tests/test_assets_endpoint.py | 159 +++++++++++++- app/tests/test_items_endpoint.py | 198 ++++++++++++++---- 5 files changed, 366 insertions(+), 45 deletions(-) diff --git a/app/config/urls.py b/app/config/urls.py index 7eb7f981..99ff0482 100644 --- a/app/config/urls.py +++ b/app/config/urls.py @@ -39,6 +39,8 @@ def checker(request): import debug_toolbar from stac_api.views_test import TestHttp500 from stac_api.views_test import TestCollectionUpsertHttp500 + from stac_api.views_test import TestItemUpsertHttp500 + from stac_api.views_test import TestAssetUpsertHttp500 urlpatterns = [ path('__debug__/', include(debug_toolbar.urls)), @@ -48,4 +50,14 @@ def checker(request): TestCollectionUpsertHttp500.as_view(), name='test-collection-detail-http-500' ), + path( + 'tests/test_item_upsert_http_500//', + TestItemUpsertHttp500.as_view(), + name='test-item-detail-http-500' + ), + path( + 'tests/test_asset_upsert_http_500///', + TestAssetUpsertHttp500.as_view(), + name='test-asset-detail-http-500' + ), ] + urlpatterns diff --git a/app/stac_api/migrations/0003_auto_20210325_1001.py b/app/stac_api/migrations/0003_auto_20210325_1001.py index 8e132b55..ba3fc497 100644 --- a/app/stac_api/migrations/0003_auto_20210325_1001.py +++ b/app/stac_api/migrations/0003_auto_20210325_1001.py @@ -1,7 +1,9 @@ # Generated by Django 3.1.7 on 2021-03-25 10:01 -from django.db import migrations, models import django.db.models.deletion +from django.db import migrations +from django.db import models + import stac_api.validators @@ -15,16 +17,32 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='asset', name='item', - field=models.ForeignKey(help_text='\n
\n Search Usage:\n
    \n
  • \n arg will make a non exact search checking if >arg\n is part of the Item path\n
  • \n
  • \n Multiple arg can be used, separated by spaces. This will search\n for all elements containing all arguments in their path\n
  • \n
  • \n "collectionID/itemID" will make an exact search for the specified item.\n
  • \n
\n Examples :\n
    \n
  • \n Searching for pixelkarte will return all items which have\n pixelkarte as a part of either their collection ID or their item ID\n
  • \n
  • \n Searching for pixelkarte 2016 4 will return all items\n which have pixelkarte, 2016 AND 4 as part of their collection ID or\n item ID\n
  • \n
  • \n Searching for "ch.swisstopo.pixelkarte.example/item2016-4-example"\n will yield only this item, if this item exists.\n
  • \n
\n
', on_delete=django.db.models.deletion.CASCADE, related_name='assets', related_query_name='asset', to='stac_api.item'), + field=models.ForeignKey( + help_text= + '\n
\n Search Usage:\n
    \n
  • \n arg will make a non exact search checking if >arg\n is part of the Item path\n
  • \n
  • \n Multiple arg can be used, separated by spaces. This will search\n for all elements containing all arguments in their path\n
  • \n
  • \n "collectionID/itemID" will make an exact search for the specified item.\n
  • \n
\n Examples :\n
    \n
  • \n Searching for pixelkarte will return all items which have\n pixelkarte as a part of either their collection ID or their item ID\n
  • \n
  • \n Searching for pixelkarte 2016 4 will return all items\n which have pixelkarte, 2016 AND 4 as part of their collection ID or\n item ID\n
  • \n
  • \n Searching for "ch.swisstopo.pixelkarte.example/item2016-4-example"\n will yield only this item, if this item exists.\n
  • \n
\n
', + on_delete=django.db.models.deletion.CASCADE, + related_name='assets', + related_query_name='asset', + to='stac_api.item' + ), ), migrations.AlterField( model_name='asset', name='name', - field=models.CharField(max_length=255, validators=[stac_api.validators.validate_asset_name], verbose_name='id'), + field=models.CharField( + max_length=255, + validators=[stac_api.validators.validate_asset_name], + verbose_name='id' + ), ), migrations.AlterField( model_name='item', name='collection', - field=models.ForeignKey(help_text='\n
\n Search Usage:\n
    \n
  • \n arg will make a non exact search checking if arg is part of\n the collection ID\n
  • \n
  • \n Multiple arg can be used, separated by spaces. This will search for all\n collections ID containing all arguments.\n
  • \n
  • \n "collectionID" will make an exact search for the specified collection.\n
  • \n
\n Examples :\n
    \n
  • \n Searching for pixelkarte will return all collections which have\n pixelkarte as a part of their collection ID\n
  • \n
  • \n Searching for pixelkarte 2016 4 will return all collection\n which have pixelkarte, 2016 AND 4 as part of their collection ID\n
  • \n
  • \n Searching for ch.swisstopo.pixelkarte.example will yield only this\n collection, if this collection exists. Please note that it would not return\n a collection named ch.swisstopo.pixelkarte.example.2.\n
  • \n
\n
', on_delete=django.db.models.deletion.CASCADE, to='stac_api.collection'), + field=models.ForeignKey( + help_text= + '\n
\n Search Usage:\n
    \n
  • \n arg will make a non exact search checking if arg is part of\n the collection ID\n
  • \n
  • \n Multiple arg can be used, separated by spaces. This will search for all\n collections ID containing all arguments.\n
  • \n
  • \n "collectionID" will make an exact search for the specified collection.\n
  • \n
\n Examples :\n
    \n
  • \n Searching for pixelkarte will return all collections which have\n pixelkarte as a part of their collection ID\n
  • \n
  • \n Searching for pixelkarte 2016 4 will return all collection\n which have pixelkarte, 2016 AND 4 as part of their collection ID\n
  • \n
  • \n Searching for ch.swisstopo.pixelkarte.example will yield only this\n collection, if this collection exists. Please note that it would not return\n a collection named ch.swisstopo.pixelkarte.example.2.\n
  • \n
\n
', + on_delete=django.db.models.deletion.CASCADE, + to='stac_api.collection' + ), ), ] diff --git a/app/stac_api/views_test.py b/app/stac_api/views_test.py index d3f9f1a4..3cde1a43 100644 --- a/app/stac_api/views_test.py +++ b/app/stac_api/views_test.py @@ -3,7 +3,9 @@ from rest_framework import generics from stac_api.models import LandingPage +from stac_api.views import AssetDetail from stac_api.views import CollectionDetail +from stac_api.views import ItemDetail logger = logging.getLogger(__name__) @@ -22,3 +24,17 @@ class TestCollectionUpsertHttp500(CollectionDetail): def perform_upsert(self, serializer, lookup): serializer.upsert(**lookup) raise AttributeError('test exception') + + +class TestItemUpsertHttp500(ItemDetail): + + def perform_upsert(self, serializer, lookup): + serializer.upsert(**lookup) + raise AttributeError('test exception') + + +class TestAssetUpsertHttp500(AssetDetail): + + def perform_upsert(self, serializer, lookup): + serializer.upsert(**lookup) + raise AttributeError('test exception') diff --git a/app/tests/test_assets_endpoint.py b/app/tests/test_assets_endpoint.py index 7270189d..6de7a9b1 100644 --- a/app/tests/test_assets_endpoint.py +++ b/app/tests/test_assets_endpoint.py @@ -5,7 +5,9 @@ from pprint import pformat from django.conf import settings +from django.contrib.auth import get_user_model from django.test import Client +from django.urls import reverse from stac_api.models import Asset from stac_api.utils import get_asset_path @@ -13,9 +15,14 @@ from stac_api.utils import utc_aware from tests.base_test import StacBaseTestCase +from tests.base_test import StacBaseTransactionTestCase +from tests.data_factory import AssetFactory +from tests.data_factory import CollectionFactory from tests.data_factory import Factory +from tests.data_factory import ItemFactory from tests.utils import client_login from tests.utils import S3TestMixin +from tests.utils import disableLogger from tests.utils import mock_s3_asset_file from tests.utils import upload_file_on_s3 @@ -445,12 +452,11 @@ def setUp(self): # pylint: disable=invalid-name client_login(self.client) self.maxDiff = None # pylint: disable=invalid-name - def test_put_non_existing_asset(self): + def test_asset_upsert_create(self): collection = self.collection.model item = self.item.model asset = self.factory.create_asset_sample(item=item, create_asset_file=True) asset_name = asset['name'] - print(asset) path = f'/{STAC_BASE_V}/collections/{collection.name}/items/{item.name}/assets/{asset_name}' @@ -667,6 +673,155 @@ def test_asset_endpoint_patch_read_only_in_payload(self): response.json()['description'], msg='Unexpected error message') + def test_asset_atomic_upsert_create_500(self): + sample = self.factory.create_asset_sample(self.item.model, create_asset_file=True) + + # the dataset to update does not exist yet + with self.settings(DEBUG_PROPAGATE_API_EXCEPTIONS=True), disableLogger('stac_api.apps'): + response = self.client.put( + reverse( + 'test-asset-detail-http-500', + args=[self.collection['name'], self.item['name'], sample['name']] + ), + data=sample.get_json('put'), + content_type='application/json' + ) + self.assertStatusCode(500, response) + self.assertEqual(response.json()['description'], "AttributeError('test exception')") + + # Make sure that the ressource has not been created + response = self.client.get( + reverse( + 'asset-detail', args=[self.collection['name'], self.item['name'], sample['name']] + ) + ) + self.assertStatusCode(404, response) + + def test_asset_atomic_upsert_update_500(self): + sample = self.factory.create_asset_sample( + self.item.model, name=self.asset['name'], create_asset_file=True + ) + + # Make sure samples is different from actual data + self.assertNotEqual(sample.attributes, self.asset.attributes) + + # the dataset to update does not exist yet + with self.settings(DEBUG_PROPAGATE_API_EXCEPTIONS=True), disableLogger('stac_api.apps'): + # because we explicitely test a crash here we don't want to print a CRITICAL log on the + # console therefore disable it. + response = self.client.put( + reverse( + 'test-asset-detail-http-500', + args=[self.collection['name'], self.item['name'], sample['name']] + ), + data=sample.get_json('put'), + content_type='application/json' + ) + self.assertStatusCode(500, response) + self.assertEqual(response.json()['description'], "AttributeError('test exception')") + + # Make sure that the ressource has not been created + response = self.client.get( + reverse( + 'asset-detail', args=[self.collection['name'], self.item['name'], sample['name']] + ) + ) + self.assertStatusCode(200, response) + self.check_stac_asset( + self.asset.json, + response.json(), + self.collection['name'], + self.item['name'], + ignore=['item'] + ) + + +class AssetRaceConditionTest(StacBaseTransactionTestCase): + + def setUp(self): + self.username = 'user' + self.password = 'dummy-password' + get_user_model().objects.create_superuser(self.username, password=self.password) + + def test_asset_upsert_race_condition(self): + workers = 5 + status_201 = 0 + collection_sample = CollectionFactory().create_sample(sample='collection-2') + item_sample = ItemFactory().create_sample(collection_sample.model, sample='item-1') + asset_sample = AssetFactory().create_sample(item_sample.model, sample='asset-1') + + def asset_atomic_upsert_test(worker): + # This method run on separate thread therefore it requires to create a new client and + # to login it for each call. + client = Client() + client.login(username=self.username, password=self.password) + return client.put( + reverse( + 'asset-detail', + args=[collection_sample['name'], item_sample['name'], asset_sample['name']] + ), + data=asset_sample.get_json('put'), + content_type='application/json' + ) + + # We call the PUT asset several times in parallel with the same data to make sure + # that we don't have any race condition. + responses, errors = self.run_parallel(workers, asset_atomic_upsert_test) + + for worker, response in responses: + if response.status_code == 201: + status_201 += 1 + self.assertIn( + response.status_code, [200, 201], + msg=f'Unexpected response status code {response.status_code} for worker {worker}' + ) + self.check_stac_asset( + asset_sample.json, + response.json(), + collection_sample['name'], + item_sample['name'], + ignore=['item'] + ) + self.assertEqual(status_201, 1, msg="Not only one upsert did a create !") + + def test_asset_post_race_condition(self): + workers = 5 + status_201 = 0 + collection_sample = CollectionFactory().create_sample(sample='collection-2') + item_sample = ItemFactory().create_sample(collection_sample.model, sample='item-1') + asset_sample = AssetFactory().create_sample(item_sample.model, sample='asset-1') + + def asset_atomic_post_test(worker): + # This method run on separate thread therefore it requires to create a new client and + # to login it for each call. + client = Client() + client.login(username=self.username, password=self.password) + return client.post( + reverse('assets-list', args=[collection_sample['name'], item_sample['name']]), + data=asset_sample.get_json('post'), + content_type='application/json' + ) + + # We call the PUT asset several times in parallel with the same data to make sure + # that we don't have any race condition. + responses, errors = self.run_parallel(workers, asset_atomic_post_test) + + for worker, response in responses: + self.assertIn(response.status_code, [201, 400]) + if response.status_code == 201: + self.check_stac_asset( + asset_sample.json, + response.json(), + collection_sample['name'], + item_sample['name'], + ignore=['item'] + ) + status_201 += 1 + else: + self.assertIn('id', response.json()['description'].keys()) + self.assertIn('This field must be unique.', response.json()['description']['id']) + self.assertEqual(status_201, 1, msg="Not only one POST was successfull") + class AssetsDeleteEndpointTestCase(StacBaseTestCase, S3TestMixin): diff --git a/app/tests/test_items_endpoint.py b/app/tests/test_items_endpoint.py index 9943cb05..595a9414 100644 --- a/app/tests/test_items_endpoint.py +++ b/app/tests/test_items_endpoint.py @@ -17,7 +17,9 @@ from tests.base_test import StacBaseTestCase from tests.base_test import StacBaseTransactionTestCase +from tests.data_factory import CollectionFactory from tests.data_factory import Factory +from tests.data_factory import ItemFactory from tests.utils import client_login from tests.utils import disableLogger @@ -517,8 +519,10 @@ class ItemsUpdateEndpointTestCase(StacBaseTestCase): @classmethod def setUpTestData(cls): cls.factory = Factory() - cls.collection = cls.factory.create_collection_sample().model - cls.item = cls.factory.create_item_sample(cls.collection, sample='item-1').model + cls.collection = cls.factory.create_collection_sample(db_create=True) + cls.item = cls.factory.create_item_sample( + cls.collection.model, sample='item-1', db_create=True + ) def setUp(self): self.client = Client() @@ -526,27 +530,27 @@ def setUp(self): def test_item_endpoint_put(self): sample = self.factory.create_item_sample( - self.collection, sample='item-2', name=self.item.name + self.collection.model, sample='item-2', name=self.item['name'] ) - path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{self.item.name}' + path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{self.item["name"]}' response = self.client.put( path, data=sample.get_json('put'), content_type="application/json" ) json_data = response.json() self.assertStatusCode(200, response) - self.check_stac_item(sample.json, json_data, self.collection.name) + self.check_stac_item(sample.json, json_data, self.collection["name"]) # Check the data by reading it back response = self.client.get(path) json_data = response.json() self.assertStatusCode(200, response) - self.check_stac_item(sample.json, json_data, self.collection.name) + self.check_stac_item(sample.json, json_data, self.collection["name"]) def test_item_endpoint_put_extra_payload(self): sample = self.factory.create_item_sample( - self.collection, sample='item-2', name=self.item.name, extra_payload='invalid' + self.collection.model, sample='item-2', name=self.item['name'], extra_payload='invalid' ) - path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{self.item.name}' + path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{self.item["name"]}' response = self.client.put( path, data=sample.get_json('put'), content_type="application/json" ) @@ -554,95 +558,95 @@ def test_item_endpoint_put_extra_payload(self): def test_item_endpoint_put_read_only_in_payload(self): data = self.factory.create_item_sample( - self.collection, sample='item-2', name=self.item.name, created=datetime.now() + self.collection.model, sample='item-2', name=self.item['name'], created=datetime.now() ).get_json('put') - path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{self.item.name}' + path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{self.item["name"]}' response = self.client.put(path, data=data, content_type="application/json") self.assertStatusCode(400, response) def test_item_endpoint_put_update_to_datetime_range(self): sample = self.factory.create_item_sample( - self.collection, + self.collection.model, sample='item-2', - name=self.item.name, + name=self.item['name'], properties={ "start_datetime": "2020-10-18T00:00:00Z", "end_datetime": "2020-10-19T00:00:00Z", } ) - path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{self.item.name}' + path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{self.item["name"]}' response = self.client.put( path, data=sample.get_json('put'), content_type="application/json" ) json_data = response.json() self.assertStatusCode(200, response) - self.check_stac_item(sample.json, json_data, self.collection.name) + self.check_stac_item(sample.json, json_data, self.collection["name"]) # Check the data by reading it back response = self.client.get(path) json_data = response.json() self.assertStatusCode(200, response) - self.check_stac_item(sample.json, json_data, self.collection.name) + self.check_stac_item(sample.json, json_data, self.collection["name"]) self.assertNotIn('datetime', json_data['properties'].keys()) self.assertNotIn('title', json_data['properties'].keys()) def test_item_endpoint_put_rename_item(self): sample = self.factory.create_item_sample( - self.collection, + self.collection.model, sample='item-2', - name=f'new-{self.item.name}', + name=f'new-{self.item["name"]}', ) - path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{self.item.name}' + path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{self.item["name"]}' response = self.client.put( path, data=sample.get_json('put'), content_type="application/json" ) json_data = response.json() self.assertStatusCode(200, response) self.assertEqual(sample.json['id'], json_data['id']) - self.check_stac_item(sample.json, json_data, self.collection.name) + self.check_stac_item(sample.json, json_data, self.collection["name"]) response = self.client.get(path) self.assertStatusCode(404, response, msg="Renamed item still available on old name") # Check the data by reading it back - path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{sample.json["id"]}' + path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{sample.json["id"]}' response = self.client.get(path) json_data = response.json() self.assertStatusCode(200, response) self.assertEqual(sample.json['id'], json_data['id']) - self.check_stac_item(sample.json, json_data, self.collection.name) + self.check_stac_item(sample.json, json_data, self.collection["name"]) def test_item_endpoint_patch(self): data = {"properties": {"title": "patched title"}} - path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{self.item.name}' + path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{self.item["name"]}' response = self.client.patch(path, data=data, content_type="application/json") json_data = response.json() self.assertStatusCode(200, response) - self.assertEqual(self.item.name, json_data['id']) - self.check_stac_item(data, json_data, self.collection.name) + self.assertEqual(self.item['name'], json_data['id']) + self.check_stac_item(data, json_data, self.collection["name"]) # Check the data by reading it back response = self.client.get(path) json_data = response.json() self.assertStatusCode(200, response) - self.assertEqual(self.item.name, json_data['id']) - self.check_stac_item(data, json_data, self.collection.name) + self.assertEqual(self.item['name'], json_data['id']) + self.check_stac_item(data, json_data, self.collection["name"]) def test_item_endpoint_patch_extra_payload(self): data = {"crazy:stuff": "not allowed"} - path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{self.item.name}' + path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{self.item["name"]}' response = self.client.patch(path, data=data, content_type="application/json") self.assertStatusCode(400, response) def test_item_endpoint_patch_read_only_in_payload(self): data = {"created": utc_aware(datetime.utcnow())} - path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{self.item.name}' + path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{self.item["name"]}' response = self.client.patch(path, data=data, content_type="application/json") self.assertStatusCode(400, response) def test_item_endpoint_patch_invalid_datetimes(self): data = {"properties": {"datetime": "patched title",}} - path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{self.item.name}' + path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{self.item["name"]}' response = self.client.patch(path, data=data, content_type="application/json") self.assertStatusCode(400, response) @@ -652,20 +656,20 @@ def test_item_endpoint_patch_invalid_datetimes(self): def test_item_endpoint_patch_rename_item(self): data = { - "id": f'new-{self.item.name}', + "id": f'new-{self.item["name"]}', } - path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{self.item.name}' + path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{self.item["name"]}' response = self.client.patch(path, data=data, content_type="application/json") json_data = response.json() self.assertStatusCode(200, response) self.assertEqual(data['id'], json_data['id']) - self.check_stac_item(data, json_data, self.collection.name) + self.check_stac_item(data, json_data, self.collection["name"]) response = self.client.get(path) self.assertStatusCode(404, response, msg="Renamed item still available on old name") # Check the data by reading it back - path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{data["id"]}' + path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{data["id"]}' response = self.client.get(path) json_data = response.json() self.assertStatusCode(200, response) @@ -673,17 +677,133 @@ def test_item_endpoint_patch_rename_item(self): def test_item_upsert_create(self): - sample = self.factory.create_item_sample(self.collection, required_only=True) - print(sample.json["id"]) - path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{sample.json["id"]}' - print(sample.get_json('post')) + sample = self.factory.create_item_sample(self.collection.model, required_only=True) + path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{sample.json["id"]}' response = self.client.put( path, data=sample.get_json('post'), content_type="application/json" ) json_data = response.json() - print(json_data) self.assertStatusCode(201, response) - self.check_stac_item(sample.json, json_data, self.collection.name) + self.check_stac_item(sample.json, json_data, self.collection["name"]) + + def test_item_atomic_upsert_create_500(self): + sample = self.factory.create_item_sample(self.collection.model, sample='item-2') + + # the dataset to update does not exist yet + with self.settings(DEBUG_PROPAGATE_API_EXCEPTIONS=True), disableLogger('stac_api.apps'): + response = self.client.put( + reverse( + 'test-item-detail-http-500', args=[self.collection['name'], sample['name']] + ), + data=sample.get_json('put'), + content_type='application/json' + ) + self.assertStatusCode(500, response) + self.assertEqual(response.json()['description'], "AttributeError('test exception')") + + # Make sure that the ressource has not been created + response = self.client.get( + reverse('item-detail', args=[self.collection['name'], sample['name']]) + ) + self.assertStatusCode(404, response) + + def test_item_atomic_upsert_update_500(self): + sample = self.factory.create_item_sample( + self.collection.model, sample='item-2', name=self.item['name'] + ) + # Make sure samples is different from actual data + self.assertNotEqual(sample.attributes, self.item.attributes) + + # the dataset to update does not exist yet + with self.settings(DEBUG_PROPAGATE_API_EXCEPTIONS=True), disableLogger('stac_api.apps'): + # because we explicitely test a crash here we don't want to print a CRITICAL log on the + # console therefore disable it. + response = self.client.put( + reverse( + 'test-item-detail-http-500', args=[self.collection['name'], sample['name']] + ), + data=sample.get_json('put'), + content_type='application/json' + ) + self.assertStatusCode(500, response) + self.assertEqual(response.json()['description'], "AttributeError('test exception')") + + # Make sure that the ressource has not been created + response = self.client.get( + reverse('item-detail', args=[self.collection['name'], sample['name']]) + ) + self.assertStatusCode(200, response) + self.check_stac_item(self.item.json, response.json(), self.collection['name']) + + +class ItemRaceConditionTest(StacBaseTransactionTestCase): + + def setUp(self): + self.username = 'user' + self.password = 'dummy-password' + get_user_model().objects.create_superuser(self.username, password=self.password) + + def test_item_upsert_race_condition(self): + workers = 5 + status_201 = 0 + collection_sample = CollectionFactory().create_sample(sample='collection-2') + item_sample = ItemFactory().create_sample(collection_sample.model, sample='item-1') + + def item_atomic_upsert_test(worker): + # This method run on separate thread therefore it requires to create a new client and + # to login it for each call. + client = Client() + client.login(username=self.username, password=self.password) + return client.put( + reverse('item-detail', args=[collection_sample['name'], item_sample['name']]), + data=item_sample.get_json('put'), + content_type='application/json' + ) + + # We call the PUT item several times in parallel with the same data to make sure + # that we don't have any race condition. + responses, errors = self.run_parallel(workers, item_atomic_upsert_test) + + for worker, response in responses: + if response.status_code == 201: + status_201 += 1 + self.assertIn( + response.status_code, [200, 201], + msg=f'Unexpected response status code {response.status_code} for worker {worker}' + ) + self.check_stac_item(item_sample.json, response.json(), collection_sample['name']) + self.assertEqual(status_201, 1, msg="Not only one upsert did a create !") + + def test_item_post_race_condition(self): + workers = 5 + status_201 = 0 + collection_sample = CollectionFactory().create_sample(sample='collection-2') + item_sample = ItemFactory().create_sample(collection_sample.model, sample='item-1') + + def item_atomic_post_test(worker): + # This method run on separate thread therefore it requires to create a new client and + # to login it for each call. + client = Client() + client.login(username=self.username, password=self.password) + return client.post( + reverse('items-list', args=[collection_sample['name']]), + data=item_sample.get_json('post'), + content_type='application/json' + ) + + # We call the PUT item several times in parallel with the same data to make sure + # that we don't have any race condition. + responses, errors = self.run_parallel(workers, item_atomic_post_test) + + for worker, response in responses: + self.assertIn(response.status_code, [201, 400]) + if response.status_code == 201: + self.check_stac_item(item_sample.json, response.json(), collection_sample['name']) + status_201 += 1 + else: + self.assertIn('id', response.json()['description'].keys()) + self.assertIn('This field must be unique.', response.json()['description']['id']) + self.assertEqual(status_201, 1, msg="Not only one POST was successfull") class ItemsDeleteEndpointTestCase(StacBaseTestCase): From a97457201c5253d00cd41cb3d28c63f581738daa Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Fri, 26 Mar 2021 11:55:41 +0100 Subject: [PATCH 043/105] BGDIINF_SB-1724 fixed minor unit test issue --- app/tests/test_assets_endpoint.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/app/tests/test_assets_endpoint.py b/app/tests/test_assets_endpoint.py index 6de7a9b1..19cc3329 100644 --- a/app/tests/test_assets_endpoint.py +++ b/app/tests/test_assets_endpoint.py @@ -747,8 +747,8 @@ def test_asset_upsert_race_condition(self): workers = 5 status_201 = 0 collection_sample = CollectionFactory().create_sample(sample='collection-2') - item_sample = ItemFactory().create_sample(collection_sample.model, sample='item-1') - asset_sample = AssetFactory().create_sample(item_sample.model, sample='asset-1') + item_sample = ItemFactory().create_sample(collection_sample.model, sample='item-2') + asset_sample = AssetFactory().create_sample(item_sample.model, sample='asset-2') def asset_atomic_upsert_test(worker): # This method run on separate thread therefore it requires to create a new client and @@ -788,8 +788,8 @@ def test_asset_post_race_condition(self): workers = 5 status_201 = 0 collection_sample = CollectionFactory().create_sample(sample='collection-2') - item_sample = ItemFactory().create_sample(collection_sample.model, sample='item-1') - asset_sample = AssetFactory().create_sample(item_sample.model, sample='asset-1') + item_sample = ItemFactory().create_sample(collection_sample.model, sample='item-2') + asset_sample = AssetFactory().create_sample(item_sample.model, sample='asset-2') def asset_atomic_post_test(worker): # This method run on separate thread therefore it requires to create a new client and From bee7c0c0921700ece8e28d3908ac4332363238e2 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Fri, 26 Mar 2021 14:43:03 +0100 Subject: [PATCH 044/105] BGDIINF_SB-1724 added check_existence() and adapted UpdateInsertModelMixin The unit tests for race conditions sometimes passed and sometimes failed. Avoiding the get_object() and catching a 404 probably helps to fix this. Hence a check_existence() was added to the collections, items and assets view and the UpdateInsertModelMixin was adapted to use this. If the instance already exists, it is passed to the serializer together with the serializer_kwargs. If it does not exist yet, the serializer is passed the data only. --- app/stac_api/views.py | 9 +++++++++ app/stac_api/views_mixins.py | 7 +------ 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/app/stac_api/views.py b/app/stac_api/views.py index 18a666d5..e1ba02dc 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -258,6 +258,9 @@ class CollectionDetail( lookup_field = "name" queryset = Collection.objects.all().prefetch_related('providers', 'links') + def check_existence(self): + return Collection.objects.filter(name=self.kwargs['collection_name']) + @etag(get_collection_etag) def get(self, request, *args, **kwargs): return self.retrieve(request, *args, **kwargs) @@ -367,6 +370,9 @@ def get_queryset(self): return queryset + def check_existence(self): + return Item.objects.filter(name=self.kwargs['item_name']) + def get_write_request_data(self, request, *args, partial=False, **kwargs): data = request.data.copy() data['collection'] = kwargs['collection_name'] @@ -476,6 +482,9 @@ def get_queryset(self): item__name=self.kwargs['item_name'] ) + def check_existence(self): + return Asset.objects.filter(name=self.kwargs['asset_name']) + def get_serializer(self, *args, **kwargs): serializer_class = self.get_serializer_class() kwargs.setdefault('context', self.get_serializer_context()) diff --git a/app/stac_api/views_mixins.py b/app/stac_api/views_mixins.py index d9f2b5fc..ac8df749 100644 --- a/app/stac_api/views_mixins.py +++ b/app/stac_api/views_mixins.py @@ -1,7 +1,6 @@ import logging from django.db import transaction -from django.http import Http404 from rest_framework import status from rest_framework.response import Response @@ -87,12 +86,8 @@ def update(self, request, *args, **kwargs): @transaction.atomic def upsert(self, request, *args, **kwargs): data = self.get_write_request_data(request, *args, **kwargs) - try: + if self.check_existence(): instance = self.get_object() - except Http404: - instance = None - - if instance: partial = kwargs.pop('partial', False) serializer_kwargs = {'partial': partial} serializer = self.get_serializer(instance, data=data, **serializer_kwargs) From 2960f586e71bb1bc12786d0cdcd85445bd63f9d3 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Mon, 29 Mar 2021 14:01:05 +0200 Subject: [PATCH 045/105] BGDIINF_SB-1724 removed check_existence | override default validators * removed the check_existence() from views.py and removed the passing of the instance to the serializer in UPDATE cases * overriding the default validators that were generated by the DRF ModelSerializer due to unique_together constraints in models.py, as this causes problems when updating an existing instance (validation would complain, that instance already exists). This is done by adding validators = [] in the Meta classes of collections, items and models. see: https://www.django-rest-framework.org/api-guide/validators/\#limitations-of-validators --- app/stac_api/serializers.py | 9 +++++++++ app/stac_api/views.py | 9 --------- app/stac_api/views_mixins.py | 9 +-------- 3 files changed, 10 insertions(+), 17 deletions(-) diff --git a/app/stac_api/serializers.py b/app/stac_api/serializers.py index 29eb05f5..10bf4082 100644 --- a/app/stac_api/serializers.py +++ b/app/stac_api/serializers.py @@ -398,6 +398,9 @@ class Meta: 'itemType' ] # crs not in sample data, but in specs.. + validators = [] # Remove a default "unique together" constraint. + # (see: + # https://www.django-rest-framework.org/api-guide/validators/#limitations-of-validators) # NOTE: when explicitely declaring fields, we need to add the validation as for the field # in model ! @@ -637,6 +640,9 @@ class Meta: 'created', 'updated' ] + validators = [] # Remove a default "unique together" constraint. + # (see: + # https://www.django-rest-framework.org/api-guide/validators/#limitations-of-validators) # NOTE: when explicitely declaring fields, we need to add the validation as for the field # in model ! @@ -807,6 +813,9 @@ class Meta: 'links', 'assets' ] + validators = [] # Remove a default "unique together" constraint. + # (see: + # https://www.django-rest-framework.org/api-guide/validators/#limitations-of-validators) # NOTE: when explicitely declaring fields, we need to add the validation as for the field # in model ! diff --git a/app/stac_api/views.py b/app/stac_api/views.py index e1ba02dc..18a666d5 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -258,9 +258,6 @@ class CollectionDetail( lookup_field = "name" queryset = Collection.objects.all().prefetch_related('providers', 'links') - def check_existence(self): - return Collection.objects.filter(name=self.kwargs['collection_name']) - @etag(get_collection_etag) def get(self, request, *args, **kwargs): return self.retrieve(request, *args, **kwargs) @@ -370,9 +367,6 @@ def get_queryset(self): return queryset - def check_existence(self): - return Item.objects.filter(name=self.kwargs['item_name']) - def get_write_request_data(self, request, *args, partial=False, **kwargs): data = request.data.copy() data['collection'] = kwargs['collection_name'] @@ -482,9 +476,6 @@ def get_queryset(self): item__name=self.kwargs['item_name'] ) - def check_existence(self): - return Asset.objects.filter(name=self.kwargs['asset_name']) - def get_serializer(self, *args, **kwargs): serializer_class = self.get_serializer_class() kwargs.setdefault('context', self.get_serializer_context()) diff --git a/app/stac_api/views_mixins.py b/app/stac_api/views_mixins.py index ac8df749..efea2882 100644 --- a/app/stac_api/views_mixins.py +++ b/app/stac_api/views_mixins.py @@ -86,14 +86,7 @@ def update(self, request, *args, **kwargs): @transaction.atomic def upsert(self, request, *args, **kwargs): data = self.get_write_request_data(request, *args, **kwargs) - if self.check_existence(): - instance = self.get_object() - partial = kwargs.pop('partial', False) - serializer_kwargs = {'partial': partial} - serializer = self.get_serializer(instance, data=data, **serializer_kwargs) - else: - serializer = self.get_serializer(data=data) - + serializer = self.get_serializer(data=data) serializer.is_valid(raise_exception=True) lookup = {} if self.lookup_url_kwarg: From e9096abce9b8095db5f03f0e78d6de908a7ce964 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Mon, 29 Mar 2021 14:36:51 +0200 Subject: [PATCH 046/105] BGDIINF_SB-1724 updated specs * updated specs for PUT (upsert) for items and assets * adapted ordering of 200 and 201 for collections upsert in specs --- .../spec/v0.9/openapitransactional.yaml | 54 +++++++++++++----- spec/transaction/transaction.yml | 56 +++++++++++++------ 2 files changed, 79 insertions(+), 31 deletions(-) diff --git a/spec/static/spec/v0.9/openapitransactional.yaml b/spec/static/spec/v0.9/openapitransactional.yaml index d7f3b25a..4e9d1cc4 100644 --- a/spec/static/spec/v0.9/openapitransactional.yaml +++ b/spec/static/spec/v0.9/openapitransactional.yaml @@ -2132,6 +2132,12 @@ paths: url: https://www.swisstopo.admin.ch title: National Map 1:200'000 responses: + "200": + description: Returns the updated Collection + content: + application/json: + schema: + $ref: "#/components/schemas/collection" "201": description: Returns the created Collection headers: @@ -2144,12 +2150,6 @@ paths: application/json: schema: $ref: "#/components/schemas/collection" - "200": - description: Returns the updated Collection - content: - application/json: - schema: - $ref: "#/components/schemas/collection" "403": $ref: "#/components/responses/PermissionDenied" "404": @@ -2337,15 +2337,15 @@ paths: tags: - Data put: - summary: Update an existing feature by Id with a complete item definition + summary: Update or create a feature description: >- - Use this method to update an existing feature. Requires the entire JSON description - be submitted. + Update or create a feature with Id `featureId` with a complete feature definition. + If the feature doesn't exists it is then created. *NOTE: Optional fields that are not part of the PUT payload, will be erased - in the resource. For example if the resource as a properties.title and the - PUT payload doesn't, then the resource properties.title will be removed.* + in the resource. For example if the resource has a properties.title and the + PUT payload doesn't, then the resource's properties.title will be removed.* operationId: putFeature tags: - Data Management @@ -2440,6 +2440,18 @@ paths: application/json: schema: $ref: "#/components/schemas/item" + "201": + description: Returns the created Item + headers: + Location: + description: A link to the item + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "#/components/schemas/item" "400": $ref: "#/components/responses/BadRequest" "403": @@ -2618,10 +2630,10 @@ paths: tags: - Data put: - summary: Update an existing asset by Id with a complete asset definition + summary: Update or create an asset description: >- - Use this method to update an existing asset. Requires the entire JSON description - be submitted. + Update or create an asset with Id `assetId` with a complete asset definition. + If the asset doesn't exists it is then created. operationId: putAsset tags: - Data Management @@ -2670,7 +2682,19 @@ paths: $ref: "#/components/schemas/itemAssetPartialUpdate" responses: "200": - description: Status of the update request. + description: Returns the updated Asset. + content: + application/json: + schema: + $ref: "#/components/schemas/itemAsset" + "201": + description: Returns the created Asset + headers: + Location: + description: A link to the asset + schema: + type: string + format: url content: application/json: schema: diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml index f0f5ae08..373ff898 100644 --- a/spec/transaction/transaction.yml +++ b/spec/transaction/transaction.yml @@ -84,6 +84,12 @@ paths: url: https://www.swisstopo.admin.ch title: National Map 1:200'000 responses: + "200": + description: Returns the updated Collection + content: + application/json: + schema: + $ref: "#/components/schemas/collection" "201": description: Returns the created Collection headers: @@ -96,12 +102,6 @@ paths: application/json: schema: $ref: "#/components/schemas/collection" - "200": - description: Returns the updated Collection - content: - application/json: - schema: - $ref: "#/components/schemas/collection" "403": $ref: "#/components/responses/PermissionDenied" "404": @@ -231,15 +231,15 @@ paths: $ref: "#/components/responses/ServerError" "/collections/{collectionId}/items/{featureId}": put: - summary: Update an existing feature by Id with a complete item definition + summary: Update or create a feature description: >- - Use this method to update an existing feature. Requires the entire - JSON description be submitted. + Update or create a feature with Id `featureId` with a complete feature + definition. If the feature doesn't exists it is then created. - *NOTE: Optional fields that are not part of the PUT payload, will be erased in the - resource. For example if the resource as a properties.title and the PUT payload doesn't, then - the resource properties.title will be removed.* + *NOTE: Optional fields that are not part of the PUT payload, will be erased + in the resource. For example if the resource has a properties.title and the + PUT payload doesn't, then the resource's properties.title will be removed.* operationId: putFeature tags: - Data Management @@ -333,6 +333,18 @@ paths: application/json: schema: $ref: "#/components/schemas/item" + "201": + description: Returns the created Item + headers: + Location: + description: A link to the item + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "#/components/schemas/item" "400": $ref: "#/components/responses/BadRequest" "403": @@ -477,10 +489,10 @@ paths: tags: - Data put: - summary: Update an existing asset by Id with a complete asset definition + summary: Update or create an asset description: >- - Use this method to update an existing asset. Requires the entire - JSON description be submitted. + Update or create an asset with Id `assetId` with a complete asset definition. + If the asset doesn't exists it is then created. operationId: putAsset tags: - Data Management @@ -529,7 +541,19 @@ paths: $ref: "#/components/schemas/itemAssetPartialUpdate" responses: "200": - description: Status of the update request. + description: Returns the updated Asset. + content: + application/json: + schema: + $ref: "#/components/schemas/itemAsset" + "201": + description: Returns the created Asset + headers: + Location: + description: A link to the asset + schema: + type: string + format: url content: application/json: schema: From 6e1917d383170a867d7b74351edac0d7835e880f Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Tue, 6 Apr 2021 08:35:43 +0200 Subject: [PATCH 047/105] BGDIINF_SB-1724 check parent objects | SlugRelatedFields | adaptions in data_factory.py Current way the SlugRelatedFields collection and item in item's and asset's serializer allow creation of asset's with a wrong collection in the path. Also problems can occur, when two assets of the same name exist in different parent items, or two items of the same name in two different parent collections respectively. Hence a few changes were implemented: * adding parent collections and/or items to the serializer's validated data so tupdate_or_create() would notice that they don't exist, when the validated_data is passed as the defaults argument * removed the serializers.SlugRelatedField() item in asset serializer * made SlugRelatedField collection in item serializer read_only * made use of the get_object_or_404() for checking the existence of parent collections or items. * adapted data_factory.py to not include collection in ItemSample's get_json()'s data and not include item in AssetSample()'s get_json()'s data, in case of method = deserialize * small according adaptions in test_serializer.py --- app/stac_api/serializers.py | 48 ++++++++++++++++------------- app/stac_api/views.py | 60 ++++++++++++++++++++++-------------- app/stac_api/views_mixins.py | 2 +- app/stac_api/views_test.py | 7 +++-- app/tests/data_factory.py | 7 +++-- app/tests/test_serializer.py | 20 +++++++----- 6 files changed, 85 insertions(+), 59 deletions(-) diff --git a/app/stac_api/serializers.py b/app/stac_api/serializers.py index 10bf4082..39ca5a5a 100644 --- a/app/stac_api/serializers.py +++ b/app/stac_api/serializers.py @@ -20,7 +20,6 @@ from stac_api.models import LandingPage from stac_api.models import LandingPageLink from stac_api.models import Provider -from stac_api.models import get_asset_path from stac_api.utils import build_asset_href from stac_api.utils import isoformat from stac_api.validators import MEDIA_TYPES_MIMES @@ -133,22 +132,33 @@ class UpsertModelSerializerMixin: """Add support for Upsert in serializer """ - def upsert(self, **kwargs): + def upsert(self, look_up, **kwargs): """ Update or insert an instance and return it. + + Args: + look_up: dict + Must be a unique query to be used in the objects.update_or_create(**look_up) method. + **kwargs: + Extra key=value pairs to pass as validated_data to update_or_create(). For example + relationships that are not serialized but part of the request path can be given + as kwargs. """ - self.instance, created = self.update_or_create(self.validated_data.copy(), **kwargs) + validated_data = {**self.validated_data, **kwargs} + self.instance, created = self.update_or_create(look_up, validated_data) return self.instance, created - def update_or_create(self, validated_data, **kwargs): + def update_or_create(self, look_up, validated_data): """This method must be implemented by the serializer and must make use of the DB objects.update_or_create() method. Args: + look_up: dict + Must be a unique query to be used in the objects.update_or_create(**look_up) + method. validated_data: dict - Copy of the validated_data to be used in the objects.update_or_create() method. - **kwargs: - Must be a unique query to be used in the objects.update_or_create() method. + Copy of the validated_data to be used as defaults in the + objects.update_or_create(defaults=validated_data) method. """ raise NotImplementedError("update_or_create() not implemented") @@ -501,7 +511,7 @@ def update(self, instance, validated_data): ) return super().update(instance, validated_data) - def update_or_create(self, validated_data, **kwargs): + def update_or_create(self, look_up, validated_data): """ Update or create the collection object selected by kwargs and return the instance. @@ -519,7 +529,9 @@ def update_or_create(self, validated_data, **kwargs): """ providers_data = validated_data.pop('providers', []) links_data = validated_data.pop('links', []) - collection, created = Collection.objects.update_or_create(**kwargs, defaults=validated_data) + collection, created = Collection.objects.update_or_create( + **look_up, defaults=validated_data + ) self._update_or_create_providers(collection=collection, providers_data=providers_data) update_or_create_links( instance_type="collection", @@ -627,7 +639,6 @@ class Meta: model = Asset fields = [ 'id', - 'item', 'title', 'type', 'href', @@ -646,9 +657,6 @@ class Meta: # NOTE: when explicitely declaring fields, we need to add the validation as for the field # in model ! - item = serializers.SlugRelatedField( - slug_field='name', write_only=True, queryset=Item.objects.all() - ) id = serializers.CharField(source='name', max_length=255, validators=[validate_asset_name]) title = serializers.CharField( required=False, max_length=255, allow_null=True, allow_blank=False @@ -696,7 +704,7 @@ def create(self, validated_data): asset = validate_uniqueness_and_create(Asset, validated_data) return asset - def update_or_create(self, validated_data, **kwargs): + def update_or_create(self, look_up, validated_data): """ Update or create the asset object selected by kwargs and return the instance. When no asset object matching the kwargs selection, a new asset is created. @@ -709,7 +717,7 @@ def update_or_create(self, validated_data, **kwargs): Returns: tuple Asset instance and True if created otherwise false """ - asset, created = Asset.objects.update_or_create(**kwargs, defaults=validated_data) + asset, created = Asset.objects.update_or_create(**look_up, defaults=validated_data) return asset, created def validate(self, attrs): @@ -723,9 +731,6 @@ def validate(self, attrs): validate_json_payload(self) - if not self.partial: - attrs['file'] = get_asset_path(attrs['item'], attrs['name']) - return attrs def get_fields(self): @@ -782,7 +787,6 @@ class Meta: list_serializer_class = AssetsDictSerializer fields = [ 'id', - 'item', 'title', 'type', 'href', @@ -819,7 +823,6 @@ class Meta: # NOTE: when explicitely declaring fields, we need to add the validation as for the field # in model ! - collection = serializers.SlugRelatedField(slug_field='name', queryset=Collection.objects.all()) id = serializers.CharField( source='name', required=True, max_length=255, validators=[validate_name] ) @@ -828,6 +831,7 @@ class Meta: links = ItemLinkSerializer(required=False, many=True) # read only fields type = serializers.SerializerMethodField() + collection = serializers.SlugRelatedField(slug_field='name', read_only=True) bbox = BboxSerializer(source='*', read_only=True) assets = AssetsForItemSerializer(many=True, read_only=True) stac_extensions = serializers.SerializerMethodField() @@ -876,7 +880,7 @@ def update(self, instance, validated_data): ) return super().update(instance, validated_data) - def update_or_create(self, validated_data, **kwargs): + def update_or_create(self, look_up, validated_data): """ Update or create the item object selected by kwargs and return the instance. When no item object matching the kwargs selection, a new item is created. @@ -890,7 +894,7 @@ def update_or_create(self, validated_data, **kwargs): Item instance and True if created otherwise false """ links_data = validated_data.pop('links', []) - item, created = Item.objects.update_or_create(**kwargs, defaults=validated_data) + item, created = Item.objects.update_or_create(**look_up, defaults=validated_data) update_or_create_links( instance_type="item", model=ItemLink, instance=item, links_data=links_data ) diff --git a/app/stac_api/views.py b/app/stac_api/views.py index 18a666d5..46f02982 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -4,6 +4,8 @@ from datetime import datetime from django.conf import settings +from django.http import Http404 +from django.shortcuts import get_object_or_404 from rest_framework import generics from rest_framework import mixins @@ -17,6 +19,7 @@ from stac_api.models import ConformancePage from stac_api.models import Item from stac_api.models import LandingPage +from stac_api.models import get_asset_path from stac_api.pagination import GetPostCursorPagination from stac_api.serializers import AssetSerializer from stac_api.serializers import CollectionSerializer @@ -276,10 +279,11 @@ def patch(self, request, *args, **kwargs): class ItemsList(generics.GenericAPIView, views_mixins.CreateModelMixin): serializer_class = ItemSerializer - def get_write_request_data(self, request, *args, **kwargs): - data = request.data.copy() - data['collection'] = kwargs['collection_name'] - return data + def perform_create(self, serializer): + # this DB hit used to be done by the serializer due to the SlugRelatedField during + # deserialization + collection = get_object_or_404(Collection, name=self.kwargs['collection_name']) + serializer.save(collection=collection) def get_queryset(self): # filter based on the url @@ -367,10 +371,13 @@ def get_queryset(self): return queryset - def get_write_request_data(self, request, *args, partial=False, **kwargs): - data = request.data.copy() - data['collection'] = kwargs['collection_name'] - return data + def perform_update(self, serializer): + collection = get_object_or_404(Collection, name=self.kwargs['collection_name']) + serializer.save(collection=collection) + + def perform_upsert(self, serializer, lookup): + collection = get_object_or_404(Collection, name=self.kwargs['collection_name']) + return serializer.upsert(lookup, collection=collection) @etag(get_item_etag) def get(self, request, *args, **kwargs): @@ -396,11 +403,6 @@ class AssetsList(generics.GenericAPIView, views_mixins.CreateModelMixin): serializer_class = AssetSerializer pagination_class = None - def get_write_request_data(self, request, *args, **kwargs): - data = request.data.copy() - data['item'] = kwargs['item_name'] - return data - def get_success_headers(self, data): # pylint: disable=arguments-differ asset_link_self = self.request.build_absolute_uri() + "/" + self.request.data["id"] return {'Location': asset_link_self} @@ -415,6 +417,14 @@ def get_queryset(self): item__name=self.kwargs['item_name'] ) + def perform_create(self, serializer): + # this DB hit used to done by the serializer due to the SlugRelatedField during + # deserialization + item = get_object_or_404( + Item, collection__name=self.kwargs['collection_name'], name=self.kwargs['item_name'] + ) + serializer.save(item=item, file=get_asset_path(item, serializer.validated_data['name'])) + def get(self, request, *args, **kwargs): validate_item(self.kwargs) @@ -459,16 +469,6 @@ class AssetDetail( lookup_url_kwarg = "asset_name" lookup_field = "name" - def get_write_request_data(self, request, *args, partial=False, **kwargs): - data = request.data.copy() - data['item'] = kwargs['item_name'] - if partial and not 'id' in data: - # Partial update for checksum:multihash requires the asset id in order to verify the - # file with the checksum, therefore if the id is missing in payload we take it from - # the request path. - data['id'] = kwargs['asset_name'] - return data - def get_queryset(self): # filter based on the url return Asset.objects.filter( @@ -481,6 +481,20 @@ def get_serializer(self, *args, **kwargs): kwargs.setdefault('context', self.get_serializer_context()) return serializer_class(*args, **kwargs) + def perform_update(self, serializer): + item = get_object_or_404( + Item, collection__name=self.kwargs['collection_name'], name=self.kwargs['item_name'] + ) + serializer.save(item=item, file=get_asset_path(item, self.kwargs['asset_name'])) + + def perform_upsert(self, serializer, lookup): + item = get_object_or_404( + Item, collection__name=self.kwargs['collection_name'], name=self.kwargs['item_name'] + ) + return serializer.upsert( + lookup, item=item, file=get_asset_path(item, self.kwargs['asset_name']) + ) + @etag(get_asset_etag) def get(self, request, *args, **kwargs): return self.retrieve(request, *args, **kwargs) diff --git a/app/stac_api/views_mixins.py b/app/stac_api/views_mixins.py index efea2882..bdcf0de3 100644 --- a/app/stac_api/views_mixins.py +++ b/app/stac_api/views_mixins.py @@ -108,7 +108,7 @@ def perform_update(self, serializer): serializer.save() def perform_upsert(self, serializer, lookup): - return serializer.upsert(**lookup) + return serializer.upsert(lookup) def partial_update(self, request, *args, **kwargs): kwargs['partial'] = True diff --git a/app/stac_api/views_test.py b/app/stac_api/views_test.py index 3cde1a43..a23f9e3e 100644 --- a/app/stac_api/views_test.py +++ b/app/stac_api/views_test.py @@ -22,19 +22,20 @@ def get(self, request, *args, **kwargs): class TestCollectionUpsertHttp500(CollectionDetail): def perform_upsert(self, serializer, lookup): - serializer.upsert(**lookup) + super().perform_upsert(serializer, lookup) raise AttributeError('test exception') class TestItemUpsertHttp500(ItemDetail): def perform_upsert(self, serializer, lookup): - serializer.upsert(**lookup) + super().perform_upsert(serializer, lookup) + raise AttributeError('test exception') class TestAssetUpsertHttp500(AssetDetail): def perform_upsert(self, serializer, lookup): - serializer.upsert(**lookup) + super().perform_upsert(serializer, lookup) raise AttributeError('test exception') diff --git a/app/tests/data_factory.py b/app/tests/data_factory.py index b6d4b144..e0be19e7 100644 --- a/app/tests/data_factory.py +++ b/app/tests/data_factory.py @@ -546,6 +546,7 @@ class ItemSample(SampleData): read_only_fields = [ 'type', 'bbox', + 'collection', 'assets', 'stac_extensions', 'stac_version', @@ -620,8 +621,8 @@ def get_json(self, method='get', keep_read_only=False): value in super().get_json(method, keep_read_only).items() if not key.startswith('properties_') } - collection = json_data.pop('collection') - if method in ['get', 'serialize', 'deserialize']: + if method in ['get', 'serialize']: + collection = self.get('collection') json_data['collection'] = collection.name if 'geometry' in json_data and isinstance(json_data['geometry'], GEOSGeometry): json_data['geometry'] = json_data['geometry'].json @@ -764,7 +765,7 @@ def get_json(self, method='get', keep_read_only=False): ''' data = super().get_json(method, keep_read_only) item = data.pop('item') - if method in ['get', 'serialize', 'deserialize']: + if method in ['get', 'serialize']: data['item'] = item.name if 'href' in data and isinstance(data['href'], File): data['href'] = \ diff --git a/app/tests/test_serializer.py b/app/tests/test_serializer.py index 64252878..7d4f682b 100644 --- a/app/tests/test_serializer.py +++ b/app/tests/test_serializer.py @@ -12,6 +12,7 @@ from rest_framework.renderers import JSONRenderer from rest_framework.test import APIRequestFactory +from stac_api.models import get_asset_path from stac_api.serializers import AssetSerializer from stac_api.serializers import CollectionSerializer from stac_api.serializers import ItemSerializer @@ -272,7 +273,6 @@ def test_item_serialization(self): ) collection_name = self.collection.model.name - item_name = self.item.model.name expected_asset = self.asset.json expected_asset.pop('id') expected_asset.pop('item') @@ -339,7 +339,7 @@ def test_item_deserialization_create_only_required(self): # translate to Python native: serializer = ItemSerializer(data=sample.get_json('deserialize')) serializer.is_valid(raise_exception=True) - item = serializer.save() + item = serializer.save(collection=self.collection.model) # serialize the object and test it against the one above # mock a request needed for the serialization of links @@ -360,7 +360,7 @@ def test_item_deserialization_create_only_required_2(self): # translate to Python native: serializer = ItemSerializer(data=sample.get_json('deserialize')) serializer.is_valid(raise_exception=True) - item = serializer.save() + item = serializer.save(collection=self.collection.model) # serialize the object and test it against the one above # mock a request needed for the serialization of links @@ -381,7 +381,7 @@ def test_item_deserialization_create_full(self): # translate to Python native: serializer = ItemSerializer(data=sample.get_json('deserialize')) serializer.is_valid(raise_exception=True) - item = serializer.save() + item = serializer.save(collection=self.collection.model) # serialize the object and test it against the one above # mock a request needed for the serialization of links @@ -568,8 +568,10 @@ def test_asset_deserialization_create(self): data=sample.get_json('deserialize'), context={'request': request_mocker} ) serializer.is_valid(raise_exception=True) - asset = serializer.save() - + asset = serializer.save( + item=self.item.model, + file=get_asset_path(self.item.model, serializer.validated_data['name']) + ) serializer = AssetSerializer(asset, context={'request': request_mocker}) python_native = serializer.data @@ -596,7 +598,10 @@ def test_asset_deserialization_create_required_fields_only(self): data=sample.get_json('deserialize'), context={'request': request_mocker} ) serializer.is_valid(raise_exception=True) - asset = serializer.save() + asset = serializer.save( + item=self.item.model, + file=get_asset_path(self.item.model, serializer.validated_data['name']) + ) # serialize the object and test it against the one above # mock a request needed for the serialization of links @@ -612,6 +617,7 @@ def test_asset_deserialization_create_required_fields_only(self): # ignoring item below, as it is a "write_only" field in the asset's serializer. # it will not be present in the mocked request's data. + print("*" * 80, sample.json, "*" * 40, python_native) self.check_stac_asset( sample.json, python_native, collection_name, item_name, ignore=['item'] ) From 2f5d7fc649a416e0c02beb360e8b85d904390527 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Thu, 8 Apr 2021 13:31:46 +0200 Subject: [PATCH 048/105] BGDIINF_SB-1724 added unit tests * added unit tests for upsert creating assets and citems with non-existing parent items or collections respectively in path --- app/tests/test_assets_endpoint.py | 52 +++++++++++++++++++++++++++++++ app/tests/test_items_endpoint.py | 9 ++++++ 2 files changed, 61 insertions(+) diff --git a/app/tests/test_assets_endpoint.py b/app/tests/test_assets_endpoint.py index 19cc3329..0a91035d 100644 --- a/app/tests/test_assets_endpoint.py +++ b/app/tests/test_assets_endpoint.py @@ -484,6 +484,58 @@ def test_asset_upsert_create(self): self.assertStatusCode(200, response) self.check_stac_asset(asset.json, json_data, collection.name, item.name, ignore=['item']) + def test_asset_upsert_create_non_existing_parent_item_in_path(self): + collection = self.collection.model + item = self.item.model + asset = self.factory.create_asset_sample(item=item, create_asset_file=True) + asset_name = asset['name'] + + path = ( + f'/{STAC_BASE_V}/collections/{collection.name}/items/non-existing-item/assets/' + f'{asset_name}' + ) + + # Check that asset does not exist already + response = self.client.get(path) + self.assertStatusCode(404, response) + + # Check also, that the asset does not exist in the DB already + self.assertFalse( + Asset.objects.filter(name=asset_name).exists(), msg="Deleted asset still found in DB" + ) + + # Now use upsert to create the new asset + response = self.client.put( + path, data=asset.get_json('post'), content_type="application/json" + ) + self.assertStatusCode(404, response) + + def test_asset_upsert_create_non_existing_parent_collection_in_path(self): + collection = self.collection.model + item = self.item.model + asset = self.factory.create_asset_sample(item=item, create_asset_file=True) + asset_name = asset['name'] + + path = ( + f'/{STAC_BASE_V}/collections/non-existing-collection/items/{item.name}/assets/' + f'{asset_name}' + ) + + # Check that asset does not exist already + response = self.client.get(path) + self.assertStatusCode(404, response) + + # Check also, that the asset does not exist in the DB already + self.assertFalse( + Asset.objects.filter(name=asset_name).exists(), msg="Deleted asset still found in DB" + ) + + # Now use upsert to create the new asset + response = self.client.put( + path, data=asset.get_json('post'), content_type="application/json" + ) + self.assertStatusCode(404, response) + def test_asset_endpoint_put(self): collection_name = self.collection['name'] item_name = self.item['name'] diff --git a/app/tests/test_items_endpoint.py b/app/tests/test_items_endpoint.py index 595a9414..c8fc8d79 100644 --- a/app/tests/test_items_endpoint.py +++ b/app/tests/test_items_endpoint.py @@ -686,6 +686,15 @@ def test_item_upsert_create(self): self.assertStatusCode(201, response) self.check_stac_item(sample.json, json_data, self.collection["name"]) + def test_item_upsert_create_non_existing_parent_collection_in_path(self): + + sample = self.factory.create_item_sample(self.collection.model, required_only=True) + path = f'/{STAC_BASE_V}/collections/non-existing-collection/items/{sample.json["id"]}' + response = self.client.put( + path, data=sample.get_json('post'), content_type="application/json" + ) + self.assertStatusCode(404, response) + def test_item_atomic_upsert_create_500(self): sample = self.factory.create_item_sample(self.collection.model, sample='item-2') From efd07914144081b74ec4176d2fc9ffd55aacd329 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Thu, 8 Apr 2021 13:36:26 +0200 Subject: [PATCH 049/105] BGDIINF_SB-1724 updated specs * removed collection field from exmaples in features' POST and PUT BGDIINF_SB-1724 WIP --- app/stac_api/views.py | 1 - app/tests/test_assets_endpoint.py | 2 +- app/tests/test_serializer.py | 1 - spec/static/spec/v0.9/openapitransactional.yaml | 2 -- spec/transaction/transaction.yml | 2 -- 5 files changed, 1 insertion(+), 7 deletions(-) diff --git a/app/stac_api/views.py b/app/stac_api/views.py index 46f02982..83d369d8 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -4,7 +4,6 @@ from datetime import datetime from django.conf import settings -from django.http import Http404 from django.shortcuts import get_object_or_404 from rest_framework import generics diff --git a/app/tests/test_assets_endpoint.py b/app/tests/test_assets_endpoint.py index 0a91035d..26a8844b 100644 --- a/app/tests/test_assets_endpoint.py +++ b/app/tests/test_assets_endpoint.py @@ -20,8 +20,8 @@ from tests.data_factory import CollectionFactory from tests.data_factory import Factory from tests.data_factory import ItemFactory -from tests.utils import client_login from tests.utils import S3TestMixin +from tests.utils import client_login from tests.utils import disableLogger from tests.utils import mock_s3_asset_file from tests.utils import upload_file_on_s3 diff --git a/app/tests/test_serializer.py b/app/tests/test_serializer.py index 7d4f682b..0c5d7613 100644 --- a/app/tests/test_serializer.py +++ b/app/tests/test_serializer.py @@ -617,7 +617,6 @@ def test_asset_deserialization_create_required_fields_only(self): # ignoring item below, as it is a "write_only" field in the asset's serializer. # it will not be present in the mocked request's data. - print("*" * 80, sample.json, "*" * 40, python_native) self.check_stac_asset( sample.json, python_native, collection_name, item_name, ignore=['item'] ) diff --git a/spec/static/spec/v0.9/openapitransactional.yaml b/spec/static/spec/v0.9/openapitransactional.yaml index 4e9d1cc4..d77f9393 100644 --- a/spec/static/spec/v0.9/openapitransactional.yaml +++ b/spec/static/spec/v0.9/openapitransactional.yaml @@ -2282,7 +2282,6 @@ paths: instruments: - cool_sensor_v1 view:sun_elevation: 33.4 - collection: CS3 links: - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html rel: license @@ -2391,7 +2390,6 @@ paths: instruments: - cool_sensor_v1 view:sun_elevation: 33.4 - collection: CS3 links: - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html rel: license diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml index 373ff898..70362d63 100644 --- a/spec/transaction/transaction.yml +++ b/spec/transaction/transaction.yml @@ -203,7 +203,6 @@ paths: instruments: - cool_sensor_v1 view:sun_elevation: 33.4 - collection: CS3 links: - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html rel: license @@ -285,7 +284,6 @@ paths: instruments: - cool_sensor_v1 view:sun_elevation: 33.4 - collection: CS3 links: - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html rel: license From 5a3946a6c38904ab85c514bed81e842f3f31339c Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Thu, 8 Apr 2021 16:03:17 +0200 Subject: [PATCH 050/105] BGDIINF_SB-1724 small improvements in assets_endpoint unit test --- app/tests/test_assets_endpoint.py | 50 ++++++++++++++++++------------- 1 file changed, 30 insertions(+), 20 deletions(-) diff --git a/app/tests/test_assets_endpoint.py b/app/tests/test_assets_endpoint.py index 26a8844b..80891817 100644 --- a/app/tests/test_assets_endpoint.py +++ b/app/tests/test_assets_endpoint.py @@ -16,10 +16,7 @@ from tests.base_test import StacBaseTestCase from tests.base_test import StacBaseTransactionTestCase -from tests.data_factory import AssetFactory -from tests.data_factory import CollectionFactory from tests.data_factory import Factory -from tests.data_factory import ItemFactory from tests.utils import S3TestMixin from tests.utils import client_login from tests.utils import disableLogger @@ -458,10 +455,10 @@ def test_asset_upsert_create(self): asset = self.factory.create_asset_sample(item=item, create_asset_file=True) asset_name = asset['name'] - path = f'/{STAC_BASE_V}/collections/{collection.name}/items/{item.name}/assets/{asset_name}' - + response = self.client.get( + reverse('asset-detail', args=[collection.name, item.name, asset_name]) + ) # Check that assert does not exist already - response = self.client.get(path) self.assertStatusCode(404, response) # Check also, that the asset does not exist in the DB already @@ -471,11 +468,15 @@ def test_asset_upsert_create(self): # Now use upsert to create the new assert response = self.client.put( - path, data=asset.get_json('post'), content_type="application/json" + reverse('asset-detail', args=[collection.name, item.name, asset_name]), + data=asset.get_json('post'), + content_type="application/json" ) json_data = response.json() self.assertStatusCode(201, response) - self.check_header_location(f"{path}", response) + self.check_header_location( + reverse('asset-detail', args=[collection.name, item.name, asset_name]), response + ) self.check_stac_asset(asset.json, json_data, collection.name, item.name, ignore=['item']) # Check the data by reading it back @@ -794,13 +795,18 @@ def setUp(self): self.username = 'user' self.password = 'dummy-password' get_user_model().objects.create_superuser(self.username, password=self.password) + self.factory = Factory() + self.collection_sample = self.factory.create_collection_sample( + sample='collection-2', db_create=True + ) + self.item_sample = self.factory.create_item_sample( + self.collection_sample.model, sample='item-2', db_create=True + ) def test_asset_upsert_race_condition(self): workers = 5 status_201 = 0 - collection_sample = CollectionFactory().create_sample(sample='collection-2') - item_sample = ItemFactory().create_sample(collection_sample.model, sample='item-2') - asset_sample = AssetFactory().create_sample(item_sample.model, sample='asset-2') + asset_sample = self.factory.create_asset_sample(self.item_sample.model, sample='asset-2') def asset_atomic_upsert_test(worker): # This method run on separate thread therefore it requires to create a new client and @@ -810,7 +816,11 @@ def asset_atomic_upsert_test(worker): return client.put( reverse( 'asset-detail', - args=[collection_sample['name'], item_sample['name'], asset_sample['name']] + args=[ + self.collection_sample['name'], + self.item_sample['name'], + asset_sample['name'] + ] ), data=asset_sample.get_json('put'), content_type='application/json' @@ -830,8 +840,8 @@ def asset_atomic_upsert_test(worker): self.check_stac_asset( asset_sample.json, response.json(), - collection_sample['name'], - item_sample['name'], + self.collection_sample['name'], + self.item_sample['name'], ignore=['item'] ) self.assertEqual(status_201, 1, msg="Not only one upsert did a create !") @@ -839,9 +849,7 @@ def asset_atomic_upsert_test(worker): def test_asset_post_race_condition(self): workers = 5 status_201 = 0 - collection_sample = CollectionFactory().create_sample(sample='collection-2') - item_sample = ItemFactory().create_sample(collection_sample.model, sample='item-2') - asset_sample = AssetFactory().create_sample(item_sample.model, sample='asset-2') + asset_sample = self.factory.create_asset_sample(self.item_sample.model, sample='asset-2') def asset_atomic_post_test(worker): # This method run on separate thread therefore it requires to create a new client and @@ -849,7 +857,9 @@ def asset_atomic_post_test(worker): client = Client() client.login(username=self.username, password=self.password) return client.post( - reverse('assets-list', args=[collection_sample['name'], item_sample['name']]), + reverse( + 'assets-list', args=[self.collection_sample['name'], self.item_sample['name']] + ), data=asset_sample.get_json('post'), content_type='application/json' ) @@ -864,8 +874,8 @@ def asset_atomic_post_test(worker): self.check_stac_asset( asset_sample.json, response.json(), - collection_sample['name'], - item_sample['name'], + self.collection_sample['name'], + self.item_sample['name'], ignore=['item'] ) status_201 += 1 From faaa6c36fc6c4033b8d53c993ef9be14dea9664a Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 8 Apr 2021 08:12:04 +0200 Subject: [PATCH 051/105] BGDIINF_SB-1739: Updated readme and pylintrc * Updated the README.md table of content. * Removed unused variable in Makefile * Allowed module name with numbers in pylintrc: e.g. s3_multipart_upload before only migrations module where allowed to have number in there names. However a valid name must always start with a letter (except for migrations). * Also improved a unittest error message * Fixed pagination bug in case the links would only contain the paginations links. In this case `links` would have been a list of list of link instead of a list of link. This use case what currently not used but will be with the asset uploads view. --- .pylintrc | 2 +- Makefile | 1 - README.md | 14 ++++++++++++-- app/stac_api/pagination.py | 2 +- app/tests/utils.py | 4 +++- 5 files changed, 17 insertions(+), 6 deletions(-) diff --git a/.pylintrc b/.pylintrc index 20af64ed..8a802228 100644 --- a/.pylintrc +++ b/.pylintrc @@ -322,7 +322,7 @@ module-naming-style=snake_case # Regular expression matching correct module names. Overrides module-naming- # style. -module-rgx=^(?:(?P[a-z]+[a-z_]*)|(?P\d+_[_a-z0-9]+))$ +module-rgx=^(?:(?P[a-z]+[a-z_\d]*)|(?P\d+_[_a-z0-9]+))$ # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. diff --git a/Makefile b/Makefile index 80b5df41..47848e71 100644 --- a/Makefile +++ b/Makefile @@ -5,7 +5,6 @@ SHELL = /bin/bash SERVICE_NAME := service-stac CURRENT_DIR := $(shell pwd) -VENV := $(CURRENT_DIR)/.venv # Django specific APP_SRC_DIR := app diff --git a/README.md b/README.md index af9a860b..79d887d8 100644 --- a/README.md +++ b/README.md @@ -7,20 +7,30 @@ ## Table of Content +- [Table of Content](#table-of-content) - [Summary of the project](#summary-of-the-project) -- [Specs](spec/README.md) - [Local development](#local-development) - [Dependencies](#dependencies) + - [Python3.7](#python37) + - [pipenv](#pipenv) + - [Using Postgres on local host](#using-postgres-on-local-host) - [Creating the local environment](#creating-the-local-environment) - [Setting up the local database](#setting-up-the-local-database) - [Using a local PostGres database instead of a container](#using-a-local-postgres-database-instead-of-a-container) - [Starting dev server](#starting-dev-server) - [Running tests](#running-tests) + - [Unit test logging](#unit-test-logging) - [Using Django shell](#using-django-shell) + - [Migrate DB with Django](#migrate-db-with-django) - [Linting and formatting your work](#linting-and-formatting-your-work) +- [Initial Setup up the RDS database and the user](#initial-setup-up-the-rds-database-and-the-user) - [Deploying the project and continuous integration](#deploying-the-project-and-continuous-integration) - [Docker](#docker) -- [Configuration](#configuration) + - [Configuration](#configuration) + - [**General settings**](#general-settings) + - [**Database settings**](#database-settings) + - [**Asset Storage settings (AWS S3)**](#asset-storage-settings-aws-s3) + - [**Development settings (only for local environment and DEV staging)**](#development-settings-only-for-local-environment-and-dev-staging) ## Summary of the project diff --git a/app/stac_api/pagination.py b/app/stac_api/pagination.py index ee8339dd..a58b71ed 100644 --- a/app/stac_api/pagination.py +++ b/app/stac_api/pagination.py @@ -45,7 +45,7 @@ def get_paginated_response(self, data, request=None): # pylint: disable=argumen if 'links' not in data and not links: data.update({'links': []}) elif 'links' not in data and links: - data.update({'links': [links]}) + data.update({'links': links}) elif links: data['links'] += links return Response(data) diff --git a/app/tests/utils.py b/app/tests/utils.py index ec7200d8..22e1fa77 100644 --- a/app/tests/utils.py +++ b/app/tests/utils.py @@ -51,7 +51,9 @@ def assertS3ObjectExists(self, path): # pylint: disable=invalid-name def assertS3ObjectNotExists(self, path): # pylint: disable=invalid-name s3 = get_s3_resource() - with self.assertRaises(botocore.exceptions.ClientError) as exception_context: + with self.assertRaises( + botocore.exceptions.ClientError, msg=f'Object {path} found on S3' + ) as exception_context: s3.Object(settings.AWS_STORAGE_BUCKET_NAME, path).load() error = exception_context.exception self.assertEqual(error.response['Error']['Code'], "404") From ce1bdbecbb5dca81d48b58bd47b820786e564d7e Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 8 Apr 2021 08:54:40 +0200 Subject: [PATCH 052/105] BGDIINF_SB-1625: Removed the checksum:multihash from asset Write endpoint With the new asset management the checksum:multihash is set when creating a new asset upload and not anymore when creating/updatin the asset metada. --- .../migrations/0004_auto_20210408_0659.py | 18 ++++ app/stac_api/models.py | 4 +- app/stac_api/serializers.py | 9 +- app/tests/base_test.py | 2 +- app/tests/data_factory.py | 8 +- app/tests/sample_data/asset_samples.py | 25 +++++- app/tests/test_assets_endpoint.py | 85 +++++++------------ app/tests/test_serializer.py | 4 +- .../spec/v0.9/openapitransactional.yaml | 3 - spec/transaction/transaction.yml | 29 ------- 10 files changed, 81 insertions(+), 106 deletions(-) create mode 100644 app/stac_api/migrations/0004_auto_20210408_0659.py diff --git a/app/stac_api/migrations/0004_auto_20210408_0659.py b/app/stac_api/migrations/0004_auto_20210408_0659.py new file mode 100644 index 00000000..bb2196c2 --- /dev/null +++ b/app/stac_api/migrations/0004_auto_20210408_0659.py @@ -0,0 +1,18 @@ +# Generated by Django 3.1.7 on 2021-04-08 06:59 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('stac_api', '0003_auto_20210325_1001'), + ] + + operations = [ + migrations.AlterField( + model_name='asset', + name='checksum_multihash', + field=models.CharField(blank=True, default=None, editable=False, max_length=255, null=True), + ), + ] diff --git a/app/stac_api/models.py b/app/stac_api/models.py index c9f53420..c408932f 100644 --- a/app/stac_api/models.py +++ b/app/stac_api/models.py @@ -557,7 +557,9 @@ class Meta: def filename(self): return os.path.basename(self.file.name) - checksum_multihash = models.CharField(editable=False, max_length=255, blank=True, default='') + checksum_multihash = models.CharField( + editable=False, max_length=255, blank=True, null=True, default=None + ) # here we need to set blank=True otherwise the field is as required in the admin interface description = models.TextField(blank=True, null=True, default=None) eo_gsd = models.FloatField(null=True, blank=True) diff --git a/app/stac_api/serializers.py b/app/stac_api/serializers.py index 39ca5a5a..396ae87c 100644 --- a/app/stac_api/serializers.py +++ b/app/stac_api/serializers.py @@ -25,7 +25,6 @@ from stac_api.validators import MEDIA_TYPES_MIMES from stac_api.validators import validate_asset_name from stac_api.validators import validate_asset_name_with_media_type -from stac_api.validators import validate_checksum_multihash_sha256 from stac_api.validators import validate_geoadmin_variant from stac_api.validators import validate_item_properties_datetimes from stac_api.validators import validate_name @@ -688,14 +687,8 @@ class Meta: validators=[validate_geoadmin_variant] ) proj_epsg = serializers.IntegerField(source='proj_epsg', allow_null=True, required=False) - checksum_multihash = serializers.CharField( - source='checksum_multihash', - max_length=255, - required=False, - allow_blank=False, - validators=[validate_checksum_multihash_sha256] - ) # read only fields + checksum_multihash = serializers.CharField(source='checksum_multihash', read_only=True) href = HrefField(source='file', read_only=True) created = serializers.DateTimeField(read_only=True) updated = serializers.DateTimeField(read_only=True) diff --git a/app/tests/base_test.py b/app/tests/base_test.py index 84634257..2d4ff29e 100644 --- a/app/tests/base_test.py +++ b/app/tests/base_test.py @@ -228,7 +228,7 @@ def check_stac_asset(self, expected, current, collection, item, ignore=None): self._check_stac_dictsubset('asset', expected, current, ignore=ignore) # check required fields - for key in ['links', 'id', 'type', 'checksum:multihash', 'href']: + for key in ['links', 'id', 'type', 'href']: self.assertIn(key, current, msg=f'Asset {key} is missing') for date_field in ['created', 'updated']: self.assertIn(date_field, current, msg=f'Asset {date_field} is missing') diff --git a/app/tests/data_factory.py b/app/tests/data_factory.py index e0be19e7..24b1c47d 100644 --- a/app/tests/data_factory.py +++ b/app/tests/data_factory.py @@ -104,7 +104,6 @@ from stac_api.models import ItemLink from stac_api.models import Provider from stac_api.utils import get_s3_resource -from stac_api.utils import get_sha256_multihash from stac_api.utils import isoformat from stac_api.validators import MEDIA_TYPES_BY_TYPE @@ -715,11 +714,7 @@ class AssetSample(SampleData): 'proj_epsg', 'checksum_multihash' ] - read_only_fields = [ - 'created', - 'updated', - 'href', - ] + read_only_fields = ['created', 'updated', 'href', 'checksum:multihash'] def __init__(self, item, sample='asset-1', name=None, required_only=False, **kwargs): '''Create a item sample data @@ -742,7 +737,6 @@ def __init__(self, item, sample='asset-1', name=None, required_only=False, **kwa file = getattr(self, 'attr_file', None) file_path = f'{item.collection.name}/{item.name}/{self.attr_name}' if isinstance(file, bytes): - self.attr_checksum_multihash = get_sha256_multihash(file) self.attr_file = SimpleUploadedFile(file_path, file) def get_json(self, method='get', keep_read_only=False): diff --git a/app/tests/sample_data/asset_samples.py b/app/tests/sample_data/asset_samples.py index 81778817..f487f7d2 100644 --- a/app/tests/sample_data/asset_samples.py +++ b/app/tests/sample_data/asset_samples.py @@ -17,6 +17,17 @@ 'checksum_multihash': get_sha256_multihash(FILE_CONTENT_1), 'file': FILE_CONTENT_1 }, + 'asset-no-checksum': { + 'name': 'asset-1.tiff', + 'title': 'Asset 1 Title', + 'description': 'This is a full description of asset 1', + 'eo_gsd': 3.4, + 'geoadmin_lang': 'fr', + 'geoadmin_variant': 'kgrs', + 'proj_epsg': 2056, + 'media_type': "image/tiff; application=geotiff; profile=cloud-optimized", + 'file': FILE_CONTENT_1 + }, 'asset-1-updated': { 'name': 'asset-2.txt', 'title': 'Asset 2 Title', @@ -86,5 +97,17 @@ 'proj_epsg': 2056, 'media_type': "text/plain", 'file': b'Asset with invalid geoadmin:variant' - } + }, + 'asset-no-file': { + 'name': 'asset-1.tiff', + 'title': 'Asset 1 Title', + 'description': 'This is a full description of asset 1', + 'eo_gsd': 3.4, + 'geoadmin_lang': 'fr', + 'geoadmin_variant': 'kgrs', + 'proj_epsg': 2056, + 'media_type': "image/tiff; application=geotiff; profile=cloud-optimized", + # use a path instead of a bytes object to avoid creating a file + 'file': 'collection-1/item-1/asset-1.tiff' + }, } diff --git a/app/tests/test_assets_endpoint.py b/app/tests/test_assets_endpoint.py index 80891817..b13c5b80 100644 --- a/app/tests/test_assets_endpoint.py +++ b/app/tests/test_assets_endpoint.py @@ -11,17 +11,15 @@ from stac_api.models import Asset from stac_api.utils import get_asset_path -from stac_api.utils import get_sha256_multihash from stac_api.utils import utc_aware from tests.base_test import StacBaseTestCase from tests.base_test import StacBaseTransactionTestCase from tests.data_factory import Factory from tests.utils import S3TestMixin -from tests.utils import client_login from tests.utils import disableLogger +from tests.utils import client_login from tests.utils import mock_s3_asset_file -from tests.utils import upload_file_on_s3 logger = logging.getLogger(__name__) @@ -57,7 +55,7 @@ def test_assets_endpoint(self): self.assertIn('assets', json_data, msg='assets is missing in response') self.assertEqual( - 3, len(json_data['assets']), msg='Number of assets doen\'t match the expected' + 3, len(json_data['assets']), msg='Number of assets doesn\'t match the expected' ) for i, asset in enumerate([self.asset_1, asset_2, asset_3]): self.check_stac_asset( @@ -114,9 +112,7 @@ def setUp(self): # pylint: disable=invalid-name def test_asset_endpoint_post_only_required(self): collection_name = self.collection.name item_name = self.item.name - asset = self.factory.create_asset_sample( - item=self.item, required_only=True, create_asset_file=True, file=b'Dummy file content' - ) + asset = self.factory.create_asset_sample(item=self.item, required_only=True) path = f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets' response = self.client.post( @@ -140,11 +136,12 @@ def test_asset_endpoint_post_only_required(self): self.assertNotIn('eo:gsd', json_data) self.assertNotIn('description', json_data) self.assertNotIn('title', json_data) + self.assertNotIn('checksum:multihash', json_data) def test_asset_endpoint_post_full(self): collection_name = self.collection.name item_name = self.item.name - asset = self.factory.create_asset_sample(item=self.item, create_asset_file=True) + asset = self.factory.create_asset_sample(item=self.item, sample='asset-no-checksum') path = f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets' response = self.client.post( @@ -155,6 +152,17 @@ def test_asset_endpoint_post_full(self): self.check_header_location(f"{path}/{asset['name']}", response) self.check_stac_asset(asset.json, json_data, collection_name, item_name, ignore=['item']) + # make sure that all optional fields are present + self.assertIn('geoadmin:lang', json_data) + self.assertIn('geoadmin:variant', json_data) + self.assertIn('proj:epsg', json_data) + self.assertIn('eo:gsd', json_data) + self.assertIn('description', json_data) + self.assertIn('title', json_data) + + # Checksum multihash is set by the AssetUpload later on + self.assertNotIn('checksum:multihash', json_data) + # Check the data by reading it back response = self.client.get(response['Location']) json_data = response.json() @@ -170,8 +178,7 @@ def test_asset_endpoint_post_empty_string(self): description='', geoadmin_variant='', geoadmin_lang='', - title='', - create_asset_file=True + title='' ) path = f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets' @@ -186,9 +193,7 @@ def test_asset_endpoint_post_empty_string(self): def test_asset_endpoint_post_extra_payload(self): collection_name = self.collection.name item_name = self.item.name - asset = self.factory.create_asset_sample( - item=self.item, extra_attribute='not allowed', create_asset_file=True - ) + asset = self.factory.create_asset_sample(item=self.item, extra_attribute='not allowed') path = f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets' response = self.client.post( @@ -209,7 +214,7 @@ def test_asset_endpoint_post_read_only_in_payload(self): collection_name = self.collection.name item_name = self.item.name asset = self.factory.create_asset_sample( - item=self.item, created=utc_aware(datetime.utcnow()), create_asset_file=True + item=self.item, created=utc_aware(datetime.utcnow()) ) path = f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets' @@ -219,6 +224,7 @@ def test_asset_endpoint_post_read_only_in_payload(self): self.assertStatusCode(400, response) self.assertEqual( { + 'checksum:multihash': ['Found read-only property in payload'], 'created': ['Found read-only property in payload'], 'href': ['Found read-only property in payload'] }, @@ -235,9 +241,7 @@ def test_asset_endpoint_post_read_only_in_payload(self): def test_asset_endpoint_post_read_only_href_in_payload(self): collection_name = self.collection.name item_name = self.item.name - asset = self.factory.create_asset_sample( - item=self.item, href='https://testserver/test.txt', create_asset_file=True - ) + asset = self.factory.create_asset_sample(item=self.item, href='https://testserver/test.txt') path = f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets' response = self.client.post( @@ -261,9 +265,7 @@ def test_asset_endpoint_post_read_only_href_in_payload(self): def test_asset_endpoint_post_invalid_data(self): collection_name = self.collection.name item_name = self.item.name - asset = self.factory.create_asset_sample( - item=self.item, sample='asset-invalid', create_asset_file=True - ) + asset = self.factory.create_asset_sample(item=self.item, sample='asset-invalid') path = f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets' response = self.client.post( @@ -292,7 +294,7 @@ def test_asset_endpoint_post_characters_geoadmin_variant(self): collection_name = self.collection.name item_name = self.item.name asset = self.factory.create_asset_sample( - item=self.item, sample='asset-valid-geoadmin-variant', create_asset_file=True + item=self.item, sample='asset-valid-geoadmin-variant' ) path = f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets' @@ -303,7 +305,7 @@ def test_asset_endpoint_post_characters_geoadmin_variant(self): # invalid geoadmin:variant asset = self.factory.create_asset_sample( - item=self.item, sample='asset-invalid-geoadmin-variant', create_asset_file=True + item=self.item, sample='asset-invalid-geoadmin-variant' ) path = f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets' @@ -373,36 +375,6 @@ def setUp(self): # pylint: disable=invalid-name client_login(self.client) self.maxDiff = None # pylint: disable=invalid-name - def test_asset_endpoint_patch_checksum(self): - new_file_content = b'New file content' - new_multihash = get_sha256_multihash(new_file_content) - collection_name = self.collection['name'] - item_name = self.item['name'] - asset_name = self.asset['name'] - - # upload first a new file on S3 - upload_file_on_s3(f'{collection_name}/{item_name}/{asset_name}', new_file_content) - - patch_payload = {'checksum:multihash': new_multihash} - patch_asset = self.asset.copy() - patch_asset['checksum_multihash'] = new_multihash - - path = f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets/{asset_name}' - response = self.client.patch(path, data=patch_payload, content_type="application/json") - self.assertStatusCode(200, response) - json_data = response.json() - self.check_stac_asset( - patch_asset.json, json_data, collection_name, item_name, ignore=['item'] - ) - - # Check the data by reading it back - response = self.client.get(path) - json_data = response.json() - self.assertStatusCode(200, response) - self.check_stac_asset( - patch_asset.json, json_data, collection_name, item_name, ignore=['item'] - ) - def test_asset_endpoint_patch_put_href(self): collection_name = self.collection['name'] item_name = self.item['name'] @@ -598,11 +570,11 @@ def test_asset_endpoint_put_read_only_in_payload(self): changed_asset = self.factory.create_asset_sample( item=self.item.model, name=asset_name, - checksum_multihash=self.asset['checksum_multihash'], sample='asset-1-updated', media_type=self.asset['media_type'], created=utc_aware(datetime.utcnow()), - create_asset_file=False + create_asset_file=False, + checksum_multihash=self.asset['checksum_multihash'], ) path = f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}/assets/{asset_name}' @@ -612,7 +584,10 @@ def test_asset_endpoint_put_read_only_in_payload(self): content_type="application/json" ) self.assertStatusCode(400, response) - self.assertEqual({'created': ['Found read-only property in payload']}, + self.assertEqual({ + 'created': ['Found read-only property in payload'], + 'checksum:multihash': ['Found read-only property in payload'] + }, response.json()['description'], msg='Unexpected error message') diff --git a/app/tests/test_serializer.py b/app/tests/test_serializer.py index 0c5d7613..df644659 100644 --- a/app/tests/test_serializer.py +++ b/app/tests/test_serializer.py @@ -553,7 +553,9 @@ def setUp(self): # pylint: disable=invalid-name self.maxDiff = None # pylint: disable=invalid-name def test_asset_deserialization_create(self): - sample = self.data_factory.create_asset_sample(item=self.item.model, create_asset_file=True) + sample = self.data_factory.create_asset_sample( + sample='asset-no-checksum', item=self.item.model, create_asset_file=True + ) # serialize the object and test it against the one above # mock a request needed for the serialization of links diff --git a/spec/static/spec/v0.9/openapitransactional.yaml b/spec/static/spec/v0.9/openapitransactional.yaml index d77f9393..e6d33357 100644 --- a/spec/static/spec/v0.9/openapitransactional.yaml +++ b/spec/static/spec/v0.9/openapitransactional.yaml @@ -1823,9 +1823,6 @@ components: required: - id - type - properties: - checksum:multihash: - $ref: "#/components/schemas/writeChecksumMultihash" itemAssetUpdate: allOf: - $ref: "#/components/schemas/assetBase" diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml index 70362d63..d7fa80a8 100644 --- a/spec/transaction/transaction.yml +++ b/spec/transaction/transaction.yml @@ -407,15 +407,6 @@ paths: available at the following URL: https://data.geo.admin.ch/{collectionId}/{featureId}/{assetId} Optionally the Asset object multihash can be given for sanity check. - # description: >- - # Create a new asset for a specific feature. - - # When creating a new asset for a feature, the metadata of - # the asset along with a temporary url of the asset itself is - # posted to the API. - - # The service is taking care of copying the asset from the temporary - # location to correct one. operationId: postAsset tags: - Data Management @@ -440,21 +431,6 @@ paths: application/json: schema: $ref: "#/components/schemas/itemAsset" - # "202": - # description: Accepted create request - # headers: - # Location: - # description: A link to the item - # schema: - # type: string - # format: url - # content: - # application/json: - # schema: - # type: string - # text/html: - # schema: - # type: string "400": $ref: "#/components/responses/BadRequest" "5XX": @@ -789,11 +765,6 @@ components: required: - id - type - properties: - "checksum:multihash": - $ref: "#/components/schemas/writeChecksumMultihash" - # href: - # $ref: "#/components/schemas/writeHref" itemAssetUpdate: allOf: - $ref: "#/components/schemas/assetBase" From f096f226214288d0299f02fc281e5da9e552c149 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 8 Apr 2021 16:53:39 +0200 Subject: [PATCH 053/105] BGDIINF_SB-1625: Fixed small issue due to rebasing with upsert of asset/item --- .../migrations/0004_auto_20210408_0659.py | 7 +- app/tests/data_factory.py | 2 - app/tests/test_assets_endpoint.py | 73 ++++++++----------- app/tests/test_serializer.py | 9 +-- 4 files changed, 38 insertions(+), 53 deletions(-) diff --git a/app/stac_api/migrations/0004_auto_20210408_0659.py b/app/stac_api/migrations/0004_auto_20210408_0659.py index bb2196c2..9f2400cc 100644 --- a/app/stac_api/migrations/0004_auto_20210408_0659.py +++ b/app/stac_api/migrations/0004_auto_20210408_0659.py @@ -1,6 +1,7 @@ # Generated by Django 3.1.7 on 2021-04-08 06:59 -from django.db import migrations, models +from django.db import migrations +from django.db import models class Migration(migrations.Migration): @@ -13,6 +14,8 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='asset', name='checksum_multihash', - field=models.CharField(blank=True, default=None, editable=False, max_length=255, null=True), + field=models.CharField( + blank=True, default=None, editable=False, max_length=255, null=True + ), ), ] diff --git a/app/tests/data_factory.py b/app/tests/data_factory.py index 24b1c47d..10994774 100644 --- a/app/tests/data_factory.py +++ b/app/tests/data_factory.py @@ -759,8 +759,6 @@ def get_json(self, method='get', keep_read_only=False): ''' data = super().get_json(method, keep_read_only) item = data.pop('item') - if method in ['get', 'serialize']: - data['item'] = item.name if 'href' in data and isinstance(data['href'], File): data['href'] = \ f'http://{settings.AWS_S3_CUSTOM_DOMAIN}/{item.collection.name}/{item.name}/{data["href"].name}' diff --git a/app/tests/test_assets_endpoint.py b/app/tests/test_assets_endpoint.py index b13c5b80..ac6fa5c6 100644 --- a/app/tests/test_assets_endpoint.py +++ b/app/tests/test_assets_endpoint.py @@ -17,8 +17,8 @@ from tests.base_test import StacBaseTransactionTestCase from tests.data_factory import Factory from tests.utils import S3TestMixin -from tests.utils import disableLogger from tests.utils import client_login +from tests.utils import disableLogger from tests.utils import mock_s3_asset_file logger = logging.getLogger(__name__) @@ -58,9 +58,7 @@ def test_assets_endpoint(self): 3, len(json_data['assets']), msg='Number of assets doesn\'t match the expected' ) for i, asset in enumerate([self.asset_1, asset_2, asset_3]): - self.check_stac_asset( - asset.json, json_data['assets'][i], collection_name, item_name, ignore=['item'] - ) + self.check_stac_asset(asset.json, json_data['assets'][i], collection_name, item_name) def test_assets_endpoint_collection_does_not_exist(self): collection_name = "non-existent" @@ -89,9 +87,7 @@ def test_single_asset_endpoint(self): self.assertStatusCode(200, response) logger.debug('Response (%s):\n%s', type(json_data), pformat(json_data)) - self.check_stac_asset( - self.asset_1.json, json_data, collection_name, item_name, ignore=['item'] - ) + self.check_stac_asset(self.asset_1.json, json_data, collection_name, item_name) # The ETag change between each test call due to the created, updated time that are in the # hash computation of the ETag @@ -121,13 +117,13 @@ def test_asset_endpoint_post_only_required(self): json_data = response.json() self.assertStatusCode(201, response) self.check_header_location(f"{path}/{asset['name']}", response) - self.check_stac_asset(asset.json, json_data, collection_name, item_name, ignore=['item']) + self.check_stac_asset(asset.json, json_data, collection_name, item_name) # Check the data by reading it back response = self.client.get(response['Location']) json_data = response.json() self.assertStatusCode(200, response) - self.check_stac_asset(asset.json, json_data, collection_name, item_name, ignore=['item']) + self.check_stac_asset(asset.json, json_data, collection_name, item_name) # make sure that the optional fields are not present self.assertNotIn('geoadmin:lang', json_data) @@ -150,7 +146,7 @@ def test_asset_endpoint_post_full(self): json_data = response.json() self.assertStatusCode(201, response) self.check_header_location(f"{path}/{asset['name']}", response) - self.check_stac_asset(asset.json, json_data, collection_name, item_name, ignore=['item']) + self.check_stac_asset(asset.json, json_data, collection_name, item_name) # make sure that all optional fields are present self.assertIn('geoadmin:lang', json_data) @@ -167,7 +163,7 @@ def test_asset_endpoint_post_full(self): response = self.client.get(response['Location']) json_data = response.json() self.assertStatusCode(200, response) - self.check_stac_asset(asset.json, json_data, collection_name, item_name, ignore=['item']) + self.check_stac_asset(asset.json, json_data, collection_name, item_name) def test_asset_endpoint_post_empty_string(self): collection_name = self.collection.name @@ -424,7 +420,9 @@ def setUp(self): # pylint: disable=invalid-name def test_asset_upsert_create(self): collection = self.collection.model item = self.item.model - asset = self.factory.create_asset_sample(item=item, create_asset_file=True) + asset = self.factory.create_asset_sample( + item=item, sample='asset-no-checksum', create_asset_file=False + ) asset_name = asset['name'] response = self.client.get( @@ -434,14 +432,12 @@ def test_asset_upsert_create(self): self.assertStatusCode(404, response) # Check also, that the asset does not exist in the DB already - self.assertFalse( - Asset.objects.filter(name=asset_name).exists(), msg="Deleted asset still found in DB" - ) + self.assertFalse(Asset.objects.filter(name=asset_name).exists(), msg="Asset already exists") - # Now use upsert to create the new assert + # Now use upsert to create the new asset response = self.client.put( reverse('asset-detail', args=[collection.name, item.name, asset_name]), - data=asset.get_json('post'), + data=asset.get_json('put'), content_type="application/json" ) json_data = response.json() @@ -449,18 +445,18 @@ def test_asset_upsert_create(self): self.check_header_location( reverse('asset-detail', args=[collection.name, item.name, asset_name]), response ) - self.check_stac_asset(asset.json, json_data, collection.name, item.name, ignore=['item']) + self.check_stac_asset(asset.json, json_data, collection.name, item.name) # Check the data by reading it back response = self.client.get(response['Location']) json_data = response.json() self.assertStatusCode(200, response) - self.check_stac_asset(asset.json, json_data, collection.name, item.name, ignore=['item']) + self.check_stac_asset(asset.json, json_data, collection.name, item.name) def test_asset_upsert_create_non_existing_parent_item_in_path(self): collection = self.collection.model item = self.item.model - asset = self.factory.create_asset_sample(item=item, create_asset_file=True) + asset = self.factory.create_asset_sample(item=item, create_asset_file=False) asset_name = asset['name'] path = ( @@ -479,14 +475,14 @@ def test_asset_upsert_create_non_existing_parent_item_in_path(self): # Now use upsert to create the new asset response = self.client.put( - path, data=asset.get_json('post'), content_type="application/json" + path, data=asset.get_json('put'), content_type="application/json" ) self.assertStatusCode(404, response) def test_asset_upsert_create_non_existing_parent_collection_in_path(self): collection = self.collection.model item = self.item.model - asset = self.factory.create_asset_sample(item=item, create_asset_file=True) + asset = self.factory.create_asset_sample(item=item, create_asset_file=False) asset_name = asset['name'] path = ( @@ -528,17 +524,13 @@ def test_asset_endpoint_put(self): ) json_data = response.json() self.assertStatusCode(200, response) - self.check_stac_asset( - changed_asset.json, json_data, collection_name, item_name, ignore=['item'] - ) + self.check_stac_asset(changed_asset.json, json_data, collection_name, item_name) # Check the data by reading it back response = self.client.get(path) json_data = response.json() self.assertStatusCode(200, response) - self.check_stac_asset( - changed_asset.json, json_data, collection_name, item_name, ignore=['item'] - ) + self.check_stac_asset(changed_asset.json, json_data, collection_name, item_name) def test_asset_endpoint_put_extra_payload(self): collection_name = self.collection['name'] @@ -610,9 +602,7 @@ def test_asset_endpoint_put_rename_asset(self): self.assertStatusCode(200, response) json_data = response.json() self.assertEqual(changed_asset.json['id'], json_data['id']) - self.check_stac_asset( - changed_asset.json, json_data, collection_name, item_name, ignore=['item'] - ) + self.check_stac_asset(changed_asset.json, json_data, collection_name, item_name) # Check the data by reading it back response = self.client.get( @@ -621,9 +611,7 @@ def test_asset_endpoint_put_rename_asset(self): ) json_data = response.json() self.assertStatusCode(200, response) - self.check_stac_asset( - changed_asset.json, json_data, collection_name, item_name, ignore=['item'] - ) + self.check_stac_asset(changed_asset.json, json_data, collection_name, item_name) def test_asset_endpoint_patch_rename_asset(self): collection_name = self.collection['name'] @@ -641,9 +629,7 @@ def test_asset_endpoint_patch_rename_asset(self): json_data = response.json() self.assertStatusCode(200, response) self.assertEqual(changed_asset.json['id'], json_data['id']) - self.check_stac_asset( - changed_asset.json, json_data, collection_name, item_name, ignore=['item'] - ) + self.check_stac_asset(changed_asset.json, json_data, collection_name, item_name) # Check the data by reading it back response = self.client.get( @@ -653,9 +639,7 @@ def test_asset_endpoint_patch_rename_asset(self): json_data = response.json() self.assertStatusCode(200, response) self.assertEqual(changed_asset.json['id'], json_data['id']) - self.check_stac_asset( - changed_asset.json, json_data, collection_name, item_name, ignore=['item'] - ) + self.check_stac_asset(changed_asset.json, json_data, collection_name, item_name) def test_asset_endpoint_patch_extra_payload(self): collection_name = self.collection['name'] @@ -781,7 +765,10 @@ def setUp(self): def test_asset_upsert_race_condition(self): workers = 5 status_201 = 0 - asset_sample = self.factory.create_asset_sample(self.item_sample.model, sample='asset-2') + asset_sample = self.factory.create_asset_sample( + self.item_sample.model, + sample='asset-no-checksum', + ) def asset_atomic_upsert_test(worker): # This method run on separate thread therefore it requires to create a new client and @@ -824,7 +811,9 @@ def asset_atomic_upsert_test(worker): def test_asset_post_race_condition(self): workers = 5 status_201 = 0 - asset_sample = self.factory.create_asset_sample(self.item_sample.model, sample='asset-2') + asset_sample = self.factory.create_asset_sample( + self.item_sample.model, sample='asset-no-checksum' + ) def asset_atomic_post_test(worker): # This method run on separate thread therefore it requires to create a new client and diff --git a/app/tests/test_serializer.py b/app/tests/test_serializer.py index df644659..c3351fc2 100644 --- a/app/tests/test_serializer.py +++ b/app/tests/test_serializer.py @@ -275,7 +275,6 @@ def test_item_serialization(self): collection_name = self.collection.model.name expected_asset = self.asset.json expected_asset.pop('id') - expected_asset.pop('item') expected = self.item.json expected.update({ 'assets': { @@ -579,9 +578,7 @@ def test_asset_deserialization_create(self): # ignoring item below, as it is a "write_only" field in the asset's serializer. # it will not be present in the mocked request's data. - self.check_stac_asset( - sample.json, python_native, collection_name, item_name, ignore=['item'] - ) + self.check_stac_asset(sample.json, python_native, collection_name, item_name) def test_asset_deserialization_create_required_fields_only(self): sample = self.data_factory.create_asset_sample( @@ -619,9 +616,7 @@ def test_asset_deserialization_create_required_fields_only(self): # ignoring item below, as it is a "write_only" field in the asset's serializer. # it will not be present in the mocked request's data. - self.check_stac_asset( - sample.json, python_native, collection_name, item_name, ignore=['item'] - ) + self.check_stac_asset(sample.json, python_native, collection_name, item_name) def test_asset_deserialization_create_invalid_data(self): sample = self.data_factory.create_asset_sample(item=self.item.model, sample='asset-invalid') From 48aaaea744056e3e32e5da1456e6798869c8eb79 Mon Sep 17 00:00:00 2001 From: Isabelle Buholzer Date: Tue, 6 Apr 2021 15:51:32 +0200 Subject: [PATCH 054/105] BGDIINF_SB-1670: Correct transactional examples in STAC spec --- spec/components/schemas.yml | 2 +- spec/static/spec/v0.9/openapi.yaml | 2 +- .../spec/v0.9/openapitransactional.yaml | 1352 ++++++++--------- spec/transaction/transaction.yml | 36 +- 4 files changed, 666 insertions(+), 726 deletions(-) diff --git a/spec/components/schemas.yml b/spec/components/schemas.yml index ae49cb67..b33eb68d 100644 --- a/spec/components/schemas.yml +++ b/spec/components/schemas.yml @@ -708,7 +708,7 @@ components: type: string type: description: Media type of the asset - example: image/png + example: image/tiff; application=geotiff type: string proj:epsg: $ref: "#/components/schemas/proj:epsg" diff --git a/spec/static/spec/v0.9/openapi.yaml b/spec/static/spec/v0.9/openapi.yaml index 08c81d00..885490eb 100644 --- a/spec/static/spec/v0.9/openapi.yaml +++ b/spec/static/spec/v0.9/openapi.yaml @@ -922,7 +922,7 @@ components: type: string type: description: Media type of the asset - example: image/png + example: image/tiff; application=geotiff type: string proj:epsg: $ref: "#/components/schemas/proj:epsg" diff --git a/spec/static/spec/v0.9/openapitransactional.yaml b/spec/static/spec/v0.9/openapitransactional.yaml index e6d33357..ded2d85c 100644 --- a/spec/static/spec/v0.9/openapitransactional.yaml +++ b/spec/static/spec/v0.9/openapitransactional.yaml @@ -11,72 +11,72 @@ info: title: The SpatioTemporal Asset Catalog API for data.geo.admin.ch version: 0.9.0 servers: -- description: Data.geo.admin.ch - url: http://data.geo.admin.ch/api/stac/v0.9 + - description: Data.geo.admin.ch + url: http://data.geo.admin.ch/api/stac/v0.9 tags: -- description: Essential characteristics of this API - name: Capabilities -- description: Access to data (features) - name: Data -- description: Extension to OGC API - Features to support STAC metadata model and - search API - name: STAC -- description: | - All write requests require authentication. The currently available options for a user to - authenticate himself are described below. - - # Session authentication - When using the browsable API the user can simply use the admin interface for logging in. - Once logged in, the browsable API can be used to perform write requests. - - # Basic authentication - The username and password for authentication can be added to every write request the user wants to perform. - Here is an example of posting an asset using curl (_username_="MickeyMouse", _password_="I_love_Minnie_Mouse"): - ``` - curl --request POST \ - --user MickeyMouse:I_love_Minnie_Mouse \ - --url https://service-stac.dev.bgdi.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ - --header 'Content-Type: application/json' \ - --data '{ - "id": "fancy_unique_id", - "item": "swisstlmregio-2020", - "title": "My title", - "type": "application/x.filegdb+zip", - "description": "My description", - "proj:epsg": 2056, - "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" - }' - ``` - - # Token authentication - A user specific token for authentication can be added to every write request the user wants to perform. - Here is an example of posting an asset using curl: - ``` - curl --request POST \ - --url https://service-stac.dev.bgdi.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ - --header 'Authorization: Token ccecf40693bfc52ba090cd46eb7f19e723fe831f' \ - --header 'Content-Type: application/json' \ - --data '{ - "id": "fancy_unique_id", - "item": "swisstlmregio-2020", - "title": "My title", - "type": "application/x.filegdb+zip", - "description": "My description", - "proj:epsg": 2056, - "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" - }' - ``` - Tokens can either be generated in the admin interface or existing users can perform a POST request - on the get-token endpoint to request a token (also see description of the get-token POST endpoint - at the bottom). - Here is an example using curl: - ``` - curl --request POST \ - --url https://service-stac.dev.bgdi.ch/api/stac/get-token \ - --header 'Content-Type: application/json' \ - --data '{"username": "MickeyMouse", "password": "I_love_Minnie_Mouse"}' - ``` - name: Data Management + - description: Essential characteristics of this API + name: Capabilities + - description: Access to data (features) + name: Data + - description: Extension to OGC API - Features to support STAC metadata model and + search API + name: STAC + - description: | + All write requests require authentication. The currently available options for a user to + authenticate himself are described below. + + # Session authentication + When using the browsable API the user can simply use the admin interface for logging in. + Once logged in, the browsable API can be used to perform write requests. + + # Basic authentication + The username and password for authentication can be added to every write request the user wants to perform. + Here is an example of posting an asset using curl (_username_="MickeyMouse", _password_="I_love_Minnie_Mouse"): + ``` + curl --request POST \ + --user MickeyMouse:I_love_Minnie_Mouse \ + --url https://service-stac.dev.bgdi.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ + --header 'Content-Type: application/json' \ + --data '{ + "id": "fancy_unique_id", + "item": "swisstlmregio-2020", + "title": "My title", + "type": "application/x.filegdb+zip", + "description": "My description", + "proj:epsg": 2056, + "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" + }' + ``` + + # Token authentication + A user specific token for authentication can be added to every write request the user wants to perform. + Here is an example of posting an asset using curl: + ``` + curl --request POST \ + --url https://service-stac.dev.bgdi.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ + --header 'Authorization: Token ccecf40693bfc52ba090cd46eb7f19e723fe831f' \ + --header 'Content-Type: application/json' \ + --data '{ + "id": "fancy_unique_id", + "item": "swisstlmregio-2020", + "title": "My title", + "type": "application/x.filegdb+zip", + "description": "My description", + "proj:epsg": 2056, + "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" + }' + ``` + Tokens can either be generated in the admin interface or existing users can perform a POST request + on the get-token endpoint to request a token (also see description of the get-token POST endpoint + at the bottom). + Here is an example using curl: + ``` + curl --request POST \ + --url https://service-stac.dev.bgdi.ch/api/stac/get-token \ + --header 'Content-Type: application/json' \ + --data '{"username": "MickeyMouse", "password": "I_love_Minnie_Mouse"}' + ``` + name: Data Management components: parameters: bbox: @@ -263,9 +263,9 @@ components: application/json: example: conformsTo: - - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core - - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30 - - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson + - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core + - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30 + - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson schema: $ref: "#/components/schemas/confClasses" description: >- @@ -327,26 +327,26 @@ components: description: Catalog of Swiss Geodata Downloads id: ch links: - - href: http://data.geo.admin.ch/api/stac/v0.9/ - rel: self - type: application/json - title: this document - - href: http://data.geo.admin.ch/api/stac/v0.9/static/api.html - rel: service-doc - type: text/html - title: the API documentation - - href: http://data.geo.admin.ch/api/stac/v0.9/conformance - rel: conformance - type: application/json - title: OGC API conformance classes implemented by this server - - href: http://data.geo.admin.ch/api/stac/v0.9/collections - rel: data - type: application/json - title: Information about the feature collections - - href: http://data.geo.admin.ch/api/stac/v0.9/search - rel: search - type: application/json - title: Search across feature collections + - href: http://data.geo.admin.ch/api/stac/v0.9/ + rel: self + type: application/json + title: this document + - href: http://data.geo.admin.ch/api/stac/v0.9/static/api.html + rel: service-doc + type: text/html + title: the API documentation + - href: http://data.geo.admin.ch/api/stac/v0.9/conformance + rel: conformance + type: application/json + title: OGC API conformance classes implemented by this server + - href: http://data.geo.admin.ch/api/stac/v0.9/collections + rel: data + type: application/json + title: Information about the feature collections + - href: http://data.geo.admin.ch/api/stac/v0.9/search + rel: search + type: application/json + title: Search across feature collections stac_version: 0.9.0 title: data.geo.admin.ch schema: @@ -419,11 +419,11 @@ components: The array contain at least a link to the parent resource (`rel: parent`). example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items - rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: parent required: - - code - - links + - code + - links BadRequest: description: The request was malformed or semantically invalid content: @@ -472,10 +472,10 @@ components: and from 45.82°N to 47.81°N) would be represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." example: - - 5.96 - - 45.82 - - 10.49 - - 47.81 + - 5.96 + - 45.82 + - 10.49 + - 47.81 items: type: number maxItems: 4 @@ -511,10 +511,10 @@ components: and from 45.82°N to 47.81°N) would be represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." example: - - 5.96 - - 45.82 - - 10.49 - - 47.81 + - 5.96 + - 45.82 + - 10.49 + - 47.81 items: type: number maxItems: 4 @@ -543,10 +543,10 @@ components: properties: crs: default: - - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 description: The list of coordinate reference systems supported by the service example: - - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 items: type: string type: array @@ -584,20 +584,20 @@ components: $ref: "#/components/schemas/license" links: example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items - rel: items - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: items + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby items: $ref: "#/components/schemas/link" type: array @@ -610,31 +610,31 @@ components: summaries: additionalProperties: oneOf: - - items: - description: A value of any type. - title: Set of values - type: array - - description: >- - By default, only ranges with a minimum and a maximum value can be - specified. Ranges can be specified for ordinal values only, which - means they need to have a rank order. Therefore, ranges can only be - specified for numbers and some special types of strings. Examples: - grades (A to F), dates or times. Implementors are free to add other - derived statistical values to the object, for example `mean` or `stddev`. - properties: - max: - anyOf: - - type: string - - type: number - min: - anyOf: - - type: string - - type: number - required: - - min - - max - title: Statistics - type: object + - items: + description: A value of any type. + title: Set of values + type: array + - description: >- + By default, only ranges with a minimum and a maximum value can be + specified. Ranges can be specified for ordinal values only, which + means they need to have a rank order. Therefore, ranges can only be + specified for numbers and some special types of strings. Examples: + grades (A to F), dates or times. Implementors are free to add other + derived statistical values to the object, for example `mean` or `stddev`. + properties: + max: + anyOf: + - type: string + - type: number + min: + anyOf: + - type: string + - type: number + required: + - min + - max + title: Statistics + type: object description: >- Summaries are either a unique set of all available values *or* statistics. Statistics by default only specify the range (minimum and maximum values), @@ -650,14 +650,14 @@ components: readOnly: true example: eo:gsd: - - 10 - - 20 + - 10 + - 20 geoadmin:variant: - - kgrel - - komb - - krel + - kgrel + - komb + - krel proj:epsg: - - 2056 + - 2056 title: description: Human readable title of the collection example: National Map 1:200'000 @@ -667,14 +667,14 @@ components: updated: $ref: "#/components/schemas/updated" required: - - id - - links - - stac_version - - description - - license - - extent - - created - - updated + - id + - links + - stac_version + - description + - license + - extent + - created + - updated type: object collections: properties: @@ -686,17 +686,17 @@ components: items: $ref: "#/components/schemas/link" example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections?cursor=10ab - rel: next - - href: https://data.geo.admin.ch/api/stac/v0.9/collections?cursor=10cd - rel: previous + - href: https://data.geo.admin.ch/api/stac/v0.9/collections + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections?cursor=10ab + rel: next + - href: https://data.geo.admin.ch/api/stac/v0.9/collections?cursor=10cd + rel: previous required: - - links - - collections + - links + - collections type: object collectionsArray: description: >- @@ -713,8 +713,8 @@ components: type: object example: collections: - - ch.swisstopo.swisstlmregio - - ch.bfe.energieschweiz + - ch.swisstopo.swisstlmregio + - ch.bfe.energieschweiz confClasses: properties: conformsTo: @@ -722,7 +722,7 @@ components: type: string type: array required: - - conformsTo + - conformsTo type: object datetime: description: RFC 3339 compliant datetime string @@ -792,15 +792,15 @@ components: example: 500 description: anyOf: - - type: string - - type: array - items: - anyOf: - - type: string - - type: object - - type: object + - type: string + - type: array + items: + anyOf: + - type: string + - type: object + - type: object required: - - code + - code type: object extent: description: >- @@ -852,10 +852,10 @@ components: decision of the server whether only a single spatial geometry property is used to determine the extent or all relevant geometries. example: - - 5.685114 - - 45.534903 - - 10.747775 - - 47.982586 + - 5.685114 + - 45.534903 + - 10.747775 + - 47.982586 items: type: number maxItems: 6 @@ -864,7 +864,7 @@ components: minItems: 1 type: array required: - - bbox + - bbox type: object temporal: description: The temporal extent of the features in the collection. @@ -876,8 +876,8 @@ components: description: >- Begin and end times of the time interval. example: - - "2019-01-01T00:00:00Z" - - "2019-01-02T00:00:00Z" + - "2019-01-01T00:00:00Z" + - "2019-01-02T00:00:00Z" items: format: date-time nullable: false @@ -889,20 +889,20 @@ components: maxItems: 1 type: array required: - - interval + - interval type: object required: - - spatial - - temporal + - spatial + - temporal type: object readOnly: true geoadmin:lang: enum: - - de - - it - - fr - - rm - - en + - de + - it + - fr + - rm + - en title: Product language type: string geoadmin:variant: @@ -911,7 +911,7 @@ components: type: string geometryGeoJSON: oneOf: - - $ref: "#/components/schemas/polygonGeoJSON" + - $ref: "#/components/schemas/polygonGeoJSON" geometrycollectionGeoJSON: properties: geometries: @@ -920,11 +920,11 @@ components: type: array type: enum: - - GeometryCollection + - GeometryCollection type: string required: - - type - - geometries + - type + - geometries type: object ids: description: >- @@ -941,8 +941,8 @@ components: type: object example: ids: - - swisstlmregio-2019 - - swisstlmregio-2020 + - swisstlmregio-2019 + - swisstlmregio-2020 intersectsFilter: description: Only returns items that intersect with the provided polygon. properties: @@ -953,8 +953,8 @@ components: intersects: type: "Point" coordinates: - - 7 - - 46 + - 7 + - 46 itemBase: description: >- A GeoJSON Feature augmented with foreign members that contain values relevant @@ -977,36 +977,36 @@ components: type: $ref: "#/components/schemas/itemType" required: - - stac_version - - type - - geometry - - bbox - - properties - - assets + - stac_version + - type + - geometry + - bbox + - properties + - assets type: object item: allOf: - - type: object - required: - - id - - links - properties: - id: - $ref: "#/components/schemas/itemId" - links: - items: - $ref: "#/components/schemas/link" - type: array - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr50-263-2016 - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: collection - - $ref: "#/components/schemas/itemBase" + - type: object + required: + - id + - links + properties: + id: + $ref: "#/components/schemas/itemId" + links: + items: + $ref: "#/components/schemas/link" + type: array + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr50-263-2016 + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: collection + - $ref: "#/components/schemas/itemBase" items: description: >- A FeatureCollection augmented with foreign members that contain values relevant @@ -1021,23 +1021,23 @@ components: $ref: "#/components/schemas/link" type: array example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items?cursor=10ab - rel: next - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items?cursor=10acd - rel: previous + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items?cursor=10ab + rel: next + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items?cursor=10acd + rel: previous type: enum: - - FeatureCollection + - FeatureCollection type: string required: - - features - - type + - features + - type type: object itemAssets: additionalProperties: @@ -1064,7 +1064,7 @@ components: type: string type: description: Media type of the asset - example: image/png + example: image/tiff; application=geotiff type: string proj:epsg: $ref: "#/components/schemas/proj:epsg" @@ -1075,11 +1075,11 @@ components: updated: $ref: "#/components/schemas/created" required: - - "checksum:multihash" - - href - - type - - created - - updated + - "checksum:multihash" + - href + - type + - created + - updated type: object type: object readOnly: true @@ -1122,35 +1122,35 @@ components: type: array type: enum: - - FeatureCollection + - FeatureCollection type: string required: - - features - - type + - features + - type type: object itemsSearchGet: allOf: - - $ref: "#/components/schemas/itemsSearch" - - type: object - properties: - links: - $ref: "#/components/schemas/itemsSearchLinks" + - $ref: "#/components/schemas/itemsSearch" + - type: object + properties: + links: + $ref: "#/components/schemas/itemsSearchLinks" itemsSearchPost: allOf: - - $ref: "#/components/schemas/itemsSearch" - - type: object - properties: - links: - $ref: "#/components/schemas/itemsSearchPostLinks" + - $ref: "#/components/schemas/itemsSearch" + - type: object + properties: + links: + $ref: "#/components/schemas/itemsSearchPostLinks" itemsSearchLinks: description: >- An array of links. Can be used for pagination, e.g. by providing a link with the `next` relation type. example: - - href: https://data.geo.admin.ch/api/stac/v0.9/search - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/search?cursor=10ab - rel: next + - href: https://data.geo.admin.ch/api/stac/v0.9/search + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/search?cursor=10ab + rel: next items: $ref: "#/components/schemas/link" type: array @@ -1159,13 +1159,13 @@ components: An array of links. Can be used for pagination, e.g. by providing a link with the `next` relation type. example: - - href: https://data.geo.admin.ch/api/stac/v0.9/search - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/search?cursor=10ab - rel: next - method: POST - body: {} - merge: true + - href: https://data.geo.admin.ch/api/stac/v0.9/search + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/search?cursor=10ab + rel: next + method: POST + body: {} + merge: true items: $ref: "#/components/schemas/linkPostSearch" type: array @@ -1202,13 +1202,13 @@ components: example: Feature title type: string required: - - created - - updated + - created + - updated type: object itemType: description: The GeoJSON type enum: - - Feature + - Feature type: string readOnly: true landingPage: @@ -1232,10 +1232,10 @@ components: example: Buildings in Bonn type: string required: - - links - - stac_version - - id - - description + - links + - stac_version + - id + - description type: object license: description: >- @@ -1285,11 +1285,11 @@ components: type: array type: enum: - - LineString + - LineString type: string required: - - type - - coordinates + - type + - coordinates type: object link: properties: @@ -1317,35 +1317,35 @@ components: default: GET description: Specifies the HTTP method that the link expects enum: - - GET - - POST + - GET + - POST type: string required: - - href - - rel + - href + - rel title: Link type: object linkPostSearch: allOf: - - $ref: "#/components/schemas/link" - - type: object - properties: - body: - default: {} - description: For `POST /search` requests, the link can specify the HTTP - body as a JSON object. - type: object - merge: - default: false - description: >- - This is only valid when the server is responding to `POST /search `request. + - $ref: "#/components/schemas/link" + - type: object + properties: + body: + default: {} + description: For `POST /search` requests, the link can specify the HTTP + body as a JSON object. + type: object + merge: + default: false + description: >- + This is only valid when the server is responding to `POST /search `request. - If merge is true, the client is expected to merge the body value into - the current request body before following the link. This avoids passing - large post bodies back and forth when following links, particularly - for navigating pages through the `POST /search` endpoint. - type: boolean + If merge is true, the client is expected to merge the body value into + the current request body before following the link. This avoids passing + large post bodies back and forth when following links, particularly + for navigating pages through the `POST /search` endpoint. + type: boolean multilinestringGeoJSON: properties: coordinates: @@ -1360,11 +1360,11 @@ components: type: array type: enum: - - MultiLineString + - MultiLineString type: string required: - - type - - coordinates + - type + - coordinates type: object multipointGeoJSON: properties: @@ -1377,11 +1377,11 @@ components: type: array type: enum: - - MultiPoint + - MultiPoint type: string required: - - type - - coordinates + - type + - coordinates type: object multipolygonGeoJSON: properties: @@ -1399,11 +1399,11 @@ components: type: array type: enum: - - MultiPolygon + - MultiPolygon type: string required: - - type - - coordinates + - type + - coordinates type: object numberMatched: description: >- @@ -1435,11 +1435,11 @@ components: type: array type: enum: - - Point + - Point type: string required: - - type - - coordinates + - type + - coordinates type: object polygonGeoJSON: properties: @@ -1455,24 +1455,24 @@ components: type: array type: enum: - - Polygon + - Polygon type: string required: - - type - - coordinates + - type + - coordinates type: object example: coordinates: - - - - 7.242974548172171 - - 46.57310580640624 - - - 7.243756483316452 - - 46.35721185723752 - - - 7.698490766144817 - - 46.357085154660915 - - - 7.699524647567326 - - 46.57297861624267 - - - 7.242974548172171 - - 46.57310580640624 + - - - 7.242974548172171 + - 46.57310580640624 + - - 7.243756483316452 + - 46.35721185723752 + - - 7.698490766144817 + - 46.357085154660915 + - - 7.699524647567326 + - 46.57297861624267 + - - 7.242974548172171 + - 46.57310580640624 type: Polygon proj:epsg: description: >- @@ -1485,8 +1485,8 @@ components: example: 2056 title: EPSG code. type: - - integer - - null + - integer + - null providers: description: >- A list of providers, which may include all organizations capturing or processing @@ -1537,10 +1537,10 @@ components: element of the list. items: enum: - - producer - - licensor - - processor - - host + - producer + - licensor + - processor + - host type: string type: array url: @@ -1550,16 +1550,16 @@ components: format: url type: string required: - - name + - name title: Provider type: object type: array example: - - name: Federal Office of Topography - swisstopo - roles: - - producer - - licensor - url: https://www.swisstopo.admin.ch + - name: Federal Office of Topography - swisstopo + roles: + - producer + - licensor + url: https://www.swisstopo.admin.ch query: additionalProperties: $ref: "#/components/schemas/queryProp" @@ -1581,113 +1581,113 @@ components: type: object queryProp: anyOf: - - description: >- - If the object doesn't contain any of the operators, it is equivalent to - using the equals operator - - description: Match using an operator - properties: - contains: - description: >- - Find items with a property that contains the specified literal string, - e.g., matches ".*.*". A case-insensitive comparison must be - performed. - type: string - endsWith: - description: >- - Find items with a property that ends with the specified string. A case-insensitive - comparison must be performed. - type: string - eq: - description: >- - Find items with a property that is equal to the specified value. For - strings, a case-insensitive comparison must be performed. - nullable: true - oneOf: - - type: string - - type: number - - type: boolean - gt: - description: Find items with a property value greater than the specified - value. - oneOf: - - format: date-time + - description: >- + If the object doesn't contain any of the operators, it is equivalent to + using the equals operator + - description: Match using an operator + properties: + contains: + description: >- + Find items with a property that contains the specified literal string, + e.g., matches ".*.*". A case-insensitive comparison must be + performed. type: string - - type: number - gte: - description: Find items with a property value greater than or equal the - specified value. - oneOf: - - format: date-time + endsWith: + description: >- + Find items with a property that ends with the specified string. A case-insensitive + comparison must be performed. type: string - - type: number - in: - description: >- - Find items with a property that equals at least one entry in the specified - array. A case-insensitive comparison must be performed. - items: + eq: + description: >- + Find items with a property that is equal to the specified value. For + strings, a case-insensitive comparison must be performed. + nullable: true oneOf: - - type: string - - type: number - type: array - lt: - description: Find items with a property value less than the specified - value. - oneOf: - - format: date-time - type: string - - type: number - lte: - description: Find items with a property value less than or equal the specified - value. - oneOf: - - format: date-time + - type: string + - type: number + - type: boolean + gt: + description: Find items with a property value greater than the specified + value. + oneOf: + - format: date-time + type: string + - type: number + gte: + description: Find items with a property value greater than or equal the + specified value. + oneOf: + - format: date-time + type: string + - type: number + in: + description: >- + Find items with a property that equals at least one entry in the specified + array. A case-insensitive comparison must be performed. + items: + oneOf: + - type: string + - type: number + type: array + lt: + description: Find items with a property value less than the specified + value. + oneOf: + - format: date-time + type: string + - type: number + lte: + description: Find items with a property value less than or equal the specified + value. + oneOf: + - format: date-time + type: string + - type: number + neq: + description: >- + Find items that *don't* contain the specified value. For strings, a + case-insensitive comparison must be performed. + nullable: true + oneOf: + - type: string + - type: number + - type: boolean + startsWith: + description: >- + Find items with a property that begins with the specified string. A + case-insensitive comparison must be performed. type: string - - type: number - neq: - description: >- - Find items that *don't* contain the specified value. For strings, a - case-insensitive comparison must be performed. - nullable: true - oneOf: - - type: string - - type: number - - type: boolean - startsWith: - description: >- - Find items with a property that begins with the specified string. A - case-insensitive comparison must be performed. - type: string - type: object + type: object description: >- Apply query operations to a specific property. The following properties are currently supported: `created`, `updated`, `title`. searchBody: allOf: - - $ref: "#/components/schemas/queryFilter" - - $ref: "#/components/schemas/bboxFilter" - - $ref: "#/components/schemas/datetimeFilter" - - $ref: "#/components/schemas/intersectsFilter" - - $ref: "#/components/schemas/collectionsFilter" - - $ref: "#/components/schemas/idsFilter" - - $ref: "#/components/schemas/limitFilter" + - $ref: "#/components/schemas/queryFilter" + - $ref: "#/components/schemas/bboxFilter" + - $ref: "#/components/schemas/datetimeFilter" + - $ref: "#/components/schemas/intersectsFilter" + - $ref: "#/components/schemas/collectionsFilter" + - $ref: "#/components/schemas/idsFilter" + - $ref: "#/components/schemas/limitFilter" description: The search criteria type: object stac_extensions: items: anyOf: - - format: uri - title: Reference to a JSON Schema - type: string - - title: Reference to a core extension - type: string + - format: uri + title: Reference to a JSON Schema + type: string + - title: Reference to a core extension + type: string title: STAC extensions type: array uniqueItems: true readOnly: true example: - - proj - - eo - - https://data.geo.admin.ch/stac/v0.9/geoadmin-extension/1.0/schema.json + - proj + - eo + - https://data.geo.admin.ch/stac/v0.9/geoadmin-extension/1.0/schema.json stac_version: example: 0.9.0 title: STAC version @@ -1750,21 +1750,21 @@ components: $ref: "#/components/schemas/link" type: array example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 - rel: item - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: collection + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 + rel: item + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: collection assetBase: type: object required: - - created - - updated + - created + - updated properties: id: $ref: "#/components/schemas/assetId" @@ -1788,58 +1788,58 @@ components: $ref: "#/components/schemas/updated" itemAsset: allOf: - - $ref: "#/components/schemas/assetBase" - - type: object - required: - - id - - type - - href - - checksum:multihash - - links - properties: - checksum:multihash: - $ref: "#/components/schemas/checksum:multihash" - href: - $ref: "#/components/schemas/href" - links: - items: - $ref: "#/components/schemas/link" - type: array - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 - rel: item - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: collection + - $ref: "#/components/schemas/assetBase" + - type: object + required: + - id + - type + - href + - checksum:multihash + - links + properties: + checksum:multihash: + $ref: "#/components/schemas/checksum:multihash" + href: + $ref: "#/components/schemas/href" + links: + items: + $ref: "#/components/schemas/link" + type: array + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 + rel: item + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: collection itemAssetWrite: allOf: - - $ref: "#/components/schemas/assetBase" - - type: object - required: - - id - - type + - $ref: "#/components/schemas/assetBase" + - type: object + required: + - id + - type itemAssetUpdate: allOf: - - $ref: "#/components/schemas/assetBase" - - type: object - required: - - id - - type - properties: - id: - $ref: "#/components/schemas/updateAssetId" + - $ref: "#/components/schemas/assetBase" + - type: object + required: + - id + - type + properties: + id: + $ref: "#/components/schemas/updateAssetId" itemAssetPartialUpdate: allOf: - - $ref: "#/components/schemas/assetBase" - - type: object - properties: - id: - $ref: "#/components/schemas/updateAssetId" + - $ref: "#/components/schemas/assetBase" + - type: object + properties: + id: + $ref: "#/components/schemas/updateAssetId" writeChecksumMultihash: description: >- `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) @@ -1878,34 +1878,34 @@ components: http://data.geo.admin.ch/ch.swisstopo.swissimage/collections/cs/items/CS3-20160503_132130_04/thumb.png writeItem: allOf: - - $ref: "#/components/schemas/itemBase" - - type: object - properties: - links: - items: - $ref: "#/components/schemas/link" - type: array - description: >- - Add additional link to the generated ones (`self`, `root`, `parent`, - `items`, `collection`, `next`, `previous`) + - $ref: "#/components/schemas/itemBase" + - type: object + properties: + links: + items: + $ref: "#/components/schemas/link" + type: array + description: >- + Add additional link to the generated ones (`self`, `root`, `parent`, + `items`, `collection`, `next`, `previous`) createItem: allOf: - - type: object - required: - - id - properties: - id: - $ref: "#/components/schemas/itemId" - - $ref: "#/components/schemas/writeItem" + - type: object + required: + - id + properties: + id: + $ref: "#/components/schemas/itemId" + - $ref: "#/components/schemas/writeItem" updateItem: allOf: - - type: object - required: - - id - properties: - id: - $ref: "#/components/schemas/itemIdUpdate" - - $ref: "#/components/schemas/writeItem" + - type: object + required: + - id + properties: + id: + $ref: "#/components/schemas/itemIdUpdate" + - $ref: "#/components/schemas/writeItem" partialItem: type: object properties: @@ -1947,11 +1947,11 @@ components: items: $ref: "#/components/schemas/link" example: - - href: http://data.example.com/buildings - rel: item - - href: http://example.com/concepts/buildings.html - rel: describedBy - type: text/html + - href: http://data.example.com/buildings + rel: item + - href: http://example.com/concepts/buildings.html + rel: describedBy + type: text/html extent: $ref: "#/components/schemas/extent" itemType: @@ -1966,10 +1966,10 @@ components: items: type: string default: - - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 example: - - http://www.opengis.net/def/crs/OGC/1.3/CRS84 - - http://www.opengis.net/def/crs/EPSG/0/4326 + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + - http://www.opengis.net/def/crs/EPSG/0/4326 example: title: The new title of the collection roles: @@ -1978,7 +1978,7 @@ components: type: string description: Purposes of the asset example: - - thumbnail + - thumbnail title: type: string description: Displayed title @@ -1986,7 +1986,7 @@ components: type: type: string description: Media type of the asset - example: image/png + example: image/tiff; application=geotiff itemIdUpdate: description: >- Item identifier (unique per collection. If it doesn't match the `featureId` @@ -2007,12 +2007,12 @@ paths: $ref: "#/components/responses/ServerError" summary: Landing page tags: - - Capabilities + - Capabilities /collections: get: operationId: getCollections parameters: - - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/limit" responses: "200": $ref: "#/components/responses/Collections" @@ -2021,10 +2021,10 @@ paths: summary: Fetch collections description: The feature collections in the dataset tags: - - Data + - Data post: tags: - - Data Management + - Data Management summary: >- Create a new collection operationId: createCollection @@ -2039,18 +2039,18 @@ paths: id: ch.swisstopo.pixelkarte-farbe-pk200.noscale license: proprietary links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby providers: - - name: Federal Office of Topography - swisstopo - roles: - - producer - - licensor - url: https://www.swisstopo.admin.ch + - name: Federal Office of Topography - swisstopo + roles: + - producer + - licensor + url: https://www.swisstopo.admin.ch title: National Map 1:200'000 responses: "201": @@ -2075,9 +2075,9 @@ paths: get: operationId: describeCollection parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/IfNoneMatch" responses: "200": $ref: "#/components/responses/Collection" @@ -2092,18 +2092,18 @@ paths: summary: Fetch a single collection description: Describe the feature collection with id `collectionId` tags: - - Data + - Data put: tags: - - Data Management + - Data Management summary: Update or create a collection description: >- Update or create a collection with Id `collectionId` with a complete collection definition. If the collection doesn't exists it is then created. operationId: updateCollection parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/IfMatchWrite" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/IfMatchWrite" requestBody: content: application/json: @@ -2115,18 +2115,18 @@ paths: id: ch.swisstopo.pixelkarte-farbe-pk200.noscale license: proprietary links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby providers: - - name: Federal Office of Topography - swisstopo - roles: - - producer - - licensor - url: https://www.swisstopo.admin.ch + - name: Federal Office of Topography - swisstopo + roles: + - producer + - licensor + url: https://www.swisstopo.admin.ch title: National Map 1:200'000 responses: "200": @@ -2157,15 +2157,15 @@ paths: $ref: "#/components/responses/ServerError" patch: tags: - - Data Management + - Data Management summary: Partial update of a collection description: >- Update an existing collection with Id `collectionId` with a partial collection definition operationId: partialUpdateCollection parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/IfMatch" requestBody: content: application/json: @@ -2175,18 +2175,18 @@ paths: id: ch.swisstopo.pixelkarte-farbe-pk200.noscale license: proprietary links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby providers: - - name: Federal Office of Topography - swisstopo - roles: - - producer - - licensor - url: https://www.swisstopo.admin.ch + - name: Federal Office of Topography - swisstopo + roles: + - producer + - licensor + url: https://www.swisstopo.admin.ch title: National Map 1:200'000 responses: "200": @@ -2217,10 +2217,10 @@ paths: Use content negotiation to request HTML or GeoJSON. operationId: getFeatures parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/limit" - - $ref: "#/components/parameters/bbox" - - $ref: "#/components/parameters/datetime" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/bbox" + - $ref: "#/components/parameters/datetime" responses: "200": $ref: "#/components/responses/Features" @@ -2232,60 +2232,45 @@ paths: $ref: "#/components/responses/ServerError" summary: Fetch features tags: - - Data + - Data post: summary: Add a new feature to a collection description: Create a new feature/item in a specific collection operationId: postFeature tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/collectionId" requestBody: content: application/json: schema: $ref: "#/components/schemas/createItem" example: - type: Feature - id: CS3-20160503_132131_05 + id: cs3-20160503_132131_05 geometry: type: Polygon coordinates: - - - - -122.308150179 - - 37.488035566 - - - -122.597502109 - - 37.538869539 - - - -122.576687533 - - 37.613537207 - - - -122.2880486 - - 37.562818007 - - - -122.308150179 - - 37.488035566 + - - - -122.308150179 + - 37.488035566 + - - -122.597502109 + - 37.538869539 + - - -122.576687533 + - 37.613537207 + - - -122.2880486 + - 37.562818007 + - - -122.308150179 + - 37.488035566 properties: datetime: "2016-05-03T13:22:30.040Z" title: A CS3 item - license: PDDL-1.0 - providers: - - name: CoolSat - roles: - - producer - - licensor - url: https://cool-sat.com/ - view:sun_azimuth: 168.7 - eo:cloud_cover: 0.12 - view:off_nadir: 1.4 - platform: coolsat2 - instruments: - - cool_sensor_v1 - view:sun_elevation: 33.4 links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby responses: "201": description: Return the created Item. @@ -2314,10 +2299,10 @@ paths: Use content negotiation to request HTML or GeoJSON. operationId: getFeature parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/IfNoneMatch" responses: "200": $ref: "#/components/responses/Feature" @@ -2331,7 +2316,7 @@ paths: $ref: "#/components/responses/ServerError" summary: Fetch a single feature tags: - - Data + - Data put: summary: Update or create a feature description: >- @@ -2344,56 +2329,41 @@ paths: PUT payload doesn't, then the resource's properties.title will be removed.* operationId: putFeature tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatchWrite" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/IfMatchWrite" requestBody: content: application/json: schema: $ref: "#/components/schemas/updateItem" example: - type: Feature - id: CS3-20160503_132131_05 + id: cs3-20160503_132131_05 geometry: type: Polygon coordinates: - - - - -122.308150179 - - 37.488035566 - - - -122.597502109 - - 37.538869539 - - - -122.576687533 - - 37.613537207 - - - -122.2880486 - - 37.562818007 - - - -122.308150179 - - 37.488035566 + - - - -122.308150179 + - 37.488035566 + - - -122.597502109 + - 37.538869539 + - - -122.576687533 + - 37.613537207 + - - -122.2880486 + - 37.562818007 + - - -122.308150179 + - 37.488035566 properties: datetime: "2016-05-03T13:22:30.040Z" title: A CS3 item - license: PDDL-1.0 - providers: - - name: CoolSat - roles: - - producer - - licensor - url: https://cool-sat.com/ - view:sun_azimuth: 168.7 - eo:cloud_cover: 0.12 - view:off_nadir: 1.4 - platform: coolsat2 - instruments: - - cool_sensor_v1 - view:sun_elevation: 33.4 links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby responses: "200": description: Returns the updated Item @@ -2418,11 +2388,11 @@ paths: the fields to be updated) be submitted. operationId: patchFeature tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatchWrite" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/IfMatchWrite" requestBody: content: application/json: @@ -2462,11 +2432,11 @@ paths: description: Use this method to delete an existing feature/item. operationId: deleteFeature tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatchWrite" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/IfMatchWrite" responses: "200": $ref: "#/components/responses/DeletedResource" @@ -2491,18 +2461,18 @@ paths: $ref: "#/components/responses/ServerError" summary: Information about specifications that this API conforms to tags: - - Capabilities + - Capabilities /search: get: description: >- Retrieve Items matching filters. Intended as a shorthand API for simple queries. operationId: getSearchSTAC parameters: - - $ref: "#/components/parameters/bbox" - - $ref: "#/components/parameters/datetime" - - $ref: "#/components/parameters/limit" - - $ref: "#/components/parameters/ids" - - $ref: "#/components/parameters/collectionsArray" + - $ref: "#/components/parameters/bbox" + - $ref: "#/components/parameters/datetime" + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/ids" + - $ref: "#/components/parameters/collectionsArray" responses: "200": content: @@ -2514,7 +2484,7 @@ paths: $ref: "#/components/responses/ServerError" summary: Search STAC items with simple filtering. tags: - - STAC + - STAC post: description: >- Retrieve items matching filters. Intended as the standard, full-featured query @@ -2536,7 +2506,7 @@ paths: $ref: "#/components/responses/ServerError" summary: Search STAC items with full-featured filtering. tags: - - STAC + - STAC /collections/{collectionId}/items/{featureId}/assets: get: description: >- @@ -2545,8 +2515,8 @@ paths: Every asset belongs to an item. operationId: getAssets parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" responses: "200": $ref: "#/components/responses/Assets" @@ -2558,7 +2528,7 @@ paths: $ref: "#/components/responses/ServerError" summary: Fetch all assets for a feature tags: - - Data + - Data post: summary: Add a new asset to a feature description: >- @@ -2572,10 +2542,10 @@ paths: Optionally the Asset object multihash can be given for sanity check. operationId: postAsset tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" requestBody: content: application/json: @@ -2605,11 +2575,11 @@ paths: feature collection with id `collectionId`. operationId: getAsset parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/IfNoneMatch" responses: "200": $ref: "#/components/responses/Asset" @@ -2623,7 +2593,7 @@ paths: $ref: "#/components/responses/ServerError" summary: Fetch a single asset tags: - - Data + - Data put: summary: Update or create an asset description: >- @@ -2631,12 +2601,12 @@ paths: If the asset doesn't exists it is then created. operationId: putAsset tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatchWrite" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/IfMatchWrite" requestBody: content: application/json: @@ -2664,12 +2634,12 @@ paths: the fields to be updated) be submitted. operationId: patchAsset tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatchWrite" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/IfMatchWrite" requestBody: content: application/json: @@ -2711,12 +2681,12 @@ paths: **NOTE: Asset file on S3 will be also removed !** operationId: deleteAsset tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatchWrite" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/IfMatchWrite" responses: "200": $ref: "#/components/responses/DeletedResource" @@ -2730,10 +2700,10 @@ paths: $ref: "#/components/responses/ServerError" /get-token: servers: - - url: http://data.geo.admin.ch/api/stac/ + - url: http://data.geo.admin.ch/api/stac/ post: tags: - - Data Management + - Data Management summary: >- Request token for token authentication. operationId: getToken @@ -2751,8 +2721,8 @@ paths: type: string description: password of user for whom token is requested required: - - username - - password + - username + - password example: username: "Mickey Mouse" password: "I_love_Minnie_Mouse" diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml index d7fa80a8..a3696bcd 100644 --- a/spec/transaction/transaction.yml +++ b/spec/transaction/transaction.yml @@ -171,8 +171,7 @@ paths: schema: $ref: "#/components/schemas/createItem" example: - type: Feature - id: CS3-20160503_132131_05 + id: cs3-20160503_132131_05 geometry: type: Polygon coordinates: @@ -189,20 +188,6 @@ paths: properties: datetime: "2016-05-03T13:22:30.040Z" title: A CS3 item - license: PDDL-1.0 - providers: - - name: CoolSat - roles: - - producer - - licensor - url: https://cool-sat.com/ - view:sun_azimuth: 168.7 - eo:cloud_cover: 0.12 - view:off_nadir: 1.4 - platform: coolsat2 - instruments: - - cool_sensor_v1 - view:sun_elevation: 33.4 links: - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html rel: license @@ -252,8 +237,7 @@ paths: schema: $ref: "#/components/schemas/updateItem" example: - type: Feature - id: CS3-20160503_132131_05 + id: cs3-20160503_132131_05 geometry: type: Polygon coordinates: @@ -270,20 +254,6 @@ paths: properties: datetime: "2016-05-03T13:22:30.040Z" title: A CS3 item - license: PDDL-1.0 - providers: - - name: CoolSat - roles: - - producer - - licensor - url: https://cool-sat.com/ - view:sun_azimuth: 168.7 - eo:cloud_cover: 0.12 - view:off_nadir: 1.4 - platform: coolsat2 - instruments: - - cool_sensor_v1 - view:sun_elevation: 33.4 links: - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html rel: license @@ -930,7 +900,7 @@ components: type: type: string description: Media type of the asset - example: image/png + example: image/tiff; application=geotiff # Overwrite the collection links examples # collection: # properties: From 6a12416454d5a168454408f9570bdd8dda1d13e0 Mon Sep 17 00:00:00 2001 From: Isabelle Buholzer Date: Fri, 9 Apr 2021 13:41:08 +0200 Subject: [PATCH 055/105] BGDIINF_SB-1670: Remove stac_extensions from spec --- spec/components/schemas.yml | 22 ------------------- spec/static/spec/v0.9/openapi.yaml | 22 ------------------- .../spec/v0.9/openapitransactional.yaml | 22 ------------------- 3 files changed, 66 deletions(-) diff --git a/spec/components/schemas.yml b/spec/components/schemas.yml index b33eb68d..16c718f3 100644 --- a/spec/components/schemas.yml +++ b/spec/components/schemas.yml @@ -231,8 +231,6 @@ components: type: array providers: $ref: "#/components/schemas/providers" - stac_extensions: - $ref: "#/components/schemas/stac_extensions" stac_version: $ref: "#/components/schemas/stac_version" summaries: @@ -605,8 +603,6 @@ components: $ref: "#/components/schemas/geometryGeoJSON" properties: $ref: "#/components/schemas/itemProperties" - stac_extensions: - $ref: "#/components/schemas/stac_extensions" readOnly: true stac_version: $ref: "#/components/schemas/stac_version" @@ -868,8 +864,6 @@ components: items: $ref: "#/components/schemas/link" type: array - stac_extensions: - $ref: "#/components/schemas/stac_extensions" stac_version: $ref: "#/components/schemas/stac_version" title: @@ -1307,22 +1301,6 @@ components: - $ref: "#/components/schemas/limitFilter" description: The search criteria type: object - stac_extensions: - items: - anyOf: - - format: uri - title: Reference to a JSON Schema - type: string - - title: Reference to a core extension - type: string - title: STAC extensions - type: array - uniqueItems: true - readOnly: true - example: - - proj - - eo - - https://data.geo.admin.ch/stac/v0.9/geoadmin-extension/1.0/schema.json stac_version: example: 0.9.0 title: STAC version diff --git a/spec/static/spec/v0.9/openapi.yaml b/spec/static/spec/v0.9/openapi.yaml index 885490eb..bec86b93 100644 --- a/spec/static/spec/v0.9/openapi.yaml +++ b/spec/static/spec/v0.9/openapi.yaml @@ -461,8 +461,6 @@ components: type: array providers: $ref: "#/components/schemas/providers" - stac_extensions: - $ref: "#/components/schemas/stac_extensions" stac_version: $ref: "#/components/schemas/stac_version" summaries: @@ -826,8 +824,6 @@ components: $ref: "#/components/schemas/geometryGeoJSON" properties: $ref: "#/components/schemas/itemProperties" - stac_extensions: - $ref: "#/components/schemas/stac_extensions" readOnly: true stac_version: $ref: "#/components/schemas/stac_version" @@ -1082,8 +1078,6 @@ components: items: $ref: "#/components/schemas/link" type: array - stac_extensions: - $ref: "#/components/schemas/stac_extensions" stac_version: $ref: "#/components/schemas/stac_version" title: @@ -1526,22 +1520,6 @@ components: - $ref: "#/components/schemas/limitFilter" description: The search criteria type: object - stac_extensions: - items: - anyOf: - - format: uri - title: Reference to a JSON Schema - type: string - - title: Reference to a core extension - type: string - title: STAC extensions - type: array - uniqueItems: true - readOnly: true - example: - - proj - - eo - - https://data.geo.admin.ch/stac/v0.9/geoadmin-extension/1.0/schema.json stac_version: example: 0.9.0 title: STAC version diff --git a/spec/static/spec/v0.9/openapitransactional.yaml b/spec/static/spec/v0.9/openapitransactional.yaml index ded2d85c..757ba1ad 100644 --- a/spec/static/spec/v0.9/openapitransactional.yaml +++ b/spec/static/spec/v0.9/openapitransactional.yaml @@ -603,8 +603,6 @@ components: type: array providers: $ref: "#/components/schemas/providers" - stac_extensions: - $ref: "#/components/schemas/stac_extensions" stac_version: $ref: "#/components/schemas/stac_version" summaries: @@ -968,8 +966,6 @@ components: $ref: "#/components/schemas/geometryGeoJSON" properties: $ref: "#/components/schemas/itemProperties" - stac_extensions: - $ref: "#/components/schemas/stac_extensions" readOnly: true stac_version: $ref: "#/components/schemas/stac_version" @@ -1224,8 +1220,6 @@ components: items: $ref: "#/components/schemas/link" type: array - stac_extensions: - $ref: "#/components/schemas/stac_extensions" stac_version: $ref: "#/components/schemas/stac_version" title: @@ -1672,22 +1666,6 @@ components: - $ref: "#/components/schemas/limitFilter" description: The search criteria type: object - stac_extensions: - items: - anyOf: - - format: uri - title: Reference to a JSON Schema - type: string - - title: Reference to a core extension - type: string - title: STAC extensions - type: array - uniqueItems: true - readOnly: true - example: - - proj - - eo - - https://data.geo.admin.ch/stac/v0.9/geoadmin-extension/1.0/schema.json stac_version: example: 0.9.0 title: STAC version From bcb6334c1e6cc56234d2e6cf913160181adcc298 Mon Sep 17 00:00:00 2001 From: Isabelle Buholzer Date: Fri, 9 Apr 2021 15:25:54 +0200 Subject: [PATCH 056/105] BGDIINF_SB-1670: Minor formatting issues --- .../spec/v0.9/openapitransactional.yaml | 1298 ++++++++--------- 1 file changed, 649 insertions(+), 649 deletions(-) diff --git a/spec/static/spec/v0.9/openapitransactional.yaml b/spec/static/spec/v0.9/openapitransactional.yaml index 757ba1ad..8f28da9b 100644 --- a/spec/static/spec/v0.9/openapitransactional.yaml +++ b/spec/static/spec/v0.9/openapitransactional.yaml @@ -11,72 +11,72 @@ info: title: The SpatioTemporal Asset Catalog API for data.geo.admin.ch version: 0.9.0 servers: - - description: Data.geo.admin.ch - url: http://data.geo.admin.ch/api/stac/v0.9 +- description: Data.geo.admin.ch + url: http://data.geo.admin.ch/api/stac/v0.9 tags: - - description: Essential characteristics of this API - name: Capabilities - - description: Access to data (features) - name: Data - - description: Extension to OGC API - Features to support STAC metadata model and - search API - name: STAC - - description: | - All write requests require authentication. The currently available options for a user to - authenticate himself are described below. - - # Session authentication - When using the browsable API the user can simply use the admin interface for logging in. - Once logged in, the browsable API can be used to perform write requests. - - # Basic authentication - The username and password for authentication can be added to every write request the user wants to perform. - Here is an example of posting an asset using curl (_username_="MickeyMouse", _password_="I_love_Minnie_Mouse"): - ``` - curl --request POST \ - --user MickeyMouse:I_love_Minnie_Mouse \ - --url https://service-stac.dev.bgdi.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ - --header 'Content-Type: application/json' \ - --data '{ - "id": "fancy_unique_id", - "item": "swisstlmregio-2020", - "title": "My title", - "type": "application/x.filegdb+zip", - "description": "My description", - "proj:epsg": 2056, - "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" - }' - ``` - - # Token authentication - A user specific token for authentication can be added to every write request the user wants to perform. - Here is an example of posting an asset using curl: - ``` - curl --request POST \ - --url https://service-stac.dev.bgdi.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ - --header 'Authorization: Token ccecf40693bfc52ba090cd46eb7f19e723fe831f' \ - --header 'Content-Type: application/json' \ - --data '{ - "id": "fancy_unique_id", - "item": "swisstlmregio-2020", - "title": "My title", - "type": "application/x.filegdb+zip", - "description": "My description", - "proj:epsg": 2056, - "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" - }' - ``` - Tokens can either be generated in the admin interface or existing users can perform a POST request - on the get-token endpoint to request a token (also see description of the get-token POST endpoint - at the bottom). - Here is an example using curl: - ``` - curl --request POST \ - --url https://service-stac.dev.bgdi.ch/api/stac/get-token \ - --header 'Content-Type: application/json' \ - --data '{"username": "MickeyMouse", "password": "I_love_Minnie_Mouse"}' - ``` - name: Data Management +- description: Essential characteristics of this API + name: Capabilities +- description: Access to data (features) + name: Data +- description: Extension to OGC API - Features to support STAC metadata model and + search API + name: STAC +- description: | + All write requests require authentication. The currently available options for a user to + authenticate himself are described below. + + # Session authentication + When using the browsable API the user can simply use the admin interface for logging in. + Once logged in, the browsable API can be used to perform write requests. + + # Basic authentication + The username and password for authentication can be added to every write request the user wants to perform. + Here is an example of posting an asset using curl (_username_="MickeyMouse", _password_="I_love_Minnie_Mouse"): + ``` + curl --request POST \ + --user MickeyMouse:I_love_Minnie_Mouse \ + --url https://service-stac.dev.bgdi.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ + --header 'Content-Type: application/json' \ + --data '{ + "id": "fancy_unique_id", + "item": "swisstlmregio-2020", + "title": "My title", + "type": "application/x.filegdb+zip", + "description": "My description", + "proj:epsg": 2056, + "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" + }' + ``` + + # Token authentication + A user specific token for authentication can be added to every write request the user wants to perform. + Here is an example of posting an asset using curl: + ``` + curl --request POST \ + --url https://service-stac.dev.bgdi.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ + --header 'Authorization: Token ccecf40693bfc52ba090cd46eb7f19e723fe831f' \ + --header 'Content-Type: application/json' \ + --data '{ + "id": "fancy_unique_id", + "item": "swisstlmregio-2020", + "title": "My title", + "type": "application/x.filegdb+zip", + "description": "My description", + "proj:epsg": 2056, + "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" + }' + ``` + Tokens can either be generated in the admin interface or existing users can perform a POST request + on the get-token endpoint to request a token (also see description of the get-token POST endpoint + at the bottom). + Here is an example using curl: + ``` + curl --request POST \ + --url https://service-stac.dev.bgdi.ch/api/stac/get-token \ + --header 'Content-Type: application/json' \ + --data '{"username": "MickeyMouse", "password": "I_love_Minnie_Mouse"}' + ``` + name: Data Management components: parameters: bbox: @@ -263,9 +263,9 @@ components: application/json: example: conformsTo: - - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core - - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30 - - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson + - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core + - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30 + - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson schema: $ref: "#/components/schemas/confClasses" description: >- @@ -327,26 +327,26 @@ components: description: Catalog of Swiss Geodata Downloads id: ch links: - - href: http://data.geo.admin.ch/api/stac/v0.9/ - rel: self - type: application/json - title: this document - - href: http://data.geo.admin.ch/api/stac/v0.9/static/api.html - rel: service-doc - type: text/html - title: the API documentation - - href: http://data.geo.admin.ch/api/stac/v0.9/conformance - rel: conformance - type: application/json - title: OGC API conformance classes implemented by this server - - href: http://data.geo.admin.ch/api/stac/v0.9/collections - rel: data - type: application/json - title: Information about the feature collections - - href: http://data.geo.admin.ch/api/stac/v0.9/search - rel: search - type: application/json - title: Search across feature collections + - href: http://data.geo.admin.ch/api/stac/v0.9/ + rel: self + type: application/json + title: this document + - href: http://data.geo.admin.ch/api/stac/v0.9/static/api.html + rel: service-doc + type: text/html + title: the API documentation + - href: http://data.geo.admin.ch/api/stac/v0.9/conformance + rel: conformance + type: application/json + title: OGC API conformance classes implemented by this server + - href: http://data.geo.admin.ch/api/stac/v0.9/collections + rel: data + type: application/json + title: Information about the feature collections + - href: http://data.geo.admin.ch/api/stac/v0.9/search + rel: search + type: application/json + title: Search across feature collections stac_version: 0.9.0 title: data.geo.admin.ch schema: @@ -419,11 +419,11 @@ components: The array contain at least a link to the parent resource (`rel: parent`). example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items - rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: parent required: - - code - - links + - code + - links BadRequest: description: The request was malformed or semantically invalid content: @@ -472,10 +472,10 @@ components: and from 45.82°N to 47.81°N) would be represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." example: - - 5.96 - - 45.82 - - 10.49 - - 47.81 + - 5.96 + - 45.82 + - 10.49 + - 47.81 items: type: number maxItems: 4 @@ -511,10 +511,10 @@ components: and from 45.82°N to 47.81°N) would be represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." example: - - 5.96 - - 45.82 - - 10.49 - - 47.81 + - 5.96 + - 45.82 + - 10.49 + - 47.81 items: type: number maxItems: 4 @@ -543,10 +543,10 @@ components: properties: crs: default: - - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 description: The list of coordinate reference systems supported by the service example: - - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 items: type: string type: array @@ -584,20 +584,20 @@ components: $ref: "#/components/schemas/license" links: example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items - rel: items - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: items + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby items: $ref: "#/components/schemas/link" type: array @@ -608,31 +608,31 @@ components: summaries: additionalProperties: oneOf: - - items: - description: A value of any type. - title: Set of values - type: array - - description: >- - By default, only ranges with a minimum and a maximum value can be - specified. Ranges can be specified for ordinal values only, which - means they need to have a rank order. Therefore, ranges can only be - specified for numbers and some special types of strings. Examples: - grades (A to F), dates or times. Implementors are free to add other - derived statistical values to the object, for example `mean` or `stddev`. - properties: - max: - anyOf: - - type: string - - type: number - min: - anyOf: - - type: string - - type: number - required: - - min - - max - title: Statistics - type: object + - items: + description: A value of any type. + title: Set of values + type: array + - description: >- + By default, only ranges with a minimum and a maximum value can be + specified. Ranges can be specified for ordinal values only, which + means they need to have a rank order. Therefore, ranges can only be + specified for numbers and some special types of strings. Examples: + grades (A to F), dates or times. Implementors are free to add other + derived statistical values to the object, for example `mean` or `stddev`. + properties: + max: + anyOf: + - type: string + - type: number + min: + anyOf: + - type: string + - type: number + required: + - min + - max + title: Statistics + type: object description: >- Summaries are either a unique set of all available values *or* statistics. Statistics by default only specify the range (minimum and maximum values), @@ -648,14 +648,14 @@ components: readOnly: true example: eo:gsd: - - 10 - - 20 + - 10 + - 20 geoadmin:variant: - - kgrel - - komb - - krel + - kgrel + - komb + - krel proj:epsg: - - 2056 + - 2056 title: description: Human readable title of the collection example: National Map 1:200'000 @@ -665,14 +665,14 @@ components: updated: $ref: "#/components/schemas/updated" required: - - id - - links - - stac_version - - description - - license - - extent - - created - - updated + - id + - links + - stac_version + - description + - license + - extent + - created + - updated type: object collections: properties: @@ -684,17 +684,17 @@ components: items: $ref: "#/components/schemas/link" example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections?cursor=10ab - rel: next - - href: https://data.geo.admin.ch/api/stac/v0.9/collections?cursor=10cd - rel: previous + - href: https://data.geo.admin.ch/api/stac/v0.9/collections + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections?cursor=10ab + rel: next + - href: https://data.geo.admin.ch/api/stac/v0.9/collections?cursor=10cd + rel: previous required: - - links - - collections + - links + - collections type: object collectionsArray: description: >- @@ -711,8 +711,8 @@ components: type: object example: collections: - - ch.swisstopo.swisstlmregio - - ch.bfe.energieschweiz + - ch.swisstopo.swisstlmregio + - ch.bfe.energieschweiz confClasses: properties: conformsTo: @@ -720,7 +720,7 @@ components: type: string type: array required: - - conformsTo + - conformsTo type: object datetime: description: RFC 3339 compliant datetime string @@ -790,15 +790,15 @@ components: example: 500 description: anyOf: - - type: string - - type: array - items: - anyOf: - - type: string - - type: object - - type: object + - type: string + - type: array + items: + anyOf: + - type: string + - type: object + - type: object required: - - code + - code type: object extent: description: >- @@ -850,10 +850,10 @@ components: decision of the server whether only a single spatial geometry property is used to determine the extent or all relevant geometries. example: - - 5.685114 - - 45.534903 - - 10.747775 - - 47.982586 + - 5.685114 + - 45.534903 + - 10.747775 + - 47.982586 items: type: number maxItems: 6 @@ -862,7 +862,7 @@ components: minItems: 1 type: array required: - - bbox + - bbox type: object temporal: description: The temporal extent of the features in the collection. @@ -874,8 +874,8 @@ components: description: >- Begin and end times of the time interval. example: - - "2019-01-01T00:00:00Z" - - "2019-01-02T00:00:00Z" + - "2019-01-01T00:00:00Z" + - "2019-01-02T00:00:00Z" items: format: date-time nullable: false @@ -887,20 +887,20 @@ components: maxItems: 1 type: array required: - - interval + - interval type: object required: - - spatial - - temporal + - spatial + - temporal type: object readOnly: true geoadmin:lang: enum: - - de - - it - - fr - - rm - - en + - de + - it + - fr + - rm + - en title: Product language type: string geoadmin:variant: @@ -909,7 +909,7 @@ components: type: string geometryGeoJSON: oneOf: - - $ref: "#/components/schemas/polygonGeoJSON" + - $ref: "#/components/schemas/polygonGeoJSON" geometrycollectionGeoJSON: properties: geometries: @@ -918,11 +918,11 @@ components: type: array type: enum: - - GeometryCollection + - GeometryCollection type: string required: - - type - - geometries + - type + - geometries type: object ids: description: >- @@ -939,8 +939,8 @@ components: type: object example: ids: - - swisstlmregio-2019 - - swisstlmregio-2020 + - swisstlmregio-2019 + - swisstlmregio-2020 intersectsFilter: description: Only returns items that intersect with the provided polygon. properties: @@ -951,8 +951,8 @@ components: intersects: type: "Point" coordinates: - - 7 - - 46 + - 7 + - 46 itemBase: description: >- A GeoJSON Feature augmented with foreign members that contain values relevant @@ -973,36 +973,36 @@ components: type: $ref: "#/components/schemas/itemType" required: - - stac_version - - type - - geometry - - bbox - - properties - - assets + - stac_version + - type + - geometry + - bbox + - properties + - assets type: object item: allOf: - - type: object - required: - - id - - links - properties: - id: - $ref: "#/components/schemas/itemId" - links: - items: - $ref: "#/components/schemas/link" - type: array - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr50-263-2016 - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: collection - - $ref: "#/components/schemas/itemBase" + - type: object + required: + - id + - links + properties: + id: + $ref: "#/components/schemas/itemId" + links: + items: + $ref: "#/components/schemas/link" + type: array + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr50-263-2016 + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: collection + - $ref: "#/components/schemas/itemBase" items: description: >- A FeatureCollection augmented with foreign members that contain values relevant @@ -1017,23 +1017,23 @@ components: $ref: "#/components/schemas/link" type: array example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items?cursor=10ab - rel: next - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items?cursor=10acd - rel: previous + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items?cursor=10ab + rel: next + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items?cursor=10acd + rel: previous type: enum: - - FeatureCollection + - FeatureCollection type: string required: - - features - - type + - features + - type type: object itemAssets: additionalProperties: @@ -1071,11 +1071,11 @@ components: updated: $ref: "#/components/schemas/created" required: - - "checksum:multihash" - - href - - type - - created - - updated + - "checksum:multihash" + - href + - type + - created + - updated type: object type: object readOnly: true @@ -1118,35 +1118,35 @@ components: type: array type: enum: - - FeatureCollection + - FeatureCollection type: string required: - - features - - type + - features + - type type: object itemsSearchGet: allOf: - - $ref: "#/components/schemas/itemsSearch" - - type: object - properties: - links: - $ref: "#/components/schemas/itemsSearchLinks" + - $ref: "#/components/schemas/itemsSearch" + - type: object + properties: + links: + $ref: "#/components/schemas/itemsSearchLinks" itemsSearchPost: allOf: - - $ref: "#/components/schemas/itemsSearch" - - type: object - properties: - links: - $ref: "#/components/schemas/itemsSearchPostLinks" + - $ref: "#/components/schemas/itemsSearch" + - type: object + properties: + links: + $ref: "#/components/schemas/itemsSearchPostLinks" itemsSearchLinks: description: >- An array of links. Can be used for pagination, e.g. by providing a link with the `next` relation type. example: - - href: https://data.geo.admin.ch/api/stac/v0.9/search - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/search?cursor=10ab - rel: next + - href: https://data.geo.admin.ch/api/stac/v0.9/search + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/search?cursor=10ab + rel: next items: $ref: "#/components/schemas/link" type: array @@ -1155,13 +1155,13 @@ components: An array of links. Can be used for pagination, e.g. by providing a link with the `next` relation type. example: - - href: https://data.geo.admin.ch/api/stac/v0.9/search - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/search?cursor=10ab - rel: next - method: POST - body: {} - merge: true + - href: https://data.geo.admin.ch/api/stac/v0.9/search + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/search?cursor=10ab + rel: next + method: POST + body: {} + merge: true items: $ref: "#/components/schemas/linkPostSearch" type: array @@ -1198,13 +1198,13 @@ components: example: Feature title type: string required: - - created - - updated + - created + - updated type: object itemType: description: The GeoJSON type enum: - - Feature + - Feature type: string readOnly: true landingPage: @@ -1226,10 +1226,10 @@ components: example: Buildings in Bonn type: string required: - - links - - stac_version - - id - - description + - links + - stac_version + - id + - description type: object license: description: >- @@ -1279,11 +1279,11 @@ components: type: array type: enum: - - LineString + - LineString type: string required: - - type - - coordinates + - type + - coordinates type: object link: properties: @@ -1311,35 +1311,35 @@ components: default: GET description: Specifies the HTTP method that the link expects enum: - - GET - - POST + - GET + - POST type: string required: - - href - - rel + - href + - rel title: Link type: object linkPostSearch: allOf: - - $ref: "#/components/schemas/link" - - type: object - properties: - body: - default: {} - description: For `POST /search` requests, the link can specify the HTTP - body as a JSON object. - type: object - merge: - default: false - description: >- - This is only valid when the server is responding to `POST /search `request. + - $ref: "#/components/schemas/link" + - type: object + properties: + body: + default: {} + description: For `POST /search` requests, the link can specify the HTTP + body as a JSON object. + type: object + merge: + default: false + description: >- + This is only valid when the server is responding to `POST /search `request. - If merge is true, the client is expected to merge the body value into - the current request body before following the link. This avoids passing - large post bodies back and forth when following links, particularly - for navigating pages through the `POST /search` endpoint. - type: boolean + If merge is true, the client is expected to merge the body value into + the current request body before following the link. This avoids passing + large post bodies back and forth when following links, particularly + for navigating pages through the `POST /search` endpoint. + type: boolean multilinestringGeoJSON: properties: coordinates: @@ -1354,11 +1354,11 @@ components: type: array type: enum: - - MultiLineString + - MultiLineString type: string required: - - type - - coordinates + - type + - coordinates type: object multipointGeoJSON: properties: @@ -1371,11 +1371,11 @@ components: type: array type: enum: - - MultiPoint + - MultiPoint type: string required: - - type - - coordinates + - type + - coordinates type: object multipolygonGeoJSON: properties: @@ -1393,11 +1393,11 @@ components: type: array type: enum: - - MultiPolygon + - MultiPolygon type: string required: - - type - - coordinates + - type + - coordinates type: object numberMatched: description: >- @@ -1429,11 +1429,11 @@ components: type: array type: enum: - - Point + - Point type: string required: - - type - - coordinates + - type + - coordinates type: object polygonGeoJSON: properties: @@ -1449,24 +1449,24 @@ components: type: array type: enum: - - Polygon + - Polygon type: string required: - - type - - coordinates + - type + - coordinates type: object example: coordinates: - - - - 7.242974548172171 - - 46.57310580640624 - - - 7.243756483316452 - - 46.35721185723752 - - - 7.698490766144817 - - 46.357085154660915 - - - 7.699524647567326 - - 46.57297861624267 - - - 7.242974548172171 - - 46.57310580640624 + - - - 7.242974548172171 + - 46.57310580640624 + - - 7.243756483316452 + - 46.35721185723752 + - - 7.698490766144817 + - 46.357085154660915 + - - 7.699524647567326 + - 46.57297861624267 + - - 7.242974548172171 + - 46.57310580640624 type: Polygon proj:epsg: description: >- @@ -1479,8 +1479,8 @@ components: example: 2056 title: EPSG code. type: - - integer - - null + - integer + - null providers: description: >- A list of providers, which may include all organizations capturing or processing @@ -1531,10 +1531,10 @@ components: element of the list. items: enum: - - producer - - licensor - - processor - - host + - producer + - licensor + - processor + - host type: string type: array url: @@ -1544,16 +1544,16 @@ components: format: url type: string required: - - name + - name title: Provider type: object type: array example: - - name: Federal Office of Topography - swisstopo - roles: - - producer - - licensor - url: https://www.swisstopo.admin.ch + - name: Federal Office of Topography - swisstopo + roles: + - producer + - licensor + url: https://www.swisstopo.admin.ch query: additionalProperties: $ref: "#/components/schemas/queryProp" @@ -1575,95 +1575,95 @@ components: type: object queryProp: anyOf: - - description: >- - If the object doesn't contain any of the operators, it is equivalent to - using the equals operator - - description: Match using an operator - properties: - contains: - description: >- - Find items with a property that contains the specified literal string, - e.g., matches ".*.*". A case-insensitive comparison must be - performed. + - description: >- + If the object doesn't contain any of the operators, it is equivalent to + using the equals operator + - description: Match using an operator + properties: + contains: + description: >- + Find items with a property that contains the specified literal string, + e.g., matches ".*.*". A case-insensitive comparison must be + performed. + type: string + endsWith: + description: >- + Find items with a property that ends with the specified string. A case-insensitive + comparison must be performed. + type: string + eq: + description: >- + Find items with a property that is equal to the specified value. For + strings, a case-insensitive comparison must be performed. + nullable: true + oneOf: + - type: string + - type: number + - type: boolean + gt: + description: Find items with a property value greater than the specified + value. + oneOf: + - format: date-time type: string - endsWith: - description: >- - Find items with a property that ends with the specified string. A case-insensitive - comparison must be performed. + - type: number + gte: + description: Find items with a property value greater than or equal the + specified value. + oneOf: + - format: date-time type: string - eq: - description: >- - Find items with a property that is equal to the specified value. For - strings, a case-insensitive comparison must be performed. - nullable: true - oneOf: - - type: string - - type: number - - type: boolean - gt: - description: Find items with a property value greater than the specified - value. - oneOf: - - format: date-time - type: string - - type: number - gte: - description: Find items with a property value greater than or equal the - specified value. - oneOf: - - format: date-time - type: string - - type: number - in: - description: >- - Find items with a property that equals at least one entry in the specified - array. A case-insensitive comparison must be performed. - items: - oneOf: - - type: string - - type: number - type: array - lt: - description: Find items with a property value less than the specified - value. - oneOf: - - format: date-time - type: string - - type: number - lte: - description: Find items with a property value less than or equal the specified - value. - oneOf: - - format: date-time - type: string - - type: number - neq: - description: >- - Find items that *don't* contain the specified value. For strings, a - case-insensitive comparison must be performed. - nullable: true + - type: number + in: + description: >- + Find items with a property that equals at least one entry in the specified + array. A case-insensitive comparison must be performed. + items: oneOf: - - type: string - - type: number - - type: boolean - startsWith: - description: >- - Find items with a property that begins with the specified string. A - case-insensitive comparison must be performed. + - type: string + - type: number + type: array + lt: + description: Find items with a property value less than the specified + value. + oneOf: + - format: date-time type: string - type: object + - type: number + lte: + description: Find items with a property value less than or equal the specified + value. + oneOf: + - format: date-time + type: string + - type: number + neq: + description: >- + Find items that *don't* contain the specified value. For strings, a + case-insensitive comparison must be performed. + nullable: true + oneOf: + - type: string + - type: number + - type: boolean + startsWith: + description: >- + Find items with a property that begins with the specified string. A + case-insensitive comparison must be performed. + type: string + type: object description: >- Apply query operations to a specific property. The following properties are currently supported: `created`, `updated`, `title`. searchBody: allOf: - - $ref: "#/components/schemas/queryFilter" - - $ref: "#/components/schemas/bboxFilter" - - $ref: "#/components/schemas/datetimeFilter" - - $ref: "#/components/schemas/intersectsFilter" - - $ref: "#/components/schemas/collectionsFilter" - - $ref: "#/components/schemas/idsFilter" - - $ref: "#/components/schemas/limitFilter" + - $ref: "#/components/schemas/queryFilter" + - $ref: "#/components/schemas/bboxFilter" + - $ref: "#/components/schemas/datetimeFilter" + - $ref: "#/components/schemas/intersectsFilter" + - $ref: "#/components/schemas/collectionsFilter" + - $ref: "#/components/schemas/idsFilter" + - $ref: "#/components/schemas/limitFilter" description: The search criteria type: object stac_version: @@ -1728,21 +1728,21 @@ components: $ref: "#/components/schemas/link" type: array example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 - rel: item - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: collection + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 + rel: item + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: collection assetBase: type: object required: - - created - - updated + - created + - updated properties: id: $ref: "#/components/schemas/assetId" @@ -1766,58 +1766,58 @@ components: $ref: "#/components/schemas/updated" itemAsset: allOf: - - $ref: "#/components/schemas/assetBase" - - type: object - required: - - id - - type - - href - - checksum:multihash - - links - properties: - checksum:multihash: - $ref: "#/components/schemas/checksum:multihash" - href: - $ref: "#/components/schemas/href" - links: - items: - $ref: "#/components/schemas/link" - type: array - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 - rel: item - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: collection + - $ref: "#/components/schemas/assetBase" + - type: object + required: + - id + - type + - href + - checksum:multihash + - links + properties: + checksum:multihash: + $ref: "#/components/schemas/checksum:multihash" + href: + $ref: "#/components/schemas/href" + links: + items: + $ref: "#/components/schemas/link" + type: array + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 + rel: item + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: collection itemAssetWrite: allOf: - - $ref: "#/components/schemas/assetBase" - - type: object - required: - - id - - type + - $ref: "#/components/schemas/assetBase" + - type: object + required: + - id + - type itemAssetUpdate: allOf: - - $ref: "#/components/schemas/assetBase" - - type: object - required: - - id - - type - properties: - id: - $ref: "#/components/schemas/updateAssetId" + - $ref: "#/components/schemas/assetBase" + - type: object + required: + - id + - type + properties: + id: + $ref: "#/components/schemas/updateAssetId" itemAssetPartialUpdate: allOf: - - $ref: "#/components/schemas/assetBase" - - type: object - properties: - id: - $ref: "#/components/schemas/updateAssetId" + - $ref: "#/components/schemas/assetBase" + - type: object + properties: + id: + $ref: "#/components/schemas/updateAssetId" writeChecksumMultihash: description: >- `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) @@ -1856,34 +1856,34 @@ components: http://data.geo.admin.ch/ch.swisstopo.swissimage/collections/cs/items/CS3-20160503_132130_04/thumb.png writeItem: allOf: - - $ref: "#/components/schemas/itemBase" - - type: object - properties: - links: - items: - $ref: "#/components/schemas/link" - type: array - description: >- - Add additional link to the generated ones (`self`, `root`, `parent`, - `items`, `collection`, `next`, `previous`) + - $ref: "#/components/schemas/itemBase" + - type: object + properties: + links: + items: + $ref: "#/components/schemas/link" + type: array + description: >- + Add additional link to the generated ones (`self`, `root`, `parent`, + `items`, `collection`, `next`, `previous`) createItem: allOf: - - type: object - required: - - id - properties: - id: - $ref: "#/components/schemas/itemId" - - $ref: "#/components/schemas/writeItem" + - type: object + required: + - id + properties: + id: + $ref: "#/components/schemas/itemId" + - $ref: "#/components/schemas/writeItem" updateItem: allOf: - - type: object - required: - - id - properties: - id: - $ref: "#/components/schemas/itemIdUpdate" - - $ref: "#/components/schemas/writeItem" + - type: object + required: + - id + properties: + id: + $ref: "#/components/schemas/itemIdUpdate" + - $ref: "#/components/schemas/writeItem" partialItem: type: object properties: @@ -1925,11 +1925,11 @@ components: items: $ref: "#/components/schemas/link" example: - - href: http://data.example.com/buildings - rel: item - - href: http://example.com/concepts/buildings.html - rel: describedBy - type: text/html + - href: http://data.example.com/buildings + rel: item + - href: http://example.com/concepts/buildings.html + rel: describedBy + type: text/html extent: $ref: "#/components/schemas/extent" itemType: @@ -1944,10 +1944,10 @@ components: items: type: string default: - - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 example: - - http://www.opengis.net/def/crs/OGC/1.3/CRS84 - - http://www.opengis.net/def/crs/EPSG/0/4326 + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + - http://www.opengis.net/def/crs/EPSG/0/4326 example: title: The new title of the collection roles: @@ -1956,7 +1956,7 @@ components: type: string description: Purposes of the asset example: - - thumbnail + - thumbnail title: type: string description: Displayed title @@ -1985,12 +1985,12 @@ paths: $ref: "#/components/responses/ServerError" summary: Landing page tags: - - Capabilities + - Capabilities /collections: get: operationId: getCollections parameters: - - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/limit" responses: "200": $ref: "#/components/responses/Collections" @@ -1999,10 +1999,10 @@ paths: summary: Fetch collections description: The feature collections in the dataset tags: - - Data + - Data post: tags: - - Data Management + - Data Management summary: >- Create a new collection operationId: createCollection @@ -2017,18 +2017,18 @@ paths: id: ch.swisstopo.pixelkarte-farbe-pk200.noscale license: proprietary links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby providers: - - name: Federal Office of Topography - swisstopo - roles: - - producer - - licensor - url: https://www.swisstopo.admin.ch + - name: Federal Office of Topography - swisstopo + roles: + - producer + - licensor + url: https://www.swisstopo.admin.ch title: National Map 1:200'000 responses: "201": @@ -2053,9 +2053,9 @@ paths: get: operationId: describeCollection parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/IfNoneMatch" responses: "200": $ref: "#/components/responses/Collection" @@ -2070,18 +2070,18 @@ paths: summary: Fetch a single collection description: Describe the feature collection with id `collectionId` tags: - - Data + - Data put: tags: - - Data Management + - Data Management summary: Update or create a collection description: >- Update or create a collection with Id `collectionId` with a complete collection definition. If the collection doesn't exists it is then created. operationId: updateCollection parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/IfMatchWrite" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/IfMatchWrite" requestBody: content: application/json: @@ -2093,18 +2093,18 @@ paths: id: ch.swisstopo.pixelkarte-farbe-pk200.noscale license: proprietary links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby providers: - - name: Federal Office of Topography - swisstopo - roles: - - producer - - licensor - url: https://www.swisstopo.admin.ch + - name: Federal Office of Topography - swisstopo + roles: + - producer + - licensor + url: https://www.swisstopo.admin.ch title: National Map 1:200'000 responses: "200": @@ -2135,15 +2135,15 @@ paths: $ref: "#/components/responses/ServerError" patch: tags: - - Data Management + - Data Management summary: Partial update of a collection description: >- Update an existing collection with Id `collectionId` with a partial collection definition operationId: partialUpdateCollection parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/IfMatch" requestBody: content: application/json: @@ -2153,18 +2153,18 @@ paths: id: ch.swisstopo.pixelkarte-farbe-pk200.noscale license: proprietary links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby providers: - - name: Federal Office of Topography - swisstopo - roles: - - producer - - licensor - url: https://www.swisstopo.admin.ch + - name: Federal Office of Topography - swisstopo + roles: + - producer + - licensor + url: https://www.swisstopo.admin.ch title: National Map 1:200'000 responses: "200": @@ -2195,10 +2195,10 @@ paths: Use content negotiation to request HTML or GeoJSON. operationId: getFeatures parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/limit" - - $ref: "#/components/parameters/bbox" - - $ref: "#/components/parameters/datetime" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/bbox" + - $ref: "#/components/parameters/datetime" responses: "200": $ref: "#/components/responses/Features" @@ -2210,15 +2210,15 @@ paths: $ref: "#/components/responses/ServerError" summary: Fetch features tags: - - Data + - Data post: summary: Add a new feature to a collection description: Create a new feature/item in a specific collection operationId: postFeature tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/collectionId" requestBody: content: application/json: @@ -2229,26 +2229,26 @@ paths: geometry: type: Polygon coordinates: - - - - -122.308150179 - - 37.488035566 - - - -122.597502109 - - 37.538869539 - - - -122.576687533 - - 37.613537207 - - - -122.2880486 - - 37.562818007 - - - -122.308150179 - - 37.488035566 + - - - -122.308150179 + - 37.488035566 + - - -122.597502109 + - 37.538869539 + - - -122.576687533 + - 37.613537207 + - - -122.2880486 + - 37.562818007 + - - -122.308150179 + - 37.488035566 properties: datetime: "2016-05-03T13:22:30.040Z" title: A CS3 item links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby responses: "201": description: Return the created Item. @@ -2277,10 +2277,10 @@ paths: Use content negotiation to request HTML or GeoJSON. operationId: getFeature parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/IfNoneMatch" responses: "200": $ref: "#/components/responses/Feature" @@ -2294,7 +2294,7 @@ paths: $ref: "#/components/responses/ServerError" summary: Fetch a single feature tags: - - Data + - Data put: summary: Update or create a feature description: >- @@ -2307,11 +2307,11 @@ paths: PUT payload doesn't, then the resource's properties.title will be removed.* operationId: putFeature tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatchWrite" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/IfMatchWrite" requestBody: content: application/json: @@ -2322,26 +2322,26 @@ paths: geometry: type: Polygon coordinates: - - - - -122.308150179 - - 37.488035566 - - - -122.597502109 - - 37.538869539 - - - -122.576687533 - - 37.613537207 - - - -122.2880486 - - 37.562818007 - - - -122.308150179 - - 37.488035566 + - - - -122.308150179 + - 37.488035566 + - - -122.597502109 + - 37.538869539 + - - -122.576687533 + - 37.613537207 + - - -122.2880486 + - 37.562818007 + - - -122.308150179 + - 37.488035566 properties: datetime: "2016-05-03T13:22:30.040Z" title: A CS3 item links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby responses: "200": description: Returns the updated Item @@ -2366,11 +2366,11 @@ paths: the fields to be updated) be submitted. operationId: patchFeature tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatchWrite" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/IfMatchWrite" requestBody: content: application/json: @@ -2410,11 +2410,11 @@ paths: description: Use this method to delete an existing feature/item. operationId: deleteFeature tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatchWrite" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/IfMatchWrite" responses: "200": $ref: "#/components/responses/DeletedResource" @@ -2439,18 +2439,18 @@ paths: $ref: "#/components/responses/ServerError" summary: Information about specifications that this API conforms to tags: - - Capabilities + - Capabilities /search: get: description: >- Retrieve Items matching filters. Intended as a shorthand API for simple queries. operationId: getSearchSTAC parameters: - - $ref: "#/components/parameters/bbox" - - $ref: "#/components/parameters/datetime" - - $ref: "#/components/parameters/limit" - - $ref: "#/components/parameters/ids" - - $ref: "#/components/parameters/collectionsArray" + - $ref: "#/components/parameters/bbox" + - $ref: "#/components/parameters/datetime" + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/ids" + - $ref: "#/components/parameters/collectionsArray" responses: "200": content: @@ -2462,7 +2462,7 @@ paths: $ref: "#/components/responses/ServerError" summary: Search STAC items with simple filtering. tags: - - STAC + - STAC post: description: >- Retrieve items matching filters. Intended as the standard, full-featured query @@ -2484,7 +2484,7 @@ paths: $ref: "#/components/responses/ServerError" summary: Search STAC items with full-featured filtering. tags: - - STAC + - STAC /collections/{collectionId}/items/{featureId}/assets: get: description: >- @@ -2493,8 +2493,8 @@ paths: Every asset belongs to an item. operationId: getAssets parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" responses: "200": $ref: "#/components/responses/Assets" @@ -2506,7 +2506,7 @@ paths: $ref: "#/components/responses/ServerError" summary: Fetch all assets for a feature tags: - - Data + - Data post: summary: Add a new asset to a feature description: >- @@ -2520,10 +2520,10 @@ paths: Optionally the Asset object multihash can be given for sanity check. operationId: postAsset tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" requestBody: content: application/json: @@ -2553,11 +2553,11 @@ paths: feature collection with id `collectionId`. operationId: getAsset parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/IfNoneMatch" responses: "200": $ref: "#/components/responses/Asset" @@ -2571,7 +2571,7 @@ paths: $ref: "#/components/responses/ServerError" summary: Fetch a single asset tags: - - Data + - Data put: summary: Update or create an asset description: >- @@ -2579,12 +2579,12 @@ paths: If the asset doesn't exists it is then created. operationId: putAsset tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatchWrite" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/IfMatchWrite" requestBody: content: application/json: @@ -2612,12 +2612,12 @@ paths: the fields to be updated) be submitted. operationId: patchAsset tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatchWrite" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/IfMatchWrite" requestBody: content: application/json: @@ -2659,12 +2659,12 @@ paths: **NOTE: Asset file on S3 will be also removed !** operationId: deleteAsset tags: - - Data Management + - Data Management parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatchWrite" + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/IfMatchWrite" responses: "200": $ref: "#/components/responses/DeletedResource" @@ -2678,10 +2678,10 @@ paths: $ref: "#/components/responses/ServerError" /get-token: servers: - - url: http://data.geo.admin.ch/api/stac/ + - url: http://data.geo.admin.ch/api/stac/ post: tags: - - Data Management + - Data Management summary: >- Request token for token authentication. operationId: getToken @@ -2699,8 +2699,8 @@ paths: type: string description: password of user for whom token is requested required: - - username - - password + - username + - password example: username: "Mickey Mouse" password: "I_love_Minnie_Mouse" From fbc9d0ce42be37995999bb65fbb324283f0cb6e0 Mon Sep 17 00:00:00 2001 From: Tobias Reber Date: Mon, 12 Apr 2021 14:14:55 +0200 Subject: [PATCH 057/105] BGDIINF_SB-1747: No move/rename of assets No move of assets at the endpoint No rename of asset in the admin gui (change) Tests Implementation of feedback from @ltshb --- app/stac_api/admin.py | 10 +++++ app/stac_api/models.py | 75 +------------------------------ app/stac_api/validators_view.py | 25 +++++++++++ app/stac_api/views.py | 13 ++++++ app/tests/test_admin_page.py | 21 --------- app/tests/test_assets_endpoint.py | 47 +++++++++++++------ 6 files changed, 83 insertions(+), 108 deletions(-) diff --git a/app/stac_api/admin.py b/app/stac_api/admin.py index 71f946ef..bb9ba371 100644 --- a/app/stac_api/admin.py +++ b/app/stac_api/admin.py @@ -226,6 +226,7 @@ def save_model(self, request, obj, form, change): # of None. We use None for empty value, None value are stripped # then in the output will empty string not. obj.description = None + super().save_model(request, obj, form, change) # Note: this is a bit hacky and only required to get access @@ -237,3 +238,12 @@ def get_form(self, request, obj=None, **kwargs): # pylint: disable=arguments-di def href(self, instance): path = instance.file.name return build_asset_href(self.request, path) + + # We don't want to move the assets on S3 + # That's why some fields like the name of the asset are set readonly here + # for update operations + def change_view(self, request, object_id, form_url='', extra_context=None): + self.readonly_fields = self.get_readonly_fields(request) + self.readonly_fields.extend(['name', 'item']) + + return super().change_view(request, object_id, form_url, extra_context) diff --git a/app/stac_api/models.py b/app/stac_api/models.py index c408932f..3d6038b9 100644 --- a/app/stac_api/models.py +++ b/app/stac_api/models.py @@ -625,77 +625,6 @@ def update_etag(self): ''' self.etag = compute_etag() - def move_asset(self, source, dest): - # Un-comment and remove the warning with BGDIINF_SB-1625 - logger.warning( - 'Asset %s has been renamed to %s, file needs to renamed on S3 as well !', - source, - dest, - extra={ - 'collection': self.item.collection.name, 'item': self.item.name, 'asset': self.name - } - ) - # logger.info( - # "Renaming asset on s3 from %s to %s", - # source, - # dest, - # extra={ - # 'collection': self.item.collection.name, - # 'item': self.item.name, - # 'asset': self.name - # } - # ) - # s3 = get_s3_resource() - - # try: - # s3.Object(settings.AWS_STORAGE_BUCKET_NAME, - # dest).copy_from(CopySource=f'{settings.AWS_STORAGE_BUCKET_NAME}/{source}') - # s3.Object(settings.AWS_STORAGE_BUCKET_NAME, source).delete() - # self.file.name = dest - # except botocore.exceptions.ClientError as error: - # logger.error( - # 'Failed to move asset %s from %s to %s: %s', - # self.name, - # source, - # dest, - # error, - # extra={ - # 'collection': self.item.collection.name, - # 'item': self.item.name, - # 'asset': self.name - # } - # ) - # raise error - - # def remove_asset(self, source): - # logger.info( - # "Remove asset on s3 from %s", - # source, - # extra={ - # 'collection': self.item.collection.name, - # 'item': self.item.name, - # 'asset': self.name - # } - # ) - # s3 = get_s3_resource() - - # try: - # s3.Object(settings.AWS_STORAGE_BUCKET_NAME, source).delete() - # except botocore.exceptions.ClientError as error: - # logger.error( - # 'Failed to remove asset %s from %s: %s', - # self.name, - # source, - # error, - # extra={ - # 'collection': self.item.collection.name, - # 'item': self.item.name, - # 'asset': self.name - # } - # ) - # self.file.name = '' - # self.checksum_multihash = '' - # alter save-function, so that the corresponding collection of the parent item of the asset # is saved, too. def save(self, *args, **kwargs): # pylint: disable=signature-differs @@ -709,10 +638,8 @@ def save(self, *args, **kwargs): # pylint: disable=signature-differs trigger = get_save_trigger(self) - if trigger == 'update' and self.name != self._original_values["name"]: - self.move_asset(self._original_values['path'], get_asset_path(self.item, self.name)) - old_values = [self._original_values.get(field, None) for field in UPDATE_SUMMARIES_FIELDS] + if self.item.collection.update_summaries(self, trigger, old_values=old_values): self.item.collection.save() diff --git a/app/stac_api/validators_view.py b/app/stac_api/validators_view.py index 89d03d2f..3539c5b7 100644 --- a/app/stac_api/validators_view.py +++ b/app/stac_api/validators_view.py @@ -92,3 +92,28 @@ def validate_upload_parts(request): message = f'Required "parts" must be a list, not a {type(request.data["parts"])}' logger.error(message, extra={'request': request}) raise ValidationError({'parts': _(message)}, code='invalid') + + +def validate_renaming(serializer, id_field='', original_id='', extra_log=None): + '''Validate that the asset name is not different from the one defined in + the data. + + Args: + serializer: serializer object + The serializer to derive the data from + id_field: string + The key to get the name/id in the data dict + original_id: string + The id/name derived from the request kwargs + extra: djangoHttpRequest object + The request object for logging purposes + + Raises: + Http400: when the asset will be renamed/moved + ''' + data = serializer.validated_data + if id_field in data.keys(): + if data[id_field] != original_id: + message = 'Renaming object is not allowed' + logger.error(message, extra={'request': extra_log}) + raise ValidationError(_(message), code='invalid') diff --git a/app/stac_api/views.py b/app/stac_api/views.py index 83d369d8..a94816ad 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -30,6 +30,7 @@ from stac_api.validators_serializer import ValidateSearchRequest from stac_api.validators_view import validate_collection from stac_api.validators_view import validate_item +from stac_api.validators_view import validate_renaming logger = logging.getLogger(__name__) @@ -484,12 +485,24 @@ def perform_update(self, serializer): item = get_object_or_404( Item, collection__name=self.kwargs['collection_name'], name=self.kwargs['item_name'] ) + validate_renaming( + serializer, + id_field='name', + original_id=self.kwargs['asset_name'], + extra_log=self.request + ) serializer.save(item=item, file=get_asset_path(item, self.kwargs['asset_name'])) def perform_upsert(self, serializer, lookup): item = get_object_or_404( Item, collection__name=self.kwargs['collection_name'], name=self.kwargs['item_name'] ) + validate_renaming( + serializer, + id_field='name', + original_id=self.kwargs['asset_name'], + extra_log=self.request + ) return serializer.upsert( lookup, item=item, file=get_asset_path(item, self.kwargs['asset_name']) ) diff --git a/app/tests/test_admin_page.py b/app/tests/test_admin_page.py index 706db20a..e2356798 100644 --- a/app/tests/test_admin_page.py +++ b/app/tests/test_admin_page.py @@ -639,27 +639,6 @@ def test_add_update_asset(self): with asset.file.open() as fd: self.assertEqual(filecontent, fd.read()) - def test_rename_asset(self): - - asset, data = self._create_asset(self.item) - - data['name'] = 'new_asset_name.zip' - # We just update the name hence we have to remove the - # 'file' from the data since submitting an empty file - # is not allowed - data.pop('file') - - response = self.client.post(reverse('admin:stac_api_asset_change', args=[asset.id]), data) - self.assertEqual(response.status_code, 302) - - # Un-comment with BGDIINF_SB-1625 - # Assert that the location on s3 has been changed - # new_path = f"{asset.item.collection.name}/{asset.item.name}/{data['name']}" - # self.assertS3ObjectExists(new_path) - - # asset.refresh_from_db() - # self.assertEqual(asset.file.name, new_path) - def test_add_asset_with_invalid_data(self): data = { diff --git a/app/tests/test_assets_endpoint.py b/app/tests/test_assets_endpoint.py index ac6fa5c6..6693a9eb 100644 --- a/app/tests/test_assets_endpoint.py +++ b/app/tests/test_assets_endpoint.py @@ -584,6 +584,7 @@ def test_asset_endpoint_put_read_only_in_payload(self): msg='Unexpected error message') def test_asset_endpoint_put_rename_asset(self): + # rename should not be allowed collection_name = self.collection['name'] item_name = self.item['name'] asset_name = self.asset['name'] @@ -599,21 +600,32 @@ def test_asset_endpoint_put_rename_asset(self): response = self.client.put( path, data=changed_asset.get_json('put'), content_type="application/json" ) - self.assertStatusCode(200, response) - json_data = response.json() - self.assertEqual(changed_asset.json['id'], json_data['id']) - self.check_stac_asset(changed_asset.json, json_data, collection_name, item_name) + self.assertStatusCode(400, response) + self.assertEqual(['Renaming object is not allowed'], + response.json()['description'], + msg='Unexpected error message') # Check the data by reading it back response = self.client.get( f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}' - f'/assets/{new_asset_name}' + f'/assets/{asset_name}' ) json_data = response.json() self.assertStatusCode(200, response) - self.check_stac_asset(changed_asset.json, json_data, collection_name, item_name) + + self.assertEqual(asset_name, json_data['id']) + + # Check the data that no new entry exist + response = self.client.get( + f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}' + f'/assets/{new_asset_name}' + ) + + # 404 - not found + self.assertStatusCode(404, response) def test_asset_endpoint_patch_rename_asset(self): + # rename should not be allowed collection_name = self.collection['name'] item_name = self.item['name'] asset_name = self.asset['name'] @@ -626,20 +638,29 @@ def test_asset_endpoint_patch_rename_asset(self): response = self.client.patch( path, data=changed_asset.get_json('patch'), content_type="application/json" ) - json_data = response.json() - self.assertStatusCode(200, response) - self.assertEqual(changed_asset.json['id'], json_data['id']) - self.check_stac_asset(changed_asset.json, json_data, collection_name, item_name) + self.assertStatusCode(400, response) + self.assertEqual(['Renaming object is not allowed'], + response.json()['description'], + msg='Unexpected error message') # Check the data by reading it back response = self.client.get( f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}' - f'/assets/{new_asset_name}' + f'/assets/{asset_name}' ) json_data = response.json() self.assertStatusCode(200, response) - self.assertEqual(changed_asset.json['id'], json_data['id']) - self.check_stac_asset(changed_asset.json, json_data, collection_name, item_name) + + self.assertEqual(asset_name, json_data['id']) + + # Check the data that no new entry exist + response = self.client.get( + f'/{STAC_BASE_V}/collections/{collection_name}/items/{item_name}' + f'/assets/{new_asset_name}' + ) + + # 404 - not found + self.assertStatusCode(404, response) def test_asset_endpoint_patch_extra_payload(self): collection_name = self.collection['name'] From 579eeaa9a33fad69cfd1bffb4df673608e7641c7 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 8 Apr 2021 08:57:52 +0200 Subject: [PATCH 058/105] BGDIINF_SB-1739: Implemented asset upload with presigned url Implemented Asset upload endpoint for presigned url. --- README.md | 1 + app/config/settings_prod.py | 4 +- app/stac_api/managers.py | 25 + .../migrations/0005_auto_20210408_0821.py | 79 +++ app/stac_api/models.py | 97 ++- app/stac_api/s3_multipart_upload.py | 266 +++++++++ app/stac_api/serializers.py | 78 +++ app/stac_api/signals.py | 21 + app/stac_api/urls.py | 13 + app/stac_api/utils.py | 28 + app/stac_api/validators_view.py | 19 - app/stac_api/views.py | 189 +++++- app/tests/test_asset_upload_endpoint.py | 560 ++++++++++++++++++ app/tests/test_asset_upload_model.py | 197 ++++++ app/tests/test_generic_api.py | 20 +- app/tests/test_serializer_asset_upload.py | 119 ++++ 16 files changed, 1689 insertions(+), 27 deletions(-) create mode 100644 app/stac_api/migrations/0005_auto_20210408_0821.py create mode 100644 app/stac_api/s3_multipart_upload.py create mode 100644 app/tests/test_asset_upload_endpoint.py create mode 100644 app/tests/test_asset_upload_model.py create mode 100644 app/tests/test_serializer_asset_upload.py diff --git a/README.md b/README.md index 79d887d8..a3873c1f 100644 --- a/README.md +++ b/README.md @@ -442,6 +442,7 @@ The service is configured by Environment Variable: | AWS_S3_REGION_NAME | - | | | AWS_S3_ENDPOINT_URL | `None` | | | AWS_S3_CUSTOM_DOMAIN | `None` | | +| AWS_PRESIGNED_URL_EXPIRES | 3600 | AWS presigned url for asset upload expire time in seconds | #### **Development settings (only for local environment and DEV staging)** diff --git a/app/config/settings_prod.py b/app/config/settings_prod.py index 00344566..fec9a4f9 100644 --- a/app/config/settings_prod.py +++ b/app/config/settings_prod.py @@ -229,6 +229,8 @@ except KeyError as err: raise KeyError(f'AWS configuration {err} missing') from err +AWS_PRESIGNED_URL_EXPIRES = int(os.environ.get('AWS_PRESIGNED_URL_EXPIRES', '3600')) + # Configure the admin upload caching try: STORAGE_ASSETS_CACHE_SECONDS = int(os.environ.get('HTTP_ASSETS_CACHE_SECONDS', '7200')) @@ -292,6 +294,6 @@ def get_logging_config(): # data.geo.admin.ch/collection/item/asset to check if asset exists. EXTERNAL_SERVICE_TIMEOUT = 3 -# By default django_promtheus tracks the number of migrations +# By default django_prometheus tracks the number of migrations # This causes troubles in various places so we disable it PROMETHEUS_EXPORT_MIGRATIONS = False diff --git a/app/stac_api/managers.py b/app/stac_api/managers.py index fa61be97..9260ab84 100644 --- a/app/stac_api/managers.py +++ b/app/stac_api/managers.py @@ -238,3 +238,28 @@ def filter_by_item_name(self, item_name_array): def filter_by_query(self, query): return self.get_queryset().filter_by_query(query) + + +class AssetUploadQuerySet(models.QuerySet): + + def filter_by_status(self, status): + '''Filter a query with a given status + + Args: + status: + A string defining the status + + Returns: + The queryset with the added status filter + ''' + + return self.filter(status=status) + + +class AssetUploadManager(models.Manager): + + def get_queryset(self): + return AssetUploadQuerySet(self.model, using=self._db).select_related('asset') + + def filter_by_status(self, status): + return self.get_queryset().filter_by_status(status) diff --git a/app/stac_api/migrations/0005_auto_20210408_0821.py b/app/stac_api/migrations/0005_auto_20210408_0821.py new file mode 100644 index 00000000..aa0faf64 --- /dev/null +++ b/app/stac_api/migrations/0005_auto_20210408_0821.py @@ -0,0 +1,79 @@ +# Generated by Django 3.1.7 on 2021-04-08 08:21 + +import django.core.serializers.json +import django.core.validators +import django.db.models.deletion +from django.db import migrations +from django.db import models + +import stac_api.models + + +class Migration(migrations.Migration): + + dependencies = [ + ('stac_api', '0004_auto_20210408_0659'), + ] + + operations = [ + migrations.CreateModel( + name='AssetUpload', + fields=[ + ('id', models.BigAutoField(primary_key=True, serialize=False)), + ('upload_id', models.CharField(max_length=255)), + ( + 'status', + models.CharField( + choices=[(None, ''), ('in-progress', 'In Progress'), + ('completed', 'Completed'), ('aborted', 'Aborted')], + default='in-progress', + max_length=32 + ) + ), + ( + 'number_parts', + models.IntegerField( + default=1, + validators=[ + django.core.validators.MinValueValidator(1), + django.core.validators.MaxValueValidator(10000) + ] + ) + ), + ( + 'urls', + models.JSONField( + blank=True, + default=list, + encoder=django.core.serializers.json.DjangoJSONEncoder + ) + ), + ('created', models.DateTimeField(auto_now_add=True)), + ('ended', models.DateTimeField(blank=True, default=None, null=True)), + ('checksum_multihash', models.CharField(max_length=255)), + ('etag', models.CharField(default=stac_api.models.compute_etag, max_length=56)), + ( + 'asset', + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name='+', + to='stac_api.asset' + ) + ), + ], + ), + migrations.AddConstraint( + model_name='assetupload', + constraint=models.UniqueConstraint( + fields=('asset', 'upload_id'), name='unique_together' + ), + ), + migrations.AddConstraint( + model_name='assetupload', + constraint=models.UniqueConstraint( + condition=models.Q(status='in-progress'), + fields=('asset', 'status'), + name='unique_in_progress' + ), + ), + ] diff --git a/app/stac_api/models.py b/app/stac_api/models.py index 3d6038b9..9abfbc7d 100644 --- a/app/stac_api/models.py +++ b/app/stac_api/models.py @@ -6,6 +6,7 @@ # import botocore.exceptions # Un-comment with BGDIINF_SB-1625 import multihash +from multihash import to_hex_string from django.conf import settings from django.contrib.gis.db import models @@ -13,6 +14,10 @@ from django.contrib.postgres.fields import ArrayField from django.core.exceptions import ValidationError from django.core.serializers.json import DjangoJSONEncoder +from django.core.validators import MaxValueValidator +from django.core.validators import MinValueValidator +from django.db.models import Q +from django.db.models.deletion import ProtectedError from django.utils.translation import gettext_lazy as _ from solo.models import SingletonModel @@ -21,6 +26,7 @@ from stac_api.collection_summaries import UPDATE_SUMMARIES_FIELDS from stac_api.collection_summaries import CollectionSummariesMixin from stac_api.collection_temporal_extent import CollectionTemporalExtentMixin +from stac_api.managers import AssetUploadManager from stac_api.managers import ItemManager from stac_api.utils import get_asset_path # from stac_api.utils import get_s3_resource # Un-comment with BGDIINF_SB-1625 @@ -521,7 +527,7 @@ def upload_asset_to_path_hook(instance, filename=None): ctx = hashlib.sha256() for chunk in instance.file.chunks(settings.UPLOAD_FILE_CHUNK_SIZE): ctx.update(chunk) - mhash = multihash.to_hex_string(multihash.encode(ctx.digest(), 'sha2-256')) + mhash = to_hex_string(multihash.encode(ctx.digest(), 'sha2-256')) # set the hash to the storage to use it for upload signing, this temporary attribute is # then used by storages.S3Storage to set the MetaData.sha256 setattr(instance.file.storage, '_tmp_sha256', ctx.hexdigest()) @@ -663,7 +669,94 @@ def delete(self, *args, **kwargs): # pylint: disable=signature-differs if self.item.collection.update_summaries(self, 'delete', old_values=None): self.item.collection.save() self.item.save() # We save the item to update its ETag - super().delete(*args, **kwargs) + try: + super().delete(*args, **kwargs) + except ProtectedError as error: + logger.error( + 'Cannot delete asset %s: %s', + self.name, + error, + extra={ + 'collection': self.item.collection.name, + 'item': self.item.name, + 'asset': self.name + } + ) + raise ValidationError(error.args[0]) from None def clean(self): validate_asset_name_with_media_type(self.name, self.media_type) + + +class AssetUpload(models.Model): + + class Meta: + constraints = [ + models.UniqueConstraint(fields=['asset', 'upload_id'], name='unique_together'), + models.UniqueConstraint( + fields=['asset', 'status'], + condition=Q(status='in-progress'), + name='unique_in_progress' + ) + ] + + class Status(models.TextChoices): + # pylint: disable=invalid-name + IN_PROGRESS = 'in-progress' + COMPLETED = 'completed' + ABORTED = 'aborted' + __empty__ = '' + + # using BigIntegerField as primary_key to deal with the expected large number of assets. + id = models.BigAutoField(primary_key=True) + asset = models.ForeignKey(Asset, related_name='+', on_delete=models.CASCADE) + upload_id = models.CharField(max_length=255, blank=False, null=False) + status = models.CharField( + choices=Status.choices, max_length=32, default=Status.IN_PROGRESS, blank=False, null=False + ) + number_parts = models.IntegerField( + default=1, + validators=[MinValueValidator(1), MaxValueValidator(10000)], + null=False, + blank=False + ) # S3 doesn't support more that 10'000 parts + urls = models.JSONField(default=list, encoder=DjangoJSONEncoder, blank=True) + created = models.DateTimeField(auto_now_add=True) + ended = models.DateTimeField(blank=True, null=True, default=None) + checksum_multihash = models.CharField(max_length=255, blank=False, null=False) + + # hidden ETag field + etag = models.CharField(blank=False, null=False, max_length=56, default=compute_etag) + + # Custom Manager that preselects the collection + objects = AssetUploadManager() + + def save(self, *args, **kwargs): # pylint: disable=signature-differs + self.update_etag() + super().save(*args, **kwargs) + + def update_etag(self): + '''Update the ETag with a new UUID + ''' + self.etag = compute_etag() + + def update_asset_checksum_multihash(self): + '''Updating the asset's checksum:multihash from the upload + + When the upload is completed, the new checksum:multihash from the upload + is set to its asset parent. + ''' + logger.debug( + 'Updating asset %s checksum:multihash from %s to %s due to upload complete', + self.asset.name, + self.asset.checksum_multihash, + self.checksum_multihash, + extra={ + 'upload_id': self.upload_id, + 'asset': self.asset.name, + 'item': self.asset.item.name, + 'collection': self.asset.item.collection.name + } + ) + self.asset.checksum_multihash = self.checksum_multihash + self.asset.save() diff --git a/app/stac_api/s3_multipart_upload.py b/app/stac_api/s3_multipart_upload.py new file mode 100644 index 00000000..45e0e210 --- /dev/null +++ b/app/stac_api/s3_multipart_upload.py @@ -0,0 +1,266 @@ +import logging +import time +from datetime import datetime +from datetime import timedelta + +from botocore.exceptions import ClientError +from botocore.exceptions import ParamValidationError +from multihash import to_hex_string + +from django.conf import settings + +from rest_framework.exceptions import ValidationError + +from stac_api.utils import get_s3_client +from stac_api.utils import isoformat +from stac_api.utils import parse_multihash +from stac_api.utils import utc_aware + +logger = logging.getLogger(__name__) + + +class MultipartUpload: + '''Multi part upload class + + Implement the Multipart upload with S3 backend. + ''' + + def __init__(self): + self.s3 = get_s3_client() + + def create_multipart_upload(self, key, asset, checksum_multihash): + '''Create a multi part upload on the backend + + Args: + key: string + key on the S3 backend for which we want to create a multipart upload + asset: Asset + Asset metadata model associated with the S3 backend key + checksum_multihash: string + Checksum multihash (must be sha256) of the future file to be uploaded + + Returns: string + Upload Id of the created multipart upload + ''' + sha256 = to_hex_string(parse_multihash(checksum_multihash).digest) + try: + response = self.s3.create_multipart_upload( + Bucket=settings.AWS_STORAGE_BUCKET_NAME, + Key=key, + Metadata={'sha256': sha256}, + CacheControl=', '.join([ + 'public', f'max-age={settings.STORAGE_ASSETS_CACHE_SECONDS}' + ]), + ContentType=asset.media_type + ) + except ClientError as error: + logger.error( + 'Failed to create multipart upload: %s', + error, + extra={ + 'collection': asset.item.collection.name, + 'item': asset.item.name, + 'asset': asset.name, + 's3_error': error.response + } + ) + raise + logger.info( + 'S3 Multipart upload successfully created: upload_id=%s', + response['UploadId'], + extra={ + 's3_response': response, 'upload_id': response['UploadId'], 'asset': asset.name + } + ) + return response['UploadId'] + + def create_presigned_url(self, key, asset, part, upload_id): + '''Create a presigned url for an upload part on the backend + + Args: + key: string + key on the S3 backend for which we want to create a presigned url upload part + asset: Asset + Asset metadata model associated with the S3 backend key + part: int + Part number for which to create a presigned url for upload part + upload_id: string + Upload ID for which to create a presigned url + + Returns: [string, int, datetime] + List [url, part, expires] + ''' + expires = utc_aware( + datetime.utcnow() + timedelta(seconds=settings.AWS_PRESIGNED_URL_EXPIRES) + ) + try: + url = self.s3.generate_presigned_url( + 'upload_part', + Params={ + 'Bucket': settings.AWS_STORAGE_BUCKET_NAME, + 'Key': key, + 'UploadId': upload_id, + 'PartNumber': part + }, + ExpiresIn=settings.AWS_PRESIGNED_URL_EXPIRES, + HttpMethod='PUT' + ) + except ClientError as error: + logger.error( + 'Failed to create presigned url for upload part: %s', + error, + extra={ + 'collection': asset.item.collection.name, + 'item': asset.item.name, + 'asset': asset.name, + 'upload_id': upload_id, + 's3_error': error.response + } + ) + raise + logger.info( + 'Presigned url %s for %s part %s with expires %s created', + url, + key, + part, + isoformat(expires), + extra={ + 'upload_id': upload_id, 'asset': asset.name + } + ) + return [url, part, expires] + + def complete_multipart_upload(self, key, asset, parts, upload_id): + '''Complete a multipart upload on the backend + + Args: + key: string + key on the S3 backend for which we want to complete the multipart upload + asset: Asset + Asset metadata model associated with the S3 backend key + parts: [{'Etag': string, 'Part': int}] + List of Etag and part number to use for the completion + upload_id: string + Upload ID + + Raises: + ValidationError: when the parts are not valid + ''' + logger.debug( + 'Sending complete mutlipart upload for %s', + key, + extra={ + 'parts': parts, 'upload_id': upload_id, 'asset': asset.name + }, + ) + try: + started = time.time() + response = self.s3.complete_multipart_upload( + Bucket=settings.AWS_STORAGE_BUCKET_NAME, + Key=key, + MultipartUpload={'Parts': parts}, + UploadId=upload_id + ) + except ParamValidationError as error: + ended = time.time() - started + logger.error( + 'Failed to complete multipart upload: %s', + error, + extra={ + 'collection': asset.item.collection.name, + 'item': asset.item.name, + 'asset': asset.name, + 'upload_id': upload_id, + 's3_error': error, + 'duration': ended + } + ) + raise + except ClientError as error: + ended = time.time() - started + logger.error( + 'Failed to complete multipart upload: %s', + error, + extra={ + 'collection': asset.item.collection.name, + 'item': asset.item.name, + 'asset': asset.name, + 'upload_id': upload_id, + 's3_error': error.response, + 'duration': ended + } + ) + raise ValidationError(str(error), code='invalid') from None + ended = time.time() - started + if 'Location' in response: + logger.info( + 'Successfully complete a multipart asset upload: %s', + response['Location'], + extra={ + 's3_response': response, + 'duration': ended, + 'upload_id': upload_id, + 'asset': asset.name + }, + ) + return + logger.error( + 'Failed to complete a multipart asset upload', + extra={ + 's3_response': response, + 'duration': ended, + 'upload_id': upload_id, + 'asset': asset.name + }, + ) + raise ValueError(response) + + def abort_multipart_upload(self, key, asset, upload_id): + '''Abort a multipart upload on the backend + + Args: + key: string + key on the S3 backend for which we want to complete the multipart upload + asset: Asset + Asset metadata model associated with the S3 backend key + upload_id: string + Upload ID + ''' + logger.debug( + 'Aborting mutlipart upload for %s...', + key, + extra={ + 'upload_id': upload_id, 'asset': asset.name + }, + ) + try: + started = time.time() + response = self.s3.abort_multipart_upload( + Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=key, UploadId=upload_id + ) + except ClientError as error: + ended = time.time() - started + logger.error( + 'Failed to abort multipart upload: %s', + error, + extra={ + 'collection': asset.item.collection.name, + 'item': asset.item.name, + 'asset': asset.name, + 'upload_id': upload_id, + 's3_error': error.response, + 'duration': ended + } + ) + raise + ended = time.time() - started + logger.info( + 'Successfully aborted a multipart asset upload: %s', + key, + extra={ + 's3_response': response, + 'duration': ended, + 'upload_id': upload_id, + 'asset': asset.name + }, + ) diff --git a/app/stac_api/serializers.py b/app/stac_api/serializers.py index 396ae87c..adcabf07 100644 --- a/app/stac_api/serializers.py +++ b/app/stac_api/serializers.py @@ -12,6 +12,7 @@ from rest_framework_gis import serializers as gis_serializers from stac_api.models import Asset +from stac_api.models import AssetUpload from stac_api.models import Collection from stac_api.models import CollectionLink from stac_api.models import ConformancePage @@ -25,6 +26,7 @@ from stac_api.validators import MEDIA_TYPES_MIMES from stac_api.validators import validate_asset_name from stac_api.validators import validate_asset_name_with_media_type +from stac_api.validators import validate_checksum_multihash_sha256 from stac_api.validators import validate_geoadmin_variant from stac_api.validators import validate_item_properties_datetimes from stac_api.validators import validate_name @@ -913,3 +915,79 @@ def validate(self, attrs): validate_json_payload(self) return attrs + + +class AssetUploadListSerializer(serializers.ListSerializer): + # pylint: disable=abstract-method + + def to_representation(self, data): + return {'uploads': super().to_representation(data)} + + @property + def data(self): + ret = super(serializers.ListSerializer, self).data + return ReturnDict(ret, serializer=self) + + +class UploadPartSerializer(serializers.Serializer): + # pylint: disable=abstract-method + etag = serializers.CharField(source='ETag', allow_blank=False, required=True) + part_number = serializers.IntegerField( + source='PartNumber', min_value=1, max_value=10000, required=True, allow_null=False + ) + + +class AssetUploadSerializer(NonNullModelSerializer): + + class Meta: + model = AssetUpload + list_serializer_class = AssetUploadListSerializer + fields = [ + 'upload_id', + 'status', + 'created', + 'checksum_multihash', + 'completed', + 'aborted', + 'number_parts', + 'urls', + 'ended', + 'parts' + ] + + checksum_multihash = serializers.CharField( + source='checksum_multihash', + max_length=255, + required=True, + allow_blank=False, + validators=[validate_checksum_multihash_sha256] + ) + + # write only fields + ended = serializers.DateTimeField(write_only=True, required=False) + parts = serializers.ListField( + child=UploadPartSerializer(), write_only=True, allow_empty=False, required=False + ) + + # Read only fields + upload_id = serializers.CharField(read_only=True) + created = serializers.DateTimeField(read_only=True) + urls = serializers.JSONField(read_only=True) + completed = serializers.SerializerMethodField() + aborted = serializers.SerializerMethodField() + + def get_completed(self, obj): + if obj.status == AssetUpload.Status.COMPLETED: + return isoformat(obj.ended) + return None + + def get_aborted(self, obj): + if obj.status == AssetUpload.Status.ABORTED: + return isoformat(obj.ended) + return None + + def get_fields(self): + fields = super().get_fields() + # This is a hack to allow fields with special characters + fields['checksum:multihash'] = fields.pop('checksum_multihash') + return fields diff --git a/app/stac_api/signals.py b/app/stac_api/signals.py index 4ac74032..8c8063b5 100644 --- a/app/stac_api/signals.py +++ b/app/stac_api/signals.py @@ -1,13 +1,34 @@ import logging +from django.db.models import ProtectedError from django.db.models.signals import pre_delete from django.dispatch import receiver from stac_api.models import Asset +from stac_api.models import AssetUpload logger = logging.getLogger(__name__) +@receiver(pre_delete, sender=AssetUpload) +def check_on_going_upload(sender, instance, **kwargs): + if instance.status == AssetUpload.Status.IN_PROGRESS: + logger.error( + "Cannot delete asset %s due to upload %s which is still in progress", + instance.asset.name, + instance.upload_id, + extra={ + 'upload_id': instance.upload_id, + 'asset': instance.asset.name, + 'item': instance.asset.item.name, + 'collection': instance.asset.item.collection.name + } + ) + raise ProtectedError( + f"Asset {instance.asset.name} has still an upload in progress", [instance] + ) + + @receiver(pre_delete, sender=Asset) def delete_s3_asset(sender, instance, **kwargs): # The file is not automatically deleted by Django diff --git a/app/stac_api/urls.py b/app/stac_api/urls.py index 5a002024..da272498 100644 --- a/app/stac_api/urls.py +++ b/app/stac_api/urls.py @@ -6,6 +6,10 @@ from stac_api.views import AssetDetail from stac_api.views import AssetsList +from stac_api.views import AssetUploadAbort +from stac_api.views import AssetUploadComplete +from stac_api.views import AssetUploadDetail +from stac_api.views import AssetUploadsList from stac_api.views import CollectionDetail from stac_api.views import CollectionList from stac_api.views import ConformancePageDetail @@ -17,8 +21,17 @@ STAC_VERSION_SHORT = settings.STAC_VERSION_SHORT HEALTHCHECK_ENDPOINT = settings.HEALTHCHECK_ENDPOINT +asset_upload_urls = [ + path("", AssetUploadDetail.as_view(), name='asset-upload-detail'), + # path("/parts/", AssetUploadPart.as_view(), name='asset-upload-part'), + path("/complete", AssetUploadComplete.as_view(), name='asset-upload-complete'), + path("/abort", AssetUploadAbort.as_view(), name='asset-upload-abort') +] + asset_urls = [ path("", AssetDetail.as_view(), name='asset-detail'), + path("/uploads", AssetUploadsList.as_view(), name='asset-uploads-list'), + path("/uploads/", include(asset_upload_urls)) ] item_urls = [ diff --git a/app/stac_api/utils.py b/app/stac_api/utils.py index 29ebe648..0b5b8989 100644 --- a/app/stac_api/utils.py +++ b/app/stac_api/utils.py @@ -107,6 +107,17 @@ def get_s3_resource(): ) +def get_s3_client(): + '''Returns an AWS S3 client + + Returns: + AWS S3 client + ''' + return boto3.client( + 's3', endpoint_url=settings.AWS_S3_ENDPOINT_URL, config=Config(signature_version='s3v4') + ) + + def build_asset_href(request, path): '''Build asset href @@ -175,6 +186,23 @@ def create_multihash_string(digest, hash_code): return multihash.to_hex_string(multihash.encode(digest, hash_code)) +def parse_multihash(multihash_string): + '''Parse a multihash string + + Args: + multihash_string: string + multihash string to parse + + Returns. + Multihash object + + Raises: + TypeError: if incoming data is not a string + ValueError: if the incoming data is not a valid multihash + ''' + return multihash.decode(multihash.from_hex_string(multihash_string)) + + def harmonize_post_get_for_search(request): '''Harmonizes the request of GET and POST for the search endpoint diff --git a/app/stac_api/validators_view.py b/app/stac_api/validators_view.py index 3539c5b7..975156d8 100644 --- a/app/stac_api/validators_view.py +++ b/app/stac_api/validators_view.py @@ -1,9 +1,6 @@ import logging from django.http import Http404 -from django.utils.translation import gettext_lazy as _ - -from rest_framework.exceptions import ValidationError from stac_api.models import Asset from stac_api.models import Collection @@ -78,22 +75,6 @@ def validate_asset(kwargs): ) -def validate_upload_parts(request): - '''Validate the multiparts upload parts from request - Args: - request: HttpRequest - - ''' - if 'parts' not in request.data: - message = 'Required "parts" attribute is missing' - logger.error(message, extra={'request': request}) - raise ValidationError({'parts': _(message)}, code='missing') - if not isinstance(request.data['parts'], list): - message = f'Required "parts" must be a list, not a {type(request.data["parts"])}' - logger.error(message, extra={'request': request}) - raise ValidationError({'parts': _(message)}, code='invalid') - - def validate_renaming(serializer, id_field='', original_id='', extra_log=None): '''Validate that the asset name is not different from the one defined in the data. diff --git a/app/stac_api/views.py b/app/stac_api/views.py index a94816ad..356ba8a9 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -4,30 +4,38 @@ from datetime import datetime from django.conf import settings -from django.shortcuts import get_object_or_404 +from django.db import IntegrityError +from django.db import transaction +from django.utils.translation import gettext_lazy as _ from rest_framework import generics from rest_framework import mixins +from rest_framework.exceptions import ValidationError +from rest_framework.generics import get_object_or_404 from rest_framework.permissions import AllowAny from rest_framework.response import Response from rest_framework_condition import etag from stac_api import views_mixins from stac_api.models import Asset +from stac_api.models import AssetUpload from stac_api.models import Collection from stac_api.models import ConformancePage from stac_api.models import Item from stac_api.models import LandingPage -from stac_api.models import get_asset_path from stac_api.pagination import GetPostCursorPagination +from stac_api.s3_multipart_upload import MultipartUpload from stac_api.serializers import AssetSerializer +from stac_api.serializers import AssetUploadSerializer from stac_api.serializers import CollectionSerializer from stac_api.serializers import ConformancePageSerializer from stac_api.serializers import ItemSerializer from stac_api.serializers import LandingPageSerializer +from stac_api.utils import get_asset_path from stac_api.utils import harmonize_post_get_for_search from stac_api.utils import utc_aware from stac_api.validators_serializer import ValidateSearchRequest +from stac_api.validators_view import validate_asset from stac_api.validators_view import validate_collection from stac_api.validators_view import validate_item from stac_api.validators_view import validate_renaming @@ -115,6 +123,21 @@ def get_asset_etag(request, *args, **kwargs): return tag +def get_asset_upload_etag(request, *args, **kwargs): + '''Get the ETag for an asset upload object + + The ETag is an UUID4 computed on each object changes + ''' + return get_etag( + AssetUpload.objects.filter( + asset__item__collection__name=kwargs['collection_name'], + asset__item__name=kwargs['item_name'], + asset__name=kwargs['asset_name'], + upload_id=kwargs['upload_id'] + ) + ) + + class LandingPageDetail(generics.RetrieveAPIView): serializer_class = LandingPageSerializer queryset = LandingPage.objects.all() @@ -254,7 +277,10 @@ def get(self, request, *args, **kwargs): class CollectionDetail( - generics.GenericAPIView, mixins.RetrieveModelMixin, views_mixins.UpdateInsertModelMixin + generics.GenericAPIView, + mixins.RetrieveModelMixin, + views_mixins.UpdateInsertModelMixin, + views_mixins.DestroyModelMixin ): serializer_class = CollectionSerializer lookup_url_kwarg = "collection_name" @@ -275,6 +301,11 @@ def put(self, request, *args, **kwargs): def patch(self, request, *args, **kwargs): return self.partial_update(request, *args, **kwargs) + # Here the etag is only added to support pre-conditional If-Match and If-Not-Match + # @etag(get_collection_etag) + # def delete(self, request, *args, **kwargs): + # return self.destroy(request, *args, **kwargs) + class ItemsList(generics.GenericAPIView, views_mixins.CreateModelMixin): serializer_class = ItemSerializer @@ -525,3 +556,155 @@ def patch(self, request, *args, **kwargs): @etag(get_asset_etag) def delete(self, request, *args, **kwargs): return self.destroy(request, *args, **kwargs) + + +class AssetUploadBase(generics.GenericAPIView): + serializer_class = AssetUploadSerializer + lookup_url_kwarg = "upload_id" + lookup_field = "upload_id" + + def get_queryset(self): + return AssetUpload.objects.filter( + asset__item__collection__name=self.kwargs['collection_name'], + asset__item__name=self.kwargs['item_name'], + asset__name=self.kwargs['asset_name'] + ).prefetch_related('asset') + + def get_in_progress_queryset(self): + return self.get_queryset().filter(status=AssetUpload.Status.IN_PROGRESS) + + def get_asset_or_404(self): + return get_object_or_404( + Asset.objects.all(), + name=self.kwargs['asset_name'], + item__name=self.kwargs['item_name'], + item__collection__name=self.kwargs['collection_name'] + ) + + def create_multipart_upload(self, executor, serializer, validated_data, asset): + key = get_asset_path(asset.item, asset.name) + upload_id = executor.create_multipart_upload( + key, asset, validated_data['checksum_multihash'] + ) + urls = [] + for part in range( + 1, (validated_data['number_parts'] if 'number_parts' in validated_data else 0) + 1 + ): + urls.append(executor.create_presigned_url(key, asset, part, upload_id)) + + clean_up_required = False + try: + with transaction.atomic(): + serializer.save(asset=asset, upload_id=upload_id, urls=urls) + except IntegrityError as error: + exception_handled = False + clean_up_required = True + logger.error( + 'Failed to create asset upload multipart: %s', + error, + extra={ + 'collection': asset.item.collection.name, + 'item': asset.item.name, + 'asset': asset.name + } + ) + in_progress = self.get_in_progress_queryset() + if bool(in_progress): + # Abort the last upload in progress and retry + self.abort_multipart_upload(executor, in_progress.get(), asset) + # And retry to save the new upload + serializer.save(asset=asset, upload_id=upload_id, urls=urls) + exception_handled = True + clean_up_required = False + if not exception_handled: + raise + finally: + if clean_up_required: + executor.abort_multipart_upload(key, asset, upload_id) + + def complete_multipart_upload(self, executor, validated_data, asset_upload, asset): + key = get_asset_path(asset.item, asset.name) + parts = validated_data.get('parts', None) + if parts is None: + raise ValidationError({'parts': _("Missing required field")}, code='missing') + if len(parts) > asset_upload.number_parts: + raise ValidationError({'parts': [_("Too many parts")]}, code='invalid') + if len(parts) < asset_upload.number_parts: + raise ValidationError({'parts': [_("Too few parts")]}, code='invalid') + executor.complete_multipart_upload(key, asset, parts, asset_upload.upload_id) + asset_upload.update_asset_checksum_multihash() + asset_upload.status = AssetUpload.Status.COMPLETED + asset_upload.ended = utc_aware(datetime.utcnow()) + asset_upload.save() + + def abort_multipart_upload(self, executor, asset_upload, asset): + key = get_asset_path(asset.item, asset.name) + executor.abort_multipart_upload(key, asset, asset_upload.upload_id) + asset_upload.status = AssetUpload.Status.ABORTED + asset_upload.ended = utc_aware(datetime.utcnow()) + asset_upload.save() + + +class AssetUploadsList(AssetUploadBase, mixins.ListModelMixin, views_mixins.CreateModelMixin): + + def post(self, request, *args, **kwargs): + return self.create(request, *args, **kwargs) + + def get(self, request, *args, **kwargs): + validate_asset(self.kwargs) + return self.list(request, *args, **kwargs) + + def get_success_headers(self, data): + return {'Location': '/'.join([self.request.build_absolute_uri(), data['upload_id']])} + + def perform_create(self, serializer): + executor = MultipartUpload() + data = serializer.validated_data + asset = self.get_asset_or_404() + self.create_multipart_upload(executor, serializer, data, asset) + + def get_queryset(self): + queryset = super().get_queryset() + + status = self.request.query_params.get('status', None) + if status: + queryset = queryset.filter_by_status(status) + + return queryset + + +class AssetUploadDetail(AssetUploadBase, mixins.RetrieveModelMixin, views_mixins.DestroyModelMixin): + + @etag(get_asset_upload_etag) + def get(self, request, *args, **kwargs): + return self.retrieve(request, *args, **kwargs) + + # @etag(get_asset_upload_etag) + # def delete(self, request, *args, **kwargs): + # return self.destroy(request, *args, **kwargs) + + +class AssetUploadComplete(AssetUploadBase, views_mixins.UpdateInsertModelMixin): + + def post(self, request, *args, **kwargs): + kwargs['partial'] = True + return self.update(request, *args, **kwargs) + + def perform_update(self, serializer): + executor = MultipartUpload() + asset = serializer.instance.asset + self.complete_multipart_upload( + executor, serializer.validated_data, serializer.instance, asset + ) + + +class AssetUploadAbort(AssetUploadBase, views_mixins.UpdateInsertModelMixin): + + def post(self, request, *args, **kwargs): + kwargs['partial'] = True + return self.update(request, *args, **kwargs) + + def perform_update(self, serializer): + executor = MultipartUpload() + asset = serializer.instance.asset + self.abort_multipart_upload(executor, serializer.instance, asset) diff --git a/app/tests/test_asset_upload_endpoint.py b/app/tests/test_asset_upload_endpoint.py new file mode 100644 index 00000000..5635061e --- /dev/null +++ b/app/tests/test_asset_upload_endpoint.py @@ -0,0 +1,560 @@ +# pylint: disable=too-many-ancestors +import logging +import os +from datetime import datetime +from urllib import parse + +from django.conf import settings +from django.test import Client +from django.urls import reverse + +from stac_api.models import Asset +from stac_api.models import AssetUpload +from stac_api.utils import fromisoformat +from stac_api.utils import get_asset_path +from stac_api.utils import get_s3_client +from stac_api.utils import get_sha256_multihash +from stac_api.utils import utc_aware + +from tests.base_test import StacBaseTestCase +from tests.data_factory import Factory +from tests.utils import S3TestMixin +from tests.utils import client_login +from tests.utils import mock_s3_asset_file + +logger = logging.getLogger(__name__) + + +class AssetUploadBaseTest(StacBaseTestCase, S3TestMixin): + + @mock_s3_asset_file + def setUp(self): # pylint: disable=invalid-name + self.client = Client() + client_login(self.client) + self.factory = Factory() + self.collection = self.factory.create_collection_sample().model + self.item = self.factory.create_item_sample(collection=self.collection).model + self.asset = self.factory.create_asset_sample(item=self.item, sample='asset-no-file').model + self.maxDiff = None # pylint: disable=invalid-name + + def get_asset_upload_queryset(self): + return AssetUpload.objects.all().filter( + asset__item__collection__name=self.collection.name, + asset__item__name=self.item.name, + asset__name=self.asset.name, + ) + + def get_delete_asset_path(self): + return reverse('asset-detail', args=[self.collection.name, self.item.name, self.asset.name]) + + def get_get_multipart_uploads_path(self): + return reverse( + 'asset-uploads-list', args=[self.collection.name, self.item.name, self.asset.name] + ) + + def get_create_multipart_upload_path(self): + return reverse( + 'asset-uploads-list', args=[self.collection.name, self.item.name, self.asset.name] + ) + + def get_abort_multipart_upload_path(self, upload_id): + return reverse( + 'asset-upload-abort', + args=[self.collection.name, self.item.name, self.asset.name, upload_id] + ) + + def get_complete_multipart_upload_path(self, upload_id): + return reverse( + 'asset-upload-complete', + args=[self.collection.name, self.item.name, self.asset.name, upload_id] + ) + + def s3_upload_parts(self, upload_id, file_like, size, number_parts): + s3 = get_s3_client() + key = get_asset_path(self.item, self.asset.name) + parts = [] + # split the file into parts + start = 0 + offset = size // number_parts + for part in range(1, number_parts + 1): + # use the s3 client to upload the file instead of the presigned url due to the s3 + # mocking + response = s3.upload_part( + Body=file_like[start:start + offset], + Bucket=settings.AWS_STORAGE_BUCKET_NAME, + Key=key, + PartNumber=part, + UploadId=upload_id + ) + start += offset + parts.append({'etag': response['ETag'], 'part_number': part}) + return parts + + def get_file_like_object(self, size): + file_like = os.urandom(size) + checksum_multihash = get_sha256_multihash(file_like) + return file_like, checksum_multihash + + def check_urls_response(self, urls, number_parts): + now = utc_aware(datetime.utcnow()) + self.assertEqual(len(urls), number_parts) + for i, (url, part, expires) in enumerate(urls): + self.assertEqual(part, i + 1, msg=f'Part {part} does not match the url index {i}') + try: + url_parsed = parse.urlparse(url) + self.assertIn(url_parsed[0], ['http', 'https']) + except ValueError as error: + self.fail(msg=f"Invalid url {url} for part {part}: {error}") + try: + expires_dt = fromisoformat(expires) + self.assertGreater( + expires_dt, now, msg=f"expires {expires} for part {part} is not in future" + ) + except ValueError as error: + self.fail(msg=f"Invalid expires {expires} for part {part}: {error}") + + +class AssetUploadCreateEndpointTestCase(AssetUploadBaseTest): + + def test_asset_upload_create_abort_multipart(self): + key = get_asset_path(self.item, self.asset.name) + self.assertS3ObjectNotExists(key) + number_parts = 2 + file_like, checksum_multihash = self.get_file_like_object(1 * 1024) + response = self.client.post( + self.get_create_multipart_upload_path(), + data={ + 'number_parts': number_parts, 'checksum:multihash': checksum_multihash + }, + content_type="application/json" + ) + self.assertStatusCode(201, response) + json_data = response.json() + + self.check_urls_response(json_data['urls'], number_parts) + + response = self.client.post( + self.get_abort_multipart_upload_path(json_data['upload_id']), + data={}, + content_type="application/json" + ) + self.assertStatusCode(200, response) + json_data = response.json() + self.assertFalse( + self.get_asset_upload_queryset().filter(status=AssetUpload.Status.IN_PROGRESS).exists(), + msg='In progress upload found' + ) + self.assertTrue( + self.get_asset_upload_queryset().filter(status=AssetUpload.Status.ABORTED).exists(), + msg='Aborted upload not found' + ) + # check that there is only one multipart upload on S3 + s3 = get_s3_client() + response = s3.list_multipart_uploads(Bucket=settings.AWS_STORAGE_BUCKET_NAME, KeyMarker=key) + self.assertNotIn('Uploads', response, msg='uploads found on S3') + + def test_asset_upload_create_multipart_duplicate(self): + key = get_asset_path(self.item, self.asset.name) + self.assertS3ObjectNotExists(key) + number_parts = 2 + file_like, checksum_multihash = self.get_file_like_object(1 * 1024) + response = self.client.post( + self.get_create_multipart_upload_path(), + data={ + 'number_parts': number_parts, 'checksum:multihash': checksum_multihash + }, + content_type="application/json" + ) + self.assertStatusCode(201, response) + json_data = response.json() + self.check_urls_response(json_data['urls'], number_parts) + + response = self.client.post( + self.get_create_multipart_upload_path(), + data={ + 'number_parts': number_parts, 'checksum:multihash': checksum_multihash + }, + content_type="application/json" + ) + self.assertStatusCode(201, response) + + self.assertEqual( + self.get_asset_upload_queryset().filter(status=AssetUpload.Status.IN_PROGRESS).count(), + 1, + msg='More than one upload in progress' + ) + self.assertTrue( + self.get_asset_upload_queryset().filter(status=AssetUpload.Status.ABORTED).exists(), + msg='Aborted upload not found' + ) + # check that there is only one multipart upload on S3 + s3 = get_s3_client() + response = s3.list_multipart_uploads(Bucket=settings.AWS_STORAGE_BUCKET_NAME, KeyMarker=key) + self.assertIn('Uploads', response, msg='Failed to retrieve the upload list from s3') + self.assertEqual(len(response['Uploads']), 1, msg='More or less uploads found on S3') + + +class AssetUpload1PartEndpointTestCase(AssetUploadBaseTest): + + def test_asset_upload_1_part(self): + key = get_asset_path(self.item, self.asset.name) + self.assertS3ObjectNotExists(key) + number_parts = 1 + size = 1 * 1024 + file_like, checksum_multihash = self.get_file_like_object(size) + response = self.client.post( + self.get_create_multipart_upload_path(), + data={ + 'number_parts': number_parts, 'checksum:multihash': checksum_multihash + }, + content_type="application/json" + ) + self.assertStatusCode(201, response) + json_data = response.json() + self.check_urls_response(json_data['urls'], number_parts) + + parts = self.s3_upload_parts(json_data['upload_id'], file_like, size, number_parts) + + response = self.client.post( + self.get_complete_multipart_upload_path(json_data['upload_id']), + data={'parts': parts}, + content_type="application/json" + ) + self.assertStatusCode(200, response) + self.assertS3ObjectExists(key) + + +class AssetUpload2PartEndpointTestCase(AssetUploadBaseTest): + + def test_asset_upload_2_parts(self): + key = get_asset_path(self.item, self.asset.name) + self.assertS3ObjectNotExists(key) + number_parts = 2 + size = 10 * 1024 * 1024 # Minimum upload part on S3 is 5 MB + file_like, checksum_multihash = self.get_file_like_object(size) + + response = self.client.post( + self.get_create_multipart_upload_path(), + data={ + 'number_parts': number_parts, 'checksum:multihash': checksum_multihash + }, + content_type="application/json" + ) + self.assertStatusCode(201, response) + json_data = response.json() + self.check_urls_response(json_data['urls'], number_parts) + + parts = self.s3_upload_parts(json_data['upload_id'], file_like, size, number_parts) + + response = self.client.post( + self.get_complete_multipart_upload_path(json_data['upload_id']), + data={'parts': parts}, + content_type="application/json" + ) + self.assertStatusCode(200, response) + self.assertS3ObjectExists(key) + + +class AssetUploadInvalidEndpointTestCase(AssetUploadBaseTest): + + def test_asset_upload_2_parts_too_small(self): + key = get_asset_path(self.item, self.asset.name) + self.assertS3ObjectNotExists(key) + number_parts = 2 + size = 1 * 1024 * 1024 # Minimum upload part on S3 is 5 MB + file_like, checksum_multihash = self.get_file_like_object(size) + + response = self.client.post( + self.get_create_multipart_upload_path(), + data={ + 'number_parts': number_parts, 'checksum:multihash': checksum_multihash + }, + content_type="application/json" + ) + self.assertStatusCode(201, response) + json_data = response.json() + self.check_urls_response(json_data['urls'], number_parts) + + parts = self.s3_upload_parts(json_data['upload_id'], file_like, size, number_parts) + + response = self.client.post( + self.get_complete_multipart_upload_path(json_data['upload_id']), + data={'parts': parts}, + content_type="application/json" + ) + self.assertStatusCode(400, response) + self.assertEqual( + response.json()['description'], + [ + 'An error occurred (EntityTooSmall) when calling the CompleteMultipartUpload ' + 'operation: Your proposed upload is smaller than the minimum allowed object size.' + ] + ) + self.assertS3ObjectNotExists(key) + + def test_asset_upload_1_parts_invalid_etag(self): + key = get_asset_path(self.item, self.asset.name) + self.assertS3ObjectNotExists(key) + number_parts = 1 + size = 1 * 1024 * 1024 + file_like, checksum_multihash = self.get_file_like_object(size) + + response = self.client.post( + self.get_create_multipart_upload_path(), + data={ + 'number_parts': number_parts, 'checksum:multihash': checksum_multihash + }, + content_type="application/json" + ) + self.assertStatusCode(201, response) + json_data = response.json() + self.check_urls_response(json_data['urls'], number_parts) + + parts = self.s3_upload_parts(json_data['upload_id'], file_like, size, number_parts) + + response = self.client.post( + self.get_complete_multipart_upload_path(json_data['upload_id']), + data={'parts': [{ + 'etag': 'dummy', 'part_number': 1 + }]}, + content_type="application/json" + ) + self.assertStatusCode(400, response) + self.assertEqual( + response.json()['description'], + [ + 'An error occurred (InvalidPart) when calling the CompleteMultipartUpload ' + 'operation: One or more of the specified parts could not be found. The part ' + 'might not have been uploaded, or the specified entity tag might not have ' + "matched the part's entity tag." + ] + ) + self.assertS3ObjectNotExists(key) + + def test_asset_upload_1_parts_too_many_parts_in_complete(self): + key = get_asset_path(self.item, self.asset.name) + self.assertS3ObjectNotExists(key) + number_parts = 1 + size = 1 * 1024 * 1024 + file_like, checksum_multihash = self.get_file_like_object(size) + + response = self.client.post( + self.get_create_multipart_upload_path(), + data={ + 'number_parts': number_parts, 'checksum:multihash': checksum_multihash + }, + content_type="application/json" + ) + self.assertStatusCode(201, response) + json_data = response.json() + self.check_urls_response(json_data['urls'], number_parts) + + parts = self.s3_upload_parts(json_data['upload_id'], file_like, size, number_parts) + parts.append({'etag': 'dummy', 'number_part': 2}) + + response = self.client.post( + self.get_complete_multipart_upload_path(json_data['upload_id']), + data={'parts': parts}, + content_type="application/json" + ) + self.assertStatusCode(400, response) + self.assertEqual(response.json()['description'], {'parts': ['Too many parts']}) + self.assertS3ObjectNotExists(key) + + def test_asset_upload_2_parts_incomplete_upload(self): + number_parts = 2 + size = 10 * 1024 * 1024 + file_like, checksum_multihash = self.get_file_like_object(size) + + response = self.client.post( + self.get_create_multipart_upload_path(), + data={ + 'number_parts': number_parts, 'checksum:multihash': checksum_multihash + }, + content_type="application/json" + ) + self.assertStatusCode(201, response) + json_data = response.json() + self.check_urls_response(json_data['urls'], number_parts) + + parts = self.s3_upload_parts(json_data['upload_id'], file_like, size // 2, 1) + response = self.client.post( + self.get_complete_multipart_upload_path(json_data['upload_id']), + data={'parts': parts}, + content_type="application/json" + ) + self.assertStatusCode(400, response) + self.assertEqual(response.json()['description'], {'parts': ['Too few parts']}) + + def test_asset_upload_1_parts_invalid_complete(self): + key = get_asset_path(self.item, self.asset.name) + self.assertS3ObjectNotExists(key) + number_parts = 1 + size = 1 * 1024 * 1024 + file_like, checksum_multihash = self.get_file_like_object(size) + + response = self.client.post( + self.get_create_multipart_upload_path(), + data={ + 'number_parts': number_parts, 'checksum:multihash': checksum_multihash + }, + content_type="application/json" + ) + self.assertStatusCode(201, response) + json_data = response.json() + self.check_urls_response(json_data['urls'], number_parts) + + parts = self.s3_upload_parts(json_data['upload_id'], file_like, size, number_parts) + + response = self.client.post( + self.get_complete_multipart_upload_path(json_data['upload_id']), + data={}, + content_type="application/json" + ) + self.assertStatusCode(400, response) + self.assertEqual(response.json()['description'], {'parts': 'Missing required field'}) + + response = self.client.post( + self.get_complete_multipart_upload_path(json_data['upload_id']), + data={'parts': []}, + content_type="application/json" + ) + self.assertStatusCode(400, response) + self.assertEqual(response.json()['description'], {'parts': ['This list may not be empty.']}) + + response = self.client.post( + self.get_complete_multipart_upload_path(json_data['upload_id']), + data={'parts': ["dummy-etag"]}, + content_type="application/json" + ) + self.assertStatusCode(400, response) + self.assertEqual( + response.json()['description'], + { + 'parts': { + '0': { + 'non_field_errors': + ['Invalid data. Expected a dictionary, ' + 'but got str.'] + } + } + } + ) + self.assertS3ObjectNotExists(key) + + +class AssetUploadDeleteInProgressEndpointTestCase(AssetUploadBaseTest): + + def test_delete_asset_upload_in_progress(self): + number_parts = 2 + size = 10 * 1024 * 1024 # Minimum upload part on S3 is 5 MB + file_like, checksum_multihash = self.get_file_like_object(size) + + response = self.client.post( + self.get_create_multipart_upload_path(), + data={ + 'number_parts': number_parts, 'checksum:multihash': checksum_multihash + }, + content_type="application/json" + ) + self.assertStatusCode(201, response) + upload_id = response.json()['upload_id'] + + response = self.client.delete(self.get_delete_asset_path()) + self.assertStatusCode(400, response) + self.assertEqual( + response.json()['description'], ['Asset asset-1.tiff has still an upload in progress'] + ) + + self.assertTrue( + Asset.objects.all().filter( + item__collection__name=self.collection.name, + item__name=self.item.name, + name=self.asset.name + ).exists(), + msg='Asset has been deleted' + ) + + response = self.client.post(self.get_abort_multipart_upload_path(upload_id)) + self.assertStatusCode(200, response) + + response = self.client.delete(self.get_delete_asset_path()) + self.assertStatusCode(200, response) + + self.assertFalse( + Asset.objects.all().filter( + item__collection__name=self.collection.name, + item__name=self.item.name, + name=self.asset.name + ).exists(), + msg='Asset has not been deleted' + ) + + +class GetAssetUploadsEndpointTestCase(AssetUploadBaseTest): + + def setUp(self): + super().setUp() + # Create some asset uploads + for i in range(1, 4): + AssetUpload.objects.create( + asset=self.asset, + upload_id=f'upload-{i}', + status=AssetUpload.Status.ABORTED, + checksum_multihash=get_sha256_multihash(b'upload-%d' % i), + number_parts=2, + ended=utc_aware(datetime.utcnow()) + ) + for i in range(4, 8): + AssetUpload.objects.create( + asset=self.asset, + upload_id=f'upload-{i}', + status=AssetUpload.Status.COMPLETED, + checksum_multihash=get_sha256_multihash(b'upload-%d' % i), + number_parts=2, + ended=utc_aware(datetime.utcnow()) + ) + AssetUpload.objects.create( + asset=self.asset, + upload_id='upload-8', + status=AssetUpload.Status.IN_PROGRESS, + checksum_multihash=get_sha256_multihash(b'upload-8'), + number_parts=2 + ) + self.maxDiff = None # pylint: disable=invalid-name + + def test_get_asset_uploads(self): + response = self.client.get(self.get_get_multipart_uploads_path()) + self.assertStatusCode(200, response) + json_data = response.json() + self.assertIn('links', json_data) + self.assertEqual(json_data['links'], []) + self.assertIn('uploads', json_data) + self.assertEqual(len(json_data['uploads']), self.get_asset_upload_queryset().count()) + self.assertEqual( + ['upload_id', 'status', 'created', 'aborted', 'number_parts', 'checksum:multihash'], + list(json_data['uploads'][0].keys()), + ) + self.assertEqual( + ['upload_id', 'status', 'created', 'completed', 'number_parts', 'checksum:multihash'], + list(json_data['uploads'][4].keys()), + ) + self.assertEqual( + ['upload_id', 'status', 'created', 'number_parts', 'checksum:multihash'], + list(json_data['uploads'][7].keys()), + ) + + def test_get_asset_uploads_status_query(self): + response = self.client.get( + self.get_get_multipart_uploads_path(), {'status': AssetUpload.Status.ABORTED} + ) + self.assertStatusCode(200, response) + json_data = response.json() + self.assertIn('uploads', json_data) + self.assertGreater(len(json_data), 1) + self.assertEqual( + len(json_data['uploads']), + self.get_asset_upload_queryset().filter(status=AssetUpload.Status.ABORTED).count(), + ) + for upload in json_data['uploads']: + self.assertEqual(upload['status'], AssetUpload.Status.ABORTED) diff --git a/app/tests/test_asset_upload_model.py b/app/tests/test_asset_upload_model.py new file mode 100644 index 00000000..d93157ac --- /dev/null +++ b/app/tests/test_asset_upload_model.py @@ -0,0 +1,197 @@ +import logging +from datetime import datetime + +from django.core.exceptions import ValidationError +from django.db import IntegrityError +from django.db.models import ProtectedError +from django.test import TestCase +from django.test import TransactionTestCase + +from stac_api.models import Asset +from stac_api.models import AssetUpload +from stac_api.utils import get_sha256_multihash +from stac_api.utils import utc_aware + +from tests.data_factory import Factory +from tests.utils import mock_s3_asset_file + +logger = logging.getLogger(__name__) + + +class AssetUploadTestCaseMixin: + + def create_asset_upload(self, asset, upload_id, **kwargs): + asset_upload = AssetUpload( + asset=asset, + upload_id=upload_id, + checksum_multihash=get_sha256_multihash(b'Test'), + **kwargs + ) + asset_upload.full_clean() + asset_upload.save() + self.assertEqual( + asset_upload, + AssetUpload.objects.get( + upload_id=upload_id, + asset__name=asset.name, + asset__item__name=asset.item.name, + asset__item__collection__name=asset.item.collection.name + ) + ) + return asset_upload + + def update_asset_upload(self, asset_upload, **kwargs): + for kwarg, value in kwargs.items(): + setattr(asset_upload, kwarg, value) + asset_upload.full_clean() + asset_upload.save() + self.assertEqual( + asset_upload, + AssetUpload.objects.get( + upload_id=asset_upload.upload_id, asset__name=asset_upload.asset.name + ) + ) + return asset_upload + + def check_etag(self, etag): + self.assertIsInstance(etag, str, msg="Etag must be a string") + self.assertNotEqual(etag, '', msg='Etag should not be empty') + + +class AssetUploadModelTestCase(TestCase, AssetUploadTestCaseMixin): + + @classmethod + @mock_s3_asset_file + def setUpTestData(cls): + cls.factory = Factory() + cls.collection = cls.factory.create_collection_sample().model + cls.item = cls.factory.create_item_sample(collection=cls.collection).model + cls.asset_1 = cls.factory.create_asset_sample(item=cls.item).model + cls.asset_2 = cls.factory.create_asset_sample(item=cls.item).model + + def test_create_asset_upload_default(self): + asset_upload = self.create_asset_upload(self.asset_1, 'default-upload') + self.assertEqual(asset_upload.urls, [], msg="Wrong default value") + self.assertEqual(asset_upload.ended, None, msg="Wrong default value") + self.assertEqual(asset_upload.number_parts, 1, msg="Wrong default value") + self.assertAlmostEqual( + utc_aware(datetime.utcnow()).timestamp(), + asset_upload.created.timestamp(), + delta=1, + msg="Wrong default value" + ) + + def test_unique_constraint(self): + # Check that asset upload is unique in collection/item/asset + # therefore the following asset upload should be ok + # collection-1/item-1/asset-1/default-upload + # collection-2/item-1/asset-1/default-upload + collection_2 = self.factory.create_collection_sample().model + item_2 = self.factory.create_item_sample(collection_2, name=self.item.name).model + asset_2 = self.factory.create_asset_sample(item_2, name=self.asset_1.name).model + asset_upload_1 = self.create_asset_upload(self.asset_1, 'default-upload') + asset_upload_2 = self.create_asset_upload(asset_2, 'default-upload') + self.assertEqual(asset_upload_1.upload_id, asset_upload_2.upload_id) + self.assertEqual(asset_upload_1.asset.name, asset_upload_2.asset.name) + self.assertEqual(asset_upload_1.asset.item.name, asset_upload_2.asset.item.name) + self.assertNotEqual( + asset_upload_1.asset.item.collection.name, asset_upload_2.asset.item.collection.name + ) + # But duplicate path are not allowed + with self.assertRaises(ValidationError, msg="Existing asset upload could be re-created."): + asset_upload_3 = self.create_asset_upload(self.asset_1, 'default-upload') + + def test_create_asset_upload_duplicate_in_progress(self): + # create a first upload on asset 1 + asset_upload_1 = self.create_asset_upload(self.asset_1, '1st-upload') + + # create a first upload on asset 2 + asset_upload_2 = self.create_asset_upload(self.asset_2, '1st-upload') + + # create a second upload on asset 1 should not be allowed. + with self.assertRaises( + IntegrityError, msg="Existing asset upload already in progress could be re-created." + ): + asset_upload_3 = self.create_asset_upload(self.asset_1, '2nd-upload') + + def test_asset_upload_etag(self): + asset_upload = self.create_asset_upload(self.asset_1, 'default-upload') + original_etag = asset_upload.etag + self.check_etag(original_etag) + asset_upload = self.update_asset_upload(asset_upload, status=AssetUpload.Status.ABORTED) + self.check_etag(asset_upload.etag) + self.assertNotEqual(asset_upload.etag, original_etag, msg='Etag was not updated') + + +class AssetUploadDeleteProtectModelTestCase(TransactionTestCase, AssetUploadTestCaseMixin): + + @mock_s3_asset_file + def setUp(self): + self.factory = Factory() + self.collection = self.factory.create_collection_sample().model + self.item = self.factory.create_item_sample(collection=self.collection,).model + self.asset = self.factory.create_asset_sample(item=self.item).model + + def test_delete_asset_upload(self): + upload_id = 'upload-in-progress' + asset_upload = self.create_asset_upload(self.asset, upload_id) + + with self.assertRaises(ProtectedError, msg="Deleting an upload in progress not allowed"): + asset_upload.delete() + + asset_upload = self.update_asset_upload( + asset_upload, status=AssetUpload.Status.COMPLETED, ended=utc_aware(datetime.utcnow()) + ) + + asset_upload.delete() + self.assertFalse( + AssetUpload.objects.all().filter(upload_id=upload_id, + asset__name=self.asset.name).exists() + ) + + def test_delete_asset_with_upload_in_progress(self): + asset_upload_1 = self.create_asset_upload(self.asset, 'upload-in-progress') + asset_upload_2 = self.create_asset_upload( + self.asset, + 'upload-completed', + status=AssetUpload.Status.COMPLETED, + ended=utc_aware(datetime.utcnow()) + ) + asset_upload_3 = self.create_asset_upload( + self.asset, + 'upload-aborted', + status=AssetUpload.Status.ABORTED, + ended=utc_aware(datetime.utcnow()) + ) + asset_upload_4 = self.create_asset_upload( + self.asset, + 'upload-aborted-2', + status=AssetUpload.Status.ABORTED, + ended=utc_aware(datetime.utcnow()) + ) + + # Try to delete parent asset + with self.assertRaises(ValidationError): + self.asset.delete() + self.assertEqual(4, len(list(AssetUpload.objects.all()))) + self.assertTrue( + Asset.objects.all().filter( + name=self.asset.name, + item__name=self.item.name, + item__collection__name=self.collection.name + ).exists() + ) + + self.update_asset_upload( + asset_upload_1, status=AssetUpload.Status.ABORTED, ended=utc_aware(datetime.utcnow()) + ) + + self.asset.delete() + self.assertEqual(0, len(list(AssetUpload.objects.all()))) + self.assertFalse( + Asset.objects.all().filter( + name=self.asset.name, + item__name=self.item.name, + item__collection__name=self.collection.name + ).exists() + ) diff --git a/app/tests/test_generic_api.py b/app/tests/test_generic_api.py index 66b11cd9..62ae577d 100644 --- a/app/tests/test_generic_api.py +++ b/app/tests/test_generic_api.py @@ -6,7 +6,10 @@ from django.test import Client from django.test import override_settings +from stac_api.models import AssetUpload from stac_api.utils import get_link +from stac_api.utils import get_sha256_multihash +from stac_api.utils import utc_aware from tests.base_test import StacBaseTestCase from tests.data_factory import Factory @@ -100,18 +103,31 @@ def test_invalid_limit_query(self): response.json()['description'], msg='Unexpected error message') + @mock_s3_asset_file def test_pagination(self): # pylint: disable=too-many-locals items = self.factory.create_item_samples(3, self.collections[0].model, db_create=True) + asset = self.factory.create_asset_sample(items[0].model, db_create=True) + for i in range(1, 4): + AssetUpload.objects.create( + asset=asset.model, + upload_id=f'upload-{i}', + status=AssetUpload.Status.ABORTED, + checksum_multihash=get_sha256_multihash(b'upload-%d' % i), + number_parts=2, + ended=utc_aware(datetime.utcnow()) + ) for endpoint, result_attribute in [ ('collections', 'collections'), - (f'collections/{self.collections[0]["name"]}/items', 'features') + (f'collections/{self.collections[0]["name"]}/items', 'features'), + (f'collections/{self.collections[0]["name"]}/items/{items[0]["name"]}/' + f'assets/{asset["name"]}/uploads', 'uploads') ]: with self.subTest(endpoint=endpoint): # Page 1: response = self.client.get(f"/{STAC_BASE_V}/{endpoint}?limit=1") + self.assertStatusCode(200, response) page_1 = response.json() - self.assertEqual(200, response.status_code, msg=get_http_error_description(page_1)) # Make sure previous link is not present self.assertIsNone( diff --git a/app/tests/test_serializer_asset_upload.py b/app/tests/test_serializer_asset_upload.py new file mode 100644 index 00000000..3fb48f0a --- /dev/null +++ b/app/tests/test_serializer_asset_upload.py @@ -0,0 +1,119 @@ +# pylint: disable=too-many-lines + +import logging +from datetime import datetime +from datetime import timedelta +from uuid import uuid4 + +from rest_framework.exceptions import ValidationError + +from stac_api.models import AssetUpload +from stac_api.serializers import AssetUploadSerializer +from stac_api.utils import get_sha256_multihash +from stac_api.utils import isoformat +from stac_api.utils import utc_aware + +from tests.base_test import StacBaseTestCase +from tests.data_factory import Factory +from tests.utils import mock_s3_asset_file + +logger = logging.getLogger(__name__) + + +class AssetUploadSerializationTestCase(StacBaseTestCase): + + @classmethod + @mock_s3_asset_file + def setUpTestData(cls): + cls.data_factory = Factory() + cls.collection = cls.data_factory.create_collection_sample().model + cls.item = cls.data_factory.create_item_sample(collection=cls.collection).model + cls.asset = cls.data_factory.create_asset_sample(item=cls.item).model + + def setUp(self): # pylint: disable=invalid-name + self.maxDiff = None # pylint: disable=invalid-name + + def test_asset_upload_serialization(self): + upload_id = str(uuid4()) + checksum = get_sha256_multihash(b'Test') + asset_upload = AssetUpload( + asset=self.asset, upload_id=upload_id, checksum_multihash=checksum + ) + + serializer = AssetUploadSerializer(asset_upload) + data = serializer.data + self.assertEqual(data['upload_id'], upload_id) + self.assertEqual(data['checksum:multihash'], checksum) + self.assertEqual(data['status'], 'in-progress') + self.assertEqual(data['number_parts'], 1) + self.assertNotIn('urls', data) + self.assertNotIn('started', data) + self.assertNotIn('completed', data) + self.assertNotIn('aborted', data) + + urls = [['http://example.com', 3600]] + started = utc_aware(datetime.utcnow()) + ended = utc_aware(datetime.utcnow() + timedelta(seconds=5)) + asset_upload.started = started + asset_upload.number_parts = 1 + asset_upload.urls = urls + asset_upload.ended = ended + asset_upload.status = AssetUpload.Status.COMPLETED + asset_upload.full_clean() + asset_upload.save() + + serializer = AssetUploadSerializer(asset_upload) + data = serializer.data + self.assertEqual(data['status'], 'completed') + self.assertEqual(data['urls'], urls) + self.assertEqual(data['completed'], isoformat(ended)) + self.assertNotIn('aborted', data) + self.assertEqual(data['number_parts'], 1) + + asset_upload.status = AssetUpload.Status.ABORTED + asset_upload.full_clean() + asset_upload.save() + serializer = AssetUploadSerializer(asset_upload) + data = serializer.data + self.assertEqual(data['status'], 'aborted') + self.assertEqual(data['aborted'], isoformat(ended)) + + def test_asset_upload_deserialization(self): + checksum = get_sha256_multihash(b'Test') + serializer = AssetUploadSerializer(data={'checksum:multihash': checksum}) + serializer.is_valid(raise_exception=True) + asset_upload = serializer.save(asset=self.asset) + self.assertEqual(asset_upload.checksum_multihash, checksum) + self.assertEqual(asset_upload.status, AssetUpload.Status.IN_PROGRESS) + self.assertEqual(asset_upload.ended, None) + + ended = utc_aware(datetime.utcnow()) + serializer = AssetUploadSerializer( + instance=asset_upload, + data={ + 'status': 'completed', + 'checksum:multihash': asset_upload.checksum_multihash, + 'ended': isoformat(ended) + } + ) + serializer.is_valid(raise_exception=True) + asset_upload = serializer.save(asset=asset_upload.asset) + self.assertEqual(asset_upload.ended, ended) + self.assertEqual(asset_upload.status, AssetUpload.Status.COMPLETED) + + def test_asset_upload_deserialization_invalid(self): + serializer = AssetUploadSerializer(data={}) + with self.assertRaises(ValidationError): + serializer.is_valid(raise_exception=True) + + serializer = AssetUploadSerializer(data={'checksum:multihash': ''}) + with self.assertRaises(ValidationError): + serializer.is_valid(raise_exception=True) + now = datetime.now().isoformat() + serializer = AssetUploadSerializer( + data={ + 'checksum:multihash': get_sha256_multihash(b'Test'), 'created': now + } + ) + serializer.is_valid(raise_exception=True) + self.assertNotIn('created', serializer.validated_data) From 43196f6eaf68a4139db97040d6de89203e3209ce Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Sat, 10 Apr 2021 06:46:49 +0200 Subject: [PATCH 059/105] BGDIINF_SB-1739: Fixed docstring copy paste mistake --- app/stac_api/s3_multipart_upload.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/stac_api/s3_multipart_upload.py b/app/stac_api/s3_multipart_upload.py index 45e0e210..5d504791 100644 --- a/app/stac_api/s3_multipart_upload.py +++ b/app/stac_api/s3_multipart_upload.py @@ -220,7 +220,7 @@ def abort_multipart_upload(self, key, asset, upload_id): Args: key: string - key on the S3 backend for which we want to complete the multipart upload + key on the S3 backend for which we want to abort the multipart upload asset: Asset Asset metadata model associated with the S3 backend key upload_id: string From 344874c3dde8909494a0a643421414233cb6f27b Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 15 Apr 2021 11:52:57 +0200 Subject: [PATCH 060/105] BGDIINF_SB-1739: Limit the maximal number of parts to 100 This give us the possibility to upload a file up to 500 GB and simplify the API as we don't need pagination. Also creating 10'000 parts would require to generate 10'000 presigned url by calling 10'000 time a S3 endpoint that could really slow down the request. --- app/stac_api/migrations/0005_auto_20210408_0821.py | 2 +- app/stac_api/models.py | 2 +- app/stac_api/serializers.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/app/stac_api/migrations/0005_auto_20210408_0821.py b/app/stac_api/migrations/0005_auto_20210408_0821.py index aa0faf64..41620f1b 100644 --- a/app/stac_api/migrations/0005_auto_20210408_0821.py +++ b/app/stac_api/migrations/0005_auto_20210408_0821.py @@ -36,7 +36,7 @@ class Migration(migrations.Migration): default=1, validators=[ django.core.validators.MinValueValidator(1), - django.core.validators.MaxValueValidator(10000) + django.core.validators.MaxValueValidator(100) ] ) ), diff --git a/app/stac_api/models.py b/app/stac_api/models.py index 9abfbc7d..9a2f5a9a 100644 --- a/app/stac_api/models.py +++ b/app/stac_api/models.py @@ -716,7 +716,7 @@ class Status(models.TextChoices): ) number_parts = models.IntegerField( default=1, - validators=[MinValueValidator(1), MaxValueValidator(10000)], + validators=[MinValueValidator(1), MaxValueValidator(100)], null=False, blank=False ) # S3 doesn't support more that 10'000 parts diff --git a/app/stac_api/serializers.py b/app/stac_api/serializers.py index adcabf07..9a7f98ee 100644 --- a/app/stac_api/serializers.py +++ b/app/stac_api/serializers.py @@ -933,7 +933,7 @@ class UploadPartSerializer(serializers.Serializer): # pylint: disable=abstract-method etag = serializers.CharField(source='ETag', allow_blank=False, required=True) part_number = serializers.IntegerField( - source='PartNumber', min_value=1, max_value=10000, required=True, allow_null=False + source='PartNumber', min_value=1, max_value=100, required=True, allow_null=False ) From 12769c69e802456a0d462ddc5266fe10e77cb6de Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Wed, 14 Apr 2021 20:38:02 +0200 Subject: [PATCH 061/105] BGDIINF_SB-1739: Fixed missing number_parts bug in create asset upload When creating an asset upload without specifying the number_parts, no presigned url where created and the number_parts was set in db per default to 1. Now the number_parts is required and no default value is set in DB. This would allow in future to change the SPEC to allow a create without number_parts and create the part separately with a new endpoint leaving the number of parts open. --- .../migrations/0005_auto_20210408_0821.py | 1 - app/stac_api/models.py | 1 - app/tests/test_asset_upload_endpoint.py | 45 +++++++++++++++++++ app/tests/test_asset_upload_model.py | 13 +++++- app/tests/test_serializer_asset_upload.py | 25 ++++++++--- 5 files changed, 75 insertions(+), 10 deletions(-) diff --git a/app/stac_api/migrations/0005_auto_20210408_0821.py b/app/stac_api/migrations/0005_auto_20210408_0821.py index 41620f1b..7ef461a7 100644 --- a/app/stac_api/migrations/0005_auto_20210408_0821.py +++ b/app/stac_api/migrations/0005_auto_20210408_0821.py @@ -33,7 +33,6 @@ class Migration(migrations.Migration): ( 'number_parts', models.IntegerField( - default=1, validators=[ django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(100) diff --git a/app/stac_api/models.py b/app/stac_api/models.py index 9a2f5a9a..794f57d3 100644 --- a/app/stac_api/models.py +++ b/app/stac_api/models.py @@ -715,7 +715,6 @@ class Status(models.TextChoices): choices=Status.choices, max_length=32, default=Status.IN_PROGRESS, blank=False, null=False ) number_parts = models.IntegerField( - default=1, validators=[MinValueValidator(1), MaxValueValidator(100)], null=False, blank=False diff --git a/app/tests/test_asset_upload_endpoint.py b/app/tests/test_asset_upload_endpoint.py index 5635061e..bcceff42 100644 --- a/app/tests/test_asset_upload_endpoint.py +++ b/app/tests/test_asset_upload_endpoint.py @@ -257,6 +257,51 @@ def test_asset_upload_2_parts(self): class AssetUploadInvalidEndpointTestCase(AssetUploadBaseTest): + def test_asset_upload_create_invalid(self): + response = self.client.post( + self.get_create_multipart_upload_path(), data={}, content_type="application/json" + ) + self.assertStatusCode(400, response) + self.assertEqual( + response.json()['description'], + { + 'checksum:multihash': ['This field is required.'], + 'number_parts': ['This field is required.'] + } + ) + + response = self.client.post( + self.get_create_multipart_upload_path(), + data={ + 'number_parts': 0, "checksum:multihash": 'abcdef' + }, + content_type="application/json" + ) + self.assertStatusCode(400, response) + self.assertEqual( + response.json()['description'], + { + 'checksum:multihash': ['Invalid multihash value; Invalid varint provided'], + 'number_parts': ['Ensure this value is greater than or equal to 1.'] + } + ) + + response = self.client.post( + self.get_create_multipart_upload_path(), + data={ + 'number_parts': 101, "checksum:multihash": 'abcdef' + }, + content_type="application/json" + ) + self.assertStatusCode(400, response) + self.assertEqual( + response.json()['description'], + { + 'checksum:multihash': ['Invalid multihash value; Invalid varint provided'], + 'number_parts': ['Ensure this value is less than or equal to 100.'] + } + ) + def test_asset_upload_2_parts_too_small(self): key = get_asset_path(self.item, self.asset.name) self.assertS3ObjectNotExists(key) diff --git a/app/tests/test_asset_upload_model.py b/app/tests/test_asset_upload_model.py index d93157ac..e36406a3 100644 --- a/app/tests/test_asset_upload_model.py +++ b/app/tests/test_asset_upload_model.py @@ -25,6 +25,7 @@ def create_asset_upload(self, asset, upload_id, **kwargs): asset=asset, upload_id=upload_id, checksum_multihash=get_sha256_multihash(b'Test'), + number_parts=1, **kwargs ) asset_upload.full_clean() @@ -73,7 +74,6 @@ def test_create_asset_upload_default(self): asset_upload = self.create_asset_upload(self.asset_1, 'default-upload') self.assertEqual(asset_upload.urls, [], msg="Wrong default value") self.assertEqual(asset_upload.ended, None, msg="Wrong default value") - self.assertEqual(asset_upload.number_parts, 1, msg="Wrong default value") self.assertAlmostEqual( utc_aware(datetime.utcnow()).timestamp(), asset_upload.created.timestamp(), @@ -122,6 +122,17 @@ def test_asset_upload_etag(self): self.check_etag(asset_upload.etag) self.assertNotEqual(asset_upload.etag, original_etag, msg='Etag was not updated') + def test_asset_upload_invalid_number_parts(self): + with self.assertRaises(ValidationError): + asset_upload = AssetUpload( + asset=self.asset_1, + upload_id='my-upload-id', + checksum_multihash=get_sha256_multihash(b'Test'), + number_parts=-1 + ) + asset_upload.full_clean() + asset_upload.save() + class AssetUploadDeleteProtectModelTestCase(TransactionTestCase, AssetUploadTestCaseMixin): diff --git a/app/tests/test_serializer_asset_upload.py b/app/tests/test_serializer_asset_upload.py index 3fb48f0a..30c26723 100644 --- a/app/tests/test_serializer_asset_upload.py +++ b/app/tests/test_serializer_asset_upload.py @@ -37,8 +37,10 @@ def test_asset_upload_serialization(self): upload_id = str(uuid4()) checksum = get_sha256_multihash(b'Test') asset_upload = AssetUpload( - asset=self.asset, upload_id=upload_id, checksum_multihash=checksum + asset=self.asset, upload_id=upload_id, checksum_multihash=checksum, number_parts=1 ) + asset_upload.full_clean() + asset_upload.save() serializer = AssetUploadSerializer(asset_upload) data = serializer.data @@ -80,7 +82,7 @@ def test_asset_upload_serialization(self): def test_asset_upload_deserialization(self): checksum = get_sha256_multihash(b'Test') - serializer = AssetUploadSerializer(data={'checksum:multihash': checksum}) + serializer = AssetUploadSerializer(data={'checksum:multihash': checksum, "number_parts": 1}) serializer.is_valid(raise_exception=True) asset_upload = serializer.save(asset=self.asset) self.assertEqual(asset_upload.checksum_multihash, checksum) @@ -93,7 +95,8 @@ def test_asset_upload_deserialization(self): data={ 'status': 'completed', 'checksum:multihash': asset_upload.checksum_multihash, - 'ended': isoformat(ended) + 'ended': isoformat(ended), + "number_parts": 1 } ) serializer.is_valid(raise_exception=True) @@ -109,11 +112,19 @@ def test_asset_upload_deserialization_invalid(self): serializer = AssetUploadSerializer(data={'checksum:multihash': ''}) with self.assertRaises(ValidationError): serializer.is_valid(raise_exception=True) - now = datetime.now().isoformat() + serializer = AssetUploadSerializer( data={ - 'checksum:multihash': get_sha256_multihash(b'Test'), 'created': now + 'checksum:multihash': get_sha256_multihash(b'Test'), 'number_parts': 0 } ) - serializer.is_valid(raise_exception=True) - self.assertNotIn('created', serializer.validated_data) + with self.assertRaises(ValidationError): + serializer.is_valid(raise_exception=True) + + serializer = AssetUploadSerializer( + data={ + 'checksum:multihash': get_sha256_multihash(b'Test'), 'number_parts': 10001 + } + ) + with self.assertRaises(ValidationError): + serializer.is_valid(raise_exception=True) From a20e4d840ea544521407ee69d0cb30e76bba152a Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Fri, 16 Apr 2021 09:03:46 +0200 Subject: [PATCH 062/105] BGDIINF_SB-1739: Cleared the `urls` when upload is completed or aborted The urls (presigned url) are not needed anymore once the upload is done and they are not valid anymore as well. Therefore remove them from answer. --- app/stac_api/models.py | 4 +-- app/stac_api/views.py | 2 ++ app/tests/test_asset_upload_endpoint.py | 43 +++++++++++++++++++++++++ 3 files changed, 46 insertions(+), 3 deletions(-) diff --git a/app/stac_api/models.py b/app/stac_api/models.py index 794f57d3..7989b55e 100644 --- a/app/stac_api/models.py +++ b/app/stac_api/models.py @@ -715,9 +715,7 @@ class Status(models.TextChoices): choices=Status.choices, max_length=32, default=Status.IN_PROGRESS, blank=False, null=False ) number_parts = models.IntegerField( - validators=[MinValueValidator(1), MaxValueValidator(100)], - null=False, - blank=False + validators=[MinValueValidator(1), MaxValueValidator(100)], null=False, blank=False ) # S3 doesn't support more that 10'000 parts urls = models.JSONField(default=list, encoder=DjangoJSONEncoder, blank=True) created = models.DateTimeField(auto_now_add=True) diff --git a/app/stac_api/views.py b/app/stac_api/views.py index 356ba8a9..2ffe4d03 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -635,6 +635,7 @@ def complete_multipart_upload(self, executor, validated_data, asset_upload, asse asset_upload.update_asset_checksum_multihash() asset_upload.status = AssetUpload.Status.COMPLETED asset_upload.ended = utc_aware(datetime.utcnow()) + asset_upload.urls = [] asset_upload.save() def abort_multipart_upload(self, executor, asset_upload, asset): @@ -642,6 +643,7 @@ def abort_multipart_upload(self, executor, asset_upload, asset): executor.abort_multipart_upload(key, asset, asset_upload.upload_id) asset_upload.status = AssetUpload.Status.ABORTED asset_upload.ended = utc_aware(datetime.utcnow()) + asset_upload.urls = [] asset_upload.save() diff --git a/app/tests/test_asset_upload_endpoint.py b/app/tests/test_asset_upload_endpoint.py index bcceff42..7fcd6789 100644 --- a/app/tests/test_asset_upload_endpoint.py +++ b/app/tests/test_asset_upload_endpoint.py @@ -113,6 +113,42 @@ def check_urls_response(self, urls, number_parts): except ValueError as error: self.fail(msg=f"Invalid expires {expires} for part {part}: {error}") + def check_created_response(self, json_response): + self.assertNotIn('completed', json_response) + self.assertNotIn('aborted', json_response) + self.assertIn('upload_id', json_response) + self.assertIn('status', json_response) + self.assertIn('number_parts', json_response) + self.assertIn('checksum:multihash', json_response) + self.assertIn('urls', json_response) + self.assertEqual(json_response['status'], 'in-progress') + + def check_completed_response(self, json_response): + self.assertNotIn('urls', json_response) + self.assertNotIn('aborted', json_response) + self.assertIn('upload_id', json_response) + self.assertIn('status', json_response) + self.assertIn('number_parts', json_response) + self.assertIn('checksum:multihash', json_response) + self.assertIn('completed', json_response) + self.assertEqual(json_response['status'], 'completed') + self.assertGreater( + fromisoformat(json_response['completed']), fromisoformat(json_response['created']) + ) + + def check_aborted_response(self, json_response): + self.assertNotIn('urls', json_response) + self.assertNotIn('completed', json_response) + self.assertIn('upload_id', json_response) + self.assertIn('status', json_response) + self.assertIn('number_parts', json_response) + self.assertIn('checksum:multihash', json_response) + self.assertIn('aborted', json_response) + self.assertEqual(json_response['status'], 'aborted') + self.assertGreater( + fromisoformat(json_response['aborted']), fromisoformat(json_response['created']) + ) + class AssetUploadCreateEndpointTestCase(AssetUploadBaseTest): @@ -130,6 +166,7 @@ def test_asset_upload_create_abort_multipart(self): ) self.assertStatusCode(201, response) json_data = response.json() + self.check_created_response(json_data) self.check_urls_response(json_data['urls'], number_parts) @@ -140,6 +177,7 @@ def test_asset_upload_create_abort_multipart(self): ) self.assertStatusCode(200, response) json_data = response.json() + self.check_aborted_response(json_data) self.assertFalse( self.get_asset_upload_queryset().filter(status=AssetUpload.Status.IN_PROGRESS).exists(), msg='In progress upload found' @@ -167,6 +205,7 @@ def test_asset_upload_create_multipart_duplicate(self): ) self.assertStatusCode(201, response) json_data = response.json() + self.check_created_response(json_data) self.check_urls_response(json_data['urls'], number_parts) response = self.client.post( @@ -211,6 +250,7 @@ def test_asset_upload_1_part(self): ) self.assertStatusCode(201, response) json_data = response.json() + self.check_created_response(json_data) self.check_urls_response(json_data['urls'], number_parts) parts = self.s3_upload_parts(json_data['upload_id'], file_like, size, number_parts) @@ -221,6 +261,7 @@ def test_asset_upload_1_part(self): content_type="application/json" ) self.assertStatusCode(200, response) + self.check_completed_response(response.json()) self.assertS3ObjectExists(key) @@ -242,6 +283,7 @@ def test_asset_upload_2_parts(self): ) self.assertStatusCode(201, response) json_data = response.json() + self.check_created_response(json_data) self.check_urls_response(json_data['urls'], number_parts) parts = self.s3_upload_parts(json_data['upload_id'], file_like, size, number_parts) @@ -252,6 +294,7 @@ def test_asset_upload_2_parts(self): content_type="application/json" ) self.assertStatusCode(200, response) + self.check_completed_response(response.json()) self.assertS3ObjectExists(key) From 1e76141f2767b1563f6f1814e063d7484431dbe0 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Fri, 16 Apr 2021 10:26:44 +0200 Subject: [PATCH 063/105] BGDIINF_SB-1739: Added some comment based on code review --- app/stac_api/models.py | 5 +++-- app/stac_api/utils.py | 6 ++++++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/app/stac_api/models.py b/app/stac_api/models.py index 7989b55e..c24a0626 100644 --- a/app/stac_api/models.py +++ b/app/stac_api/models.py @@ -5,7 +5,7 @@ from uuid import uuid4 # import botocore.exceptions # Un-comment with BGDIINF_SB-1625 -import multihash +from multihash import encode as multihash_encode from multihash import to_hex_string from django.conf import settings @@ -527,7 +527,7 @@ def upload_asset_to_path_hook(instance, filename=None): ctx = hashlib.sha256() for chunk in instance.file.chunks(settings.UPLOAD_FILE_CHUNK_SIZE): ctx.update(chunk) - mhash = to_hex_string(multihash.encode(ctx.digest(), 'sha2-256')) + mhash = to_hex_string(multihash_encode(ctx.digest(), 'sha2-256')) # set the hash to the storage to use it for upload signing, this temporary attribute is # then used by storages.S3Storage to set the MetaData.sha256 setattr(instance.file.storage, '_tmp_sha256', ctx.hexdigest()) @@ -693,6 +693,7 @@ class AssetUpload(models.Model): class Meta: constraints = [ models.UniqueConstraint(fields=['asset', 'upload_id'], name='unique_together'), + # Make sure that there is only one asset upload in progress per asset models.UniqueConstraint( fields=['asset', 'status'], condition=Q(status='in-progress'), diff --git a/app/stac_api/utils.py b/app/stac_api/utils.py index 0b5b8989..28e36fdd 100644 --- a/app/stac_api/utils.py +++ b/app/stac_api/utils.py @@ -99,6 +99,9 @@ def get_asset_path(item, asset_name): def get_s3_resource(): '''Returns an AWS S3 resource + The authentication with the S3 server is configured via the AWS_ACCESS_KEY_ID and + AWS_SECRET_ACCESS_KEY environment variables. + Returns: AWS S3 resource ''' @@ -110,6 +113,9 @@ def get_s3_resource(): def get_s3_client(): '''Returns an AWS S3 client + The authentication with the S3 server is configured via the AWS_ACCESS_KEY_ID and + AWS_SECRET_ACCESS_KEY environment variables. + Returns: AWS S3 client ''' From 6f7ccce7fc730b5c318691f8bacc1e5b648d22b9 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Fri, 16 Apr 2021 11:31:27 +0200 Subject: [PATCH 064/105] BGDIINF_SB-1739: Changed the urls property schema to follow the spec The spec (from ticket) defined the urls as follow: [{'url': url, 'part': part, 'expires': expires}] therefore adapted the code to match the spec. --- app/stac_api/s3_multipart_upload.py | 6 +++--- app/tests/test_asset_upload_endpoint.py | 21 ++++++++++++++------- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/app/stac_api/s3_multipart_upload.py b/app/stac_api/s3_multipart_upload.py index 5d504791..edd566a6 100644 --- a/app/stac_api/s3_multipart_upload.py +++ b/app/stac_api/s3_multipart_upload.py @@ -87,8 +87,8 @@ def create_presigned_url(self, key, asset, part, upload_id): upload_id: string Upload ID for which to create a presigned url - Returns: [string, int, datetime] - List [url, part, expires] + Returns: dict(string, int, datetime) + Dict {'url': string, 'part': int, 'expires': datetime} ''' expires = utc_aware( datetime.utcnow() + timedelta(seconds=settings.AWS_PRESIGNED_URL_EXPIRES) @@ -128,7 +128,7 @@ def create_presigned_url(self, key, asset, part, upload_id): 'upload_id': upload_id, 'asset': asset.name } ) - return [url, part, expires] + return {'url': url, 'part': part, 'expires': expires} def complete_multipart_upload(self, key, asset, parts, upload_id): '''Complete a multipart upload on the backend diff --git a/app/tests/test_asset_upload_endpoint.py b/app/tests/test_asset_upload_endpoint.py index 7fcd6789..cd4a53ac 100644 --- a/app/tests/test_asset_upload_endpoint.py +++ b/app/tests/test_asset_upload_endpoint.py @@ -98,20 +98,27 @@ def get_file_like_object(self, size): def check_urls_response(self, urls, number_parts): now = utc_aware(datetime.utcnow()) self.assertEqual(len(urls), number_parts) - for i, (url, part, expires) in enumerate(urls): - self.assertEqual(part, i + 1, msg=f'Part {part} does not match the url index {i}') + for i, url in enumerate(urls): + self.assertListEqual( + list(url.keys()), ['url', 'part', 'expires'], msg='Url dictionary keys missing' + ) + self.assertEqual( + url['part'], i + 1, msg=f'Part {url["part"]} does not match the url index {i}' + ) try: - url_parsed = parse.urlparse(url) + url_parsed = parse.urlparse(url["url"]) self.assertIn(url_parsed[0], ['http', 'https']) except ValueError as error: - self.fail(msg=f"Invalid url {url} for part {part}: {error}") + self.fail(msg=f"Invalid url {url['url']} for part {url['part']}: {error}") try: - expires_dt = fromisoformat(expires) + expires_dt = fromisoformat(url['expires']) self.assertGreater( - expires_dt, now, msg=f"expires {expires} for part {part} is not in future" + expires_dt, + now, + msg=f"expires {url['expires']} for part {url['part']} is not in future" ) except ValueError as error: - self.fail(msg=f"Invalid expires {expires} for part {part}: {error}") + self.fail(msg=f"Invalid expires {url['expires']} for part {url['part']}: {error}") def check_created_response(self, json_response): self.assertNotIn('completed', json_response) From 52b7862bcead799d805e66f526d91a1f07d68e39 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Fri, 16 Apr 2021 13:05:11 +0200 Subject: [PATCH 065/105] BGDIINF_SB-1739: Fixed rebasing issues --- app/stac_api/validators_view.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/app/stac_api/validators_view.py b/app/stac_api/validators_view.py index 975156d8..b138bc70 100644 --- a/app/stac_api/validators_view.py +++ b/app/stac_api/validators_view.py @@ -1,6 +1,9 @@ import logging from django.http import Http404 +from django.utils.translation import gettext_lazy as _ + +from rest_framework.exceptions import ValidationError from stac_api.models import Asset from stac_api.models import Collection From cf0a8ec087cc1b43afbac36d8681e454646cdb10 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Mon, 12 Apr 2021 11:07:58 +0200 Subject: [PATCH 066/105] BGDIINF_SB-1739: Added list upload parts endpoint Aslo splitted the serializers module into two as it became quite big. --- app/stac_api/pagination.py | 294 +++++++++++++++++------- app/stac_api/s3_multipart_upload.py | 278 +++++++++++----------- app/stac_api/serializers.py | 223 ++---------------- app/stac_api/serializers_utils.py | 208 +++++++++++++++++ app/stac_api/urls.py | 3 +- app/stac_api/utils.py | 6 + app/stac_api/views.py | 31 +++ app/tests/test_asset_upload_endpoint.py | 99 +++++++- 8 files changed, 720 insertions(+), 422 deletions(-) create mode 100644 app/stac_api/serializers_utils.py diff --git a/app/stac_api/pagination.py b/app/stac_api/pagination.py index a58b71ed..14fa0f2b 100644 --- a/app/stac_api/pagination.py +++ b/app/stac_api/pagination.py @@ -9,6 +9,8 @@ from rest_framework.exceptions import ValidationError from rest_framework.request import Request from rest_framework.response import Response +from rest_framework.utils.urls import remove_query_param +from rest_framework.utils.urls import replace_query_param from stac_api.utils import get_query_params from stac_api.utils import remove_query_params @@ -16,96 +18,176 @@ logger = logging.getLogger(__name__) +def update_links_with_pagination(data, previous_url, next_url): + '''Update the links dictionary with the previous and next link if needed + + When no 'links' is present in data it is added even if there is no previous and/or next link + to add. + + Args: + data: dict + data dictionary + previous_url: string + previous url + next_url: string + next url + + Returns: tuple(dict, None|dict, None|dict) + Tuple (data, previous_link, next_link) + ''' + links = [] + previous_link = None + next_link = None + if next_url is not None: + next_link = {'rel': 'next', 'href': next_url} + links.append(next_link) + if previous_url is not None: + previous_link = {'rel': 'previous', 'href': previous_url} + links.append(previous_link) + + if 'links' not in data: + data.update({'links': links}) + elif links: + data['links'] += links + return data, previous_link, next_link + + +def validate_page_size(size_string, max_page_size, log_extra=None): + '''Parse and validate page size + + Args: + size_string: string + page size in string + max_page_size: int + max page size allowed + log_extra: dict + extra to add to the log message in case of error + + Returns: int + Page size parsed + + Raises: + ValidationError: if page size is invalid + ''' + try: + page_size = int(size_string) + except ValueError as error: + logger.error( + 'Invalid query parameter limit=%s: must be an integer', size_string, extra=log_extra + ) + raise ValidationError( + _('invalid limit query parameter: must be an integer'), + code='limit' + ) from None + + if page_size <= 0: + logger.error( + 'Invalid query parameter limit=%d: negative number not allowed', + page_size, + extra=log_extra + ) + raise ValidationError( + _('limit query parameter too small, must be in range 1..%d') % (max_page_size), + code='limit' + ) + if max_page_size and page_size > max_page_size: + logger.error( + 'Invalid query parameter limit=%d: number bigger than the max size of %d', + page_size, + max_page_size, + extra=log_extra + ) + raise ValidationError( + _('limit query parameter too big, must be in range 1..%d') % (max_page_size), + code='limit' + ) + return page_size + + +def validate_offset(offset_string, log_extra=None): + '''Parse and validate offset + + Args: + offset_string: string + page size in string + log_extra: dict + extra to add to the log message in case of error + + Returns: int + Offset parsed + + Raises: + ValidationError: if offset is invalid + ''' + try: + offset = int(offset_string) + except ValueError as error: + logger.error( + 'Invalid query parameter offset=%s: must be an integer', offset_string, extra=log_extra + ) + raise ValidationError( + _('invalid offset query parameter: must be an integer'), + code='invalid' + ) from None + + if offset < 0: + logger.error( + 'Invalid query parameter offset=%d: negative number not allowed', + offset, + extra=log_extra + ) + raise ValidationError( + _('offset query parameter too small, must be positive'), + code='invalid' + ) + return offset + + class CursorPagination(pagination.CursorPagination): + '''Default pagination for all endpoints + ''' ordering = 'id' page_size_query_param = 'limit' max_page_size = settings.REST_FRAMEWORK['PAGE_SIZE_LIMIT'] - def get_next_link(self, request=None): # pylint: disable=arguments-differ - next_page = super().get_next_link() - if next_page: - return {'rel': 'next', 'href': next_page} - return None - - def get_previous_link(self, request=None): # pylint: disable=arguments-differ - previous_page = super().get_previous_link() - if previous_page: - return {'rel': 'previous', 'href': previous_page} - return None - - def get_paginated_response(self, data, request=None): # pylint: disable=arguments-differ - links = [] - next_link = self.get_next_link(request) - previous_link = self.get_previous_link(request) - if next_link is not None: - links.append(next_link) - if previous_link is not None: - links.append(previous_link) - - if 'links' not in data and not links: - data.update({'links': []}) - elif 'links' not in data and links: - data.update({'links': links}) - elif links: - data['links'] += links + def get_paginated_response(self, data): + update_links_with_pagination(data, self.get_previous_link(), self.get_next_link()) return Response(data) def get_page_size(self, request): # Overwrite the default implementation about the page size as this one # don't validate the query parameter, its simply correct it if it is not valid # here we want to return a 400 BAD REQUEST when the provided page size is invalid. - - integer_string = self.get_raw_page_size(request) - - try: - page_size = int(integer_string) - except ValueError as error: - logger.error( - 'Invalid query parameter limit=%s: must be an integer', - integer_string, - extra={'request': request} - ) - raise ValidationError( - _('invalid limit query parameter: must be an integer'), - code='limit' - ) from None - - if page_size <= 0: - logger.error( - 'Invalid query parameter limit=%d: negative number not allowed', - page_size, - extra={'request': request} - ) - raise ValidationError( - _('limit query parameter too small, must be in range 1..%d') % (self.max_page_size), - code='limit' - ) - if self.max_page_size and page_size > self.max_page_size: - logger.error( - 'Invalid query parameter limit=%d: number bigger than the max size of %d', - page_size, - self.max_page_size, - extra={'request': request} - ) - raise ValidationError( - _('limit query parameter too big, must be in range 1..%d') % (self.max_page_size), - code='limit' - ) - - return page_size - - def get_raw_page_size(self, request): - return request.query_params.get(self.page_size_query_param, str(self.page_size)) + return validate_page_size( + request.query_params.get(self.page_size_query_param, str(self.page_size)), + self.max_page_size, + log_extra={'request': request} + ) class GetPostCursorPagination(CursorPagination): + '''Pagination to be used for the GET/POST /search endpoint where the + pagination is either in query or in payload depending on the method. + ''' - def get_raw_page_size(self, request): + def get_paginated_response(self, data, request=None): # pylint: disable=arguments-differ + data, previous_link, next_link = update_links_with_pagination( + data, self.get_previous_link(), self.get_next_link() + ) + self.patch_link(previous_link, request) + self.patch_link(next_link, request) + return Response(data) + + def get_page_size(self, request): if request.method == 'POST': # For POST method the page size, aka `limit` parameter is in the body and not in the # URL query - return request.data.get(self.page_size_query_param, str(self.page_size)) - return super().get_raw_page_size(request) + return validate_page_size( + request.data.get(self.page_size_query_param, str(self.page_size)), + self.max_page_size, + log_extra={'request': request} + ) + return super().get_page_size(request) def decode_cursor(self, request): if request.method == 'POST': @@ -125,14 +207,6 @@ def decode_cursor(self, request): request = Request(WSGIRequest(environ)) return super().decode_cursor(request) - def get_next_link(self, request=None): - next_link = super().get_next_link(request) - return self.patch_link(next_link, request) - - def get_previous_link(self, request=None): - previous_link = super().get_previous_link(request) - return self.patch_link(previous_link, request) - def patch_link(self, link, request): if link and request and request.method == 'POST': cursor, limit = get_query_params( @@ -153,3 +227,63 @@ def patch_link(self, link, request): body['cursor'] = cursor[0] link.update({'method': 'POST', 'merge': True, 'body': body}) return link + + +class ExtApiPagination: + """ + A limit/offset based style pagination for external API (e.g. S3) + + http://api.example.org/accounts/?limit=100 + http://api.example.org/accounts/?offset=400&limit=100 + """ + default_limit = settings.REST_FRAMEWORK['PAGE_SIZE'] + max_limit = settings.REST_FRAMEWORK['PAGE_SIZE_LIMIT'] + limit_query_param = 'limit' + offset_query_param = 'offset' + + def get_pagination_config(self, request): + # pylint: disable=attribute-defined-outside-init + self.request = request + self.limit = self.get_limit(request) + self.offset = self.get_offset(request) + return self.limit, self.offset + + def get_next_link(self): + next_url = self.request.build_absolute_uri() + next_url = replace_query_param(next_url, self.limit_query_param, self.limit) + + offset = self.offset + self.limit + return replace_query_param(next_url, self.offset_query_param, offset) + + def get_previous_link(self): + if self.offset <= 0: + return None + + previous_url = self.request.build_absolute_uri() + previous_url = replace_query_param(previous_url, self.limit_query_param, self.limit) + + if self.offset - self.limit <= 0: + previous_url = remove_query_param(previous_url, self.offset_query_param) + else: + offset = self.offset - self.limit + previous_url = replace_query_param(previous_url, self.offset_query_param, offset) + return previous_url + + def get_paginated_response(self, data, has_next): + update_links_with_pagination( + data, self.get_previous_link(), self.get_next_link() if has_next else None + ) + return Response(data) + + def get_limit(self, request): + return validate_page_size( + request.query_params.get(self.limit_query_param, str(self.default_limit)), + self.max_limit, + log_extra={'request': request} + ) + + def get_offset(self, request): + return validate_offset( + request.query_params.get(self.offset_query_param, str(0)), + log_extra={'request': request} + ) diff --git a/app/stac_api/s3_multipart_upload.py b/app/stac_api/s3_multipart_upload.py index edd566a6..98b814e0 100644 --- a/app/stac_api/s3_multipart_upload.py +++ b/app/stac_api/s3_multipart_upload.py @@ -43,28 +43,19 @@ def create_multipart_upload(self, key, asset, checksum_multihash): Upload Id of the created multipart upload ''' sha256 = to_hex_string(parse_multihash(checksum_multihash).digest) - try: - response = self.s3.create_multipart_upload( - Bucket=settings.AWS_STORAGE_BUCKET_NAME, - Key=key, - Metadata={'sha256': sha256}, - CacheControl=', '.join([ - 'public', f'max-age={settings.STORAGE_ASSETS_CACHE_SECONDS}' - ]), - ContentType=asset.media_type - ) - except ClientError as error: - logger.error( - 'Failed to create multipart upload: %s', - error, - extra={ - 'collection': asset.item.collection.name, - 'item': asset.item.name, - 'asset': asset.name, - 's3_error': error.response - } - ) - raise + response = self.call_s3_api( + self.s3.create_multipart_upload, + Bucket=settings.AWS_STORAGE_BUCKET_NAME, + Key=key, + Metadata={'sha256': sha256}, + CacheControl=', '.join(['public', f'max-age={settings.STORAGE_ASSETS_CACHE_SECONDS}']), + ContentType=asset.media_type, + log_extra={ + 'collection': asset.item.collection.name, + 'item': asset.item.name, + 'asset': asset.name + } + ) logger.info( 'S3 Multipart upload successfully created: upload_id=%s', response['UploadId'], @@ -93,31 +84,25 @@ def create_presigned_url(self, key, asset, part, upload_id): expires = utc_aware( datetime.utcnow() + timedelta(seconds=settings.AWS_PRESIGNED_URL_EXPIRES) ) - try: - url = self.s3.generate_presigned_url( - 'upload_part', - Params={ - 'Bucket': settings.AWS_STORAGE_BUCKET_NAME, - 'Key': key, - 'UploadId': upload_id, - 'PartNumber': part - }, - ExpiresIn=settings.AWS_PRESIGNED_URL_EXPIRES, - HttpMethod='PUT' - ) - except ClientError as error: - logger.error( - 'Failed to create presigned url for upload part: %s', - error, - extra={ - 'collection': asset.item.collection.name, - 'item': asset.item.name, - 'asset': asset.name, - 'upload_id': upload_id, - 's3_error': error.response - } - ) - raise + url = self.call_s3_api( + self.s3.generate_presigned_url, + 'upload_part', + Params={ + 'Bucket': settings.AWS_STORAGE_BUCKET_NAME, + 'Key': key, + 'UploadId': upload_id, + 'PartNumber': part + }, + ExpiresIn=settings.AWS_PRESIGNED_URL_EXPIRES, + HttpMethod='PUT', + log_extra={ + 'collection': asset.item.collection.name, + 'item': asset.item.name, + 'asset': asset.name, + 'upload_id': upload_id + } + ) + logger.info( 'Presigned url %s for %s part %s with expires %s created', url, @@ -146,74 +131,32 @@ def complete_multipart_upload(self, key, asset, parts, upload_id): Raises: ValidationError: when the parts are not valid ''' - logger.debug( - 'Sending complete mutlipart upload for %s', - key, - extra={ - 'parts': parts, 'upload_id': upload_id, 'asset': asset.name - }, - ) try: - started = time.time() - response = self.s3.complete_multipart_upload( + response = self.call_s3_api( + self.s3.complete_multipart_upload, Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=key, MultipartUpload={'Parts': parts}, - UploadId=upload_id - ) - except ParamValidationError as error: - ended = time.time() - started - logger.error( - 'Failed to complete multipart upload: %s', - error, - extra={ - 'collection': asset.item.collection.name, - 'item': asset.item.name, - 'asset': asset.name, + UploadId=upload_id, + log_extra={ + 'parts': parts, 'upload_id': upload_id, - 's3_error': error, - 'duration': ended - } - ) - raise - except ClientError as error: - ended = time.time() - started - logger.error( - 'Failed to complete multipart upload: %s', - error, - extra={ 'collection': asset.item.collection.name, 'item': asset.item.name, - 'asset': asset.name, - 'upload_id': upload_id, - 's3_error': error.response, - 'duration': ended + 'asset': asset.name } ) + except ClientError as error: raise ValidationError(str(error), code='invalid') from None - ended = time.time() - started - if 'Location' in response: - logger.info( - 'Successfully complete a multipart asset upload: %s', - response['Location'], + + if 'Location' not in response: + logger.error( + 'Failed to complete a multipart asset upload', extra={ - 's3_response': response, - 'duration': ended, - 'upload_id': upload_id, - 'asset': asset.name + 's3_response': response, 'upload_id': upload_id, 'asset': asset.name }, ) - return - logger.error( - 'Failed to complete a multipart asset upload', - extra={ - 's3_response': response, - 'duration': ended, - 'upload_id': upload_id, - 'asset': asset.name - }, - ) - raise ValueError(response) + raise ValueError(response) def abort_multipart_upload(self, key, asset, upload_id): '''Abort a multipart upload on the backend @@ -226,41 +169,110 @@ def abort_multipart_upload(self, key, asset, upload_id): upload_id: string Upload ID ''' - logger.debug( - 'Aborting mutlipart upload for %s...', - key, - extra={ + self.call_s3_api( + self.s3.abort_multipart_upload, + Bucket=settings.AWS_STORAGE_BUCKET_NAME, + Key=key, + UploadId=upload_id, + log_extra={ 'upload_id': upload_id, 'asset': asset.name - }, + } ) + + def list_upload_parts(self, key, asset, upload_id, limit, offset): + '''List all actual part uploaded for a multipart upload + + Args: + key: string + key on the S3 backend for which we want to complete the multipart upload + asset: Asset + Asset metadata model associated with the S3 backend key + upload_id: string + Upload ID + limit: int + Limit the number of result (for pagination) + offset: int + Start offset of the result list (for pagination) + Returns: dict + AWS S3 list parts answer + + Raises: + ValueError: if AWS S3 return an HTTP Error code + ClientError: any S3 client error + ''' + response = self.call_s3_api( + self.s3.list_parts, + Bucket=settings.AWS_STORAGE_BUCKET_NAME, + Key=key, + UploadId=upload_id, + MaxParts=limit, + PartNumberMarker=offset, + log_extra={ + 'collection': asset.item.collection.name, + 'item': asset.item.name, + 'asset': asset.name, + 'upload_id': upload_id + } + ) + return response, response.get('IsTruncated', False) + + def call_s3_api(self, func, *args, **kwargs): + '''Wrap a S3 API call with logging and generic error handling + + Args: + func: callable + S3 client method to call + log_extra: dict + dictionary to pass as extra to the logger + *args: + Argument to pass to the S3 method call + **kwargs: + Keyword arguments to pass to the S3 method call + + Response: dict + S3 client response + ''' + log_extra = kwargs.pop('log_extra', {}) + logger.debug('Calling S3 %s(%s, %s)', func.__name__, args, kwargs, extra=log_extra) + time_started = time.time() try: - started = time.time() - response = self.s3.abort_multipart_upload( - Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=key, UploadId=upload_id - ) - except ClientError as error: - ended = time.time() - started + response = func(*args, **kwargs) + except (ClientError, ParamValidationError) as error: + log_extra.update({'duration': time.time() - time_started}) + if isinstance(error, ClientError): + log_extra.update({'s3_response': error.response}) logger.error( - 'Failed to abort multipart upload: %s', + 'Failed to call %s(args=%s, kwargs=%s): %s', + func.__name__, + args, + kwargs, error, - extra={ - 'collection': asset.item.collection.name, - 'item': asset.item.name, - 'asset': asset.name, - 'upload_id': upload_id, - 's3_error': error.response, - 'duration': ended - } + extra=log_extra ) raise - ended = time.time() - started - logger.info( - 'Successfully aborted a multipart asset upload: %s', - key, - extra={ - 's3_response': response, - 'duration': ended, - 'upload_id': upload_id, - 'asset': asset.name - }, - ) + else: + log_extra.update({'duration': time.time() - time_started, 's3_response': response}) + logger.debug( + 'Successfully call %s(args=%s, kwargs=%s)', + func.__name__, + args, + kwargs, + extra=log_extra + ) + + if ( + 'ResponseMetadata' in response and 'HTTPStatusCode' in response['ResponseMetadata'] and + response['ResponseMetadata']['HTTPStatusCode'] not in [200, 201, 202, 204, 206] + ): + log_extra.update({'s3_response': response}) + logger.error( + 'S3 call %s(%s. %s) returned an error code: HTTP %d', + func.__name__, + args, + kwargs, + response['ResponseMetadata']['HTTPStatusCode'], + extra=log_extra + ) + raise ValueError(f"S3 HTTP {response['ResponseMetadata']['HTTPStatusCode']}") + + return response diff --git a/app/stac_api/serializers.py b/app/stac_api/serializers.py index 9a7f98ee..66278fd4 100644 --- a/app/stac_api/serializers.py +++ b/app/stac_api/serializers.py @@ -4,7 +4,6 @@ from django.conf import settings from django.contrib.gis.geos import GEOSGeometry -from django.urls import reverse from rest_framework import serializers from rest_framework.utils.serializer_helpers import ReturnDict @@ -21,7 +20,13 @@ from stac_api.models import LandingPage from stac_api.models import LandingPageLink from stac_api.models import Provider +from stac_api.serializers_utils import DictSerializer +from stac_api.serializers_utils import NonNullModelSerializer +from stac_api.serializers_utils import UpsertModelSerializerMixin +from stac_api.serializers_utils import get_relation_links +from stac_api.serializers_utils import update_or_create_links from stac_api.utils import build_asset_href +from stac_api.utils import get_url from stac_api.utils import isoformat from stac_api.validators import MEDIA_TYPES_MIMES from stac_api.validators import validate_asset_name @@ -36,202 +41,6 @@ logger = logging.getLogger(__name__) -def create_or_update_str(created): - if created: - return 'create' - return 'update' - - -def update_or_create_links(model, instance, instance_type, links_data): - '''Update or create links for a model - - Update the given links list within a model instance or create them when they don't exists yet. - Args: - model: model class on which to update/create links (Collection or Item) - instance: model instance on which to update/create links - instance_type: (str) instance type name string to use for filtering ('collection' or 'item') - links_data: list of links dictionary to add/update - ''' - links_ids = [] - for link_data in links_data: - link, created = model.objects.get_or_create( - **{instance_type: instance}, - rel=link_data["rel"], - defaults={ - 'href': link_data.get('href', None), - 'link_type': link_data.get('link_type', None), - 'title': link_data.get('title', None) - } - ) - logger.debug( - '%s link %s', - create_or_update_str(created), - link.href, - extra={ - instance_type: instance.name, "link": link_data - } - ) - links_ids.append(link.id) - # the duplicate here is necessary to update the values in - # case the object already exists - link.link_type = link_data.get('link_type', link.link_type) - link.title = link_data.get('title', link.title) - link.href = link_data.get('href', link.rel) - link.full_clean() - link.save() - - # Delete link that were not mentioned in the payload anymore - deleted = model.objects.filter(**{instance_type: instance},).exclude(id__in=links_ids).delete() - logger.info( - "deleted %d stale links for %s %s", - deleted[0], - instance_type, - instance.name, - extra={instance_type: instance} - ) - - -def get_relation_links(request, view, view_args): - '''Returns a list of auto generated relation links - - Returns the self, root and parent auto generated links. - - Args: - request: HttpRequest - request object - view: string - name of the view that originate the call - view_args: list - args to construct the view path - - Returns: list - List of auto generated links - ''' - self_url = request.build_absolute_uri(reverse(view, args=view_args)) - return [ - OrderedDict([ - ('rel', 'self'), - ('href', self_url), - ]), - OrderedDict([ - ('rel', 'root'), - ('href', request.build_absolute_uri(reverse('landing-page'))), - ]), - OrderedDict([ - ('rel', 'parent'), - ('href', self_url.rsplit('/', maxsplit=1)[0]), - ]), - ] - - -def get_url(request, view, args=None): - '''Get an full url based on a view name''' - return request.build_absolute_uri(reverse(view, args=args)) - - -class UpsertModelSerializerMixin: - """Add support for Upsert in serializer - """ - - def upsert(self, look_up, **kwargs): - """ - Update or insert an instance and return it. - - Args: - look_up: dict - Must be a unique query to be used in the objects.update_or_create(**look_up) method. - **kwargs: - Extra key=value pairs to pass as validated_data to update_or_create(). For example - relationships that are not serialized but part of the request path can be given - as kwargs. - """ - validated_data = {**self.validated_data, **kwargs} - self.instance, created = self.update_or_create(look_up, validated_data) - return self.instance, created - - def update_or_create(self, look_up, validated_data): - """This method must be implemented by the serializer and must make use of the DB - objects.update_or_create() method. - - Args: - look_up: dict - Must be a unique query to be used in the objects.update_or_create(**look_up) - method. - validated_data: dict - Copy of the validated_data to be used as defaults in the - objects.update_or_create(defaults=validated_data) method. - """ - raise NotImplementedError("update_or_create() not implemented") - - -class NonNullModelSerializer(serializers.ModelSerializer): - """Filter fields with null value - - Best practice is to not include (optional) fields whose - value is None. - """ - - def to_representation(self, instance): - - def filter_null(obj): - filtered_obj = {} - if isinstance(obj, OrderedDict): - filtered_obj = OrderedDict() - for key, value in obj.items(): - if isinstance(value, dict): - filtered_obj[key] = filter_null(value) - # then links array might be empty at this point, - # but that in the view the auto generated links are added anyway - elif isinstance(value, list) and key != 'links': - if len(value) > 0: - filtered_obj[key] = value - elif value is not None: - filtered_obj[key] = value - return filtered_obj - - obj = super().to_representation(instance) - return filter_null(obj) - - -class DictSerializer(serializers.ListSerializer): - '''Represent objects within a dictionary instead of a list - - By default the Serializer with `many=True` attribute represent all objects within a list. - Here we overwrite the ListSerializer to instead represent multiple objects using a dictionary - where the object identifier is used as key. - - For example the following list: - - [{ - 'name': 'object1', - 'description': 'This is object 1' - }, { - 'name': 'object2', - 'description': 'This is object 2' - }] - - Would be represented as follow: - - { - 'object1': {'description': 'This is object 1'}, - 'object2': {'description': 'This is object 2'} - } - ''' - - # pylint: disable=abstract-method - - key_identifier = 'id' - - def to_representation(self, data): - objects = super().to_representation(data) - return {obj.pop(self.key_identifier): obj for obj in objects} - - @property - def data(self): - ret = super(serializers.ListSerializer, self).data - return ReturnDict(ret, serializer=self) - - class LandingPageLinkSerializer(serializers.ModelSerializer): class Meta: @@ -458,7 +267,7 @@ def _update_or_create_providers(self, collection, providers_data): ) logger.debug( '%s provider %s', - create_or_update_str(created), + 'created' if created else 'updated', provider.name, extra={"provider": provider_data} ) @@ -930,11 +739,15 @@ def data(self): class UploadPartSerializer(serializers.Serializer): + '''This serializer is used to serialize the data from/to the S3 API. + ''' # pylint: disable=abstract-method etag = serializers.CharField(source='ETag', allow_blank=False, required=True) part_number = serializers.IntegerField( source='PartNumber', min_value=1, max_value=100, required=True, allow_null=False ) + modified = serializers.DateTimeField(source='LastModified', required=False, allow_null=True) + size = serializers.IntegerField(source='Size', allow_null=True, required=False) class AssetUploadSerializer(NonNullModelSerializer): @@ -991,3 +804,17 @@ def get_fields(self): # This is a hack to allow fields with special characters fields['checksum:multihash'] = fields.pop('checksum_multihash') return fields + + +class AssetUploadPartsSerializer(serializers.Serializer): + '''S3 list_parts response serializer''' + + # pylint: disable=abstract-method + + class Meta: + list_serializer_class = AssetUploadListSerializer + + # Read only fields + parts = serializers.ListField( + source='Parts', child=UploadPartSerializer(), default=list, read_only=True + ) diff --git a/app/stac_api/serializers_utils.py b/app/stac_api/serializers_utils.py new file mode 100644 index 00000000..3d2f4a59 --- /dev/null +++ b/app/stac_api/serializers_utils.py @@ -0,0 +1,208 @@ +import logging +from collections import OrderedDict + +from django.urls import reverse + +from rest_framework import serializers +from rest_framework.utils.serializer_helpers import ReturnDict + +logger = logging.getLogger(__name__) + + +def update_or_create_links(model, instance, instance_type, links_data): + '''Update or create links for a model + + Update the given links list within a model instance or create them when they don't exists yet. + Args: + model: model class on which to update/create links (Collection or Item) + instance: model instance on which to update/create links + instance_type: (str) instance type name string to use for filtering ('collection' or 'item') + links_data: list of links dictionary to add/update + ''' + links_ids = [] + for link_data in links_data: + link, created = model.objects.get_or_create( + **{instance_type: instance}, + rel=link_data["rel"], + defaults={ + 'href': link_data.get('href', None), + 'link_type': link_data.get('link_type', None), + 'title': link_data.get('title', None) + } + ) + logger.debug( + '%s link %s', + 'created' if created else 'updated', + link.href, + extra={ + instance_type: instance.name, "link": link_data + } + ) + links_ids.append(link.id) + # the duplicate here is necessary to update the values in + # case the object already exists + link.link_type = link_data.get('link_type', link.link_type) + link.title = link_data.get('title', link.title) + link.href = link_data.get('href', link.rel) + link.full_clean() + link.save() + + # Delete link that were not mentioned in the payload anymore + deleted = model.objects.filter(**{instance_type: instance},).exclude(id__in=links_ids).delete() + logger.info( + "deleted %d stale links for %s %s", + deleted[0], + instance_type, + instance.name, + extra={instance_type: instance} + ) + + +def get_relation_links(request, view, view_args): + '''Returns a list of auto generated relation links + + Returns the self, root and parent auto generated links. + + Args: + request: HttpRequest + request object + view: string + name of the view that originate the call + view_args: list + args to construct the view path + + Returns: list + List of auto generated links + ''' + self_url = request.build_absolute_uri(reverse(view, args=view_args)) + return [ + OrderedDict([ + ('rel', 'self'), + ('href', self_url), + ]), + OrderedDict([ + ('rel', 'root'), + ('href', request.build_absolute_uri(reverse('landing-page'))), + ]), + OrderedDict([ + ('rel', 'parent'), + ('href', self_url.rsplit('/', maxsplit=1)[0]), + ]), + ] + + +class UpsertModelSerializerMixin: + """Add support for Upsert in serializer + """ + + def upsert(self, look_up, **kwargs): + """ + Update or insert an instance and return it. + + Args: + look_up: dict + Must be a unique query to be used in the objects.update_or_create(**look_up) method. + **kwargs: + Extra key=value pairs to pass as validated_data to update_or_create(). For example + relationships that are not serialized but part of the request path can be given + as kwargs. + """ + validated_data = {**self.validated_data, **kwargs} + self.instance, created = self.update_or_create(look_up, validated_data) + return self.instance, created + + def update_or_create(self, look_up, validated_data): + """This method must be implemented by the serializer and must make use of the DB + objects.update_or_create() method. + + Args: + look_up: dict + Must be a unique query to be used in the objects.update_or_create(**look_up) + method. + validated_data: dict + Copy of the validated_data to be used as defaults in the + objects.update_or_create(defaults=validated_data) method. + """ + raise NotImplementedError("update_or_create() not implemented") + + +def filter_null(obj): + filtered_obj = {} + if isinstance(obj, OrderedDict): + filtered_obj = OrderedDict() + for key, value in obj.items(): + if isinstance(value, dict): + filtered_obj[key] = filter_null(value) + # then links array might be empty at this point, + # but that in the view the auto generated links are added anyway + elif isinstance(value, list) and key != 'links': + if len(value) > 0: + filtered_obj[key] = value + elif value is not None: + filtered_obj[key] = value + return filtered_obj + + +class NonNullSerializer(serializers.Serializer): + """Filter fields with null value + + Best practice is to not include (optional) fields whose + value is None. + """ + + # pylint: disable=abstract-method + + def to_representation(self, instance): + obj = super().to_representation(instance) + return filter_null(obj) + + +class NonNullModelSerializer(serializers.ModelSerializer): + """Filter fields with null value + + Best practice is to not include (optional) fields whose + value is None. + """ + + def to_representation(self, instance): + obj = super().to_representation(instance) + return filter_null(obj) + + +class DictSerializer(serializers.ListSerializer): + '''Represent objects within a dictionary instead of a list + + By default the Serializer with `many=True` attribute represent all objects within a list. + Here we overwrite the ListSerializer to instead represent multiple objects using a dictionary + where the object identifier is used as key. + + For example the following list: + + [{ + 'name': 'object1', + 'description': 'This is object 1' + }, { + 'name': 'object2', + 'description': 'This is object 2' + }] + + Would be represented as follow: + + { + 'object1': {'description': 'This is object 1'}, + 'object2': {'description': 'This is object 2'} + } + ''' + + # pylint: disable=abstract-method + + key_identifier = 'id' + + def to_representation(self, data): + objects = super().to_representation(data) + return {obj.pop(self.key_identifier): obj for obj in objects} + + @property + def data(self): + ret = super(serializers.ListSerializer, self).data + return ReturnDict(ret, serializer=self) diff --git a/app/stac_api/urls.py b/app/stac_api/urls.py index da272498..482b14e5 100644 --- a/app/stac_api/urls.py +++ b/app/stac_api/urls.py @@ -9,6 +9,7 @@ from stac_api.views import AssetUploadAbort from stac_api.views import AssetUploadComplete from stac_api.views import AssetUploadDetail +from stac_api.views import AssetUploadPartsList from stac_api.views import AssetUploadsList from stac_api.views import CollectionDetail from stac_api.views import CollectionList @@ -23,7 +24,7 @@ asset_upload_urls = [ path("", AssetUploadDetail.as_view(), name='asset-upload-detail'), - # path("/parts/", AssetUploadPart.as_view(), name='asset-upload-part'), + path("/parts", AssetUploadPartsList.as_view(), name='asset-upload-parts-list'), path("/complete", AssetUploadComplete.as_view(), name='asset-upload-complete'), path("/abort", AssetUploadAbort.as_view(), name='asset-upload-abort') ] diff --git a/app/stac_api/utils.py b/app/stac_api/utils.py index 28e36fdd..3d4799d9 100644 --- a/app/stac_api/utils.py +++ b/app/stac_api/utils.py @@ -13,6 +13,7 @@ from django.conf import settings from django.contrib.gis.geos import Point from django.contrib.gis.geos import Polygon +from django.urls import reverse logger = logging.getLogger(__name__) @@ -328,3 +329,8 @@ def geometry_from_bbox(bbox): else: bbox_geometry = Polygon.from_bbox(list_bbox_values) return bbox_geometry + + +def get_url(request, view, args=None): + '''Get an full url based on a view name''' + return request.build_absolute_uri(reverse(view, args=args)) diff --git a/app/stac_api/views.py b/app/stac_api/views.py index 2ffe4d03..3b472605 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -23,9 +23,11 @@ from stac_api.models import ConformancePage from stac_api.models import Item from stac_api.models import LandingPage +from stac_api.pagination import ExtApiPagination from stac_api.pagination import GetPostCursorPagination from stac_api.s3_multipart_upload import MultipartUpload from stac_api.serializers import AssetSerializer +from stac_api.serializers import AssetUploadPartsSerializer from stac_api.serializers import AssetUploadSerializer from stac_api.serializers import CollectionSerializer from stac_api.serializers import ConformancePageSerializer @@ -646,6 +648,10 @@ def abort_multipart_upload(self, executor, asset_upload, asset): asset_upload.urls = [] asset_upload.save() + def list_multipart_upload_parts(self, executor, asset_upload, asset, limit, offset): + key = get_asset_path(asset.item, asset.name) + return executor.list_upload_parts(key, asset, asset_upload.upload_id, limit, offset) + class AssetUploadsList(AssetUploadBase, mixins.ListModelMixin, views_mixins.CreateModelMixin): @@ -710,3 +716,28 @@ def perform_update(self, serializer): executor = MultipartUpload() asset = serializer.instance.asset self.abort_multipart_upload(executor, serializer.instance, asset) + + +class AssetUploadPartsList(AssetUploadBase): + serializer_class = AssetUploadPartsSerializer + pagination_class = ExtApiPagination + + def get(self, request, *args, **kwargs): + return self.list(request, *args, **kwargs) + + def list(self, request, *args, **kwargs): + executor = MultipartUpload() + asset_upload = self.get_object() + limit, offset = self.get_pagination_config(request) + data, has_next = self.list_multipart_upload_parts( + executor, asset_upload, asset_upload.asset, limit, offset + ) + serializer = self.get_serializer(data) + + return self.get_paginated_response(serializer.data, has_next) + + def get_pagination_config(self, request): + return self.paginator.get_pagination_config(request) + + def get_paginated_response(self, data, has_next): # pylint: disable=arguments-differ + return self.paginator.get_paginated_response(data, has_next) diff --git a/app/tests/test_asset_upload_endpoint.py b/app/tests/test_asset_upload_endpoint.py index cd4a53ac..94416827 100644 --- a/app/tests/test_asset_upload_endpoint.py +++ b/app/tests/test_asset_upload_endpoint.py @@ -24,6 +24,10 @@ logger = logging.getLogger(__name__) +KB = 1024 +MB = 1024 * KB +GB = 1024 * MB + class AssetUploadBaseTest(StacBaseTestCase, S3TestMixin): @@ -69,6 +73,12 @@ def get_complete_multipart_upload_path(self, upload_id): args=[self.collection.name, self.item.name, self.asset.name, upload_id] ) + def get_list_parts_path(self, upload_id): + return reverse( + 'asset-upload-parts-list', + args=[self.collection.name, self.item.name, self.asset.name, upload_id] + ) + def s3_upload_parts(self, upload_id, file_like, size, number_parts): s3 = get_s3_client() key = get_asset_path(self.item, self.asset.name) @@ -163,7 +173,7 @@ def test_asset_upload_create_abort_multipart(self): key = get_asset_path(self.item, self.asset.name) self.assertS3ObjectNotExists(key) number_parts = 2 - file_like, checksum_multihash = self.get_file_like_object(1 * 1024) + file_like, checksum_multihash = self.get_file_like_object(1 * KB) response = self.client.post( self.get_create_multipart_upload_path(), data={ @@ -202,7 +212,7 @@ def test_asset_upload_create_multipart_duplicate(self): key = get_asset_path(self.item, self.asset.name) self.assertS3ObjectNotExists(key) number_parts = 2 - file_like, checksum_multihash = self.get_file_like_object(1 * 1024) + file_like, checksum_multihash = self.get_file_like_object(1 * KB) response = self.client.post( self.get_create_multipart_upload_path(), data={ @@ -246,7 +256,7 @@ def test_asset_upload_1_part(self): key = get_asset_path(self.item, self.asset.name) self.assertS3ObjectNotExists(key) number_parts = 1 - size = 1 * 1024 + size = 1 * KB file_like, checksum_multihash = self.get_file_like_object(size) response = self.client.post( self.get_create_multipart_upload_path(), @@ -278,7 +288,7 @@ def test_asset_upload_2_parts(self): key = get_asset_path(self.item, self.asset.name) self.assertS3ObjectNotExists(key) number_parts = 2 - size = 10 * 1024 * 1024 # Minimum upload part on S3 is 5 MB + size = 10 * MB # Minimum upload part on S3 is 5 MB file_like, checksum_multihash = self.get_file_like_object(size) response = self.client.post( @@ -356,7 +366,7 @@ def test_asset_upload_2_parts_too_small(self): key = get_asset_path(self.item, self.asset.name) self.assertS3ObjectNotExists(key) number_parts = 2 - size = 1 * 1024 * 1024 # Minimum upload part on S3 is 5 MB + size = 1 * KB # Minimum upload part on S3 is 5 MB file_like, checksum_multihash = self.get_file_like_object(size) response = self.client.post( @@ -391,7 +401,7 @@ def test_asset_upload_1_parts_invalid_etag(self): key = get_asset_path(self.item, self.asset.name) self.assertS3ObjectNotExists(key) number_parts = 1 - size = 1 * 1024 * 1024 + size = 1 * KB file_like, checksum_multihash = self.get_file_like_object(size) response = self.client.post( @@ -430,7 +440,7 @@ def test_asset_upload_1_parts_too_many_parts_in_complete(self): key = get_asset_path(self.item, self.asset.name) self.assertS3ObjectNotExists(key) number_parts = 1 - size = 1 * 1024 * 1024 + size = 1 * KB file_like, checksum_multihash = self.get_file_like_object(size) response = self.client.post( @@ -458,7 +468,7 @@ def test_asset_upload_1_parts_too_many_parts_in_complete(self): def test_asset_upload_2_parts_incomplete_upload(self): number_parts = 2 - size = 10 * 1024 * 1024 + size = 10 * MB file_like, checksum_multihash = self.get_file_like_object(size) response = self.client.post( @@ -485,7 +495,7 @@ def test_asset_upload_1_parts_invalid_complete(self): key = get_asset_path(self.item, self.asset.name) self.assertS3ObjectNotExists(key) number_parts = 1 - size = 1 * 1024 * 1024 + size = 1 * KB file_like, checksum_multihash = self.get_file_like_object(size) response = self.client.post( @@ -542,7 +552,7 @@ class AssetUploadDeleteInProgressEndpointTestCase(AssetUploadBaseTest): def test_delete_asset_upload_in_progress(self): number_parts = 2 - size = 10 * 1024 * 1024 # Minimum upload part on S3 is 5 MB + size = 10 * MB # Minimum upload part on S3 is 5 MB file_like, checksum_multihash = self.get_file_like_object(size) response = self.client.post( @@ -653,3 +663,72 @@ def test_get_asset_uploads_status_query(self): ) for upload in json_data['uploads']: self.assertEqual(upload['status'], AssetUpload.Status.ABORTED) + + +class AssetUploadListPartsEndpointTestCase(AssetUploadBaseTest): + + def test_asset_upload_list_parts(self): + key = get_asset_path(self.item, self.asset.name) + self.assertS3ObjectNotExists(key) + number_parts = 4 + size = 5 * MB * number_parts + file_like, checksum_multihash = self.get_file_like_object(size) + response = self.client.post( + self.get_create_multipart_upload_path(), + data={ + 'number_parts': number_parts, 'checksum:multihash': checksum_multihash + }, + content_type="application/json" + ) + self.assertStatusCode(201, response) + json_data = response.json() + upload_id = json_data['upload_id'] + self.check_urls_response(json_data['urls'], number_parts) + + # List the uploaded parts should be empty + response = self.client.get(self.get_list_parts_path(upload_id)) + self.assertStatusCode(200, response) + json_data = response.json() + self.assertIn('links', json_data, msg='missing required field in list parts response') + self.assertIn('parts', json_data, msg='missing required field in list parts response') + self.assertEqual(len(json_data['parts']), 0, msg='parts should be empty') + + # upload all the parts + parts = self.s3_upload_parts(upload_id, file_like, size, number_parts) + + # List the uploaded parts should have 4 parts + response = self.client.get(self.get_list_parts_path(upload_id)) + self.assertStatusCode(200, response) + json_data = response.json() + self.assertIn('links', json_data, msg='missing required field in list parts response') + self.assertIn('parts', json_data, msg='missing required field in list parts response') + self.assertEqual(len(json_data['parts']), number_parts) + for part in json_data['parts']: + self.assertIn('etag', part) + self.assertIn('modified', part) + self.assertIn('size', part) + self.assertIn('part_number', part) + + # Unfortunately moto doesn't support yet the MaxParts + # (see https://github.com/spulec/moto/issues/2680) + # Test the list parts pagination + # response = self.client.get(self.get_list_parts_path(upload_id), {'limit': 2}) + # self.assertStatusCode(200, response) + # json_data = response.json() + # self.assertIn('links', json_data, msg='missing required field in list parts response') + # self.assertIn('parts', json_data, msg='missing required field in list parts response') + # self.assertEqual(len(json_data['parts']), number_parts) + # for part in json_data['parts']: + # self.assertIn('etag', part) + # self.assertIn('modified', part) + # self.assertIn('size', part) + # self.assertIn('part_number', part) + + # Complete the upload + response = self.client.post( + self.get_complete_multipart_upload_path(upload_id), + data={'parts': parts}, + content_type="application/json" + ) + self.assertStatusCode(200, response) + self.assertS3ObjectExists(key) From 5f72e65c0e4422382b6d66815c93a9cf2c687517 Mon Sep 17 00:00:00 2001 From: Isabelle Buholzer Date: Thu, 8 Apr 2021 10:53:31 +0200 Subject: [PATCH 067/105] BGDIINF_SB-1670: remove collection renaming function --- app/stac_api/admin.py | 8 ++++++++ app/stac_api/views.py | 16 ++++++++++++++++ 2 files changed, 24 insertions(+) diff --git a/app/stac_api/admin.py b/app/stac_api/admin.py index bb9ba371..e8fc8143 100644 --- a/app/stac_api/admin.py +++ b/app/stac_api/admin.py @@ -80,6 +80,14 @@ def get_search_results(self, request, queryset, search_term): queryset |= self.model.objects.filter(name__exact=search_term.strip('"')) return queryset, use_distinct + # We don't want to do a rename of the collection on S3 + # That's why the field is set readonly here + # for update operation + def change_view(self, request, object_id, form_url='', extra_content=None): + self.readonly_fields = self.get_readonly_fields(request) + self.readonly_fields.extend(['name']) + return super().change_view(request, object_id, form_url, extra_content) + class ItemLinkInline(admin.TabularInline): model = ItemLink diff --git a/app/stac_api/views.py b/app/stac_api/views.py index 3b472605..4e2b9a08 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -308,6 +308,22 @@ def patch(self, request, *args, **kwargs): # def delete(self, request, *args, **kwargs): # return self.destroy(request, *args, **kwargs) + def perform_upsert(self, serializer, lookup): + validate_renaming( + serializer, + 'name', + self.kwargs['collection_name'], {'collection': self.kwargs['collection_name']} + ) + return super().perform_upsert(serializer, lookup) + + def perform_update(self, serializer, *args, **kwargs): + validate_renaming( + serializer, + 'name', + self.kwargs['collection_name'], {'collection': self.kwargs['collection_name']} + ) + return super().partial_update(serializer, *args, **kwargs) + class ItemsList(generics.GenericAPIView, views_mixins.CreateModelMixin): serializer_class = ItemSerializer From a493d31faefaa6fe9a707d7c9bdbbfed4d0e1ae7 Mon Sep 17 00:00:00 2001 From: Isabelle Buholzer Date: Fri, 16 Apr 2021 16:27:50 +0200 Subject: [PATCH 068/105] BGDIINF_SB-1745: Adapt the test due to renaming is not allowed --- app/tests/test_collections_endpoint.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/app/tests/test_collections_endpoint.py b/app/tests/test_collections_endpoint.py index 908ed1b5..7c108836 100644 --- a/app/tests/test_collections_endpoint.py +++ b/app/tests/test_collections_endpoint.py @@ -234,27 +234,31 @@ def test_collections_put_read_only_in_payload(self): msg='Unexpected error message') def test_collection_put_change_id(self): + # Renaming is no longer allowed, due to this the test has been adapted sample = self.collection_factory.create_sample( name='new-collection-name', sample='collection-2' ) - # for the start, the id have to be different + # test if renaming does not work self.assertNotEqual(self.collection['name'], sample['name']) response = self.client.put( f"/{STAC_BASE_V}/collections/{self.collection['name']}", data=sample.get_json('put'), content_type='application/json' ) - self.assertStatusCode(200, response) + self.assertStatusCode(400, response) + self.assertEqual(['Renaming object is not allowed'], + response.json()['description'], + msg='Unexpected error message') - # check if id changed + # check if id has not changed response = self.client.get(f"/{STAC_BASE_V}/collections/{sample['name']}") - self.assertStatusCode(200, response) - self.check_stac_collection(sample.json, response.json()) + self.assertStatusCode(404, response) + #self.check_stac_collection(sample.json, response.json()) - # the old collection shouldn't exist any more + # the old collection should still exist response = self.client.get(f"/{STAC_BASE_V}/collections/{self.collection['name']}") - self.assertStatusCode(404, response) + self.assertStatusCode(200, response) def test_collection_put_remove_optional_fields(self): collection_name = self.collection['name'] # get a name that is registered in the service From c00cde23ab768554ef8bed9ea9d5d02cc05440ef Mon Sep 17 00:00:00 2001 From: Isabelle Buholzer Date: Mon, 19 Apr 2021 13:14:38 +0200 Subject: [PATCH 069/105] BGDIINF_SB-1745: Adding read only fields --- app/stac_api/admin.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/app/stac_api/admin.py b/app/stac_api/admin.py index e8fc8143..208e1f31 100644 --- a/app/stac_api/admin.py +++ b/app/stac_api/admin.py @@ -80,13 +80,10 @@ def get_search_results(self, request, queryset, search_term): queryset |= self.model.objects.filter(name__exact=search_term.strip('"')) return queryset, use_distinct - # We don't want to do a rename of the collection on S3 - # That's why the field is set readonly here - # for update operation - def change_view(self, request, object_id, form_url='', extra_content=None): - self.readonly_fields = self.get_readonly_fields(request) - self.readonly_fields.extend(['name']) - return super().change_view(request, object_id, form_url, extra_content) + def get_readonly_fields(self, request, obj=None): + if obj is not None: + return self.readonly_fields + ['name'] + return self.readonly_fields class ItemLinkInline(admin.TabularInline): From 0629b745adb5d1d60d8f258ef58a2d12b701b63b Mon Sep 17 00:00:00 2001 From: Isabelle Buholzer Date: Mon, 19 Apr 2021 13:43:02 +0200 Subject: [PATCH 070/105] BGDIINF_SB-1745: Tipp error --- app/stac_api/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/stac_api/views.py b/app/stac_api/views.py index 4e2b9a08..07610541 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -322,7 +322,7 @@ def perform_update(self, serializer, *args, **kwargs): 'name', self.kwargs['collection_name'], {'collection': self.kwargs['collection_name']} ) - return super().partial_update(serializer, *args, **kwargs) + return super().perform_update(serializer, *args, **kwargs) class ItemsList(generics.GenericAPIView, views_mixins.CreateModelMixin): From 5d549177a9fd8c4b782f214234b5a519e07bca8d Mon Sep 17 00:00:00 2001 From: Brice Schaffner <67745584+ltshb@users.noreply.github.com> Date: Mon, 19 Apr 2021 13:57:23 +0200 Subject: [PATCH 071/105] Forced the admin collection fields order Added the fields property to the admin collection class to force the order of the fields. This way the `Id` field is always at the top, otherwise it is at the bottom in the change view due to the read only flag. --- app/stac_api/admin.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/app/stac_api/admin.py b/app/stac_api/admin.py index 208e1f31..17f194bc 100644 --- a/app/stac_api/admin.py +++ b/app/stac_api/admin.py @@ -66,7 +66,15 @@ class CollectionAdmin(admin.ModelAdmin): class Media: js = ('js/admin/collection_help_search.js',) css = {'all': ('style/hover.css',)} - + fields = [ + 'name', + 'title', + 'description', + 'extent_start_datetime', + 'extent_end_datetime', + 'summaries', + 'extent_geometry' + ] readonly_fields = [ 'extent_start_datetime', 'extent_end_datetime', 'summaries', 'extent_geometry' ] From cce04aa0cdee34453bb838206220248ea7a2b6ea Mon Sep 17 00:00:00 2001 From: Isabelle Buholzer Date: Mon, 19 Apr 2021 14:35:18 +0200 Subject: [PATCH 072/105] BGDIINF_SB-1745: Remove unused line --- app/tests/test_collections_endpoint.py | 1 - 1 file changed, 1 deletion(-) diff --git a/app/tests/test_collections_endpoint.py b/app/tests/test_collections_endpoint.py index 7c108836..4cfe6f34 100644 --- a/app/tests/test_collections_endpoint.py +++ b/app/tests/test_collections_endpoint.py @@ -254,7 +254,6 @@ def test_collection_put_change_id(self): # check if id has not changed response = self.client.get(f"/{STAC_BASE_V}/collections/{sample['name']}") self.assertStatusCode(404, response) - #self.check_stac_collection(sample.json, response.json()) # the old collection should still exist response = self.client.get(f"/{STAC_BASE_V}/collections/{self.collection['name']}") From 24dcd0aa5a52074d03e5399eec4b3386b365ce9c Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 15 Apr 2021 10:09:56 +0200 Subject: [PATCH 073/105] BGDIINF_SB-1739: Removed wrong openapi response in PATCH asset The PATCH Asset doesn't support upsert but only update therefore the response 201 is not valid and has been removed. --- spec/transaction/transaction.yml | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml index a3696bcd..bb2e1254 100644 --- a/spec/transaction/transaction.yml +++ b/spec/transaction/transaction.yml @@ -433,7 +433,7 @@ paths: tags: - Data put: - summary: Update or create an asset + summary: Update or create an asset description: >- Update or create an asset with Id `assetId` with a complete asset definition. If the asset doesn't exists it is then created. @@ -489,19 +489,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/itemAsset" - "201": - description: Returns the created Asset - headers: - Location: - description: A link to the asset - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/itemAsset" + $ref: "#/components/schemas/readUpdateAsset" "400": $ref: "#/components/responses/BadRequest" "404": From 42fbadb66cf58a9d8a5a67b3d1ea7d4d3e7d0766 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 15 Apr 2021 10:19:45 +0200 Subject: [PATCH 074/105] BGDIINF_SB-1739: Moved SPEC generic response and schemas Those responses and schemas were generic and make more sense to have them in the final openapi.yaml final and not only in the openapitransactional.yaml. --- spec/components/responses.yml | 9 ++++++ spec/components/schemas.yml | 22 +++++++++++++ spec/transaction/transaction.yml | 55 -------------------------------- 3 files changed, 31 insertions(+), 55 deletions(-) diff --git a/spec/components/responses.yml b/spec/components/responses.yml index 2a659418..73aacd32 100644 --- a/spec/components/responses.yml +++ b/spec/components/responses.yml @@ -191,6 +191,15 @@ components: example: code: 404 description: "Resource not found" + BadRequest: + description: The request was malformed or semantically invalid + content: + application/json: + schema: + $ref: "#/components/schemas/exception" + example: + code: 400 + description: "Invalid parameter" PreconditionFailed: description: Some condition specified by the request could not be met in the server # Currently the 412 Precondition not met is handled by django and we cannot give a body diff --git a/spec/components/schemas.yml b/spec/components/schemas.yml index 16c718f3..6f8fd96b 100644 --- a/spec/components/schemas.yml +++ b/spec/components/schemas.yml @@ -557,6 +557,13 @@ components: - type - geometries type: object + href: + type: string + format: url + description: Link to the asset object + readOnly: true + example: | + http://data.geo.admin.ch/ch.swisstopo.swissimage/collections/cs/items/CS3-20160503_132130_04/thumb.png ids: description: >- Array of Item ids to return. All other filter parameters that further @@ -1289,6 +1296,13 @@ components: description: >- Apply query operations to a specific property. The following properties are currently supported: `created`, `updated`, `title`. + roles: + type: array + items: + type: string + description: Purposes of the asset + example: + - thumbnail searchBody: allOf: # - $ref: "#/components/schemas/assetQueryFilter" @@ -1311,6 +1325,14 @@ components: example: "2017-08-17T08:05:32Z" format: date-time type: string + title: + type: string + description: Displayed title + example: Thumbnail + type: + type: string + description: Media type of the asset + example: image/tiff; application=geotiff updated: description: RFC 3339 compliant datetime string, time when the object was updated example: 2018-02-12T23:20:50Z diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml index bb2e1254..7483eebc 100644 --- a/spec/transaction/transaction.yml +++ b/spec/transaction/transaction.yml @@ -604,14 +604,6 @@ components: he would overwrite another changes of the resource. example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" schemas: - description: - type: string - description: >- - Detailed multi-line description to fully explain the object (collection, - item, asset, ...). - - [CommonMark 0.29](http://commonmark.org/) syntax MAY be used for rich - text representation. assetId: type: string pattern: ^[a-z0-9.-_]+$ @@ -770,12 +762,6 @@ components: default: https://data.geo.admin.ch/// example: >- http://data.geo.admin.ch/tmp/gdwh/ch.swisstopo.swissimage/CS3-20160503_132130_04.png - href: - type: string - format: url - description: Link to the asset object - example: >- - http://data.geo.admin.ch/ch.swisstopo.swissimage/collections/cs/items/CS3-20160503_132130_04/thumb.png writeItem: allOf: - $ref: "#/components/schemas/itemBase" @@ -825,10 +811,6 @@ components: example: properties: datetime: "2016-05-03T13:22:30.040Z" - # assets: - # analytic: - # title: 1-Band Analytic - # href: http://cool-sat.com/catalog/collections/cs/items/CS3-201605XX_132130_04/analytic-1.tif partialCollection: type: object description: Allows for a set of partial metadata fields for a collection @@ -874,34 +856,6 @@ components: - http://www.opengis.net/def/crs/EPSG/0/4326 example: title: The new title of the collection - roles: - type: array - items: - type: string - description: Purposes of the asset - example: - - thumbnail - title: - type: string - description: Displayed title - example: Thumbnail - type: - type: string - description: Media type of the asset - example: image/tiff; application=geotiff - # Overwrite the collection links examples - # collection: - # properties: - # links: - # example: - # - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - # rel: license - # title: Licence for the free geodata of the Federal Office of Topography swisstopo - # - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - # rel: describedby - # items: - # $ref: "#/components/schemas/link" - # type: array itemIdUpdate: description: >- Item identifier (unique per collection. If it doesn't match the `featureId` in path @@ -953,15 +907,6 @@ components: required: - code - links - BadRequest: - description: The request was malformed or semantically invalid - content: - application/json: - schema: - $ref: "#/components/schemas/exception" - example: - code: 400 - description: "Invalid parameter" PermissionDenied: description: No Permission for this request content: From 1c1cca4fdc8de55fdbd0b6886c7406681709abe7 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 15 Apr 2021 11:28:23 +0200 Subject: [PATCH 075/105] BGDIINF_SB-1739: Removed the checksum:multihash from write Assets requests + clean ups Removed the checksum:multihash from Asset write requests. Now the checksum:multihash will be added to the asset upload requests. Did also some clean up regarding the asset schemas to allow better reusability of schemas. Also removed deprecated comment in the create asset description. Also corrected the Asset UPSERT response, where the 201 was missing. Some minor clean up and documentation improvement, especialy making use of schema `title` property. This make the read of the documentation easier. --- spec/components/schemas.yml | 100 +++++++++---------- spec/transaction/transaction.yml | 159 +++++++++---------------------- 2 files changed, 97 insertions(+), 162 deletions(-) diff --git a/spec/components/schemas.yml b/spec/components/schemas.yml index 6f8fd96b..b7ba8073 100644 --- a/spec/components/schemas.yml +++ b/spec/components/schemas.yml @@ -69,6 +69,39 @@ components: # type: string # type: object # description: Apply query operations to a specific property + assetBase: + title: Asset + description: The `property name` defines the ID of the Asset. + type: object + required: + - type + - created + - updated + properties: + title: + $ref: "#/components/schemas/title" + description: + $ref: "#/components/schemas/description" + type: + $ref: "#/components/schemas/type" + href: + $ref: "#/components/schemas/href" + checksum:multihash: + $ref: "#/components/schemas/checksumMultihashReadOnly" + # roles: + # $ref: '#/components/schemas/roles' + "geoadmin:variant": + $ref: "#/components/schemas/geoadmin:variant" + "geoadmin:lang": + $ref: "#/components/schemas/geoadmin:lang" + "proj:epsg": + $ref: "#/components/schemas/proj:epsg" + "eo:gsd": + $ref: "#/components/schemas/eo:gsd" + created: + $ref: "#/components/schemas/created" + updated: + $ref: "#/components/schemas/updated" bbox: description: >- Only features that have a geometry that intersects the bounding box are selected. @@ -163,7 +196,16 @@ components: example: 90e402107a7f2588a85362b9beea2a12d4514d45 pattern: ^[a-f0-9]+$ title: Multihash - type: string, + type: string + checksumMultihashReadOnly: + description: | + `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) format. + + example: 90e402107a7f2588a85362b9beea2a12d4514d45 + pattern: ^[a-f0-9]+$ + title: Multihash + type: string + readOnly: true created: description: RFC 3339 compliant datetime string, time when the object was created example: 2018-02-12T23:20:50Z @@ -356,6 +398,7 @@ components: description: RFC 3339 compliant datetime string example: 2018-02-12T23:20:50Z type: string + format: date-time datetimeQuery: description: >- Either a date-time or an interval, open or closed. Date and time expressions @@ -680,54 +723,10 @@ components: - type type: object itemAssets: + title: Assets + description: List of Assets attached to this feature. additionalProperties: - properties: - created: - $ref: "#/components/schemas/created" - description: - description: >- - Multi-line description to explain the asset. - - - [CommonMark 0.29](http://commonmark.org/) syntax MAY be used for - rich text representation. - example: Small 256x256px PNG thumbnail for a preview. - type: string - href: - description: Link to the asset object - example: http://cool-sat.com/catalog/collections/cs/items/CS3-20160503_132130_04/thumb.png - format: url - type: string - # roles: - # description: Purposes of the asset - # example: - # - thumbnail - # items: - # type: string - # type: array - title: - description: Displayed title - example: Thumbnail - type: string - type: - description: Media type of the asset - example: image/tiff; application=geotiff - type: string - proj:epsg: - $ref: "#/components/schemas/proj:epsg" - geoadmin:variant: - $ref: "#/components/schemas/geoadmin:variant" - eo:gsd: - $ref: "#/components/schemas/eo:gsd" - updated: - $ref: "#/components/schemas/created" - required: - - "checksum:multihash" - - href - - type - - created - - updated - type: object + $ref: "#/components/schemas/assetBase" type: object readOnly: true example: @@ -817,10 +816,12 @@ components: $ref: "#/components/schemas/linkPostSearch" type: array itemId: - description: Item identifier (unique per collection) + title: ID + description: Feature identifier (unique per collection) example: smr200-200-4-2019 type: string itemProperties: + title: Properties description: >- Provides the core metadata fields plus extensions @@ -853,6 +854,7 @@ components: - updated type: object itemType: + title: type description: The GeoJSON type enum: - Feature diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml index 7483eebc..6bc519a1 100644 --- a/spec/transaction/transaction.yml +++ b/spec/transaction/transaction.yml @@ -368,15 +368,7 @@ paths: - Data post: summary: Add a new asset to a feature - description: >- - Create a new asset for a specific feature. - - - When creating a new asset for a feature, the metadata of - the asset is posted to the API. The Asset object itself must be already publicly - available at the following URL: https://data.geo.admin.ch/{collectionId}/{featureId}/{assetId} - - Optionally the Asset object multihash can be given for sanity check. + description: Create a new asset for a specific feature. operationId: postAsset tags: - Data Management @@ -387,7 +379,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/itemAssetWrite" + $ref: "#/components/schemas/createAsset" responses: "201": description: Return the created Asset @@ -400,7 +392,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/itemAsset" + $ref: "#/components/schemas/createAsset" "400": $ref: "#/components/responses/BadRequest" "5XX": @@ -449,14 +441,26 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/itemAssetUpdate" + $ref: "#/components/schemas/readUpdateAsset" responses: "200": - description: Status of the update request. + description: Asset has been successfully updated. + content: + application/json: + schema: + $ref: "#/components/schemas/readUpdateAsset" + "201": + description: Asset has been newly created. + headers: + Location: + description: A link to the asset + schema: + type: string + format: url content: application/json: schema: - $ref: "#/components/schemas/itemAsset" + $ref: "#/components/schemas/createAsset" "400": $ref: "#/components/responses/BadRequest" "404": @@ -482,7 +486,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/itemAssetPartialUpdate" + $ref: "#/components/schemas/readUpdateAsset" responses: "200": description: Returns the updated Asset. @@ -607,7 +611,7 @@ components: assetId: type: string pattern: ^[a-z0-9.-_]+$ - title: asset id + title: ID description: >- The asset id uniquely identifies the asset for an item @@ -615,24 +619,13 @@ components: **Note**: `id` must be unique for the item and must be identical to the filename. example: smr50-263-2016-2056-kgrs-2.5.tiff - updateAssetId: - type: string - pattern: ^[a-z0-9.-_]+$ - title: asset id - description: >- - The asset id uniquely identifies the asset for an item - - - **Note**: `id` must be unique for the item and must be identical to the - filename. When the `id` doesn't match the parameter `assetId`, the asset is renamed, - renaming also the object itself on S3. - example: smr50-263-2016-2056-kgrs-2.5.tiff assets: + title: Assets type: object properties: assets: items: - $ref: "#/components/schemas/itemAsset" + $ref: "#/components/schemas/readUpdateAsset" type: array links: items: @@ -649,54 +642,21 @@ components: rel: item - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale rel: collection - assetBase: - type: object - required: - - created - - updated - properties: - id: - $ref: "#/components/schemas/assetId" - title: - $ref: "#/components/schemas/title" - description: - $ref: "#/components/schemas/description" - type: - $ref: "#/components/schemas/type" - # roles: - # $ref: '#/components/schemas/roles' - "geoadmin:variant": - $ref: "#/components/schemas/geoadmin:variant" - "geoadmin:lang": - $ref: "#/components/schemas/geoadmin:lang" - "proj:epsg": - $ref: "#/components/schemas/proj:epsg" - "eo:gsd": - $ref: "#/components/schemas/eo:gsd" - created: - $ref: "#/components/schemas/created" - updated: - $ref: "#/components/schemas/updated" - # overwrites the STAC definition of itemAsset - itemAsset: + createAsset: allOf: - $ref: "#/components/schemas/assetBase" - type: object required: - id - - type - - href - - checksum:multihash - links properties: - "checksum:multihash": - $ref: "#/components/schemas/checksum:multihash" - href: - $ref: "#/components/schemas/href" + id: + $ref: "#/components/schemas/assetId" links: items: $ref: "#/components/schemas/link" type: array + readOnly: true example: - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff rel: self @@ -708,60 +668,33 @@ components: rel: item - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale rel: collection - itemAssetWrite: + readUpdateAsset: allOf: - $ref: "#/components/schemas/assetBase" - type: object required: - id - - type - itemAssetUpdate: - allOf: - - $ref: "#/components/schemas/assetBase" - - type: object - required: - - id - - type - properties: - id: - $ref: "#/components/schemas/updateAssetId" - itemAssetPartialUpdate: - allOf: - - $ref: "#/components/schemas/assetBase" - - type: object + - links properties: id: - $ref: "#/components/schemas/updateAssetId" - writeChecksumMultihash: - description: >- - `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) - format. - - - When provided the asset Object located at href will be checked against this checksum and the - request is rejected if the checksum don't match. - example: 90e402107a7f2588a85362b9beea2a12d4514d45 - pattern: ^[a-f0-9]+$ - title: Multihash - type: string, - writeHref: - type: string - format: url - description: >- - URL of the current location of the asset object. - - - The url must be publicly accessible. If the URL corresponds already to the correct location - of the asset object on s3, the checksum of the payload and the object on s3 are compared - and if matching the request will be accepted. - + $ref: "#/components/schemas/assetId" + links: + items: + $ref: "#/components/schemas/link" + type: array + readOnly: true + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 + rel: item + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: collection - If the URL is different from the location that the asset object should have according to the - the pattern `///`, the service will move the asset from this - temporary location to the correct one. - default: https://data.geo.admin.ch/// - example: >- - http://data.geo.admin.ch/tmp/gdwh/ch.swisstopo.swissimage/CS3-20160503_132130_04.png writeItem: allOf: - $ref: "#/components/schemas/itemBase" @@ -879,7 +812,7 @@ components: content: application/json: schema: - $ref: "#/components/schemas/itemAsset" + $ref: "#/components/schemas/readUpdateAsset" DeletedResource: description: Status of the delete resource content: From c959d69c7a10b0d5c4d9afc0cdb5660281e1f7c7 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 15 Apr 2021 11:38:42 +0200 Subject: [PATCH 076/105] BGDIINF_SB-1739: Reorganized the requests by adding two new tags Added a tag for Authentication which contains the authentication description and get-token endpoint. Added a new tag for the Asset Upload Management. --- spec/transaction/transaction.yml | 61 +++++++++++++++++++++----------- 1 file changed, 41 insertions(+), 20 deletions(-) diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml index 6bc519a1..270f0196 100644 --- a/spec/transaction/transaction.yml +++ b/spec/transaction/transaction.yml @@ -533,7 +533,7 @@ paths: - url: http://data.geo.admin.ch/api/stac/ post: tags: - - Data Management + - Authentication summary: >- Request token for token authentication. operationId: getToken @@ -546,7 +546,7 @@ paths: properties: username: type: string - decscription: name of user for whom token is requested + description: name of user for whom token is requested password: type: string description: password of user for whom token is requested @@ -849,28 +849,47 @@ components: example: code: 403 description: "Permission denied" + tags: - - description: Essential characteristics of this API - name: Capabilities - - description: Access to data (features) - name: Data - - description: Extension to OGC API - Features to support STAC metadata model and search API - name: STAC - - description: | - All write requests require authentication. The currently available options for a user to - authenticate himself are described below. + - name: Capabilities + description: Essential characteristics of this API + - name: Data + description: Access to data (features) + - name: STAC + description: Extension to OGC API - Features to support STAC metadata model and search API + - name: Data Management + description: | + Metadata management requests. Theses requests are used to create, update or delete the STAC + metadata. + + *NOTE: these requests requires authentication as described in [here](#tag/Authentication).* + - name: Asset Upload Management + description: | + Asset file can be uploaded via the STAC API using the following requests. + + *NOTE: the POST requests requires authentication as described in [here](#tag/Authentication).* + - name: Authentication + description: | + All write requests require authentication. There is currently three type of supported authentications: + + * [Session authentication](#section/Session-authentication) + * [Basic authentication](#section/Basic-authentication) + * [Token authentication](#section/Token-authentication) + + ## Session authentication - # Session authentication When using the browsable API the user can simply use the admin interface for logging in. Once logged in, the browsable API can be used to perform write requests. - # Basic authentication + ## Basic authentication + The username and password for authentication can be added to every write request the user wants to perform. Here is an example of posting an asset using curl (_username_="MickeyMouse", _password_="I_love_Minnie_Mouse"): + ``` curl --request POST \ --user MickeyMouse:I_love_Minnie_Mouse \ - --url https://service-stac.dev.bgdi.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ + --url https://data.geoadmin.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ --header 'Content-Type: application/json' \ --data '{ "id": "fancy_unique_id", @@ -883,12 +902,14 @@ tags: }' ``` - # Token authentication + ## Token authentication + A user specific token for authentication can be added to every write request the user wants to perform. Here is an example of posting an asset using curl: + ``` curl --request POST \ - --url https://service-stac.dev.bgdi.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ + --url https://data.geoadmin.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ --header 'Authorization: Token ccecf40693bfc52ba090cd46eb7f19e723fe831f' \ --header 'Content-Type: application/json' \ --data '{ @@ -901,14 +922,14 @@ tags: "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" }' ``` + Tokens can either be generated in the admin interface or existing users can perform a POST request - on the get-token endpoint to request a token (also see description of the get-token POST endpoint - at the bottom). + on the get-token endpoint to request a token (also see [Request token for token authentication](#operation/getToken)). Here is an example using curl: + ``` curl --request POST \ - --url https://service-stac.dev.bgdi.ch/api/stac/get-token \ + --url https://data.geoadmin.ch/api/stac/get-token \ --header 'Content-Type: application/json' \ --data '{"username": "MickeyMouse", "password": "I_love_Minnie_Mouse"}' ``` - name: Data Management From a458c7bb0de49a69e0b78d6dbb86171413c15d02 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Fri, 16 Apr 2021 07:00:29 +0200 Subject: [PATCH 077/105] BGDIINF_SB-1739: SPEC - Moved the ETag definition from schemas to headers The ETag header was defined in the /components/schemas, however regarding the openapi spec a header definition is not a schema but a Header object, therefore moved it in /components/headers. This may/could/might avoid spec validation issues. --- spec/Makefile | 1 + spec/components/headers.yml | 13 +++++++++++++ spec/components/responses.yml | 4 ++-- spec/components/schemas.yml | 11 ----------- spec/transaction/transaction.yml | 2 +- 5 files changed, 17 insertions(+), 14 deletions(-) create mode 100644 spec/components/headers.yml diff --git a/spec/Makefile b/spec/Makefile index 3e0eddf0..83a64f32 100644 --- a/spec/Makefile +++ b/spec/Makefile @@ -5,6 +5,7 @@ SPEC_HTTP_PORT ?= 8090 PARTS = base.yml \ + components/headers.yml \ components/parameters.yml \ components/responses.yml \ components/schemas.yml \ diff --git a/spec/components/headers.yml b/spec/components/headers.yml new file mode 100644 index 00000000..0300b163 --- /dev/null +++ b/spec/components/headers.yml @@ -0,0 +1,13 @@ +components: + headers: + ETag: + schema: + type: string + description: >- + The RFC7232 ETag header field in a response provides the current entity- + tag for the selected resource. An entity-tag is an opaque identifier for + different versions of a resource over time, regardless whether multiple + versions are valid at the same time. An entity-tag consists of an opaque + quoted string, possibly prefixed by a weakness indicator. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + required: true diff --git a/spec/components/responses.yml b/spec/components/responses.yml index 73aacd32..f3620bf1 100644 --- a/spec/components/responses.yml +++ b/spec/components/responses.yml @@ -3,7 +3,7 @@ components: Collection: headers: ETag: - $ref: "#/components/schemas/ETag" + $ref: "#/components/headers/ETag" content: application/json: schema: @@ -89,7 +89,7 @@ components: Feature: headers: ETag: - $ref: "#/components/schemas/ETag" + $ref: "#/components/headers/ETag" content: application/json: schema: diff --git a/spec/components/schemas.yml b/spec/components/schemas.yml index b7ba8073..8580b2ee 100644 --- a/spec/components/schemas.yml +++ b/spec/components/schemas.yml @@ -1341,14 +1341,3 @@ components: type: string format: date-time readOnly: true - ETag: - schema: - type: string - description: >- - The RFC7232 ETag header field in a response provides the current entity- - tag for the selected resource. An entity-tag is an opaque identifier for - different versions of a resource over time, regardless whether multiple - versions are valid at the same time. An entity-tag consists of an opaque - quoted string, possibly prefixed by a weakness indicator. - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - required: true diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml index 270f0196..d326ba31 100644 --- a/spec/transaction/transaction.yml +++ b/spec/transaction/transaction.yml @@ -808,7 +808,7 @@ components: The response is a document consisting of one asset of the feature. headers: ETag: - $ref: "#/components/schemas/ETag" + $ref: "#/components/headers/ETag" content: application/json: schema: From 12841d614520055df93e7b6ff6108eebacd8b87d Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Fri, 16 Apr 2021 08:09:36 +0200 Subject: [PATCH 078/105] BGDIINF_SB-1739: SPEC made the collection links optional in write request In collection write request the links property is optional, therefore corrected the spec. --- spec/components/schemas.yml | 62 +++++++++++++++++++++----------- spec/transaction/transaction.yml | 6 ++-- 2 files changed, 45 insertions(+), 23 deletions(-) diff --git a/spec/components/schemas.yml b/spec/components/schemas.yml index 8580b2ee..7cb8753d 100644 --- a/spec/components/schemas.yml +++ b/spec/components/schemas.yml @@ -212,7 +212,7 @@ components: type: string format: date-time readOnly: true - collection: + collectionBase: properties: crs: default: @@ -253,24 +253,6 @@ components: readOnly: true license: $ref: "#/components/schemas/license" - links: - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items - rel: items - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby - items: - $ref: "#/components/schemas/link" - type: array providers: $ref: "#/components/schemas/providers" stac_version: @@ -338,7 +320,6 @@ components: $ref: "#/components/schemas/updated" required: - id - - links - stac_version - description - license @@ -346,6 +327,47 @@ components: - created - updated type: object + collection: + allOf: + - $ref: "#/components/schemas/collectionBase" + - type: object + required: + - links + properties: + links: + type: array + items: + $ref: "#/components/schemas/link" + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: items + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + collectionWrite: + title: collection + allOf: + - $ref: "#/components/schemas/collectionBase" + - type: object + properties: + links: + type: array + items: + $ref: "#/components/schemas/link" + example: + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby collections: properties: collections: diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml index d326ba31..8ea25b19 100644 --- a/spec/transaction/transaction.yml +++ b/spec/transaction/transaction.yml @@ -41,7 +41,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/collection" + $ref: "#/components/schemas/collectionWrite" "403": $ref: "#/components/responses/PermissionDenied" "404": @@ -65,7 +65,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/collection" + $ref: "#/components/schemas/collectionWrite" example: description: The National Map 1:200,000 is a topographic map giving an overview of Switzerland. id: ch.swisstopo.pixelkarte-farbe-pk200.noscale @@ -124,7 +124,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/collection" + $ref: "#/components/schemas/collectionWrite" example: id: ch.swisstopo.pixelkarte-farbe-pk200.noscale license: proprietary From 06c967aef62675bf52047f3ced22df3a6f8d3a10 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Mon, 19 Apr 2021 06:45:46 +0200 Subject: [PATCH 079/105] BGDIINF_SB-1739, BGDIINF_SB-1764: SPEC - fixed schemas name In the openapi spec the schemas names should not have special characters, but only [a-zA-Z0-9\.\-_]. --- spec/components/schemas.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/spec/components/schemas.yml b/spec/components/schemas.yml index 7cb8753d..74fe52be 100644 --- a/spec/components/schemas.yml +++ b/spec/components/schemas.yml @@ -91,13 +91,13 @@ components: # roles: # $ref: '#/components/schemas/roles' "geoadmin:variant": - $ref: "#/components/schemas/geoadmin:variant" + $ref: "#/components/schemas/geoadminVariant" "geoadmin:lang": - $ref: "#/components/schemas/geoadmin:lang" + $ref: "#/components/schemas/geoadminLang" "proj:epsg": - $ref: "#/components/schemas/proj:epsg" + $ref: "#/components/schemas/projEpsg" "eo:gsd": - $ref: "#/components/schemas/eo:gsd" + $ref: "#/components/schemas/eoGsd" created: $ref: "#/components/schemas/created" updated: @@ -188,7 +188,7 @@ components: properties: bbox: $ref: "#/components/schemas/bboxfilter" - checksum:multihash: + checksumMultihash: description: >- `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) format. @@ -456,7 +456,7 @@ components: [CommonMark 0.29](http://commonmark.org/) syntax MAY be used for rich text representation. type: string - eo:gsd: + eoGsd: description: >- GSD is the nominal Ground Sample Distance for the data, as measured in meters on the ground. @@ -586,7 +586,7 @@ components: - temporal type: object readOnly: true - geoadmin:lang: + geoadminLang: enum: - de - it @@ -595,7 +595,7 @@ components: - en title: Product language type: string - geoadmin:variant: + geoadminVariant: example: komb title: Product variants type: string @@ -1145,7 +1145,7 @@ components: - - 7.242974548172171 - 46.57310580640624 type: Polygon - proj:epsg: + projEpsg: description: >- A Coordinate Reference System (CRS) is the data reference system (sometimes called a 'projection') used by the asset data, and can From f95d4af310f40a146993cd9e467d21552e561fff Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Mon, 19 Apr 2021 06:53:35 +0200 Subject: [PATCH 080/105] BGDIINF_SB-1764: SPEC - fixed wrong description schemas In the CollectionBase schemas the description properties used the `schema` attribute which is not allowed in the openapi spec. --- spec/components/schemas.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/spec/components/schemas.yml b/spec/components/schemas.yml index 74fe52be..15c8eec0 100644 --- a/spec/components/schemas.yml +++ b/spec/components/schemas.yml @@ -225,8 +225,6 @@ components: type: array readOnly: true description: - schema: - $ref: "#/components/schemas/description" description: A description of the features in the collection example: >- Swiss Map Raster are a conversion of the map image into a digital form From 0571138fdac139d67634911822383266c0145f29 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Mon, 19 Apr 2021 06:55:47 +0200 Subject: [PATCH 081/105] BGDIINF_SB-1764: SPEC - fixed wrong types In openapi spec the `int` type should be marked as `integer`. Also the `type` attribute doesn't support list. --- spec/components/schemas.yml | 6 ++---- spec/transaction/transaction.yml | 2 +- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/spec/components/schemas.yml b/spec/components/schemas.yml index 15c8eec0..88a7ca0c 100644 --- a/spec/components/schemas.yml +++ b/spec/components/schemas.yml @@ -477,7 +477,7 @@ components: Information about the exception: an error code plus an optional description. properties: code: - type: int + type: integer example: 500 description: anyOf: @@ -1154,9 +1154,7 @@ components: example: 2056 title: EPSG code. - type: - - integer - - null + type: integer providers: description: >- A list of providers, which may include all organizations capturing or processing the data diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml index 8ea25b19..14684887 100644 --- a/spec/transaction/transaction.yml +++ b/spec/transaction/transaction.yml @@ -823,7 +823,7 @@ components: type: object properties: code: - type: int + type: integer example: 200 description: type: string From 1c2fd88bb94b4e5e3bdfd516a650c4905127b960 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 15 Apr 2021 11:47:51 +0200 Subject: [PATCH 082/105] BGDIINF_SB-1739: Added asset upload spec --- spec/static/spec/v0.9/openapi.yaml | 231 ++-- .../spec/v0.9/openapitransactional.yaml | 1147 +++++++++++++---- spec/transaction/transaction.yml | 662 +++++++++- 3 files changed, 1689 insertions(+), 351 deletions(-) diff --git a/spec/static/spec/v0.9/openapi.yaml b/spec/static/spec/v0.9/openapi.yaml index bec86b93..70b42f84 100644 --- a/spec/static/spec/v0.9/openapi.yaml +++ b/spec/static/spec/v0.9/openapi.yaml @@ -22,6 +22,18 @@ tags: search API name: STAC components: + headers: + ETag: + schema: + type: string + description: >- + The RFC7232 ETag header field in a response provides the current entity- tag + for the selected resource. An entity-tag is an opaque identifier for different + versions of a resource over time, regardless whether multiple versions are + valid at the same time. An entity-tag consists of an opaque quoted string, + possibly prefixed by a weakness indicator. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + required: true parameters: bbox: explode: false @@ -116,7 +128,7 @@ components: Collection: headers: ETag: - $ref: "#/components/schemas/ETag" + $ref: "#/components/headers/ETag" content: application/json: schema: @@ -199,7 +211,7 @@ components: Feature: headers: ETag: - $ref: "#/components/schemas/ETag" + $ref: "#/components/headers/ETag" content: application/json: schema: @@ -286,6 +298,15 @@ components: example: code: 404 description: "Resource not found" + BadRequest: + description: The request was malformed or semantically invalid + content: + application/json: + schema: + $ref: "#/components/schemas/exception" + example: + code: 400 + description: "Invalid parameter" PreconditionFailed: description: Some condition specified by the request could not be met in the server @@ -301,6 +322,37 @@ components: code: 500 description: "Internal server error" schemas: + assetBase: + title: Asset + description: The `property name` defines the ID of the Asset. + type: object + required: + - type + - created + - updated + properties: + title: + $ref: "#/components/schemas/title" + description: + $ref: "#/components/schemas/description" + type: + $ref: "#/components/schemas/type" + href: + $ref: "#/components/schemas/href" + checksum:multihash: + $ref: "#/components/schemas/checksumMultihashReadOnly" + geoadmin:variant: + $ref: "#/components/schemas/geoadminVariant" + geoadmin:lang: + $ref: "#/components/schemas/geoadminLang" + proj:epsg: + $ref: "#/components/schemas/projEpsg" + eo:gsd: + $ref: "#/components/schemas/eoGsd" + created: + $ref: "#/components/schemas/created" + updated: + $ref: "#/components/schemas/updated" bbox: description: >- Only features that have a geometry that intersects the bounding box are selected. @@ -383,21 +435,29 @@ components: properties: bbox: $ref: "#/components/schemas/bboxfilter" - checksum:multihash: + checksumMultihash: description: >- `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) format. example: 90e402107a7f2588a85362b9beea2a12d4514d45 pattern: ^[a-f0-9]+$ title: Multihash - type: string, + type: string + checksumMultihashReadOnly: + description: | + `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) format. + example: 90e402107a7f2588a85362b9beea2a12d4514d45 + pattern: ^[a-f0-9]+$ + title: Multihash + type: string + readOnly: true created: description: RFC 3339 compliant datetime string, time when the object was created example: 2018-02-12T23:20:50Z type: string format: date-time readOnly: true - collection: + collectionBase: properties: crs: default: @@ -410,8 +470,6 @@ components: type: array readOnly: true description: - schema: - $ref: "#/components/schemas/description" description: A description of the features in the collection example: >- Swiss Map Raster are a conversion of the map image into a digital form @@ -440,25 +498,6 @@ components: readOnly: true license: $ref: "#/components/schemas/license" - links: - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items - rel: items - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby - items: - $ref: "#/components/schemas/link" - type: array providers: $ref: "#/components/schemas/providers" stac_version: @@ -524,7 +563,6 @@ components: $ref: "#/components/schemas/updated" required: - id - - links - stac_version - description - license @@ -532,6 +570,49 @@ components: - created - updated type: object + collection: + allOf: + - $ref: "#/components/schemas/collectionBase" + - type: object + required: + - links + properties: + links: + type: array + items: + $ref: "#/components/schemas/link" + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: items + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + collectionWrite: + title: collection + allOf: + - $ref: "#/components/schemas/collectionBase" + - type: object + properties: + links: + type: array + items: + $ref: "#/components/schemas/link" + example: + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby collections: properties: collections: @@ -584,6 +665,7 @@ components: description: RFC 3339 compliant datetime string example: 2018-02-12T23:20:50Z type: string + format: date-time datetimeQuery: description: >- Either a date-time or an interval, open or closed. Date and time expressions @@ -618,7 +700,7 @@ components: [CommonMark 0.29](http://commonmark.org/) syntax MAY be used for rich text representation. type: string - eo:gsd: + eoGsd: description: >- GSD is the nominal Ground Sample Distance for the data, as measured in meters on the ground. @@ -644,7 +726,7 @@ components: Information about the exception: an error code plus an optional description. properties: code: - type: int + type: integer example: 500 description: anyOf: @@ -752,7 +834,7 @@ components: - temporal type: object readOnly: true - geoadmin:lang: + geoadminLang: enum: - de - it @@ -761,7 +843,7 @@ components: - en title: Product language type: string - geoadmin:variant: + geoadminVariant: example: komb title: Product variants type: string @@ -782,6 +864,13 @@ components: - type - geometries type: object + href: + type: string + format: url + description: Link to the asset object + readOnly: true + example: | + http://data.geo.admin.ch/ch.swisstopo.swissimage/collections/cs/items/CS3-20160503_132130_04/thumb.png ids: description: >- Array of Item ids to return. All other filter parameters that further restrict @@ -894,47 +983,10 @@ components: - type type: object itemAssets: + title: Assets + description: List of Assets attached to this feature. additionalProperties: - properties: - created: - $ref: "#/components/schemas/created" - description: - description: >- - Multi-line description to explain the asset. - - - [CommonMark 0.29](http://commonmark.org/) syntax MAY be used for rich - text representation. - example: Small 256x256px PNG thumbnail for a preview. - type: string - href: - description: Link to the asset object - example: http://cool-sat.com/catalog/collections/cs/items/CS3-20160503_132130_04/thumb.png - format: url - type: string - title: - description: Displayed title - example: Thumbnail - type: string - type: - description: Media type of the asset - example: image/tiff; application=geotiff - type: string - proj:epsg: - $ref: "#/components/schemas/proj:epsg" - geoadmin:variant: - $ref: "#/components/schemas/geoadmin:variant" - eo:gsd: - $ref: "#/components/schemas/eo:gsd" - updated: - $ref: "#/components/schemas/created" - required: - - "checksum:multihash" - - href - - type - - created - - updated - type: object + $ref: "#/components/schemas/assetBase" type: object readOnly: true example: @@ -1024,10 +1076,12 @@ components: $ref: "#/components/schemas/linkPostSearch" type: array itemId: - description: Item identifier (unique per collection) + title: ID + description: Feature identifier (unique per collection) example: smr200-200-4-2019 type: string itemProperties: + title: Properties description: >- Provides the core metadata fields plus extensions @@ -1060,6 +1114,7 @@ components: - updated type: object itemType: + title: type description: The GeoJSON type enum: - Feature @@ -1326,7 +1381,7 @@ components: - - 7.242974548172171 - 46.57310580640624 type: Polygon - proj:epsg: + projEpsg: description: >- A Coordinate Reference System (CRS) is the data reference system (sometimes called a 'projection') used by the asset data, and can usually be referenced @@ -1336,9 +1391,7 @@ components: there is no valid EPSG code. example: 2056 title: EPSG code. - type: - - integer - - null + type: integer providers: description: >- A list of providers, which may include all organizations capturing or processing @@ -1509,6 +1562,13 @@ components: description: >- Apply query operations to a specific property. The following properties are currently supported: `created`, `updated`, `title`. + roles: + type: array + items: + type: string + description: Purposes of the asset + example: + - thumbnail searchBody: allOf: - $ref: "#/components/schemas/queryFilter" @@ -1531,23 +1591,20 @@ components: example: "2017-08-17T08:05:32Z" format: date-time type: string + title: + type: string + description: Displayed title + example: Thumbnail + type: + type: string + description: Media type of the asset + example: image/tiff; application=geotiff updated: description: RFC 3339 compliant datetime string, time when the object was updated example: 2018-02-12T23:20:50Z type: string format: date-time readOnly: true - ETag: - schema: - type: string - description: >- - The RFC7232 ETag header field in a response provides the current entity- tag - for the selected resource. An entity-tag is an opaque identifier for different - versions of a resource over time, regardless whether multiple versions are - valid at the same time. An entity-tag consists of an opaque quoted string, - possibly prefixed by a weakness indicator. - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - required: true paths: /: get: diff --git a/spec/static/spec/v0.9/openapitransactional.yaml b/spec/static/spec/v0.9/openapitransactional.yaml index 8f28da9b..819a9dc2 100644 --- a/spec/static/spec/v0.9/openapitransactional.yaml +++ b/spec/static/spec/v0.9/openapitransactional.yaml @@ -21,21 +21,86 @@ tags: - description: Extension to OGC API - Features to support STAC metadata model and search API name: STAC -- description: | - All write requests require authentication. The currently available options for a user to - authenticate himself are described below. +- name: Data Management + description: | + Metadata management requests. Theses requests are used to create, update or delete the STAC + metadata. + + *NOTE: these requests requires authentication as described in [here](#tag/Authentication).* +- name: Asset Upload Management + description: | + Asset file can be uploaded via the STAC API using the following requests. + + *NOTE: the POST requests requires authentication as described in [here](#tag/Authentication).* + + ### Example + + ```python + import os + import hashlib + + import requests + import multihash + + # variables + scheme = 'https' + hostname = 'data.geo.admin.ch' + collection = 'ch.swisstopo.pixelkarte-farbe-pk200.noscale' + item = 'smr200-200-4-2016' + asset = 'smr200-200-4-2016-2056-kgrs-10.tiff' + asset_path = f'collections/{collection}/items/{item}/assets/{asset}' + user = os.environ.get('STAC_USER', 'unknown-user') + password = os.environ.get('STAC_PASSWORD', 'unknown-password') + + with open('smr200-200-4-2016-2056-kgrs-10.tiff', 'rb') as fd: + data = fd.read() + + checksum_multihash = multihash.to_hex_string(multihash.encode(hashlib.sha256(data).digest(), 'sha2-256')) + + # 1. Create a multipart upload + response = requests.post( + f"{scheme}://{hostname}/api/stac/v0.9/{asset_path}/uploads", + auth=(user, password), + json={ + "number_parts": 1, + "checksum:multihash": checksum_multihash + } + ) + upload_id = response.json()['upload_id'] + + # 2. Upload the part using the presigned url + response = requests.put(response.json()['urls'][0]['url'], data=data) + etag = response.headers['ETag'] + + # 3. Complete the upload + response = requests.post( + f"{scheme}://{hostname}/api/stac/v0.9/{asset_path}/uploads/{upload_id}/complete", + auth=(user, password), + json={'parts': [{'etag': etag, 'part_number': 1}]} + ) + ``` +- name: Authentication + description: | + All write requests require authentication. There is currently three type of supported authentications: + + * [Session authentication](#section/Session-authentication) + * [Basic authentication](#section/Basic-authentication) + * [Token authentication](#section/Token-authentication) + + ## Session authentication - # Session authentication When using the browsable API the user can simply use the admin interface for logging in. Once logged in, the browsable API can be used to perform write requests. - # Basic authentication + ## Basic authentication + The username and password for authentication can be added to every write request the user wants to perform. Here is an example of posting an asset using curl (_username_="MickeyMouse", _password_="I_love_Minnie_Mouse"): + ``` curl --request POST \ --user MickeyMouse:I_love_Minnie_Mouse \ - --url https://service-stac.dev.bgdi.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ + --url https://data.geoadmin.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ --header 'Content-Type: application/json' \ --data '{ "id": "fancy_unique_id", @@ -48,12 +113,14 @@ tags: }' ``` - # Token authentication + ## Token authentication + A user specific token for authentication can be added to every write request the user wants to perform. Here is an example of posting an asset using curl: + ``` curl --request POST \ - --url https://service-stac.dev.bgdi.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ + --url https://data.geoadmin.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ --header 'Authorization: Token ccecf40693bfc52ba090cd46eb7f19e723fe831f' \ --header 'Content-Type: application/json' \ --data '{ @@ -66,18 +133,30 @@ tags: "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" }' ``` + Tokens can either be generated in the admin interface or existing users can perform a POST request - on the get-token endpoint to request a token (also see description of the get-token POST endpoint - at the bottom). + on the get-token endpoint to request a token (also see [Request token for token authentication](#operation/getToken)). Here is an example using curl: + ``` curl --request POST \ - --url https://service-stac.dev.bgdi.ch/api/stac/get-token \ + --url https://data.geoadmin.ch/api/stac/get-token \ --header 'Content-Type: application/json' \ --data '{"username": "MickeyMouse", "password": "I_love_Minnie_Mouse"}' ``` - name: Data Management components: + headers: + ETag: + schema: + type: string + description: >- + The RFC7232 ETag header field in a response provides the current entity- tag + for the selected resource. An entity-tag is an opaque identifier for different + versions of a resource over time, regardless whether multiple versions are + valid at the same time. An entity-tag consists of an opaque quoted string, + possibly prefixed by a weakness indicator. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + required: true parameters: bbox: explode: false @@ -171,7 +250,21 @@ components: assetId: name: assetId in: path - description: Local identifier of a asset + description: Local identifier of an asset. + required: true + schema: + type: string + uploadId: + name: uploadId + in: path + description: Local identifier of an asset's upload. + required: true + schema: + type: string + presignedUrl: + name: presignedUrl + in: path + description: Presigned url returned by [Create a new Asset's multipart upload](#operation/createAssetUpload). required: true schema: type: string @@ -195,7 +288,7 @@ components: Collection: headers: ETag: - $ref: "#/components/schemas/ETag" + $ref: "#/components/headers/ETag" content: application/json: schema: @@ -278,7 +371,7 @@ components: Feature: headers: ETag: - $ref: "#/components/schemas/ETag" + $ref: "#/components/headers/ETag" content: application/json: schema: @@ -365,6 +458,15 @@ components: example: code: 404 description: "Resource not found" + BadRequest: + description: The request was malformed or semantically invalid + content: + application/json: + schema: + $ref: "#/components/schemas/exception" + example: + code: 400 + description: "Invalid parameter" PreconditionFailed: description: Some condition specified by the request could not be met in the server @@ -391,11 +493,11 @@ components: The response is a document consisting of one asset of the feature. headers: ETag: - $ref: "#/components/schemas/ETag" + $ref: "#/components/headers/ETag" content: application/json: schema: - $ref: "#/components/schemas/itemAsset" + $ref: "#/components/schemas/readUpdateAsset" DeletedResource: description: Status of the delete resource content: @@ -406,7 +508,7 @@ components: type: object properties: code: - type: int + type: integer example: 200 description: type: string @@ -424,15 +526,6 @@ components: required: - code - links - BadRequest: - description: The request was malformed or semantically invalid - content: - application/json: - schema: - $ref: "#/components/schemas/exception" - example: - code: 400 - description: "Invalid parameter" PermissionDenied: description: No Permission for this request content: @@ -443,6 +536,37 @@ components: code: 403 description: "Permission denied" schemas: + assetBase: + title: Asset + description: The `property name` defines the ID of the Asset. + type: object + required: + - type + - created + - updated + properties: + title: + $ref: "#/components/schemas/title" + description: + $ref: "#/components/schemas/description" + type: + $ref: "#/components/schemas/type" + href: + $ref: "#/components/schemas/href" + checksum:multihash: + $ref: "#/components/schemas/checksumMultihashReadOnly" + geoadmin:variant: + $ref: "#/components/schemas/geoadminVariant" + geoadmin:lang: + $ref: "#/components/schemas/geoadminLang" + proj:epsg: + $ref: "#/components/schemas/projEpsg" + eo:gsd: + $ref: "#/components/schemas/eoGsd" + created: + $ref: "#/components/schemas/created" + updated: + $ref: "#/components/schemas/updated" bbox: description: >- Only features that have a geometry that intersects the bounding box are selected. @@ -525,21 +649,29 @@ components: properties: bbox: $ref: "#/components/schemas/bboxfilter" - checksum:multihash: + checksumMultihash: description: >- `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) format. example: 90e402107a7f2588a85362b9beea2a12d4514d45 pattern: ^[a-f0-9]+$ title: Multihash - type: string, + type: string + checksumMultihashReadOnly: + description: | + `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) format. + example: 90e402107a7f2588a85362b9beea2a12d4514d45 + pattern: ^[a-f0-9]+$ + title: Multihash + type: string + readOnly: true created: description: RFC 3339 compliant datetime string, time when the object was created example: 2018-02-12T23:20:50Z type: string format: date-time readOnly: true - collection: + collectionBase: properties: crs: default: @@ -552,8 +684,6 @@ components: type: array readOnly: true description: - schema: - $ref: "#/components/schemas/description" description: A description of the features in the collection example: >- Swiss Map Raster are a conversion of the map image into a digital form @@ -582,25 +712,6 @@ components: readOnly: true license: $ref: "#/components/schemas/license" - links: - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items - rel: items - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby - items: - $ref: "#/components/schemas/link" - type: array providers: $ref: "#/components/schemas/providers" stac_version: @@ -666,7 +777,6 @@ components: $ref: "#/components/schemas/updated" required: - id - - links - stac_version - description - license @@ -674,6 +784,49 @@ components: - created - updated type: object + collection: + allOf: + - $ref: "#/components/schemas/collectionBase" + - type: object + required: + - links + properties: + links: + type: array + items: + $ref: "#/components/schemas/link" + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: items + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + collectionWrite: + title: collection + allOf: + - $ref: "#/components/schemas/collectionBase" + - type: object + properties: + links: + type: array + items: + $ref: "#/components/schemas/link" + example: + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby collections: properties: collections: @@ -726,6 +879,7 @@ components: description: RFC 3339 compliant datetime string example: 2018-02-12T23:20:50Z type: string + format: date-time datetimeQuery: description: >- Either a date-time or an interval, open or closed. Date and time expressions @@ -754,13 +908,13 @@ components: $ref: "#/components/schemas/datetimeQuery" description: description: >- - Detailed multi-line description to fully explain the object (collection, item, - asset, ...). + Detailed multi-line description to fully explain the catalog or collection. + [CommonMark 0.29](http://commonmark.org/) syntax MAY be used for rich text representation. type: string - eo:gsd: + eoGsd: description: >- GSD is the nominal Ground Sample Distance for the data, as measured in meters on the ground. @@ -786,7 +940,7 @@ components: Information about the exception: an error code plus an optional description. properties: code: - type: int + type: integer example: 500 description: anyOf: @@ -894,7 +1048,7 @@ components: - temporal type: object readOnly: true - geoadmin:lang: + geoadminLang: enum: - de - it @@ -903,7 +1057,7 @@ components: - en title: Product language type: string - geoadmin:variant: + geoadminVariant: example: komb title: Product variants type: string @@ -924,6 +1078,13 @@ components: - type - geometries type: object + href: + type: string + format: url + description: Link to the asset object + readOnly: true + example: | + http://data.geo.admin.ch/ch.swisstopo.swissimage/collections/cs/items/CS3-20160503_132130_04/thumb.png ids: description: >- Array of Item ids to return. All other filter parameters that further restrict @@ -1036,47 +1197,10 @@ components: - type type: object itemAssets: + title: Assets + description: List of Assets attached to this feature. additionalProperties: - properties: - created: - $ref: "#/components/schemas/created" - description: - description: >- - Multi-line description to explain the asset. - - - [CommonMark 0.29](http://commonmark.org/) syntax MAY be used for rich - text representation. - example: Small 256x256px PNG thumbnail for a preview. - type: string - href: - description: Link to the asset object - example: http://cool-sat.com/catalog/collections/cs/items/CS3-20160503_132130_04/thumb.png - format: url - type: string - title: - description: Displayed title - example: Thumbnail - type: string - type: - description: Media type of the asset - example: image/tiff; application=geotiff - type: string - proj:epsg: - $ref: "#/components/schemas/proj:epsg" - geoadmin:variant: - $ref: "#/components/schemas/geoadmin:variant" - eo:gsd: - $ref: "#/components/schemas/eo:gsd" - updated: - $ref: "#/components/schemas/created" - required: - - "checksum:multihash" - - href - - type - - created - - updated - type: object + $ref: "#/components/schemas/assetBase" type: object readOnly: true example: @@ -1166,10 +1290,12 @@ components: $ref: "#/components/schemas/linkPostSearch" type: array itemId: - description: Item identifier (unique per collection) + title: ID + description: Feature identifier (unique per collection) example: smr200-200-4-2019 type: string itemProperties: + title: Properties description: >- Provides the core metadata fields plus extensions @@ -1202,6 +1328,7 @@ components: - updated type: object itemType: + title: type description: The GeoJSON type enum: - Feature @@ -1468,7 +1595,7 @@ components: - - 7.242974548172171 - 46.57310580640624 type: Polygon - proj:epsg: + projEpsg: description: >- A Coordinate Reference System (CRS) is the data reference system (sometimes called a 'projection') used by the asset data, and can usually be referenced @@ -1478,9 +1605,7 @@ components: there is no valid EPSG code. example: 2056 title: EPSG code. - type: - - integer - - null + type: integer providers: description: >- A list of providers, which may include all organizations capturing or processing @@ -1655,6 +1780,13 @@ components: description: >- Apply query operations to a specific property. The following properties are currently supported: `created`, `updated`, `title`. + roles: + type: array + items: + type: string + description: Purposes of the asset + example: + - thumbnail searchBody: allOf: - $ref: "#/components/schemas/queryFilter" @@ -1677,51 +1809,37 @@ components: example: "2017-08-17T08:05:32Z" format: date-time type: string + title: + type: string + description: Displayed title + example: Thumbnail + type: + type: string + description: Media type of the asset + example: image/tiff; application=geotiff updated: description: RFC 3339 compliant datetime string, time when the object was updated example: 2018-02-12T23:20:50Z type: string format: date-time readOnly: true - ETag: - schema: - type: string - description: >- - The RFC7232 ETag header field in a response provides the current entity- tag - for the selected resource. An entity-tag is an opaque identifier for different - versions of a resource over time, regardless whether multiple versions are - valid at the same time. An entity-tag consists of an opaque quoted string, - possibly prefixed by a weakness indicator. - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - required: true assetId: type: string pattern: ^[a-z0-9.-_]+$ - title: asset id - description: >- - The asset id uniquely identifies the asset for an item - - - **Note**: `id` must be unique for the item and must be identical to the filename. - example: smr50-263-2016-2056-kgrs-2.5.tiff - updateAssetId: - type: string - pattern: ^[a-z0-9.-_]+$ - title: asset id + title: ID description: >- The asset id uniquely identifies the asset for an item **Note**: `id` must be unique for the item and must be identical to the filename. - When the `id` doesn't match the parameter `assetId`, the asset is renamed, - renaming also the object itself on S3. example: smr50-263-2016-2056-kgrs-2.5.tiff assets: + title: Assets type: object properties: assets: items: - $ref: "#/components/schemas/itemAsset" + $ref: "#/components/schemas/readUpdateAsset" type: array links: items: @@ -1738,51 +1856,21 @@ components: rel: item - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale rel: collection - assetBase: - type: object - required: - - created - - updated - properties: - id: - $ref: "#/components/schemas/assetId" - title: - $ref: "#/components/schemas/title" - description: - $ref: "#/components/schemas/description" - type: - $ref: "#/components/schemas/type" - geoadmin:variant: - $ref: "#/components/schemas/geoadmin:variant" - geoadmin:lang: - $ref: "#/components/schemas/geoadmin:lang" - proj:epsg: - $ref: "#/components/schemas/proj:epsg" - eo:gsd: - $ref: "#/components/schemas/eo:gsd" - created: - $ref: "#/components/schemas/created" - updated: - $ref: "#/components/schemas/updated" - itemAsset: + createAsset: allOf: - $ref: "#/components/schemas/assetBase" - type: object required: - id - - type - - href - - checksum:multihash - links properties: - checksum:multihash: - $ref: "#/components/schemas/checksum:multihash" - href: - $ref: "#/components/schemas/href" + id: + $ref: "#/components/schemas/assetId" links: items: $ref: "#/components/schemas/link" type: array + readOnly: true example: - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff rel: self @@ -1794,66 +1882,32 @@ components: rel: item - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale rel: collection - itemAssetWrite: + readUpdateAsset: allOf: - $ref: "#/components/schemas/assetBase" - type: object required: - id - - type - itemAssetUpdate: - allOf: - - $ref: "#/components/schemas/assetBase" - - type: object - required: - - id - - type - properties: - id: - $ref: "#/components/schemas/updateAssetId" - itemAssetPartialUpdate: - allOf: - - $ref: "#/components/schemas/assetBase" - - type: object + - links properties: id: - $ref: "#/components/schemas/updateAssetId" - writeChecksumMultihash: - description: >- - `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) - format. - - - When provided the asset Object located at href will be checked against this - checksum and the request is rejected if the checksum don't match. - example: 90e402107a7f2588a85362b9beea2a12d4514d45 - pattern: ^[a-f0-9]+$ - title: Multihash - type: string, - writeHref: - type: string - format: url - description: >- - URL of the current location of the asset object. - - - The url must be publicly accessible. If the URL corresponds already to the - correct location of the asset object on s3, the checksum of the payload and - the object on s3 are compared and if matching the request will be accepted. - - - If the URL is different from the location that the asset object should have - according to the the pattern `///`, the - service will move the asset from this temporary location to the correct one. - default: https://data.geo.admin.ch/// - example: >- - http://data.geo.admin.ch/tmp/gdwh/ch.swisstopo.swissimage/CS3-20160503_132130_04.png - href: - type: string - format: url - description: Link to the asset object - example: >- - http://data.geo.admin.ch/ch.swisstopo.swissimage/collections/cs/items/CS3-20160503_132130_04/thumb.png + $ref: "#/components/schemas/assetId" + links: + items: + $ref: "#/components/schemas/link" + type: array + readOnly: true + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 + rel: item + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: collection writeItem: allOf: - $ref: "#/components/schemas/itemBase" @@ -1950,27 +2004,318 @@ components: - http://www.opengis.net/def/crs/EPSG/0/4326 example: title: The new title of the collection - roles: - type: array - items: - type: string - description: Purposes of the asset - example: - - thumbnail - title: - type: string - description: Displayed title - example: Thumbnail - type: - type: string - description: Media type of the asset - example: image/tiff; application=geotiff itemIdUpdate: description: >- Item identifier (unique per collection. If it doesn't match the `featureId` in path parameters, then the Item is renamed. example: smr200-200-4-2019 type: string + uploadId: + title: ID + type: string + description: Unique Asset upload identifier + example: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + readOnly: true + dtUploadCreated: + title: created + description: Date time when the Asset's upload has been created/started. + type: string + format: date-time + readOnly: true + dtUploadCompleted: + title: completed + description: | + Date time when the Asset's upload has been completed. + + *Note: this property is mutually exclusive with `aborted`* + type: string + format: date-time + readOnly: true + dtUploadAborted: + title: aborted + description: | + Date time when the Asset's upload has been aborted. + + *Note: this property is mutually exclusive with `completed`* + type: string + format: date-time + readOnly: true + assetUploads: + title: AssetUploads + type: object + required: + - uploads + - links + properties: + uploads: + description: List of uploads that are within the asset. + type: array + items: + $ref: "#/components/schemas/assetUpload" + links: + description: Next and/or previous links for the pagination. + type: array + items: + $ref: "#/components/schemas/link" + example: + - rel: next + href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads?cursor=0d34 + assetUpload: + title: AssetUpload + type: object + required: + - upload_id + - status + - created + - "checksum:multihash" + - number_parts + properties: + upload_id: + $ref: "#/components/schemas/uploadId" + status: + $ref: "#/components/schemas/status" + number_parts: + $ref: "#/components/schemas/number_parts" + urls: + type: array + description: | + Note: As soon as the multipart upload is completed or aborted, the `urls` property is removed. + items: + $ref: "#/components/schemas/multipartUploadUrl" + readOnly: true + created: + $ref: "#/components/schemas/dtUploadCreated" + completed: + $ref: "#/components/schemas/dtUploadCompleted" + aborted: + $ref: "#/components/schemas/dtUploadAborted" + checksum:multihash: + $ref: "#/components/schemas/checksumMultihash" + assetUploadCreate: + title: AssetUpload + type: object + required: + - upload_id + - status + - created + - "checksum:multihash" + - number_parts + properties: + upload_id: + $ref: "#/components/schemas/uploadId" + status: + $ref: "#/components/schemas/status" + number_parts: + $ref: "#/components/schemas/number_parts" + urls: + type: array + description: | + Note: As soon as the multipart upload is completed or aborted, the `urls` property is removed. + items: + $ref: "#/components/schemas/multipartUploadUrl" + readOnly: true + created: + $ref: "#/components/schemas/dtUploadCreated" + checksum:multihash: + $ref: "#/components/schemas/checksumMultihash" + assetCompleteUpload: + title: CompleteUpload + type: object + required: + - parts + properties: + parts: + type: array + description: Parts that have been uploaded + items: + title: File part that have been uploaded + type: object + required: + - etag + - part_number + properties: + etag: + title: ETag + type: string + description: >- + ETag of the uploaded file part (returned in the header of the answer + of [Upload asset file part](#operation/uploadAssetFilePart)). + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + part_number: + $ref: "#/components/schemas/part_number" + assetUploadCompleted: + title: UploadCompleted + type: object + required: + - upload_id + - status + - number_parts + - created + - completed + - "checksum:multihash" + properties: + upload_id: + $ref: "#/components/schemas/uploadId" + status: + title: Status + description: Status of the Asset's multipart upload. + type: string + enum: + - completed + example: completed + number_parts: + $ref: "#/components/schemas/number_parts" + created: + $ref: "#/components/schemas/dtUploadCreated" + completed: + $ref: "#/components/schemas/dtUploadCompleted" + checksum:multihash: + $ref: "#/components/schemas/checksumMultihash" + assetUploadAborted: + title: UploadCompleted + type: object + required: + - upload_id + - status + - number_parts + - created + - aborted + - "checksum:multihash" + properties: + upload_id: + $ref: "#/components/schemas/uploadId" + status: + title: Status + description: Status of the Asset's multipart upload. + type: string + enum: + - aborted + example: aborted + number_parts: + $ref: "#/components/schemas/number_parts" + created: + $ref: "#/components/schemas/dtUploadCreated" + aborted: + $ref: "#/components/schemas/dtUploadAborted" + checksum:multihash: + $ref: "#/components/schemas/checksumMultihash" + assetUploadParts: + title: Parts + type: object + required: + - parts + - links + properties: + parts: + type: object + description: List of uploaded parts + required: + - etag + - part_number + - modified + - size + properties: + etag: + $ref: "#/components/schemas/uploadEtag" + part_number: + $ref: "#/components/schemas/part_number" + modified: + type: string + format: date-time + description: Date time when the part was added/modified + size: + type: integer + description: Part size in bytes + minimum: 0 + example: 1024 + links: + description: Next and/or previous links for the pagination. + type: array + items: + $ref: "#/components/schemas/link" + example: + - rel: next + href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads/upload-id/parts?limit=50&offset=50 + status: + title: Status + description: Status of the Asset's multipart upload. + type: string + enum: + - in-progress + - aborted + - completed + readOnly: true + number_parts: + description: Number of parts for the Asset's multipart upload. + type: integer + minimum: 1 + maximum: 100 + part_number: + description: Number of the part. + type: integer + minimum: 1 + maximum: 100 + multipartUploadUrl: + title: MultipartUploadUrl + description: Multipart upload url. + type: object + required: + - url + - part + - expires + properties: + url: + description: Presigned URL to use to upload the Asset File part using the + PUT method. + type: string + format: url + example: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 + part: + description: Part number assigned to this presigned URL. + type: integer + minimum: 1 + maximum: 100 + expires: + description: Date time when this presigned URL expires and is not valid + anymore. + type: string + format: date-time + uploadEtag: + title: ETag + type: string + description: The RFC7232 ETag for the specified uploaded part. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + examples: + inprogress: + summary: In progress upload example + value: + upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: in-progress + number_parts: 1 + urls: + - url: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 + part: 1 + expires: '2019-08-24T14:15:22Z' + created: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + completed: + summary: Completed upload example + value: + upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: completed + number_parts: 1 + created: '2019-08-24T14:15:22Z' + completed: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + aborted: + summary: Aborted upload example + value: + upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: completed + number_parts: 1 + created: '2019-08-24T14:15:22Z' + aborted: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 paths: /: get: @@ -2042,7 +2387,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/collection" + $ref: "#/components/schemas/collectionWrite" "403": $ref: "#/components/responses/PermissionDenied" "404": @@ -2086,7 +2431,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/collection" + $ref: "#/components/schemas/collectionWrite" example: description: The National Map 1:200,000 is a topographic map giving an overview of Switzerland. @@ -2148,7 +2493,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/collection" + $ref: "#/components/schemas/collectionWrite" example: id: ch.swisstopo.pixelkarte-farbe-pk200.noscale license: proprietary @@ -2509,15 +2854,10 @@ paths: - Data post: summary: Add a new asset to a feature - description: >- + description: | Create a new asset for a specific feature. - - When creating a new asset for a feature, the metadata of the asset is posted - to the API. The Asset object itself must be already publicly available at - the following URL: https://data.geo.admin.ch/{collectionId}/{featureId}/{assetId} - - Optionally the Asset object multihash can be given for sanity check. + *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* operationId: postAsset tags: - Data Management @@ -2528,7 +2868,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/itemAssetWrite" + $ref: "#/components/schemas/createAsset" responses: "201": description: Return the created Asset @@ -2541,7 +2881,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/itemAsset" + $ref: "#/components/schemas/createAsset" "400": $ref: "#/components/responses/BadRequest" 5XX: @@ -2573,10 +2913,13 @@ paths: tags: - Data put: - summary: Update or create an asset + summary: Update or create an asset description: >- Update or create an asset with Id `assetId` with a complete asset definition. If the asset doesn't exists it is then created. + + + *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* operationId: putAsset tags: - Data Management @@ -2589,14 +2932,26 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/itemAssetUpdate" + $ref: "#/components/schemas/readUpdateAsset" responses: "200": - description: Status of the update request. + description: Asset has been successfully updated. + content: + application/json: + schema: + $ref: "#/components/schemas/readUpdateAsset" + "201": + description: Asset has been newly created. + headers: + Location: + description: A link to the asset + schema: + type: string + format: url content: application/json: schema: - $ref: "#/components/schemas/itemAsset" + $ref: "#/components/schemas/createAsset" "400": $ref: "#/components/responses/BadRequest" "404": @@ -2610,6 +2965,9 @@ paths: description: >- Use this method to update an existing asset. Requires a JSON fragment (containing the fields to be updated) be submitted. + + + *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* operationId: patchAsset tags: - Data Management @@ -2622,26 +2980,14 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/itemAssetPartialUpdate" + $ref: "#/components/schemas/readUpdateAsset" responses: "200": description: Returns the updated Asset. content: application/json: schema: - $ref: "#/components/schemas/itemAsset" - "201": - description: Returns the created Asset - headers: - Location: - description: A link to the asset - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/itemAsset" + $ref: "#/components/schemas/readUpdateAsset" "400": $ref: "#/components/responses/BadRequest" "404": @@ -2676,12 +3022,289 @@ paths: $ref: "#/components/responses/PreconditionFailed" 5XX: $ref: "#/components/responses/ServerError" + /collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads: + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + get: + tags: + - Asset Upload Management + summary: List all Asset's multipart uploads + description: >- + Return a list of all Asset's multipart uploads that are in progress and have + been completed or aborted. + operationId: getAssetUploads + parameters: + - name: status + in: query + description: Filter the list by status. + schema: + $ref: "#/components/schemas/status" + responses: + "200": + description: List of Asset's uploads + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploads" + example: + uploads: + - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnusebaJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: in-progress + number_parts: 1 + urls: + - url: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 + part: 1 + expires: '2019-08-24T14:15:22Z' + created: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YaaegJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: completed + number_parts: 1 + created: '2019-08-24T14:15:22Z' + completed: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: aborted + number_parts: 1 + created: '2019-08-24T14:15:22Z' + aborted: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + links: + - rel: next + href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads?cursor=0d34 + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + 5XX: + $ref: "#/components/responses/ServerError" + post: + tags: + - Asset Upload Management + summary: Create a new Asset's multipart upload + description: | + Create a new Asset's multipart upload. + operationId: createAssetUpload + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploadCreate" + responses: + "201": + description: Created Asset's multipart upload + headers: + Location: + description: A link to the Asset's multipart upload object + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploadCreate" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + 5XX: + $ref: "#/components/responses/ServerError" + /collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}: + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/uploadId" + get: + tags: + - Asset Upload Management + summary: Get an Asset's multipart upload + description: | + Return an Asset's multipart upload. + operationId: getAssetUpload + parameters: + - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/IfNoneMatch" + responses: + "200": + description: Asset's multipart upload description. + headers: + ETag: + $ref: "#/components/headers/ETag" + content: + application/json: + schema: + $ref: "#/components/schemas/assetUpload" + examples: + inprogress: + $ref: "#/components/examples/inprogress" + completed: + $ref: "#/components/examples/completed" + aborted: + $ref: "#/components/examples/aborted" + "304": + $ref: "#/components/responses/NotModified" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + "500": + $ref: "#/components/responses/ServerError" + /{presignedUrl}: + servers: + - url: http://data.geo.admin.ch/ + put: + tags: + - Asset Upload Management + summary: Upload asset file part + description: >- + Upload an Asset file part using the presigned url(s) returned by [Create a + new Asset's multipart upload](#operation/createAssetUpload). + + + Parts that have been uploaded but not completed can be checked using [Get + an Asset's multipart upload](#operation/getAssetUpload) + + + A file part must be at least 5 MB except for the last one and at most 5 GB, + otherwise the complete operation will fail. + + + *Note: this endpoint doesn't require any authentication as it is already part + of the presigned url* + operationId: uploadAssetFilePart + parameters: + - $ref: "#/components/parameters/presignedUrl" + - name: Content-MD5 + in: header + description: Asset file part content MD5. + required: true + schema: + type: string + responses: + "200": + description: Asset file uploaded part successfully + headers: + ETag: + schema: + type: string + description: >- + The RFC7232 ETag header field in a response provides the current entity- + tag for the selected resource. + + + This ETag is required in the complete multipart upload payload. + + + An entity-tag is an opaque identifier for different versions of a + resource over time, regardless whether multiple versions are valid + at the same time. An entity-tag consists of an opaque quoted string. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + required: true + /collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/complete: + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/uploadId" + post: + tags: + - Asset Upload Management + summary: Complete multipart upload + operationId: completeMultipartUpload + description: >- + Complete the multipart upload process. After completion, the Asset metadata + are updated with the new `checksum:multihash` from the upload and the parts + are automatically deleted. The Asset `href` field is also set if it was the + first upload. + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/assetCompleteUpload" + responses: + "200": + description: Asset multipart upload completed successfully. + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploadCompleted" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + 5XX: + $ref: "#/components/responses/ServerError" + /collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/abort: + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/uploadId" + post: + tags: + - Asset Upload Management + summary: Abort multipart upload + operationId: abortMultipartUpload + description: >- + Abort the multipart upload process. All already uploaded parts are automatically + deleted. + responses: + "200": + description: Asset multipart upload aborted successfully. + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploadAborted" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + 5XX: + $ref: "#/components/responses/ServerError" + /collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/parts: + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/uploadId" + get: + tags: + - Asset Upload Management + summary: Get upload parts + operationId: getUploadParts + description: >- + Return the list of already uploaded parts. + + + ### Pagination + + By default all parts are returned (maximum number of parts being 100). The + user can use pagination to reduce the number of returned parts. Pagination + is done via the `limit` query parameter (see below). + parameters: + - $ref: "#/components/parameters/limit" + responses: + "200": + description: List of parts already uploaded. + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploadParts" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + 5XX: + $ref: "#/components/responses/ServerError" /get-token: servers: - url: http://data.geo.admin.ch/api/stac/ post: tags: - - Data Management + - Authentication summary: >- Request token for token authentication. operationId: getToken @@ -2694,7 +3317,7 @@ paths: properties: username: type: string - decscription: name of user for whom token is requested + description: name of user for whom token is requested password: type: string description: password of user for whom token is requested diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml index 14684887..32971545 100644 --- a/spec/transaction/transaction.yml +++ b/spec/transaction/transaction.yml @@ -368,7 +368,10 @@ paths: - Data post: summary: Add a new asset to a feature - description: Create a new asset for a specific feature. + description: | + Create a new asset for a specific feature. + + *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* operationId: postAsset tags: - Data Management @@ -429,6 +432,9 @@ paths: description: >- Update or create an asset with Id `assetId` with a complete asset definition. If the asset doesn't exists it is then created. + + + *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* operationId: putAsset tags: - Data Management @@ -474,6 +480,9 @@ paths: description: >- Use this method to update an existing asset. Requires a JSON fragment (containing the fields to be updated) be submitted. + + + *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* operationId: patchAsset tags: - Data Management @@ -528,6 +537,286 @@ paths: $ref: "#/components/responses/PreconditionFailed" "5XX": $ref: "#/components/responses/ServerError" + + + "/collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads": + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + get: + tags: + - Asset Upload Management + summary: List all Asset's multipart uploads + description: >- + Return a list of all Asset's multipart uploads that are in progress and have been completed + or aborted. + operationId: getAssetUploads + parameters: + - name: status + in: query + description: Filter the list by status. + schema: + $ref: "#/components/schemas/status" + responses: + 200: + description: List of Asset's uploads + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploads" + example: + uploads: + - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnusebaJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: in-progress + number_parts: 1 + urls: + - url: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 + part: 1 + expires: '2019-08-24T14:15:22Z' + created: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YaaegJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: completed + number_parts: 1 + created: '2019-08-24T14:15:22Z' + completed: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: aborted + number_parts: 1 + created: '2019-08-24T14:15:22Z' + aborted: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + links: + - rel: next + href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads?cursor=0d34 + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + "5XX": + $ref: "#/components/responses/ServerError" + post: + tags: + - Asset Upload Management + summary: Create a new Asset's multipart upload + description: | + Create a new Asset's multipart upload. + operationId: createAssetUpload + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploadCreate" + responses: + 201: + description: Created Asset's multipart upload + headers: + Location: + description: A link to the Asset's multipart upload object + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploadCreate" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + "5XX": + $ref: "#/components/responses/ServerError" + "/collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}": + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/uploadId" + get: + tags: + - Asset Upload Management + summary: Get an Asset's multipart upload + description: | + Return an Asset's multipart upload. + operationId: getAssetUpload + parameters: + - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/IfNoneMatch" + responses: + "200": + description: Asset's multipart upload description. + headers: + ETag: + $ref: "#/components/headers/ETag" + content: + application/json: + schema: + $ref: "#/components/schemas/assetUpload" + examples: + inprogress: + $ref: "#/components/examples/inprogress" + completed: + $ref: "#/components/examples/completed" + aborted: + $ref: "#/components/examples/aborted" + "304": + $ref: "#/components/responses/NotModified" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + "500": + $ref: "#/components/responses/ServerError" + "/{presignedUrl}": + servers: + - url: http://data.geo.admin.ch/ + put: + tags: + - Asset Upload Management + summary: Upload asset file part + description: >- + Upload an Asset file part using the presigned url(s) returned by + [Create a new Asset's multipart upload](#operation/createAssetUpload). + + + Parts that have been uploaded but not completed can be checked using + [Get an Asset's multipart upload](#operation/getAssetUpload) + + + A file part must be at least 5 MB except for the last one and at most 5 GB, otherwise the + complete operation will fail. + + + *Note: this endpoint doesn't require any authentication as it is already part of the + presigned url* + operationId: uploadAssetFilePart + parameters: + - $ref: "#/components/parameters/presignedUrl" + - name: Content-MD5 + in: header + description: Asset file part content MD5. + required: true + schema: + type: string + responses: + "200": + description: Asset file uploaded part successfully + headers: + ETag: + schema: + type: string + description: >- + The RFC7232 ETag header field in a response provides the current entity- + tag for the selected resource. + + + This ETag is required in the complete multipart upload payload. + + + An entity-tag is an opaque identifier for + different versions of a resource over time, regardless whether multiple + versions are valid at the same time. An entity-tag consists of an opaque + quoted string. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + required: true + "/collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/complete": + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/uploadId" + post: + tags: + - Asset Upload Management + summary: Complete multipart upload + operationId: completeMultipartUpload + description: >- + Complete the multipart upload process. After completion, the Asset metadata are updated + with the new `checksum:multihash` from the upload and the parts are automatically deleted. + The Asset `href` field is also set if it was the first upload. + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/assetCompleteUpload" + responses: + "200": + description: Asset multipart upload completed successfully. + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploadCompleted" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + "5XX": + $ref: "#/components/responses/ServerError" + "/collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/abort": + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/uploadId" + post: + tags: + - Asset Upload Management + summary: Abort multipart upload + operationId: abortMultipartUpload + description: >- + Abort the multipart upload process. All already uploaded parts are automatically deleted. + responses: + "200": + description: Asset multipart upload aborted successfully. + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploadAborted" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + "5XX": + $ref: "#/components/responses/ServerError" + "/collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/parts": + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/uploadId" + get: + tags: + - Asset Upload Management + summary: Get upload parts + operationId: getUploadParts + description: >- + Return the list of already uploaded parts. + + + ### Pagination + + By default all parts are returned (maximum number of parts being 100). The user can + use pagination to reduce the number of returned parts. Pagination is done via the `limit` + query parameter (see below). + parameters: + - $ref: "#/components/parameters/limit" + responses: + "200": + description: List of parts already uploaded. + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploadParts" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + "5XX": + $ref: "#/components/responses/ServerError" + + "/get-token": servers: - url: http://data.geo.admin.ch/api/stac/ @@ -583,12 +872,58 @@ paths: example: code: 400 description: "Unable to log in with provided credentials." + components: + examples: + inprogress: + summary: In progress upload example + value: + upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: in-progress + number_parts: 1 + urls: + - url: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 + part: 1 + expires: '2019-08-24T14:15:22Z' + created: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + completed: + summary: Completed upload example + value: + upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: completed + number_parts: 1 + created: '2019-08-24T14:15:22Z' + completed: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + aborted: + summary: Aborted upload example + value: + upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: completed + number_parts: 1 + created: '2019-08-24T14:15:22Z' + aborted: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 parameters: assetId: name: assetId in: path - description: Local identifier of a asset + description: Local identifier of an asset. + required: true + schema: + type: string + uploadId: + name: uploadId + in: path + description: Local identifier of an asset's upload. + required: true + schema: + type: string + presignedUrl: + name: presignedUrl + in: path + description: Presigned url returned by [Create a new Asset's multipart upload](#operation/createAssetUpload). required: true schema: type: string @@ -795,6 +1130,282 @@ components: parameters, then the Item is renamed. example: smr200-200-4-2019 type: string + uploadId: + title: ID + type: string + description: Unique Asset upload identifier + example: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + readOnly: true + dtUploadCreated: + title: created + description: Date time when the Asset's upload has been created/started. + type: string + format: date-time + readOnly: true + dtUploadCompleted: + title: completed + description: | + Date time when the Asset's upload has been completed. + + *Note: this property is mutually exclusive with `aborted`* + type: string + format: date-time + readOnly: true + dtUploadAborted: + title: aborted + description: | + Date time when the Asset's upload has been aborted. + + *Note: this property is mutually exclusive with `completed`* + type: string + format: date-time + readOnly: true + assetUploads: + title: AssetUploads + type: object + required: + - uploads + - links + properties: + uploads: + description: List of uploads that are within the asset. + type: array + items: + $ref: "#/components/schemas/assetUpload" + links: + description: Next and/or previous links for the pagination. + type: array + items: + $ref: "#/components/schemas/link" + example: + - rel: next + href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads?cursor=0d34 + assetUpload: + title: AssetUpload + type: object + required: + - upload_id + - status + - created + - "checksum:multihash" + - number_parts + properties: + upload_id: + $ref: "#/components/schemas/uploadId" + status: + $ref: "#/components/schemas/status" + number_parts: + $ref: "#/components/schemas/number_parts" + urls: + type: array + description: | + Note: As soon as the multipart upload is completed or aborted, the `urls` property is removed. + items: + $ref: "#/components/schemas/multipartUploadUrl" + readOnly: true + created: + $ref: "#/components/schemas/dtUploadCreated" + completed: + $ref: "#/components/schemas/dtUploadCompleted" + aborted: + $ref: "#/components/schemas/dtUploadAborted" + "checksum:multihash": + $ref: "#/components/schemas/checksumMultihash" + assetUploadCreate: + title: AssetUpload + type: object + required: + - upload_id + - status + - created + - "checksum:multihash" + - number_parts + properties: + upload_id: + $ref: "#/components/schemas/uploadId" + status: + $ref: "#/components/schemas/status" + number_parts: + $ref: "#/components/schemas/number_parts" + urls: + type: array + description: | + Note: As soon as the multipart upload is completed or aborted, the `urls` property is removed. + items: + $ref: "#/components/schemas/multipartUploadUrl" + readOnly: true + created: + $ref: "#/components/schemas/dtUploadCreated" + "checksum:multihash": + $ref: "#/components/schemas/checksumMultihash" + assetCompleteUpload: + title: CompleteUpload + type: object + required: + - parts + properties: + parts: + type: array + description: Parts that have been uploaded + items: + title: File part that have been uploaded + type: object + required: + - etag + - part_number + properties: + etag: + title: ETag + type: string + description: >- + ETag of the uploaded file part (returned in the header of the answer of + [Upload asset file part](#operation/uploadAssetFilePart)). + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + part_number: + $ref: "#/components/schemas/part_number" + assetUploadCompleted: + title: UploadCompleted + type: object + required: + - upload_id + - status + - number_parts + - created + - completed + - "checksum:multihash" + properties: + upload_id: + $ref: "#/components/schemas/uploadId" + status: + title: Status + description: Status of the Asset's multipart upload. + type: string + enum: + - completed + example: + completed + number_parts: + $ref: "#/components/schemas/number_parts" + created: + $ref: "#/components/schemas/dtUploadCreated" + completed: + $ref: "#/components/schemas/dtUploadCompleted" + "checksum:multihash": + $ref: "#/components/schemas/checksumMultihash" + assetUploadAborted: + title: UploadCompleted + type: object + required: + - upload_id + - status + - number_parts + - created + - aborted + - "checksum:multihash" + properties: + upload_id: + $ref: "#/components/schemas/uploadId" + status: + title: Status + description: Status of the Asset's multipart upload. + type: string + enum: + - aborted + example: + aborted + number_parts: + $ref: "#/components/schemas/number_parts" + created: + $ref: "#/components/schemas/dtUploadCreated" + aborted: + $ref: "#/components/schemas/dtUploadAborted" + "checksum:multihash": + $ref: "#/components/schemas/checksumMultihash" + assetUploadParts: + title: Parts + type: object + required: + - parts + - links + properties: + parts: + type: object + description: List of uploaded parts + required: + - etag + - part_number + - modified + - size + properties: + etag: + $ref: "#/components/schemas/uploadEtag" + part_number: + $ref: "#/components/schemas/part_number" + modified: + type: string + format: date-time + description: Date time when the part was added/modified + size: + type: integer + description: Part size in bytes + minimum: 0 + example: 1024 + links: + description: Next and/or previous links for the pagination. + type: array + items: + $ref: "#/components/schemas/link" + example: + - rel: next + href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads/upload-id/parts?limit=50&offset=50 + status: + title: Status + description: Status of the Asset's multipart upload. + type: string + enum: + - in-progress + - aborted + - completed + readOnly: true + number_parts: + description: Number of parts for the Asset's multipart upload. + type: integer + minimum: 1 + maximum: 100 + part_number: + description: Number of the part. + type: integer + minimum: 1 + maximum: 100 + multipartUploadUrl: + title: MultipartUploadUrl + description: Multipart upload url. + type: object + required: + - url + - part + - expires + properties: + url: + description: Presigned URL to use to upload the Asset File part using the PUT method. + type: string + format: url + example: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 + part: + description: Part number assigned to this presigned URL. + type: integer + minimum: 1 + maximum: 100 + expires: + description: Date time when this presigned URL expires and is not valid anymore. + type: string + format: date-time + uploadEtag: + title: ETag + type: string + description: The RFC7232 ETag for the specified uploaded part. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + responses: Assets: description: >- @@ -868,6 +1479,53 @@ tags: Asset file can be uploaded via the STAC API using the following requests. *NOTE: the POST requests requires authentication as described in [here](#tag/Authentication).* + + ### Example + + ```python + import os + import hashlib + + import requests + import multihash + + # variables + scheme = 'https' + hostname = 'data.geo.admin.ch' + collection = 'ch.swisstopo.pixelkarte-farbe-pk200.noscale' + item = 'smr200-200-4-2016' + asset = 'smr200-200-4-2016-2056-kgrs-10.tiff' + asset_path = f'collections/{collection}/items/{item}/assets/{asset}' + user = os.environ.get('STAC_USER', 'unknown-user') + password = os.environ.get('STAC_PASSWORD', 'unknown-password') + + with open('smr200-200-4-2016-2056-kgrs-10.tiff', 'rb') as fd: + data = fd.read() + + checksum_multihash = multihash.to_hex_string(multihash.encode(hashlib.sha256(data).digest(), 'sha2-256')) + + # 1. Create a multipart upload + response = requests.post( + f"{scheme}://{hostname}/api/stac/v0.9/{asset_path}/uploads", + auth=(user, password), + json={ + "number_parts": 1, + "checksum:multihash": checksum_multihash + } + ) + upload_id = response.json()['upload_id'] + + # 2. Upload the part using the presigned url + response = requests.put(response.json()['urls'][0]['url'], data=data) + etag = response.headers['ETag'] + + # 3. Complete the upload + response = requests.post( + f"{scheme}://{hostname}/api/stac/v0.9/{asset_path}/uploads/{upload_id}/complete", + auth=(user, password), + json={'parts': [{'etag': etag, 'part_number': 1}]} + ) + ``` - name: Authentication description: | All write requests require authentication. There is currently three type of supported authentications: From 40ccb18a7c1b9bd822a07a37c2ab26d8ebda69f9 Mon Sep 17 00:00:00 2001 From: Brice Schaffner <67745584+ltshb@users.noreply.github.com> Date: Mon, 19 Apr 2021 14:01:58 +0200 Subject: [PATCH 083/105] Corrected grammatical --- spec/transaction/transaction.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml index 32971545..f4aa54fc 100644 --- a/spec/transaction/transaction.yml +++ b/spec/transaction/transaction.yml @@ -1138,14 +1138,14 @@ components: readOnly: true dtUploadCreated: title: created - description: Date time when the Asset's upload has been created/started. + description: Date and time when the Asset's upload has been created/started. type: string format: date-time readOnly: true dtUploadCompleted: title: completed description: | - Date time when the Asset's upload has been completed. + Date and time when the Asset's upload has been completed. *Note: this property is mutually exclusive with `aborted`* type: string @@ -1154,7 +1154,7 @@ components: dtUploadAborted: title: aborted description: | - Date time when the Asset's upload has been aborted. + Date and time when the Asset's upload has been aborted. *Note: this property is mutually exclusive with `completed`* type: string @@ -1248,7 +1248,7 @@ components: type: array description: Parts that have been uploaded items: - title: File part that have been uploaded + title: File parts that have been uploaded type: object required: - etag @@ -1473,12 +1473,12 @@ tags: Metadata management requests. Theses requests are used to create, update or delete the STAC metadata. - *NOTE: these requests requires authentication as described in [here](#tag/Authentication).* + *NOTE: these requests require authentication as described in [here](#tag/Authentication).* - name: Asset Upload Management description: | Asset file can be uploaded via the STAC API using the following requests. - *NOTE: the POST requests requires authentication as described in [here](#tag/Authentication).* + *NOTE: the POST requests require authentication as described in [here](#tag/Authentication).* ### Example From d865f2a89ddaa201a10dc36332f3f49d6b7fab8e Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Mon, 19 Apr 2021 10:59:52 +0200 Subject: [PATCH 084/105] BGDIINF_SB-1746: Removed the Item renaming functionality Also improved the Asset admin page. In this one the read only item field still had the help text for the field search functionality which don't make sense for read only field. To avoid this simply changed the logic, adding extra read only fields that are only displayed in the change view and the write item field is only display in the add view. Also improved the logging in case off renaming. --- app/stac_api/admin.py | 53 +++++++++++++++++++++++------- app/stac_api/validators_view.py | 20 ++++++------ app/stac_api/views.py | 34 ++++++++++++++++--- app/tests/test_assets_endpoint.py | 4 +-- app/tests/test_items_endpoint.py | 54 ++++++++++++++++++------------- 5 files changed, 115 insertions(+), 50 deletions(-) diff --git a/app/stac_api/admin.py b/app/stac_api/admin.py index 17f194bc..683e1714 100644 --- a/app/stac_api/admin.py +++ b/app/stac_api/admin.py @@ -114,6 +114,7 @@ class Media: inlines = [ItemLinkInline] autocomplete_fields = ['collection'] search_fields = ['name', 'collection__name'] + readonly_fields = ['collection_name'] fieldsets = ( (None, { 'fields': ('name', 'collection', 'geometry') @@ -167,6 +168,30 @@ def get_search_results(self, request, queryset, search_term): queryset &= self.model.objects.filter(collection__name__exact=collection_name) return queryset, use_distinct + # Here we use a special field for read only to avoid adding the extra help text for search + # functionality + def collection_name(self, obj): + return obj.collection.name + + collection_name.admin_order_field = 'collection__name' + collection_name.short_description = 'Collection Id' + + # We don't want to move the assets on S3 + # That's why some fields like the name of the item and the collection name are set readonly here + # for update operations. Those fields value are used as key on S3 that's why renaming them + # would mean that the Asset on S3 should be moved. + def get_fieldsets(self, request, obj=None): + fields = super().get_fieldsets(request, obj) + if obj is None: + # In case a new Item is added use the normal field 'collection' from model that have + # a help text fort the search functionality. + fields[0][1]['fields'] = ('name', 'collection') + return fields + # Otherwise if this is an update operation only display the read only field + # without help text + fields[0][1]['fields'] = ('name', 'collection_name') + return fields + @admin.register(Asset) class AssetAdmin(admin.ModelAdmin): @@ -177,11 +202,11 @@ class Media: autocomplete_fields = ['item'] search_fields = ['name', 'item__name', 'item__collection__name'] - readonly_fields = ['item_name', 'collection', 'href', 'checksum_multihash'] - list_display = ['name', 'item_name', 'collection'] + readonly_fields = ['item_name', 'collection_name', 'href', 'checksum_multihash'] + list_display = ['name', 'item_name', 'collection_name'] fieldsets = ( (None, { - 'fields': ('name', 'item', 'item_name', 'collection') + 'fields': ('name', 'item') }), ('File', { 'fields': ('file', 'media_type', 'href', 'checksum_multihash') @@ -221,11 +246,11 @@ def get_search_results(self, request, queryset, search_term): queryset &= self.model.objects.filter(item__collection__name__exact=collection_name) return queryset, use_distinct - def collection(self, instance): + def collection_name(self, instance): return instance.item.collection - collection.admin_order_field = 'item__collection' - collection.short_description = 'Collection Id' + collection_name.admin_order_field = 'item__collection' + collection_name.short_description = 'Collection Id' def item_name(self, instance): return instance.item.name @@ -255,8 +280,14 @@ def href(self, instance): # We don't want to move the assets on S3 # That's why some fields like the name of the asset are set readonly here # for update operations - def change_view(self, request, object_id, form_url='', extra_context=None): - self.readonly_fields = self.get_readonly_fields(request) - self.readonly_fields.extend(['name', 'item']) - - return super().change_view(request, object_id, form_url, extra_context) + def get_fieldsets(self, request, obj=None): + fields = super().get_fieldsets(request, obj) + if obj is None: + # In case a new Asset is added use the normal field 'item' from model that have + # a help text fort the search functionality. + fields[0][1]['fields'] = ('name', 'item') + return fields + # Otherwise if this is an update operation only display the read only fields + # without help text + fields[0][1]['fields'] = ('name', 'item_name', 'collection_name') + return fields diff --git a/app/stac_api/validators_view.py b/app/stac_api/validators_view.py index b138bc70..1f61748d 100644 --- a/app/stac_api/validators_view.py +++ b/app/stac_api/validators_view.py @@ -78,26 +78,26 @@ def validate_asset(kwargs): ) -def validate_renaming(serializer, id_field='', original_id='', extra_log=None): - '''Validate that the asset name is not different from the one defined in +def validate_renaming(serializer, original_id, id_field='name', extra_log=None): + '''Validate that the object name is not different from the one defined in the data. Args: serializer: serializer object The serializer to derive the data from - id_field: string - The key to get the name/id in the data dict original_id: string The id/name derived from the request kwargs - extra: djangoHttpRequest object - The request object for logging purposes + id_field: string + The key to get the name/id in the data dict (default 'name') + extra_log: dict + Dictionary to pass to the log extra in case of error Raises: - Http400: when the asset will be renamed/moved + Http400: when the object will be renamed/moved ''' data = serializer.validated_data if id_field in data.keys(): if data[id_field] != original_id: - message = 'Renaming object is not allowed' - logger.error(message, extra={'request': extra_log}) - raise ValidationError(_(message), code='invalid') + message = 'Renaming is not allowed' + logger.error(message, extra=extra_log) + raise ValidationError({'id': _(message)}, code='invalid') diff --git a/app/stac_api/views.py b/app/stac_api/views.py index 07610541..6d730989 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -422,10 +422,28 @@ def get_queryset(self): def perform_update(self, serializer): collection = get_object_or_404(Collection, name=self.kwargs['collection_name']) + validate_renaming( + serializer, + self.kwargs['item_name'], + extra_log={ + 'request': self.request, + 'collection': self.kwargs['collection_name'], + 'item': self.kwargs['item_name'] + } + ) serializer.save(collection=collection) def perform_upsert(self, serializer, lookup): collection = get_object_or_404(Collection, name=self.kwargs['collection_name']) + validate_renaming( + serializer, + self.kwargs['item_name'], + extra_log={ + 'request': self.request, + 'collection': self.kwargs['collection_name'], + 'item': self.kwargs['item_name'] + } + ) return serializer.upsert(lookup, collection=collection) @etag(get_item_etag) @@ -536,9 +554,13 @@ def perform_update(self, serializer): ) validate_renaming( serializer, - id_field='name', original_id=self.kwargs['asset_name'], - extra_log=self.request + extra_log={ + 'request': self.request, + 'collection': self.kwargs['collection_name'], + 'item': self.kwargs['item_name'], + 'asset': self.kwargs['asset_name'] + } ) serializer.save(item=item, file=get_asset_path(item, self.kwargs['asset_name'])) @@ -548,9 +570,13 @@ def perform_upsert(self, serializer, lookup): ) validate_renaming( serializer, - id_field='name', original_id=self.kwargs['asset_name'], - extra_log=self.request + extra_log={ + 'request': self.request, + 'collection': self.kwargs['collection_name'], + 'item': self.kwargs['item_name'], + 'asset': self.kwargs['asset_name'] + } ) return serializer.upsert( lookup, item=item, file=get_asset_path(item, self.kwargs['asset_name']) diff --git a/app/tests/test_assets_endpoint.py b/app/tests/test_assets_endpoint.py index 6693a9eb..75e1f45d 100644 --- a/app/tests/test_assets_endpoint.py +++ b/app/tests/test_assets_endpoint.py @@ -601,7 +601,7 @@ def test_asset_endpoint_put_rename_asset(self): path, data=changed_asset.get_json('put'), content_type="application/json" ) self.assertStatusCode(400, response) - self.assertEqual(['Renaming object is not allowed'], + self.assertEqual({'id': 'Renaming is not allowed'}, response.json()['description'], msg='Unexpected error message') @@ -639,7 +639,7 @@ def test_asset_endpoint_patch_rename_asset(self): path, data=changed_asset.get_json('patch'), content_type="application/json" ) self.assertStatusCode(400, response) - self.assertEqual(['Renaming object is not allowed'], + self.assertEqual({'id': 'Renaming is not allowed'}, response.json()['description'], msg='Unexpected error message') diff --git a/app/tests/test_items_endpoint.py b/app/tests/test_items_endpoint.py index c8fc8d79..659dbdff 100644 --- a/app/tests/test_items_endpoint.py +++ b/app/tests/test_items_endpoint.py @@ -601,20 +601,24 @@ def test_item_endpoint_put_rename_item(self): path, data=sample.get_json('put'), content_type="application/json" ) json_data = response.json() - self.assertStatusCode(200, response) - self.assertEqual(sample.json['id'], json_data['id']) - self.check_stac_item(sample.json, json_data, self.collection["name"]) + self.assertStatusCode(400, response) + self.assertEqual(json_data['description'], {'id': 'Renaming is not allowed'}) - response = self.client.get(path) - self.assertStatusCode(404, response, msg="Renamed item still available on old name") + # Make sure the original item was not updated + self.assertTrue( + Item.objects.all().filter( + name=self.item["name"], collection__name=self.collection['name'] + ).exists(), + msg="Original item doesn't exists anymore after trying to rename it" + ) - # Check the data by reading it back - path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{sample.json["id"]}' - response = self.client.get(path) - json_data = response.json() - self.assertStatusCode(200, response) - self.assertEqual(sample.json['id'], json_data['id']) - self.check_stac_item(sample.json, json_data, self.collection["name"]) + # Make sure the rename item was done + self.assertFalse( + Item.objects.all().filter( + name=sample["name"], collection__name=self.collection['name'] + ).exists(), + msg="Original item doesn't exists anymore after trying to rename it" + ) def test_item_endpoint_patch(self): data = {"properties": {"title": "patched title"}} @@ -661,19 +665,23 @@ def test_item_endpoint_patch_rename_item(self): path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{self.item["name"]}' response = self.client.patch(path, data=data, content_type="application/json") json_data = response.json() - self.assertStatusCode(200, response) - self.assertEqual(data['id'], json_data['id']) - self.check_stac_item(data, json_data, self.collection["name"]) + self.assertStatusCode(400, response) + self.assertEqual(json_data['description'], {'id': 'Renaming is not allowed'}) - response = self.client.get(path) - self.assertStatusCode(404, response, msg="Renamed item still available on old name") + # Make sure the original item was not updated + self.assertTrue( + Item.objects.all().filter( + name=self.item["name"], collection__name=self.collection['name'] + ).exists(), + msg="Original item doesn't exists anymore after trying to rename it" + ) - # Check the data by reading it back - path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}/items/{data["id"]}' - response = self.client.get(path) - json_data = response.json() - self.assertStatusCode(200, response) - self.assertEqual(data['id'], json_data['id']) + # Make sure the rename item was done + self.assertFalse( + Item.objects.all().filter(name=data["id"], + collection__name=self.collection['name']).exists(), + msg="Original item doesn't exists anymore after trying to rename it" + ) def test_item_upsert_create(self): From ec6adbb306213e6158f1b0aa91a0fbd586e93613 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Mon, 19 Apr 2021 15:26:10 +0200 Subject: [PATCH 085/105] BGDIINF_SB-1746: Fixed minor issue from code review --- app/stac_api/admin.py | 4 ++-- app/tests/test_items_endpoint.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/app/stac_api/admin.py b/app/stac_api/admin.py index 683e1714..30ea6483 100644 --- a/app/stac_api/admin.py +++ b/app/stac_api/admin.py @@ -247,9 +247,9 @@ def get_search_results(self, request, queryset, search_term): return queryset, use_distinct def collection_name(self, instance): - return instance.item.collection + return instance.item.collection.name - collection_name.admin_order_field = 'item__collection' + collection_name.admin_order_field = 'item__collection__name' collection_name.short_description = 'Collection Id' def item_name(self, instance): diff --git a/app/tests/test_items_endpoint.py b/app/tests/test_items_endpoint.py index 659dbdff..13818a27 100644 --- a/app/tests/test_items_endpoint.py +++ b/app/tests/test_items_endpoint.py @@ -612,12 +612,12 @@ def test_item_endpoint_put_rename_item(self): msg="Original item doesn't exists anymore after trying to rename it" ) - # Make sure the rename item was done + # Make sure the rename item was not done self.assertFalse( Item.objects.all().filter( name=sample["name"], collection__name=self.collection['name'] ).exists(), - msg="Original item doesn't exists anymore after trying to rename it" + msg="Renamed item shouldn't exist" ) def test_item_endpoint_patch(self): @@ -676,11 +676,11 @@ def test_item_endpoint_patch_rename_item(self): msg="Original item doesn't exists anymore after trying to rename it" ) - # Make sure the rename item was done + # Make sure the rename item was not done self.assertFalse( Item.objects.all().filter(name=data["id"], collection__name=self.collection['name']).exists(), - msg="Original item doesn't exists anymore after trying to rename it" + msg="Renamed item shouldn't exist" ) def test_item_upsert_create(self): From c1556330c273177d73639a1ac9bc2ba7c2142524 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Mon, 19 Apr 2021 15:53:07 +0200 Subject: [PATCH 086/105] BGDIINF_SB-1746: Fixed issues due to rebasing Updated the validate_renaming for collection due to rebasing. Also use the `self.request._request` in logging, because in the view the request is not a Django HttpRequest but a Rest Framework Request object. The `._request` is the the Django HttpRequest object. --- app/stac_api/admin.py | 1 + app/stac_api/views.py | 24 ++++++++++++++++-------- app/tests/test_collections_endpoint.py | 2 +- 3 files changed, 18 insertions(+), 9 deletions(-) diff --git a/app/stac_api/admin.py b/app/stac_api/admin.py index 30ea6483..23e9b43b 100644 --- a/app/stac_api/admin.py +++ b/app/stac_api/admin.py @@ -66,6 +66,7 @@ class CollectionAdmin(admin.ModelAdmin): class Media: js = ('js/admin/collection_help_search.js',) css = {'all': ('style/hover.css',)} + fields = [ 'name', 'title', diff --git a/app/stac_api/views.py b/app/stac_api/views.py index 6d730989..3dfdbcc0 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -311,16 +311,24 @@ def patch(self, request, *args, **kwargs): def perform_upsert(self, serializer, lookup): validate_renaming( serializer, - 'name', - self.kwargs['collection_name'], {'collection': self.kwargs['collection_name']} + self.kwargs['collection_name'], + extra_log={ + # pylint: disable=protected-access + 'request': self.request._request, + 'collection': self.kwargs['collection_name'] + } ) return super().perform_upsert(serializer, lookup) def perform_update(self, serializer, *args, **kwargs): validate_renaming( serializer, - 'name', - self.kwargs['collection_name'], {'collection': self.kwargs['collection_name']} + self.kwargs['collection_name'], + extra_log={ + # pylint: disable=protected-access + 'request': self.request._request, + 'collection': self.kwargs['collection_name'] + } ) return super().perform_update(serializer, *args, **kwargs) @@ -426,7 +434,7 @@ def perform_update(self, serializer): serializer, self.kwargs['item_name'], extra_log={ - 'request': self.request, + 'request': self.request._request, # pylint: disable=protected-access 'collection': self.kwargs['collection_name'], 'item': self.kwargs['item_name'] } @@ -439,7 +447,7 @@ def perform_upsert(self, serializer, lookup): serializer, self.kwargs['item_name'], extra_log={ - 'request': self.request, + 'request': self.request._request, # pylint: disable=protected-access 'collection': self.kwargs['collection_name'], 'item': self.kwargs['item_name'] } @@ -556,7 +564,7 @@ def perform_update(self, serializer): serializer, original_id=self.kwargs['asset_name'], extra_log={ - 'request': self.request, + 'request': self.request._request, # pylint: disable=protected-access 'collection': self.kwargs['collection_name'], 'item': self.kwargs['item_name'], 'asset': self.kwargs['asset_name'] @@ -572,7 +580,7 @@ def perform_upsert(self, serializer, lookup): serializer, original_id=self.kwargs['asset_name'], extra_log={ - 'request': self.request, + 'request': self.request._request, # pylint: disable=protected-access 'collection': self.kwargs['collection_name'], 'item': self.kwargs['item_name'], 'asset': self.kwargs['asset_name'] diff --git a/app/tests/test_collections_endpoint.py b/app/tests/test_collections_endpoint.py index 4cfe6f34..2fed1f7c 100644 --- a/app/tests/test_collections_endpoint.py +++ b/app/tests/test_collections_endpoint.py @@ -247,7 +247,7 @@ def test_collection_put_change_id(self): content_type='application/json' ) self.assertStatusCode(400, response) - self.assertEqual(['Renaming object is not allowed'], + self.assertEqual({'id': 'Renaming is not allowed'}, response.json()['description'], msg='Unexpected error message') From c7d10260edbeb8ebbd37cb5ae482936c225783e1 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Mon, 19 Apr 2021 16:14:26 +0200 Subject: [PATCH 087/105] BGDIINF_SB-1742: Added collection delete endpoint Collection delete is only allowed if the collection doesn't have anymore items. --- .../migrations/0006_auto_20210419_1409.py | 25 +++++++++ app/stac_api/models.py | 2 +- app/stac_api/views.py | 6 +- app/stac_api/views_mixins.py | 21 ++++++- app/tests/test_admin_page.py | 22 +++++++- app/tests/test_collections_endpoint.py | 56 ++++++++++++++++--- 6 files changed, 117 insertions(+), 15 deletions(-) create mode 100644 app/stac_api/migrations/0006_auto_20210419_1409.py diff --git a/app/stac_api/migrations/0006_auto_20210419_1409.py b/app/stac_api/migrations/0006_auto_20210419_1409.py new file mode 100644 index 00000000..5ab39146 --- /dev/null +++ b/app/stac_api/migrations/0006_auto_20210419_1409.py @@ -0,0 +1,25 @@ +# Generated by Django 3.1.7 on 2021-04-19 14:09 + +import django.db.models.deletion +from django.db import migrations +from django.db import models + + +class Migration(migrations.Migration): + + dependencies = [ + ('stac_api', '0005_auto_20210408_0821'), + ] + + operations = [ + migrations.AlterField( + model_name='item', + name='collection', + field=models.ForeignKey( + help_text= + '\n
\n Search Usage:\n
    \n
  • \n arg will make a non exact search checking if arg is part of\n the collection ID\n
  • \n
  • \n Multiple arg can be used, separated by spaces. This will search for all\n collections ID containing all arguments.\n
  • \n
  • \n "collectionID" will make an exact search for the specified collection.\n
  • \n
\n Examples :\n
    \n
  • \n Searching for pixelkarte will return all collections which have\n pixelkarte as a part of their collection ID\n
  • \n
  • \n Searching for pixelkarte 2016 4 will return all collection\n which have pixelkarte, 2016 AND 4 as part of their collection ID\n
  • \n
  • \n Searching for ch.swisstopo.pixelkarte.example will yield only this\n collection, if this collection exists. Please note that it would not return\n a collection named ch.swisstopo.pixelkarte.example.2.\n
  • \n
\n
', + on_delete=django.db.models.deletion.PROTECT, + to='stac_api.collection' + ), + ), + ] diff --git a/app/stac_api/models.py b/app/stac_api/models.py index c24a0626..bc96a744 100644 --- a/app/stac_api/models.py +++ b/app/stac_api/models.py @@ -376,7 +376,7 @@ class Meta: name = models.CharField('id', blank=False, max_length=255, validators=[validate_name]) collection = models.ForeignKey( - Collection, on_delete=models.CASCADE, help_text=_(SEARCH_TEXT_HELP_COLLECTION) + Collection, on_delete=models.PROTECT, help_text=_(SEARCH_TEXT_HELP_COLLECTION) ) geometry = models.PolygonField( null=False, blank=False, default=BBOX_CH, srid=4326, validators=[validate_geometry] diff --git a/app/stac_api/views.py b/app/stac_api/views.py index 3dfdbcc0..b34a1bb8 100644 --- a/app/stac_api/views.py +++ b/app/stac_api/views.py @@ -304,9 +304,9 @@ def patch(self, request, *args, **kwargs): return self.partial_update(request, *args, **kwargs) # Here the etag is only added to support pre-conditional If-Match and If-Not-Match - # @etag(get_collection_etag) - # def delete(self, request, *args, **kwargs): - # return self.destroy(request, *args, **kwargs) + @etag(get_collection_etag) + def delete(self, request, *args, **kwargs): + return self.destroy(request, *args, **kwargs) def perform_upsert(self, serializer, lookup): validate_renaming( diff --git a/app/stac_api/views_mixins.py b/app/stac_api/views_mixins.py index bdcf0de3..b5b6f629 100644 --- a/app/stac_api/views_mixins.py +++ b/app/stac_api/views_mixins.py @@ -1,8 +1,11 @@ import logging from django.db import transaction +from django.db.models.deletion import ProtectedError +from django.utils.translation import gettext_lazy as _ from rest_framework import status +from rest_framework.exceptions import ValidationError from rest_framework.response import Response from stac_api.utils import get_link @@ -143,4 +146,20 @@ def destroy(self, request, *args, **kwargs): ) def perform_destroy(self, instance): - instance.delete() + try: + instance.delete() + except ProtectedError as error: + logger.error( + 'Failed to delete object %s, object has children: %s', + instance, + error, + extra={'request': self.request._request} # pylint: disable=protected-access + ) + child_name = 'unknown' + if instance.__class__.__name__ == 'Collection': + child_name = 'items' + elif instance.__class__.__name__ == 'Item': + child_name = 'assets' + raise ValidationError( + _(f'Deleting {instance.__class__.__name__} with {child_name} not allowed') + ) from None diff --git a/app/tests/test_admin_page.py b/app/tests/test_admin_page.py index e2356798..e8741b8e 100644 --- a/app/tests/test_admin_page.py +++ b/app/tests/test_admin_page.py @@ -437,9 +437,27 @@ def test_add_remove_collection(self): reverse('admin:stac_api_collection_delete', args=[collection.id]), {"post": "yes"} ) - # Status code for successful creation is 302, since in the admin UI - # you're redirected to the list view after successful creation + # Removing a collection with items should not be allowed, note on failure a 200 OK is + # returned with error description as html. In case of success a 302 is returned + self.assertEqual(response.status_code, 200, msg="Admin page remove collection was allowed") + + # removes the assets and items first + response = self.client.post( + reverse('admin:stac_api_asset_delete', args=[asset.id]), {"post": "yes"} + ) + self.assertEqual(response.status_code, 302, msg="Admin page failed to remove asset") + response = self.client.post( + reverse('admin:stac_api_item_delete', args=[item.id]), {"post": "yes"} + ) + self.assertEqual(response.status_code, 302, msg="Admin page failed to remove item") + + # remove collection again with links and providers + response = self.client.post( + reverse('admin:stac_api_collection_delete', args=[collection.id]), {"post": "yes"} + ) self.assertEqual(response.status_code, 302, msg="Admin page failed to remove collection") + + # Check that asset, item, links, providers doesn't exists anymore self.assertFalse(Asset.objects.filter(item=item).exists(), msg="Deleted asset still in DB") self.assertFalse( Item.objects.filter(collection=collection).exists(), msg="Deleted item still in DB" diff --git a/app/tests/test_collections_endpoint.py b/app/tests/test_collections_endpoint.py index 2fed1f7c..135baa49 100644 --- a/app/tests/test_collections_endpoint.py +++ b/app/tests/test_collections_endpoint.py @@ -6,11 +6,15 @@ from django.test import Client from django.urls import reverse +from stac_api.models import Collection +from stac_api.models import CollectionLink +from stac_api.models import Provider from stac_api.utils import utc_aware from tests.base_test import StacBaseTestCase from tests.base_test import StacBaseTransactionTestCase from tests.data_factory import CollectionFactory +from tests.data_factory import Factory from tests.utils import client_login from tests.utils import disableLogger @@ -329,14 +333,6 @@ def test_collection_patch_read_only_in_payload(self): response.json()['description'], msg='Unexpected error message') - def test_authorized_collection_delete(self): - path = f'/{STAC_BASE_V}/collections/{self.collection["name"]}' - response = self.client.delete(path) - # Collection delete is not implemented (and currently not foreseen), hence - # the status code should be 405. If it should get implemented in future - # an unauthorized delete should get a status code of 401 (see test above). - self.assertStatusCode(405, response, msg="unimplemented collection delete was permitted.") - def test_collection_atomic_upsert_create_500(self): sample = self.collection_factory.create_sample(sample='collection-2') @@ -380,6 +376,50 @@ def test_collection_atomic_upsert_update_500(self): self.check_stac_collection(self.collection.json, response.json()) +class CollectionsDeleteEndpointTestCase(StacBaseTestCase): + + def setUp(self): # pylint: disable=invalid-name + self.client = Client() + client_login(self.client) + self.factory = Factory() + self.collection = self.factory.create_collection_sample(db_create=True) + self.item = self.factory.create_item_sample(self.collection.model, db_create=True) + self.maxDiff = None # pylint: disable=invalid-name + + def test_authorized_collection_delete(self): + + path = reverse('collection-detail', args=[self.collection["name"]]) + response = self.client.delete(path) + + self.assertStatusCode(400, response) + self.assertEqual( + response.json()['description'], ['Deleting Collection with items not allowed'] + ) + + # delete first the item + item_path = reverse('item-detail', args=[self.collection["name"], self.item['name']]) + response = self.client.delete(item_path) + self.assertStatusCode(200, response) + + # try the collection delete again + response = self.client.delete(path) + self.assertStatusCode(200, response) + + # Check that the object doesn't exists anymore + self.assertFalse( + CollectionLink.objects.filter(collection__name=self.collection["name"]).exists(), + msg="Deleted collection link still in DB" + ) + self.assertFalse( + Provider.objects.filter(collection__name=self.collection["name"]).exists(), + msg="Deleted provider still in DB" + ) + self.assertFalse( + Collection.objects.filter(name=self.collection["name"]).exists(), + msg="Deleted collection still in DB" + ) + + class CollectionRaceConditionTest(StacBaseTransactionTestCase): def setUp(self): From c44c0f7f58d0eddb2a5880656577ef24497a2c9f Mon Sep 17 00:00:00 2001 From: Isabelle Buholzer Date: Wed, 21 Apr 2021 16:40:00 +0200 Subject: [PATCH 088/105] BGDIINF_SB-1742: Removed item with assets delete --- .../migrations/0007_auto_20210421_0701.py | 27 +++++++++++++++++++ app/stac_api/models.py | 2 +- app/tests/test_admin_page.py | 18 +++++++++++-- app/tests/test_items_endpoint.py | 20 ++++++++++++-- 4 files changed, 62 insertions(+), 5 deletions(-) create mode 100644 app/stac_api/migrations/0007_auto_20210421_0701.py diff --git a/app/stac_api/migrations/0007_auto_20210421_0701.py b/app/stac_api/migrations/0007_auto_20210421_0701.py new file mode 100644 index 00000000..ee862dad --- /dev/null +++ b/app/stac_api/migrations/0007_auto_20210421_0701.py @@ -0,0 +1,27 @@ +# Generated by Django 3.1.7 on 2021-04-21 07:01 + +import django.db.models.deletion +from django.db import migrations +from django.db import models + + +class Migration(migrations.Migration): + + dependencies = [ + ('stac_api', '0006_auto_20210419_1409'), + ] + + operations = [ + migrations.AlterField( + model_name='asset', + name='item', + field=models.ForeignKey( + help_text= + '\n
\n Search Usage:\n
    \n
  • \n arg will make a non exact search checking if >arg\n is part of the Item path\n
  • \n
  • \n Multiple arg can be used, separated by spaces. This will search\n for all elements containing all arguments in their path\n
  • \n
  • \n "collectionID/itemID" will make an exact search for the specified item.\n
  • \n
\n Examples :\n
    \n
  • \n Searching for pixelkarte will return all items which have\n pixelkarte as a part of either their collection ID or their item ID\n
  • \n
  • \n Searching for pixelkarte 2016 4 will return all items\n which have pixelkarte, 2016 AND 4 as part of their collection ID or\n item ID\n
  • \n
  • \n Searching for "ch.swisstopo.pixelkarte.example/item2016-4-example"\n will yield only this item, if this item exists.\n
  • \n
\n
', + on_delete=django.db.models.deletion.PROTECT, + related_name='assets', + related_query_name='asset', + to='stac_api.item' + ), + ), + ] diff --git a/app/stac_api/models.py b/app/stac_api/models.py index bc96a744..5a81c116 100644 --- a/app/stac_api/models.py +++ b/app/stac_api/models.py @@ -552,7 +552,7 @@ class Meta: Item, related_name='assets', related_query_name='asset', - on_delete=models.CASCADE, + on_delete=models.PROTECT, help_text=_(SEARCH_TEXT_HELP_ITEM) ) # using "name" instead of "id", as "id" has a default meaning in django diff --git a/app/tests/test_admin_page.py b/app/tests/test_admin_page.py index e8741b8e..6150cf87 100644 --- a/app/tests/test_admin_page.py +++ b/app/tests/test_admin_page.py @@ -596,9 +596,23 @@ def test_add_remove_item(self): reverse('admin:stac_api_item_delete', args=[item.id]), {"post": "yes"} ) - # Status code for successful creation is 302, since in the admin UI - # you're redirected to the list view after successful creation + # Removing items with assets should not be allowed, note on failure a 200 OK is returned + # with error description as html. In case of success a 302 is returned + self.assertEqual(response.status_code, 200, msg="Admin page remove item was allowed") + + # remove assets first + response = self.client.post( + reverse('admin:stac_api_asset_delete', args=[asset.id]), {"post": "yes"} + ) + self.assertEqual(response.status_code, 302, msg="Admin page failed to remove asset") + + # remove item again with links and providers + response = self.client.post( + reverse('admin:stac_api_item_delete', args=[item.id]), {"post": "yes"} + ) self.assertEqual(response.status_code, 302, msg="Admin page failed to remove item") + + # Check that asset and links doesn't exist anymore self.assertFalse(Asset.objects.filter(item=item).exists(), msg="Deleted asset still in DB") self.assertFalse( ItemLink.objects.filter(item=item, rel=data["links-0-rel"]).exists(), diff --git a/app/tests/test_items_endpoint.py b/app/tests/test_items_endpoint.py index 13818a27..9a30c72f 100644 --- a/app/tests/test_items_endpoint.py +++ b/app/tests/test_items_endpoint.py @@ -22,6 +22,7 @@ from tests.data_factory import ItemFactory from tests.utils import client_login from tests.utils import disableLogger +from tests.utils import mock_s3_asset_file logger = logging.getLogger(__name__) @@ -828,14 +829,29 @@ class ItemsDeleteEndpointTestCase(StacBaseTestCase): @classmethod def setUpTestData(cls): cls.factory = Factory() - cls.collection = cls.factory.create_collection_sample().model - cls.item = cls.factory.create_item_sample(cls.collection, sample='item-1').model + @mock_s3_asset_file def setUp(self): self.client = Client() client_login(self.client) + self.collection = self.factory.create_collection_sample().model + self.item = self.factory.create_item_sample(self.collection, sample='item-1').model + self.asset = self.factory.create_asset_sample(self.item, sample='asset-1').model def test_item_endpoint_delete_item(self): + # Check that deleting, while assets are present, is not allowed + path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{self.item.name}' + response = self.client.delete(path) + self.assertStatusCode(400, response) + self.assertEqual(response.json()['description'], ['Deleting Item with assets not allowed']) + + # delete asset first + asset_path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{self.item.name}' \ + f'/assets/{self.asset.name}' + response = self.client.delete(asset_path) + self.assertStatusCode(200, response) + + # try item delete again path = f'/{STAC_BASE_V}/collections/{self.collection.name}/items/{self.item.name}' response = self.client.delete(path) self.assertStatusCode(200, response) From cfd94c115e88416752b431ad1cba545570b577d2 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Wed, 14 Apr 2021 14:25:33 +0200 Subject: [PATCH 089/105] BGDIINF_SB-1739: Added management command to list all asset uploads This command won't work correctly with S3 minio server. --- .../management/commands/list_asset_uploads.py | 166 ++++++++++++++++++ app/stac_api/s3_multipart_upload.py | 27 +++ app/stac_api/utils.py | 1 + 3 files changed, 194 insertions(+) create mode 100644 app/stac_api/management/commands/list_asset_uploads.py diff --git a/app/stac_api/management/commands/list_asset_uploads.py b/app/stac_api/management/commands/list_asset_uploads.py new file mode 100644 index 00000000..6e7dfbcc --- /dev/null +++ b/app/stac_api/management/commands/list_asset_uploads.py @@ -0,0 +1,166 @@ +import json +import logging + +from django.core.management.base import BaseCommand +from django.core.serializers.json import DjangoJSONEncoder + +from stac_api.models import AssetUpload +from stac_api.s3_multipart_upload import MultipartUpload +from stac_api.serializers import AssetUploadSerializer +from stac_api.utils import CommandHandler +from stac_api.utils import get_asset_path + +logger = logging.getLogger(__name__) + + +class ListAssetUploadsHandler(CommandHandler): + + def __init__(self, command, options): + super().__init__(command, options) + self.s3 = MultipartUpload() + + def list_asset_uploads(self): + # pylint: disable=too-many-locals + uploads = [] + only_s3_uploads = [] + only_db_uploads = [] + s3_has_next = False + db_has_next = False + limit = self.options['limit'] + start = self.options['start'] + s3_key_start = self.options['s3_key_start'] + s3_upload_id_start = self.options['s3_upload_id_start'] + + s3_uploads = [] + if not self.options['db_only']: + # get all s3 multipart uploads + ( + s3_uploads, + s3_has_next, + s3_next_key, + s3_next_upload_id, + ) = self.s3.list_multipart_uploads( + limit=limit, key=s3_key_start, start=s3_upload_id_start + ) + + db_uploads = [] + if not self.options['s3_only']: + queryset = AssetUpload.objects.filter_by_status(self.options['status']) + count = queryset.count() + if count > limit: + queryset = queryset[start:start + limit] + db_uploads_qs = queryset + if start + limit < count: + db_has_next = True + + if not self.options['db_only'] and not self.options['s3_only']: + + def are_uploads_equals(s3_upload, db_upload): + if ( + s3_upload['UploadId'] == db_upload.upload_id and + s3_upload['Key'] == get_asset_path(db_upload.asset.item, db_upload.asset.name) + ): + return True + return False + + # Add all db uploads + for db_upload in db_uploads_qs: + s3_upload = next( + ( + s3_upload for s3_upload in s3_uploads + if are_uploads_equals(s3_upload, db_upload) + ), + None, + ) + + if s3_upload is None: + only_db_uploads.append(AssetUploadSerializer(instance=db_upload).data) + else: + uploads.append({ + 'db': AssetUploadSerializer(instance=db_upload).data, 's3': s3_upload + }) + + # Add s3 uploads that are not found in db uploads + for s3_upload in s3_uploads: + db_upload = next( + ( + db_upload for db_upload in db_uploads_qs + if are_uploads_equals(s3_upload, db_upload) + ), + None, + ) + if db_upload is None: + only_s3_uploads.append(s3_upload) + elif self.options['db_only']: + only_db_uploads = AssetUploadSerializer(instance=list(db_uploads_qs), many=True).data + elif self.options['s3_only']: + only_s3_uploads = s3_uploads + + print( + json.dumps( + { + 'uploads': uploads, + 'db_uploads': only_db_uploads, + 's3_uploads': only_s3_uploads, + 'next': + ' '.join([ + f'./{self.command.prog}', + f'--limit={limit}', + f'--start={start}' if db_has_next else '', + f'--s3-key-start={s3_next_key}' if s3_has_next else '', + f'--s3-upload-id-start={s3_next_upload_id}' if s3_has_next else '' + ]) + }, + indent=2, + cls=DjangoJSONEncoder, + ) + ) + + +class Command(BaseCommand): + help = """List all asset uploads object (DB and/or S3) + + This checks for all asset uploads object in DB (by default only returning the `in-progress` + status objects) as well as the open S3 multipart uploads. This command is in addition to the + .../assets//uploads which only list the uploads of one asset, while the command list + all uploads for all assets. + + WARNING: The S3 minio server for local development doesn't supports the list_multipart_uploads + methods, therefore the output will only contains the DB entries. + """ + + def add_arguments(self, parser): + self.prog = parser.prog # pylint: disable=attribute-defined-outside-init + + parser.add_argument( + '--status', + type=str, + default=AssetUpload.Status.IN_PROGRESS, + help=f"Filter by status (default '{AssetUpload.Status.IN_PROGRESS}')" + ) + + default_limit = 50 + parser.add_argument( + '--limit', + type=int, + default=default_limit, + help=f"Limit the output (default {default_limit})" + ) + + parser.add_argument( + '--start', type=int, default=0, help="Start the list at the given index (default 0)" + ) + + parser.add_argument('--db-only', type=bool, default=False, help="List only DB objects") + + parser.add_argument('--s3-only', type=bool, default=False, help="List only S3 objects") + + parser.add_argument( + '--s3-key-start', type=str, default=None, help='Next S3 key for pagination' + ) + parser.add_argument( + '--s3-upload-id-start', type=str, default=None, help='Next S3 upload ID for pagination' + ) + + def handle(self, *args, **options): + ListAssetUploadsHandler(self, options).list_asset_uploads() diff --git a/app/stac_api/s3_multipart_upload.py b/app/stac_api/s3_multipart_upload.py index 98b814e0..5e3af109 100644 --- a/app/stac_api/s3_multipart_upload.py +++ b/app/stac_api/s3_multipart_upload.py @@ -28,6 +28,33 @@ class MultipartUpload: def __init__(self): self.s3 = get_s3_client() + def list_multipart_uploads(self, key=None, limit=100, start=None): + '''List all in progress multipart uploads + + Args: + key: string | None + Only list for a specific asset file + limit: int + Limit the output number of result + start: str + Upload ID start marker for retrieving the next results + + Returns: ([], bool, string, string) + Returns a tuple (uploads, has_next, next_key, next_upload_id) + ''' + kwargs = {'Bucket': settings.AWS_STORAGE_BUCKET_NAME, 'MaxUploads': limit} + if key is not None: + kwargs['KeyMarker'] = key + if start is not None: + kwargs['UploadIdMarker'] = start + response = self.call_s3_api(self.s3.list_multipart_uploads, **kwargs) + return ( + response.get('Uploads', []), + response.get('IsTruncated', False), + response.get('NextKeyMarker', None), + response.get('NextUploadIdMarker', None), + ) + def create_multipart_upload(self, key, asset, checksum_multihash): '''Create a multi part upload on the backend diff --git a/app/stac_api/utils.py b/app/stac_api/utils.py index 3d4799d9..9f598995 100644 --- a/app/stac_api/utils.py +++ b/app/stac_api/utils.py @@ -290,6 +290,7 @@ def __init__(self, command, options): self.stdout = command.stdout self.stderr = command.stderr self.style = command.style + self.command = command def print(self, message, *args, level=2): if self.verbosity >= level: From bcb0d682fefaa5500c4db389fb0cd95c9f3d55aa Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Fri, 23 Apr 2021 07:34:55 +0200 Subject: [PATCH 090/105] BGDIINF_SB-1739: Minors improvement in list_asset_uploads command based on review --- .../management/commands/list_asset_uploads.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/app/stac_api/management/commands/list_asset_uploads.py b/app/stac_api/management/commands/list_asset_uploads.py index 6e7dfbcc..f84ddbf9 100644 --- a/app/stac_api/management/commands/list_asset_uploads.py +++ b/app/stac_api/management/commands/list_asset_uploads.py @@ -43,7 +43,6 @@ def list_asset_uploads(self): limit=limit, key=s3_key_start, start=s3_upload_id_start ) - db_uploads = [] if not self.options['s3_only']: queryset = AssetUpload.objects.filter_by_status(self.options['status']) count = queryset.count() @@ -55,7 +54,7 @@ def list_asset_uploads(self): if not self.options['db_only'] and not self.options['s3_only']: - def are_uploads_equals(s3_upload, db_upload): + def are_uploads_equal(s3_upload, db_upload): if ( s3_upload['UploadId'] == db_upload.upload_id and s3_upload['Key'] == get_asset_path(db_upload.asset.item, db_upload.asset.name) @@ -68,7 +67,7 @@ def are_uploads_equals(s3_upload, db_upload): s3_upload = next( ( s3_upload for s3_upload in s3_uploads - if are_uploads_equals(s3_upload, db_upload) + if are_uploads_equal(s3_upload, db_upload) ), None, ) @@ -85,7 +84,7 @@ def are_uploads_equals(s3_upload, db_upload): db_upload = next( ( db_upload for db_upload in db_uploads_qs - if are_uploads_equals(s3_upload, db_upload) + if are_uploads_equal(s3_upload, db_upload) ), None, ) @@ -121,12 +120,17 @@ class Command(BaseCommand): help = """List all asset uploads object (DB and/or S3) This checks for all asset uploads object in DB (by default only returning the `in-progress` - status objects) as well as the open S3 multipart uploads. This command is in addition to the + status objects) as well as the open S3 multipart uploads (S3 has only `in-progress` uploads, + once the upload is completed it is automatically deleted). This command is in addition to the .../assets//uploads which only list the uploads of one asset, while the command list all uploads for all assets. - WARNING: The S3 minio server for local development doesn't supports the list_multipart_uploads - methods, therefore the output will only contains the DB entries. + WARNINGS: + - Although pagination is implemented, if there is more uploads than the limit, the sync + algorithm will not work because it only search for common upload on the page context and + uploads are not sorted. + - The S3 minio server for local development doesn't supports the list_multipart_uploads + methods, therefore the output will only contains the DB entries. """ def add_arguments(self, parser): From c10a6e9214d769a25f6518fe0898cce9e01089ed Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Wed, 21 Apr 2021 07:03:40 +0200 Subject: [PATCH 091/105] BGDIINF_SB-1764: Reorganized the openapi files Also make uses of local file reference which allows to jump into references with an IDE like VSCode. --- spec/Makefile | 19 +- spec/base.yml | 21 - spec/components/{headers.yml => headers.yaml} | 1 + .../{parameters.yml => parameters.yaml} | 43 +- .../{responses.yml => responses.yaml} | 39 +- spec/components/{schemas.yml => schemas.yaml} | 306 +- spec/openapi.yaml | 193 + spec/paths.yml | 174 - spec/static/spec/v0.9/openapi.yaml | 1019 ++-- .../spec/v0.9/openapitransactional.yaml | 5107 +++++++++-------- spec/transaction/components/examples.yaml | 33 + spec/transaction/components/parameters.yaml | 39 + spec/transaction/components/responses.yaml | 47 + spec/transaction/components/schemas.yaml | 480 ++ spec/transaction/paths.yaml | 874 +++ spec/transaction/tags.yaml | 128 + spec/transaction/transaction.yml | 1593 ----- 17 files changed, 5132 insertions(+), 4984 deletions(-) delete mode 100644 spec/base.yml rename spec/components/{headers.yml => headers.yaml} (97%) rename spec/components/{parameters.yml => parameters.yaml} (77%) rename spec/components/{responses.yml => responses.yaml} (87%) rename spec/components/{schemas.yml => schemas.yaml} (84%) create mode 100644 spec/openapi.yaml delete mode 100644 spec/paths.yml create mode 100644 spec/transaction/components/examples.yaml create mode 100644 spec/transaction/components/parameters.yaml create mode 100644 spec/transaction/components/responses.yaml create mode 100644 spec/transaction/components/schemas.yaml create mode 100644 spec/transaction/paths.yaml create mode 100644 spec/transaction/tags.yaml delete mode 100644 spec/transaction/transaction.yml diff --git a/spec/Makefile b/spec/Makefile index 83a64f32..efedb230 100644 --- a/spec/Makefile +++ b/spec/Makefile @@ -4,27 +4,26 @@ SPEC_HTTP_PORT ?= 8090 -PARTS = base.yml \ - components/headers.yml \ - components/parameters.yml \ - components/responses.yml \ - components/schemas.yml \ - paths.yml +COMPONENTS_DIR = ./components +PARTS_COMPONENTS := $(shell find $(COMPONENTS_DIR) -type f -name "*.yaml" -print) +PARTS := openapi.yaml $(PARTS_COMPONENTS) -PARTS_TRANSACTIONAL = transaction/transaction.yml +TRANSACTIONAL_DIR = ./transaction +PARTS_TRANSACTIONAL = $(shell find $(TRANSACTIONAL_DIR) -type f -name "*.yaml" -print) STATIC_BASE_DIR = static/spec/v0.9 OPENAPI = $(STATIC_BASE_DIR)/openapi.yaml OPENAPI_TRANSACTIONAL = $(STATIC_BASE_DIR)/openapitransactional.yaml -# AWS_ACCOUNT = swisstopo-3d $(OPENAPI): $(PARTS) - docker run --rm -v ${PWD}:/workdir mikefarah/yq:3.3.0 yq merge -x $(PARTS) > $@ + docker run --rm -v ${PWD}:/workdir mikefarah/yq:3.3.0 yq merge -x $(PARTS) | \ + sed -E '/\$ref:/s/"\..*?(#.*?)"/"\1"/' > $@ $(OPENAPI_TRANSACTIONAL): $(OPENAPI) $(PARTS_TRANSACTIONAL) - docker run --rm -v ${PWD}:/workdir mikefarah/yq:3.3.0 yq merge -x $(OPENAPI) $(PARTS_TRANSACTIONAL) > $@ + docker run --rm -v ${PWD}:/workdir mikefarah/yq:3.3.0 yq merge -x $(OPENAPI) $(PARTS_TRANSACTIONAL) | \ + sed -E '/\$ref:/s/"\..*?(#.*?)"/"\1"/' > $@ .PHONY: build-specs diff --git a/spec/base.yml b/spec/base.yml deleted file mode 100644 index b9bc949c..00000000 --- a/spec/base.yml +++ /dev/null @@ -1,21 +0,0 @@ -openapi: 3.0.1 -info: - contact: - name: API Specification (based on STAC) - url: http://data.geo.admin.ch/api/stac/v0.9/ - description: >- - This is an OpenAPI definition of the API to query and access federal geodata on - data.geo.admin.ch. The API is based on the core SpatioTemporal Asset Catalog API specification - [STAC](http://stacspec.org) and adds two extensions for extended searching possibilities. - title: The SpatioTemporal Asset Catalog API for data.geo.admin.ch - version: 0.9.0 -servers: - - description: Data.geo.admin.ch - url: http://data.geo.admin.ch/api/stac/v0.9 -tags: - - description: Essential characteristics of this API - name: Capabilities - - description: Access to data (features) - name: Data - - description: Extension to OGC API - Features to support STAC metadata model and search API - name: STAC diff --git a/spec/components/headers.yml b/spec/components/headers.yaml similarity index 97% rename from spec/components/headers.yml rename to spec/components/headers.yaml index 0300b163..febb4e4b 100644 --- a/spec/components/headers.yml +++ b/spec/components/headers.yaml @@ -1,3 +1,4 @@ +openapi: 3.0.1 components: headers: ETag: diff --git a/spec/components/parameters.yml b/spec/components/parameters.yaml similarity index 77% rename from spec/components/parameters.yml rename to spec/components/parameters.yaml index 36ee07d1..69731e7c 100644 --- a/spec/components/parameters.yml +++ b/spec/components/parameters.yaml @@ -1,21 +1,22 @@ +openapi: 3.0.1 components: parameters: - #assetQuery: - # description: >- - # Query for properties in assets (e.g. mediatype). Use the JSON form of the assetQueryFilter - # used in POST. - # in: query - # name: assetQuery - # required: false - # schema: - # type: string + assetQuery: + description: >- + Query for properties in assets (e.g. mediatype). Use the JSON form of the assetQueryFilter + used in POST. + in: query + name: assetQuery + required: false + schema: + type: string bbox: explode: false in: query name: bbox required: false schema: - $ref: "#/components/schemas/bbox" + $ref: "./schemas.yaml#/components/schemas/bbox" style: form example: collectionId: @@ -31,14 +32,14 @@ components: name: collections required: false schema: - $ref: "#/components/schemas/collectionsArray" + $ref: "./schemas.yaml#/components/schemas/collectionsArray" datetime: explode: false in: query name: datetime required: false schema: - $ref: "#/components/schemas/datetimeQuery" + $ref: "./schemas.yaml#/components/schemas/datetimeQuery" example: 2018-02-12T00%3A00%3A00Z%2F2018-03-18T12%3A31%3A12Z style: form featureId: @@ -57,22 +58,22 @@ components: name: ids required: false schema: - $ref: "#/components/schemas/ids" + $ref: "./schemas.yaml#/components/schemas/ids" limit: explode: false in: query name: limit required: false schema: - $ref: "#/components/schemas/limit" + $ref: "./schemas.yaml#/components/schemas/limit" style: form - # query: - # description: Query for properties in items. Use the JSON form of the queryFilter used in POST. - # in: query - # name: query - # required: false - # schema: - # type: string + query: + description: Query for properties in items. Use the JSON form of the queryFilter used in POST. + in: query + name: query + required: false + schema: + type: string IfNoneMatch: name: If-None-Match in: header diff --git a/spec/components/responses.yml b/spec/components/responses.yaml similarity index 87% rename from spec/components/responses.yml rename to spec/components/responses.yaml index f3620bf1..493bb890 100644 --- a/spec/components/responses.yml +++ b/spec/components/responses.yaml @@ -1,13 +1,14 @@ +openapi: 3.0.1 components: responses: Collection: headers: ETag: - $ref: "#/components/headers/ETag" + $ref: "./headers.yaml#/components/headers/ETag" content: application/json: schema: - $ref: "#/components/schemas/collection" + $ref: "./schemas.yaml#/components/schemas/collection" description: >- Information about the feature collection with id `collectionId`. @@ -35,7 +36,7 @@ components: content: application/json: schema: - $ref: "#/components/schemas/collections" + $ref: "./schemas.yaml#/components/schemas/collections" description: >- The feature collections shared by this API. @@ -77,7 +78,7 @@ components: - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30 - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson schema: - $ref: "#/components/schemas/confClasses" + $ref: "./schemas.yaml#/components/schemas/confClasses" description: >- The URIs of all conformance classes supported by the server. @@ -89,11 +90,11 @@ components: Feature: headers: ETag: - $ref: "#/components/headers/ETag" + $ref: "./headers.yaml#/components/headers/ETag" content: application/json: schema: - $ref: "#/components/schemas/item" + $ref: "./schemas.yaml#/components/schemas/item" description: >- Fetch the feature with id `featureId` in the feature collection with id `collectionId` @@ -101,8 +102,7 @@ components: content: application/json: schema: - # $ref: '#/components/schemas/featureCollectionGeoJSON' - $ref: "#/components/schemas/items" + $ref: "./schemas.yaml#/components/schemas/items" description: >- The response is a document consisting of features in the collection. The features included in the response are determined by the server @@ -130,7 +130,7 @@ components: # content: # application/json: # schema: - # $ref: "#/components/schemas/exception" + # $ref: "./schemas.yaml#/components/schemas/exception" # example: # code: 304 # description: "Not modified" @@ -139,7 +139,7 @@ components: content: application/json: schema: - $ref: "#/components/schemas/exception" + $ref: "./schemas.yaml#/components/schemas/exception" example: code: 400 description: "Invalid parameter" @@ -174,7 +174,7 @@ components: stac_version: 0.9.0 title: data.geo.admin.ch schema: - $ref: "#/components/schemas/landingPage" + $ref: "./schemas.yaml#/components/schemas/landingPage" description: >- The landing page provides links to the API definition (link relations `service-desc` and `service-doc`), @@ -187,7 +187,7 @@ components: content: application/json: schema: - $ref: "#/components/schemas/exception" + $ref: "./schemas.yaml#/components/schemas/exception" example: code: 404 description: "Resource not found" @@ -196,17 +196,26 @@ components: content: application/json: schema: - $ref: "#/components/schemas/exception" + $ref: "./schemas.yaml#/components/schemas/exception" example: code: 400 description: "Invalid parameter" + PermissionDenied: + description: No Permission for this request + content: + application/json: + schema: + $ref: "./schemas.yaml#/components/schemas/exception" + example: + code: 403 + description: "Permission denied" PreconditionFailed: description: Some condition specified by the request could not be met in the server # Currently the 412 Precondition not met is handled by django and we cannot give a body # content: # application/json: # schema: - # $ref: "#/components/schemas/exception" + # $ref: "./schemas.yaml#/components/schemas/exception" # example: # code: 412 # description: "Precondition not met" @@ -217,7 +226,7 @@ components: content: application/json: schema: - $ref: "#/components/schemas/exception" + $ref: "./schemas.yaml#/components/schemas/exception" example: code: 500 description: "Internal server error" diff --git a/spec/components/schemas.yml b/spec/components/schemas.yaml similarity index 84% rename from spec/components/schemas.yml rename to spec/components/schemas.yaml index 88a7ca0c..640ca414 100644 --- a/spec/components/schemas.yml +++ b/spec/components/schemas.yaml @@ -1,74 +1,75 @@ +openapi: 3.0.1 components: schemas: - #assetQuery: - # additionalProperties: - # $ref: "#/components/schemas/assetQueryProp" - # description: >- - # Define which properties of the asset to query and the operations to apply. - # - # - # The following properties can be queried: - # - # - `type`: query for assets with this specific media type - # - # - `proj:epsg`: query for assets with this specific epsg - # - # - `eo:gsd`: query for assets with this specific gsd - # - # - `geoadmin:variant`: query for assets with this specific variant - # example: - # type: - # eq: image/tiff - # type: object - #assetQueryFilter: - # description: Allows users to query asset properties for specific values - # properties: - # assetQuery: - # $ref: "#/components/schemas/assetQuery" - # type: object - #assetQueryProp: - # anyOf: - # - description: >- - # If the object doesn't contain any of the operators, it is equivalent to using the - # equals operator - # - description: Match using an operator - # properties: - # contains: - # description: >- - # Find items with a property that contains the specified - # literal string, e.g., matches ".*.*". - # A case-insensitive comparison must be performed. - # type: string - # endsWith: - # description: >- - # Find items with a property that ends with the specified string. A case-insensitive - # comparison must be performed. - # type: string - # eq: - # description: >- - # Find items with a property that is equal to the specified value. For strings, a - # case-insensitive comparison must be performed. - # nullable: true - # oneOf: - # - type: string - # - type: number - # - type: boolean - # in: - # description: >- - # Find items with a property that equals at least one entry in the specified array. - # A case-insensitive comparison must be performed. - # items: - # oneOf: - # - type: string - # - type: number - # type: array - # startsWith: - # description: >- - # Find items with a property that begins with the specified string. A case-insensitive - # comparison must be performed. - # type: string - # type: object - # description: Apply query operations to a specific property + assetQuery: + additionalProperties: + $ref: "./schemas.yaml#/components/schemas/assetQueryProp" + description: >- + Define which properties of the asset to query and the operations to apply. + + + The following properties can be queried: + + - `type`: query for assets with this specific media type + + - `proj:epsg`: query for assets with this specific epsg + + - `eo:gsd`: query for assets with this specific gsd + + - `geoadmin:variant`: query for assets with this specific variant + example: + type: + eq: image/tiff + type: object + assetQueryFilter: + description: Allows users to query asset properties for specific values + properties: + assetQuery: + $ref: "./schemas.yaml#/components/schemas/assetQuery" + type: object + assetQueryProp: + anyOf: + - description: >- + If the object doesn't contain any of the operators, it is equivalent to using the + equals operator + - description: Match using an operator + properties: + contains: + description: >- + Find items with a property that contains the specified + literal string, e.g., matches ".*.*". + A case-insensitive comparison must be performed. + type: string + endsWith: + description: >- + Find items with a property that ends with the specified string. A case-insensitive + comparison must be performed. + type: string + eq: + description: >- + Find items with a property that is equal to the specified value. For strings, a + case-insensitive comparison must be performed. + nullable: true + oneOf: + - type: string + - type: number + - type: boolean + in: + description: >- + Find items with a property that equals at least one entry in the specified array. + A case-insensitive comparison must be performed. + items: + oneOf: + - type: string + - type: number + type: array + startsWith: + description: >- + Find items with a property that begins with the specified string. A case-insensitive + comparison must be performed. + type: string + type: object + description: Apply query operations to a specific property assetBase: title: Asset description: The `property name` defines the ID of the Asset. @@ -79,55 +80,48 @@ components: - updated properties: title: - $ref: "#/components/schemas/title" + $ref: "./schemas.yaml#/components/schemas/title" description: - $ref: "#/components/schemas/description" + $ref: "./schemas.yaml#/components/schemas/description" type: - $ref: "#/components/schemas/type" + $ref: "./schemas.yaml#/components/schemas/type" href: - $ref: "#/components/schemas/href" + $ref: "./schemas.yaml#/components/schemas/href" checksum:multihash: - $ref: "#/components/schemas/checksumMultihashReadOnly" + $ref: "./schemas.yaml#/components/schemas/checksumMultihashReadOnly" # roles: # $ref: '#/components/schemas/roles' "geoadmin:variant": - $ref: "#/components/schemas/geoadminVariant" + $ref: "./schemas.yaml#/components/schemas/geoadminVariant" "geoadmin:lang": - $ref: "#/components/schemas/geoadminLang" + $ref: "./schemas.yaml#/components/schemas/geoadminLang" "proj:epsg": - $ref: "#/components/schemas/projEpsg" + $ref: "./schemas.yaml#/components/schemas/projEpsg" "eo:gsd": - $ref: "#/components/schemas/eoGsd" + $ref: "./schemas.yaml#/components/schemas/eoGsd" created: - $ref: "#/components/schemas/created" + $ref: "./schemas.yaml#/components/schemas/created" updated: - $ref: "#/components/schemas/updated" + $ref: "./schemas.yaml#/components/schemas/updated" bbox: - description: >- + description: | Only features that have a geometry that intersects the bounding box are selected. The bounding box is provided as four numbers: - * Lower left corner, coordinate axis 1 - * Lower left corner, coordinate axis 2 - * Upper right corner, coordinate axis 1 - * Upper right corner, coordinate axis 2 - The coordinate reference system of the values is WGS84 longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). - For WGS84 longitude/latitude the values are in most cases the sequence of minimum longitude, minimum latitude, maximum longitude and maximum latitude. However, in cases where the box spans the antimeridian the first value (west-most box edge) is larger than the third value (east-most box edge). - Example: The bounding box of Switzerland in WGS 84 (from 5.96°E to 10.49°E and from 45.82°N to 47.81°N) would be represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as @@ -187,7 +181,7 @@ components: bboxFilter: properties: bbox: - $ref: "#/components/schemas/bboxfilter" + $ref: "./schemas.yaml#/components/schemas/bboxfilter" checksumMultihash: description: >- `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) @@ -238,7 +232,7 @@ components: for a variety of purposes (digital printing, plots, offset printing, etc.). type: string extent: - $ref: "#/components/schemas/extent" + $ref: "./schemas.yaml#/components/schemas/extent" id: description: Identifier of the collection used, for example, in URIs example: ch.swisstopo.pixelkarte-farbe-pk200.noscale @@ -250,11 +244,11 @@ components: type: string readOnly: true license: - $ref: "#/components/schemas/license" + $ref: "./schemas.yaml#/components/schemas/license" providers: - $ref: "#/components/schemas/providers" + $ref: "./schemas.yaml#/components/schemas/providers" stac_version: - $ref: "#/components/schemas/stac_version" + $ref: "./schemas.yaml#/components/schemas/stac_version" summaries: additionalProperties: oneOf: @@ -313,9 +307,9 @@ components: example: National Map 1:200'000 type: string created: - $ref: "#/components/schemas/created" + $ref: "./schemas.yaml#/components/schemas/created" updated: - $ref: "#/components/schemas/updated" + $ref: "./schemas.yaml#/components/schemas/updated" required: - id - stac_version @@ -327,7 +321,7 @@ components: type: object collection: allOf: - - $ref: "#/components/schemas/collectionBase" + - $ref: "./schemas.yaml#/components/schemas/collectionBase" - type: object required: - links @@ -335,7 +329,7 @@ components: links: type: array items: - $ref: "#/components/schemas/link" + $ref: "./schemas.yaml#/components/schemas/link" example: - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale rel: self @@ -350,31 +344,15 @@ components: title: Licence for the free geodata of the Federal Office of Topography swisstopo - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df rel: describedby - collectionWrite: - title: collection - allOf: - - $ref: "#/components/schemas/collectionBase" - - type: object - properties: - links: - type: array - items: - $ref: "#/components/schemas/link" - example: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby collections: properties: collections: items: - $ref: "#/components/schemas/collection" + $ref: "./schemas.yaml#/components/schemas/collection" type: array links: items: - $ref: "#/components/schemas/link" + $ref: "./schemas.yaml#/components/schemas/link" example: - href: https://data.geo.admin.ch/api/stac/v0.9/collections rel: self @@ -399,7 +377,7 @@ components: description: Only returns the collections specified properties: collections: - $ref: "#/components/schemas/collectionsArray" + $ref: "./schemas.yaml#/components/schemas/collectionsArray" type: object example: collections: @@ -444,7 +422,7 @@ components: datetimeFilter: properties: datetime: - $ref: "#/components/schemas/datetimeQuery" + $ref: "./schemas.yaml#/components/schemas/datetimeQuery" description: description: >- Detailed multi-line description to fully explain the catalog or @@ -599,18 +577,18 @@ components: type: string geometryGeoJSON: oneOf: - # - $ref: "#/components/schemas/pointGeoJSON" - # - $ref: "#/components/schemas/multipointGeoJSON" - # - $ref: "#/components/schemas/linestringGeoJSON" - # - $ref: "#/components/schemas/multilinestringGeoJSON" - - $ref: "#/components/schemas/polygonGeoJSON" - # - $ref: "#/components/schemas/multipolygonGeoJSON" - # - $ref: "#/components/schemas/geometrycollectionGeoJSON" + # - $ref: "./schemas.yaml#/components/schemas/pointGeoJSON" + # - $ref: "./schemas.yaml#/components/schemas/multipointGeoJSON" + # - $ref: "./schemas.yaml#/components/schemas/linestringGeoJSON" + # - $ref: "./schemas.yaml#/components/schemas/multilinestringGeoJSON" + - $ref: "./schemas.yaml#/components/schemas/polygonGeoJSON" + # - $ref: "./schemas.yaml#/components/schemas/multipolygonGeoJSON" + # - $ref: "./schemas.yaml#/components/schemas/geometrycollectionGeoJSON" geometrycollectionGeoJSON: properties: geometries: items: - $ref: "#/components/schemas/geometryGeoJSON" + $ref: "./schemas.yaml#/components/schemas/geometryGeoJSON" type: array type: enum: @@ -638,7 +616,7 @@ components: description: Only returns items that match the array of given ids properties: ids: - $ref: "#/components/schemas/ids" + $ref: "./schemas.yaml#/components/schemas/ids" type: object example: ids: @@ -662,23 +640,23 @@ components: STAC entity properties: assets: - $ref: "#/components/schemas/itemAssets" + $ref: "./schemas.yaml#/components/schemas/itemAssets" bbox: - $ref: "#/components/schemas/bbox" + $ref: "./schemas.yaml#/components/schemas/bbox" geometry: # we could use the 'original' schema, but it doesn't # contain a useful example, hence we use our own # schema with appropriate example # $ref: https://geojson.org/schema/Geometry.json - $ref: "#/components/schemas/geometryGeoJSON" + $ref: "./schemas.yaml#/components/schemas/geometryGeoJSON" properties: - $ref: "#/components/schemas/itemProperties" + $ref: "./schemas.yaml#/components/schemas/itemProperties" readOnly: true stac_version: - $ref: "#/components/schemas/stac_version" + $ref: "./schemas.yaml#/components/schemas/stac_version" readOnly: true type: - $ref: "#/components/schemas/itemType" + $ref: "./schemas.yaml#/components/schemas/itemType" required: - stac_version - type @@ -695,10 +673,10 @@ components: - links properties: id: - $ref: "#/components/schemas/itemId" + $ref: "./schemas.yaml#/components/schemas/itemId" links: items: - $ref: "#/components/schemas/link" + $ref: "./schemas.yaml#/components/schemas/link" type: array example: - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr50-263-2016 @@ -709,7 +687,7 @@ components: rel: parent - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale rel: collection - - $ref: "#/components/schemas/itemBase" + - $ref: "./schemas.yaml#/components/schemas/itemBase" items: description: >- A FeatureCollection augmented with foreign members that contain values relevant @@ -717,11 +695,11 @@ components: properties: features: items: - $ref: "#/components/schemas/item" + $ref: "./schemas.yaml#/components/schemas/item" type: array links: items: - $ref: "#/components/schemas/link" + $ref: "./schemas.yaml#/components/schemas/link" type: array example: - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items @@ -746,7 +724,7 @@ components: title: Assets description: List of Assets attached to this feature. additionalProperties: - $ref: "#/components/schemas/assetBase" + $ref: "./schemas.yaml#/components/schemas/assetBase" type: object readOnly: true example: @@ -784,7 +762,7 @@ components: properties: features: items: - $ref: "#/components/schemas/item" + $ref: "./schemas.yaml#/components/schemas/item" type: array type: enum: @@ -796,18 +774,18 @@ components: type: object itemsSearchGet: allOf: - - $ref: "#/components/schemas/itemsSearch" + - $ref: "./schemas.yaml#/components/schemas/itemsSearch" - type: object properties: links: - $ref: "#/components/schemas/itemsSearchLinks" + $ref: "./schemas.yaml#/components/schemas/itemsSearchLinks" itemsSearchPost: allOf: - - $ref: "#/components/schemas/itemsSearch" + - $ref: "./schemas.yaml#/components/schemas/itemsSearch" - type: object properties: links: - $ref: "#/components/schemas/itemsSearchPostLinks" + $ref: "./schemas.yaml#/components/schemas/itemsSearchPostLinks" itemsSearchLinks: description: >- An array of links. Can be used for pagination, e.g. by providing a link with the `next` @@ -818,7 +796,7 @@ components: - href: https://data.geo.admin.ch/api/stac/v0.9/search?cursor=10ab rel: next items: - $ref: "#/components/schemas/link" + $ref: "./schemas.yaml#/components/schemas/link" type: array itemsSearchPostLinks: description: >- @@ -833,7 +811,7 @@ components: body: {} merge: true items: - $ref: "#/components/schemas/linkPostSearch" + $ref: "./schemas.yaml#/components/schemas/linkPostSearch" type: array itemId: title: ID @@ -856,15 +834,15 @@ components: One of the two is required properties: created: - $ref: "#/components/schemas/created" + $ref: "./schemas.yaml#/components/schemas/created" datetime: - $ref: "#/components/schemas/datetime" + $ref: "./schemas.yaml#/components/schemas/datetime" start_datetime: - $ref: "#/components/schemas/datetime" + $ref: "./schemas.yaml#/components/schemas/datetime" end_datetime: - $ref: "#/components/schemas/datetime" + $ref: "./schemas.yaml#/components/schemas/datetime" updated: - $ref: "#/components/schemas/updated" + $ref: "./schemas.yaml#/components/schemas/updated" title: description: Human readable title of the Feature example: Feature title @@ -891,10 +869,10 @@ components: type: string links: items: - $ref: "#/components/schemas/link" + $ref: "./schemas.yaml#/components/schemas/link" type: array stac_version: - $ref: "#/components/schemas/stac_version" + $ref: "./schemas.yaml#/components/schemas/stac_version" title: example: Buildings in Bonn type: string @@ -940,7 +918,7 @@ components: description: Only returns maximum number of results (page size) properties: limit: - $ref: "#/components/schemas/limit" + $ref: "./schemas.yaml#/components/schemas/limit" type: object linestringGeoJSON: properties: @@ -996,7 +974,7 @@ components: type: object linkPostSearch: allOf: - - $ref: "#/components/schemas/link" + - $ref: "./schemas.yaml#/components/schemas/link" - type: object properties: body: @@ -1221,7 +1199,7 @@ components: url: https://www.swisstopo.admin.ch query: additionalProperties: - $ref: "#/components/schemas/queryProp" + $ref: "./schemas.yaml#/components/schemas/queryProp" description: Define which properties to query and the operations to apply example: title: @@ -1237,7 +1215,7 @@ components: description: Allows users to query properties for specific values properties: query: - $ref: "#/components/schemas/query" + $ref: "./schemas.yaml#/components/schemas/query" type: object queryProp: anyOf: @@ -1325,14 +1303,14 @@ components: - thumbnail searchBody: allOf: - # - $ref: "#/components/schemas/assetQueryFilter" - - $ref: "#/components/schemas/queryFilter" - - $ref: "#/components/schemas/bboxFilter" - - $ref: "#/components/schemas/datetimeFilter" - - $ref: "#/components/schemas/intersectsFilter" - - $ref: "#/components/schemas/collectionsFilter" - - $ref: "#/components/schemas/idsFilter" - - $ref: "#/components/schemas/limitFilter" + # - $ref: "./schemas.yaml#/components/schemas/assetQueryFilter" + - $ref: "./schemas.yaml#/components/schemas/queryFilter" + - $ref: "./schemas.yaml#/components/schemas/bboxFilter" + - $ref: "./schemas.yaml#/components/schemas/datetimeFilter" + - $ref: "./schemas.yaml#/components/schemas/intersectsFilter" + - $ref: "./schemas.yaml#/components/schemas/collectionsFilter" + - $ref: "./schemas.yaml#/components/schemas/idsFilter" + - $ref: "./schemas.yaml#/components/schemas/limitFilter" description: The search criteria type: object stac_version: diff --git a/spec/openapi.yaml b/spec/openapi.yaml new file mode 100644 index 00000000..5038f1e1 --- /dev/null +++ b/spec/openapi.yaml @@ -0,0 +1,193 @@ +openapi: 3.0.1 +info: + contact: + name: API Specification (based on STAC) + url: http://data.geo.admin.ch/api/stac/v0.9/ + description: >- + This is an OpenAPI definition of the API to query and access federal geodata on + data.geo.admin.ch. The API is based on the core SpatioTemporal Asset Catalog API specification + [STAC](http://stacspec.org) and adds two extensions for extended searching possibilities. + title: The SpatioTemporal Asset Catalog API for data.geo.admin.ch + version: 0.9.0 +servers: + - description: Data.geo.admin.ch + url: http://data.geo.admin.ch/api/stac/v0.9 +tags: + - description: Essential characteristics of this API + name: Capabilities + - description: Access to data (features) + name: Data + - description: Extension to OGC API - Features to support STAC metadata model and search API + name: STAC + +paths: + /: + get: + description: >- + The landing page provides links to the API definition, the conformance + statements and to the feature collections in this dataset. + operationId: getLandingPage + responses: + "200": + $ref: "./components/responses.yaml#/components/responses/LandingPage" + "500": + $ref: "./components/responses.yaml#/components/responses/ServerError" + summary: Landing page + tags: + - Capabilities + /collections: + get: + operationId: getCollections + parameters: + - $ref: "./components/parameters.yaml#/components/parameters/limit" + responses: + "200": + $ref: "./components/responses.yaml#/components/responses/Collections" + "500": + $ref: "./components/responses.yaml#/components/responses/ServerError" + summary: Fetch collections + description: The feature collections in the dataset + tags: + - Data + /collections/{collectionId}: + get: + operationId: describeCollection + parameters: + - $ref: "./components/parameters.yaml#/components/parameters/collectionId" + - $ref: "./components/parameters.yaml#/components/parameters/IfMatch" + - $ref: "./components/parameters.yaml#/components/parameters/IfNoneMatch" + responses: + "200": + $ref: "./components/responses.yaml#/components/responses/Collection" + "304": + $ref: "./components/responses.yaml#/components/responses/NotModified" + "404": + $ref: "./components/responses.yaml#/components/responses/NotFound" + "412": + $ref: "./components/responses.yaml#/components/responses/PreconditionFailed" + "500": + $ref: "./components/responses.yaml#/components/responses/ServerError" + summary: Fetch a single collection + description: Describe the feature collection with id `collectionId` + tags: + - Data + /collections/{collectionId}/items: + get: + description: >- + Fetch features of the feature collection with id `collectionId`. + + + Every feature in a dataset belongs to a collection. A dataset may + consist of multiple feature collections. A feature collection is often a + collection of features of a similar type, based on a common schema. + + + Use content negotiation to request HTML or GeoJSON. + operationId: getFeatures + parameters: + - $ref: "./components/parameters.yaml#/components/parameters/collectionId" + - $ref: "./components/parameters.yaml#/components/parameters/limit" + - $ref: "./components/parameters.yaml#/components/parameters/bbox" + - $ref: "./components/parameters.yaml#/components/parameters/datetime" + responses: + "200": + $ref: "./components/responses.yaml#/components/responses/Features" + "400": + $ref: "./components/responses.yaml#/components/responses/InvalidParameter" + "404": + $ref: "./components/responses.yaml#/components/responses/NotFound" + "500": + $ref: "./components/responses.yaml#/components/responses/ServerError" + summary: Fetch features + tags: + - Data + /collections/{collectionId}/items/{featureId}: + get: + description: >- + Fetch the feature with id `featureId` in the feature collection + with id `collectionId`. + + + Use content negotiation to request HTML or GeoJSON. + operationId: getFeature + parameters: + - $ref: "./components/parameters.yaml#/components/parameters/collectionId" + - $ref: "./components/parameters.yaml#/components/parameters/featureId" + - $ref: "./components/parameters.yaml#/components/parameters/IfMatch" + - $ref: "./components/parameters.yaml#/components/parameters/IfNoneMatch" + responses: + "200": + $ref: "./components/responses.yaml#/components/responses/Feature" + "304": + $ref: "./components/responses.yaml#/components/responses/NotModified" + "404": + $ref: "./components/responses.yaml#/components/responses/NotFound" + "412": + $ref: "./components/responses.yaml#/components/responses/PreconditionFailed" + "500": + $ref: "./components/responses.yaml#/components/responses/ServerError" + summary: Fetch a single feature + tags: + - Data + /conformance: + get: + description: >- + A list of all conformance classes specified in a standard that the + server conforms to. + operationId: getConformanceDeclaration + responses: + "200": + $ref: "./components/responses.yaml#/components/responses/ConformanceDeclaration" + "500": + $ref: "./components/responses.yaml#/components/responses/ServerError" + summary: Information about specifications that this API conforms to + tags: + - Capabilities + /search: + get: + description: >- + Retrieve Items matching filters. Intended as a shorthand API for simple + queries. + + operationId: getSearchSTAC + parameters: + - $ref: "./components/parameters.yaml#/components/parameters/bbox" + - $ref: "./components/parameters.yaml#/components/parameters/datetime" + - $ref: "./components/parameters.yaml#/components/parameters/limit" + - $ref: "./components/parameters.yaml#/components/parameters/ids" + - $ref: "./components/parameters.yaml#/components/parameters/collectionsArray" + responses: + "200": + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/itemsSearchGet" + description: A feature collection. + "500": + $ref: "./components/responses.yaml#/components/responses/ServerError" + summary: Search STAC items with simple filtering. + tags: + - STAC + post: + description: >- + Retrieve items matching filters. Intended as the standard, full-featured + query API. + + operationId: postSearchSTAC + requestBody: + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/searchBody" + responses: + "200": + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/itemsSearchPost" + description: A feature collection. + "500": + $ref: "./components/responses.yaml#/components/responses/ServerError" + summary: Search STAC items with full-featured filtering. + tags: + - STAC diff --git a/spec/paths.yml b/spec/paths.yml deleted file mode 100644 index 492aa106..00000000 --- a/spec/paths.yml +++ /dev/null @@ -1,174 +0,0 @@ -paths: - /: - get: - description: >- - The landing page provides links to the API definition, the conformance - statements and to the feature collections in this dataset. - operationId: getLandingPage - responses: - "200": - $ref: "#/components/responses/LandingPage" - "500": - $ref: "#/components/responses/ServerError" - summary: Landing page - tags: - - Capabilities - /collections: - get: - operationId: getCollections - parameters: - - $ref: "#/components/parameters/limit" - responses: - "200": - $ref: "#/components/responses/Collections" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch collections - description: The feature collections in the dataset - tags: - - Data - /collections/{collectionId}: - get: - operationId: describeCollection - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" - responses: - "200": - $ref: "#/components/responses/Collection" - "304": - $ref: "#/components/responses/NotModified" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch a single collection - description: Describe the feature collection with id `collectionId` - tags: - - Data - /collections/{collectionId}/items: - get: - description: >- - Fetch features of the feature collection with id `collectionId`. - - - Every feature in a dataset belongs to a collection. A dataset may - consist of multiple feature collections. A feature collection is often a - collection of features of a similar type, based on a common schema. - - - Use content negotiation to request HTML or GeoJSON. - operationId: getFeatures - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/limit" - - $ref: "#/components/parameters/bbox" - - $ref: "#/components/parameters/datetime" - responses: - "200": - $ref: "#/components/responses/Features" - "400": - $ref: "#/components/responses/InvalidParameter" - "404": - $ref: "#/components/responses/NotFound" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch features - tags: - - Data - /collections/{collectionId}/items/{featureId}: - get: - description: >- - Fetch the feature with id `featureId` in the feature collection - with id `collectionId`. - - - Use content negotiation to request HTML or GeoJSON. - operationId: getFeature - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" - responses: - "200": - $ref: "#/components/responses/Feature" - "304": - $ref: "#/components/responses/NotModified" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch a single feature - tags: - - Data - /conformance: - get: - description: >- - A list of all conformance classes specified in a standard that the - server conforms to. - operationId: getConformanceDeclaration - responses: - "200": - $ref: "#/components/responses/ConformanceDeclaration" - "500": - $ref: "#/components/responses/ServerError" - summary: Information about specifications that this API conforms to - tags: - - Capabilities - /search: - get: - description: >- - Retrieve Items matching filters. Intended as a shorthand API for simple - queries. - - operationId: getSearchSTAC - parameters: - # - $ref: "#/components/parameters/assetQuery" - # Note: this is commented here since not part of the official spec - # - $ref: "#/components/parameters/query" - - $ref: "#/components/parameters/bbox" - - $ref: "#/components/parameters/datetime" - - $ref: "#/components/parameters/limit" - - $ref: "#/components/parameters/ids" - - $ref: "#/components/parameters/collectionsArray" - responses: - "200": - content: - application/json: - schema: - $ref: "#/components/schemas/itemsSearchGet" - description: A feature collection. - "500": - $ref: "#/components/responses/ServerError" - summary: Search STAC items with simple filtering. - tags: - - STAC - post: - description: >- - Retrieve items matching filters. Intended as the standard, full-featured - query API. - - operationId: postSearchSTAC - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/searchBody" - responses: - "200": - content: - application/json: - schema: - $ref: "#/components/schemas/itemsSearchPost" - description: A feature collection. - "500": - $ref: "#/components/responses/ServerError" - summary: Search STAC items with full-featured filtering. - tags: - - STAC diff --git a/spec/static/spec/v0.9/openapi.yaml b/spec/static/spec/v0.9/openapi.yaml index 70b42f84..eceb10e6 100644 --- a/spec/static/spec/v0.9/openapi.yaml +++ b/spec/static/spec/v0.9/openapi.yaml @@ -21,307 +21,244 @@ tags: - description: Extension to OGC API - Features to support STAC metadata model and search API name: STAC -components: - headers: - ETag: - schema: - type: string - description: >- - The RFC7232 ETag header field in a response provides the current entity- tag - for the selected resource. An entity-tag is an opaque identifier for different - versions of a resource over time, regardless whether multiple versions are - valid at the same time. An entity-tag consists of an opaque quoted string, - possibly prefixed by a weakness indicator. - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - required: true - parameters: - bbox: - explode: false - in: query - name: bbox - required: false - schema: - $ref: "#/components/schemas/bbox" - style: form - example: - collectionId: - description: Local identifier of a collection - in: path - name: collectionId - required: true - schema: - type: string - collectionsArray: - explode: false - in: query - name: collections - required: false - schema: - $ref: "#/components/schemas/collectionsArray" - datetime: - explode: false - in: query - name: datetime - required: false - schema: - $ref: "#/components/schemas/datetimeQuery" - example: 2018-02-12T00%3A00%3A00Z%2F2018-03-18T12%3A31%3A12Z - style: form - featureId: - description: Local identifier of a feature - in: path - name: featureId - required: true - schema: - type: string - ids: +paths: + /: + get: description: >- - Array of Item ids to return. All other filter parameters that further restrict - the number of search results are ignored - explode: false - in: query - name: ids - required: false - schema: - $ref: "#/components/schemas/ids" - limit: - explode: false - in: query - name: limit - required: false - schema: - $ref: "#/components/schemas/limit" - style: form - IfNoneMatch: - name: If-None-Match - in: header - schema: - type: string + The landing page provides links to the API definition, the conformance statements + and to the feature collections in this dataset. + operationId: getLandingPage + responses: + "200": + $ref: "#/components/responses/LandingPage" + "500": + $ref: "#/components/responses/ServerError" + summary: Landing page + tags: + - Capabilities + /collections: + get: + operationId: getCollections + parameters: + - $ref: "#/components/parameters/limit" + responses: + "200": + $ref: "#/components/responses/Collections" + "500": + $ref: "#/components/responses/ServerError" + summary: Fetch collections + description: The feature collections in the dataset + tags: + - Data + /collections/{collectionId}: + get: + operationId: describeCollection + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/IfNoneMatch" + responses: + "200": + $ref: "#/components/responses/Collection" + "304": + $ref: "#/components/responses/NotModified" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + "500": + $ref: "#/components/responses/ServerError" + summary: Fetch a single collection + description: Describe the feature collection with id `collectionId` + tags: + - Data + /collections/{collectionId}/items: + get: description: >- - The RFC7232 `If-None-Match` header field makes the GET request method conditional. - It is composed of a comma separated list of ETags or value "*". + Fetch features of the feature collection with id `collectionId`. - The server compares the client's ETags (sent with `If-None-Match`) with the - ETag for its current version of the resource, and if both values match (that - is, the resource has not changed), the server sends back a `304 Not Modified` - status, without a body, which tells the client that the cached version of - the response is still good to use (fresh). - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - IfMatch: - name: If-Match - in: header - schema: - type: string - description: >- - The RFC7232 `If-Match` header field makes the GET request method conditional. - It is composed of a comma separated list of ETags or value "*". + Every feature in a dataset belongs to a collection. A dataset may consist + of multiple feature collections. A feature collection is often a collection + of features of a similar type, based on a common schema. - The server compares the client's ETags (sent with `If-Match`) with the ETag - for its current version of the resource, and if both values don't match (that - is, the resource has changed), the server sends back a `412 Precondition Failed` - status, without a body, which tells the client that the cached version of - the response is not good to use anymore. - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - responses: - Collection: - headers: - ETag: - $ref: "#/components/headers/ETag" - content: - application/json: - schema: - $ref: "#/components/schemas/collection" + Use content negotiation to request HTML or GeoJSON. + operationId: getFeatures + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/bbox" + - $ref: "#/components/parameters/datetime" + responses: + "200": + $ref: "#/components/responses/Features" + "400": + $ref: "#/components/responses/InvalidParameter" + "404": + $ref: "#/components/responses/NotFound" + "500": + $ref: "#/components/responses/ServerError" + summary: Fetch features + tags: + - Data + /collections/{collectionId}/items/{featureId}: + get: description: >- - Information about the feature collection with id `collectionId`. - - - The response contains a link to the items in the collection (path `/collections/{collectionId}/items`, - link relation `items`) as well as key information about the collection. This - information includes: - - - * A local identifier for the collection that is unique for the dataset - - * A list of coordinate reference systems (CRS) in which geometries may be - returned by the server. The first CRS is the default coordinate reference - system (the default is always WGS 84 with axis order longitude/latitude) - - * An optional title and description for the collection + Fetch the feature with id `featureId` in the feature collection with id `collectionId`. - * An optional extent that can be used to provide an indication of the spatial - and temporal extent of the collection - typically derived from the data - * An optional indicator about the type of the items in the collection (the - default value, if the indicator is not provided, is 'feature') - Collections: - content: - application/json: - schema: - $ref: "#/components/schemas/collections" + Use content negotiation to request HTML or GeoJSON. + operationId: getFeature + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/IfNoneMatch" + responses: + "200": + $ref: "#/components/responses/Feature" + "304": + $ref: "#/components/responses/NotModified" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + "500": + $ref: "#/components/responses/ServerError" + summary: Fetch a single feature + tags: + - Data + /conformance: + get: description: >- - The feature collections shared by this API. - - - The dataset is organized as one or more feature collections. This resource - provides information about and access to the collections. - - - The response contains the list of collections. For each collection, a link - to the items in the collection (path `/collections/{collectionId}/items`, - link relation `items`) as well as key information about the collection. This - information includes: - - - * A local identifier for the collection that is unique for the dataset - - * A list of coordinate reference systems (CRS) in which geometries may be - returned by the server. The first CRS is the default coordinate reference - system (the default is always WGS 84 with axis order longitude/latitude) - - * An optional title and description for the collection - - * An optional extent that can be used to provide an indication of the spatial - and temporal extent of the collection - typically derived from the data - - * An optional indicator about the type of the items in the collection (the - default value, if the indicator is not provided, is 'feature'). - - The `limit` parameter may be used to control the subset of the selected collections - that should be returned in the response, the page size. Each page include - links to support paging (link relation `next` and/or `previous`). - ConformanceDeclaration: - content: - application/json: - example: - conformsTo: - - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core - - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30 - - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson - schema: - $ref: "#/components/schemas/confClasses" + A list of all conformance classes specified in a standard that the server + conforms to. + operationId: getConformanceDeclaration + responses: + "200": + $ref: "#/components/responses/ConformanceDeclaration" + "500": + $ref: "#/components/responses/ServerError" + summary: Information about specifications that this API conforms to + tags: + - Capabilities + /search: + get: description: >- - The URIs of all conformance classes supported by the server. - - - To support "generic" clients that want to access multiple OGC API Features - implementations - and not "just" a specific API / server, the server declares - the conformance classes it implements and conforms to. - Feature: - headers: - ETag: - $ref: "#/components/headers/ETag" - content: - application/json: - schema: - $ref: "#/components/schemas/item" + Retrieve Items matching filters. Intended as a shorthand API for simple queries. + operationId: getSearchSTAC + parameters: + - $ref: "#/components/parameters/bbox" + - $ref: "#/components/parameters/datetime" + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/ids" + - $ref: "#/components/parameters/collectionsArray" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/itemsSearchGet" + description: A feature collection. + "500": + $ref: "#/components/responses/ServerError" + summary: Search STAC items with simple filtering. + tags: + - STAC + post: description: >- - Fetch the feature with id `featureId` in the feature collection with id `collectionId` - Features: - content: - application/json: - schema: - $ref: "#/components/schemas/items" + Retrieve items matching filters. Intended as the standard, full-featured query + API. + operationId: postSearchSTAC + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/searchBody" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/itemsSearchPost" + description: A feature collection. + "500": + $ref: "#/components/responses/ServerError" + summary: Search STAC items with full-featured filtering. + tags: + - STAC +components: + schemas: + assetQuery: + additionalProperties: + $ref: "#/components/schemas/assetQueryProp" description: >- - The response is a document consisting of features in the collection. The features - included in the response are determined by the server based on the query parameters - of the request. To support access to larger collections without overloading - the client, the API supports paged access with links to the next page, if - more features are selected that the page size. + Define which properties of the asset to query and the operations to apply. - The `bbox` and `datetime` parameter can be used to select only a subset of - the features in the collection (the features that are in the bounding box - or time interval). The `bbox` parameter matches all features in the collection - that are not associated with a location, too. The `datetime` parameter matches - all features in the collection that are not associated with a time stamp or - interval, too. + The following properties can be queried: + - `type`: query for assets with this specific media type - The `limit` parameter may be used to control the subset of the selected features - that should be returned in the response, the page size. Each page include - links to support paging (link relation `next` and/or `previous`). - NotModified: - description: The cached resource was not modified since last request. - InvalidParameter: - content: - application/json: - schema: - $ref: "#/components/schemas/exception" - example: - code: 400 - description: "Invalid parameter" - description: A query parameter has an invalid value. - LandingPage: - content: - application/json: - example: - description: Catalog of Swiss Geodata Downloads - id: ch - links: - - href: http://data.geo.admin.ch/api/stac/v0.9/ - rel: self - type: application/json - title: this document - - href: http://data.geo.admin.ch/api/stac/v0.9/static/api.html - rel: service-doc - type: text/html - title: the API documentation - - href: http://data.geo.admin.ch/api/stac/v0.9/conformance - rel: conformance - type: application/json - title: OGC API conformance classes implemented by this server - - href: http://data.geo.admin.ch/api/stac/v0.9/collections - rel: data - type: application/json - title: Information about the feature collections - - href: http://data.geo.admin.ch/api/stac/v0.9/search - rel: search - type: application/json - title: Search across feature collections - stac_version: 0.9.0 - title: data.geo.admin.ch - schema: - $ref: "#/components/schemas/landingPage" - description: >- - The landing page provides links to the API definition (link relations `service-desc` - and `service-doc`), the Conformance declaration (path `/conformance`, link - relation `conformance`), and the Feature Collections (path `/collections`, - link relation `data`). - NotFound: - description: The specified resource/URI was not found - content: - application/json: - schema: - $ref: "#/components/schemas/exception" - example: - code: 404 - description: "Resource not found" - BadRequest: - description: The request was malformed or semantically invalid - content: - application/json: - schema: - $ref: "#/components/schemas/exception" - example: - code: 400 - description: "Invalid parameter" - PreconditionFailed: - description: Some condition specified by the request could not be met in the - server - ServerError: - description: >- - The request was syntactically and semantically valid, but an error occurred - while trying to act upon it - content: - application/json: - schema: - $ref: "#/components/schemas/exception" - example: - code: 500 - description: "Internal server error" - schemas: + - `proj:epsg`: query for assets with this specific epsg + + - `eo:gsd`: query for assets with this specific gsd + + - `geoadmin:variant`: query for assets with this specific variant + example: + type: + eq: image/tiff + type: object + assetQueryFilter: + description: Allows users to query asset properties for specific values + properties: + assetQuery: + $ref: "#/components/schemas/assetQuery" + type: object + assetQueryProp: + anyOf: + - description: >- + If the object doesn't contain any of the operators, it is equivalent to + using the equals operator + - description: Match using an operator + properties: + contains: + description: >- + Find items with a property that contains the specified literal string, + e.g., matches ".*.*". A case-insensitive comparison must be + performed. + type: string + endsWith: + description: >- + Find items with a property that ends with the specified string. A case-insensitive + comparison must be performed. + type: string + eq: + description: >- + Find items with a property that is equal to the specified value. For + strings, a case-insensitive comparison must be performed. + nullable: true + oneOf: + - type: string + - type: number + - type: boolean + in: + description: >- + Find items with a property that equals at least one entry in the specified + array. A case-insensitive comparison must be performed. + items: + oneOf: + - type: string + - type: number + type: array + startsWith: + description: >- + Find items with a property that begins with the specified string. A + case-insensitive comparison must be performed. + type: string + type: object + description: Apply query operations to a specific property assetBase: title: Asset description: The `property name` defines the ID of the Asset. @@ -354,33 +291,28 @@ components: updated: $ref: "#/components/schemas/updated" bbox: - description: >- + description: | Only features that have a geometry that intersects the bounding box are selected. The bounding box is provided as four numbers: - * Lower left corner, coordinate axis 1 - * Lower left corner, coordinate axis 2 - * Upper right corner, coordinate axis 1 - * Upper right corner, coordinate axis 2 + The coordinate reference system of the values is WGS84 + longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). - The coordinate reference system of the values is WGS84 longitude/latitude - (http://www.opengis.net/def/crs/OGC/1.3/CRS84). - - - For WGS84 longitude/latitude the values are in most cases the sequence of - minimum longitude, minimum latitude, maximum longitude and maximum latitude. - However, in cases where the box spans the antimeridian the first value (west-most - box edge) is larger than the third value (east-most box edge). + For WGS84 longitude/latitude the values are in most cases the sequence + of minimum longitude, minimum latitude, maximum longitude and maximum + latitude. However, in cases where the box spans the antimeridian the + first value (west-most box edge) is larger than the third value + (east-most box edge). - - Example: The bounding box of Switzerland in WGS 84 (from 5.96°E to 10.49°E - and from 45.82°N to 47.81°N) would be represented in JSON as `[5.96, 45.82, - 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." + Example: The bounding box of Switzerland in + WGS 84 (from 5.96°E to 10.49°E and from 45.82°N to 47.81°N) would be + represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as + `bbox=5.96,45.82,10.49,47.81`." example: - 5.96 - 45.82 @@ -596,23 +528,6 @@ components: swisstopo - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df rel: describedby - collectionWrite: - title: collection - allOf: - - $ref: "#/components/schemas/collectionBase" - - type: object - properties: - links: - type: array - items: - $ref: "#/components/schemas/link" - example: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby collections: properties: collections: @@ -1605,170 +1520,328 @@ components: type: string format: date-time readOnly: true -paths: - /: - get: + parameters: + assetQuery: description: >- - The landing page provides links to the API definition, the conformance statements - and to the feature collections in this dataset. - operationId: getLandingPage - responses: - "200": - $ref: "#/components/responses/LandingPage" - "500": - $ref: "#/components/responses/ServerError" - summary: Landing page - tags: - - Capabilities - /collections: - get: - operationId: getCollections - parameters: - - $ref: "#/components/parameters/limit" - responses: - "200": - $ref: "#/components/responses/Collections" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch collections - description: The feature collections in the dataset - tags: - - Data - /collections/{collectionId}: - get: - operationId: describeCollection - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" - responses: - "200": - $ref: "#/components/responses/Collection" - "304": - $ref: "#/components/responses/NotModified" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch a single collection - description: Describe the feature collection with id `collectionId` - tags: - - Data - /collections/{collectionId}/items: - get: + Query for properties in assets (e.g. mediatype). Use the JSON form of the + assetQueryFilter used in POST. + in: query + name: assetQuery + required: false + schema: + type: string + bbox: + explode: false + in: query + name: bbox + required: false + schema: + $ref: "#/components/schemas/bbox" + style: form + example: + collectionId: + description: Local identifier of a collection + in: path + name: collectionId + required: true + schema: + type: string + collectionsArray: + explode: false + in: query + name: collections + required: false + schema: + $ref: "#/components/schemas/collectionsArray" + datetime: + explode: false + in: query + name: datetime + required: false + schema: + $ref: "#/components/schemas/datetimeQuery" + example: 2018-02-12T00%3A00%3A00Z%2F2018-03-18T12%3A31%3A12Z + style: form + featureId: + description: Local identifier of a feature + in: path + name: featureId + required: true + schema: + type: string + ids: description: >- - Fetch features of the feature collection with id `collectionId`. + Array of Item ids to return. All other filter parameters that further restrict + the number of search results are ignored + explode: false + in: query + name: ids + required: false + schema: + $ref: "#/components/schemas/ids" + limit: + explode: false + in: query + name: limit + required: false + schema: + $ref: "#/components/schemas/limit" + style: form + query: + description: Query for properties in items. Use the JSON form of the queryFilter + used in POST. + in: query + name: query + required: false + schema: + type: string + IfNoneMatch: + name: If-None-Match + in: header + schema: + type: string + description: >- + The RFC7232 `If-None-Match` header field makes the GET request method conditional. + It is composed of a comma separated list of ETags or value "*". + + + The server compares the client's ETags (sent with `If-None-Match`) with the + ETag for its current version of the resource, and if both values match (that + is, the resource has not changed), the server sends back a `304 Not Modified` + status, without a body, which tells the client that the cached version of + the response is still good to use (fresh). + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + IfMatch: + name: If-Match + in: header + schema: + type: string + description: >- + The RFC7232 `If-Match` header field makes the GET request method conditional. + It is composed of a comma separated list of ETags or value "*". + + + The server compares the client's ETags (sent with `If-Match`) with the ETag + for its current version of the resource, and if both values don't match (that + is, the resource has changed), the server sends back a `412 Precondition Failed` + status, without a body, which tells the client that the cached version of + the response is not good to use anymore. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + responses: + Collection: + headers: + ETag: + $ref: "#/components/headers/ETag" + content: + application/json: + schema: + $ref: "#/components/schemas/collection" + description: >- + Information about the feature collection with id `collectionId`. + + + The response contains a link to the items in the collection (path `/collections/{collectionId}/items`, + link relation `items`) as well as key information about the collection. This + information includes: + + + * A local identifier for the collection that is unique for the dataset + + * A list of coordinate reference systems (CRS) in which geometries may be + returned by the server. The first CRS is the default coordinate reference + system (the default is always WGS 84 with axis order longitude/latitude) + + * An optional title and description for the collection + + * An optional extent that can be used to provide an indication of the spatial + and temporal extent of the collection - typically derived from the data + + * An optional indicator about the type of the items in the collection (the + default value, if the indicator is not provided, is 'feature') + Collections: + content: + application/json: + schema: + $ref: "#/components/schemas/collections" + description: >- + The feature collections shared by this API. + + + The dataset is organized as one or more feature collections. This resource + provides information about and access to the collections. + + + The response contains the list of collections. For each collection, a link + to the items in the collection (path `/collections/{collectionId}/items`, + link relation `items`) as well as key information about the collection. This + information includes: + + + * A local identifier for the collection that is unique for the dataset + * A list of coordinate reference systems (CRS) in which geometries may be + returned by the server. The first CRS is the default coordinate reference + system (the default is always WGS 84 with axis order longitude/latitude) - Every feature in a dataset belongs to a collection. A dataset may consist - of multiple feature collections. A feature collection is often a collection - of features of a similar type, based on a common schema. + * An optional title and description for the collection + * An optional extent that can be used to provide an indication of the spatial + and temporal extent of the collection - typically derived from the data - Use content negotiation to request HTML or GeoJSON. - operationId: getFeatures - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/limit" - - $ref: "#/components/parameters/bbox" - - $ref: "#/components/parameters/datetime" - responses: - "200": - $ref: "#/components/responses/Features" - "400": - $ref: "#/components/responses/InvalidParameter" - "404": - $ref: "#/components/responses/NotFound" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch features - tags: - - Data - /collections/{collectionId}/items/{featureId}: - get: + * An optional indicator about the type of the items in the collection (the + default value, if the indicator is not provided, is 'feature'). + + The `limit` parameter may be used to control the subset of the selected collections + that should be returned in the response, the page size. Each page include + links to support paging (link relation `next` and/or `previous`). + ConformanceDeclaration: + content: + application/json: + example: + conformsTo: + - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core + - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30 + - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson + schema: + $ref: "#/components/schemas/confClasses" description: >- - Fetch the feature with id `featureId` in the feature collection with id `collectionId`. + The URIs of all conformance classes supported by the server. - Use content negotiation to request HTML or GeoJSON. - operationId: getFeature - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" - responses: - "200": - $ref: "#/components/responses/Feature" - "304": - $ref: "#/components/responses/NotModified" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch a single feature - tags: - - Data - /conformance: - get: + To support "generic" clients that want to access multiple OGC API Features + implementations - and not "just" a specific API / server, the server declares + the conformance classes it implements and conforms to. + Feature: + headers: + ETag: + $ref: "#/components/headers/ETag" + content: + application/json: + schema: + $ref: "#/components/schemas/item" description: >- - A list of all conformance classes specified in a standard that the server - conforms to. - operationId: getConformanceDeclaration - responses: - "200": - $ref: "#/components/responses/ConformanceDeclaration" - "500": - $ref: "#/components/responses/ServerError" - summary: Information about specifications that this API conforms to - tags: - - Capabilities - /search: - get: + Fetch the feature with id `featureId` in the feature collection with id `collectionId` + Features: + content: + application/json: + schema: + $ref: "#/components/schemas/items" description: >- - Retrieve Items matching filters. Intended as a shorthand API for simple queries. - operationId: getSearchSTAC - parameters: - - $ref: "#/components/parameters/bbox" - - $ref: "#/components/parameters/datetime" - - $ref: "#/components/parameters/limit" - - $ref: "#/components/parameters/ids" - - $ref: "#/components/parameters/collectionsArray" - responses: - "200": - content: - application/json: - schema: - $ref: "#/components/schemas/itemsSearchGet" - description: A feature collection. - "500": - $ref: "#/components/responses/ServerError" - summary: Search STAC items with simple filtering. - tags: - - STAC - post: + The response is a document consisting of features in the collection. The features + included in the response are determined by the server based on the query parameters + of the request. To support access to larger collections without overloading + the client, the API supports paged access with links to the next page, if + more features are selected that the page size. + + + The `bbox` and `datetime` parameter can be used to select only a subset of + the features in the collection (the features that are in the bounding box + or time interval). The `bbox` parameter matches all features in the collection + that are not associated with a location, too. The `datetime` parameter matches + all features in the collection that are not associated with a time stamp or + interval, too. + + + The `limit` parameter may be used to control the subset of the selected features + that should be returned in the response, the page size. Each page include + links to support paging (link relation `next` and/or `previous`). + NotModified: + description: The cached resource was not modified since last request. + InvalidParameter: + content: + application/json: + schema: + $ref: "#/components/schemas/exception" + example: + code: 400 + description: "Invalid parameter" + description: A query parameter has an invalid value. + LandingPage: + content: + application/json: + example: + description: Catalog of Swiss Geodata Downloads + id: ch + links: + - href: http://data.geo.admin.ch/api/stac/v0.9/ + rel: self + type: application/json + title: this document + - href: http://data.geo.admin.ch/api/stac/v0.9/static/api.html + rel: service-doc + type: text/html + title: the API documentation + - href: http://data.geo.admin.ch/api/stac/v0.9/conformance + rel: conformance + type: application/json + title: OGC API conformance classes implemented by this server + - href: http://data.geo.admin.ch/api/stac/v0.9/collections + rel: data + type: application/json + title: Information about the feature collections + - href: http://data.geo.admin.ch/api/stac/v0.9/search + rel: search + type: application/json + title: Search across feature collections + stac_version: 0.9.0 + title: data.geo.admin.ch + schema: + $ref: "#/components/schemas/landingPage" description: >- - Retrieve items matching filters. Intended as the standard, full-featured query - API. - operationId: postSearchSTAC - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/searchBody" - responses: - "200": - content: - application/json: - schema: - $ref: "#/components/schemas/itemsSearchPost" - description: A feature collection. - "500": - $ref: "#/components/responses/ServerError" - summary: Search STAC items with full-featured filtering. - tags: - - STAC + The landing page provides links to the API definition (link relations `service-desc` + and `service-doc`), the Conformance declaration (path `/conformance`, link + relation `conformance`), and the Feature Collections (path `/collections`, + link relation `data`). + NotFound: + description: The specified resource/URI was not found + content: + application/json: + schema: + $ref: "#/components/schemas/exception" + example: + code: 404 + description: "Resource not found" + BadRequest: + description: The request was malformed or semantically invalid + content: + application/json: + schema: + $ref: "#/components/schemas/exception" + example: + code: 400 + description: "Invalid parameter" + PermissionDenied: + description: No Permission for this request + content: + application/json: + schema: + $ref: "#/components/schemas/exception" + example: + code: 403 + description: "Permission denied" + PreconditionFailed: + description: Some condition specified by the request could not be met in the + server + ServerError: + description: >- + The request was syntactically and semantically valid, but an error occurred + while trying to act upon it + content: + application/json: + schema: + $ref: "#/components/schemas/exception" + example: + code: 500 + description: "Internal server error" + headers: + ETag: + schema: + type: string + description: >- + The RFC7232 ETag header field in a response provides the current entity- tag + for the selected resource. An entity-tag is an opaque identifier for different + versions of a resource over time, regardless whether multiple versions are + valid at the same time. An entity-tag consists of an opaque quoted string, + possibly prefixed by a weakness indicator. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + required: true diff --git a/spec/static/spec/v0.9/openapitransactional.yaml b/spec/static/spec/v0.9/openapitransactional.yaml index 819a9dc2..cf848edb 100644 --- a/spec/static/spec/v0.9/openapitransactional.yaml +++ b/spec/static/spec/v0.9/openapitransactional.yaml @@ -26,12 +26,12 @@ tags: Metadata management requests. Theses requests are used to create, update or delete the STAC metadata. - *NOTE: these requests requires authentication as described in [here](#tag/Authentication).* + *NOTE: these requests require authentication as described in [here](#tag/Authentication).* - name: Asset Upload Management description: | Asset file can be uploaded via the STAC API using the following requests. - *NOTE: the POST requests requires authentication as described in [here](#tag/Authentication).* + *NOTE: the POST requests require authentication as described in [here](#tag/Authentication).* ### Example @@ -144,1641 +144,2336 @@ tags: --header 'Content-Type: application/json' \ --data '{"username": "MickeyMouse", "password": "I_love_Minnie_Mouse"}' ``` -components: - headers: - ETag: - schema: - type: string - description: >- - The RFC7232 ETag header field in a response provides the current entity- tag - for the selected resource. An entity-tag is an opaque identifier for different - versions of a resource over time, regardless whether multiple versions are - valid at the same time. An entity-tag consists of an opaque quoted string, - possibly prefixed by a weakness indicator. - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - required: true - parameters: - bbox: - explode: false - in: query - name: bbox - required: false - schema: - $ref: "#/components/schemas/bbox" - style: form - example: - collectionId: - description: Local identifier of a collection - in: path - name: collectionId - required: true - schema: - type: string - collectionsArray: - explode: false - in: query - name: collections - required: false - schema: - $ref: "#/components/schemas/collectionsArray" - datetime: - explode: false - in: query - name: datetime - required: false - schema: - $ref: "#/components/schemas/datetimeQuery" - example: 2018-02-12T00%3A00%3A00Z%2F2018-03-18T12%3A31%3A12Z - style: form - featureId: - description: Local identifier of a feature - in: path - name: featureId - required: true - schema: - type: string - ids: +paths: + /: + get: description: >- - Array of Item ids to return. All other filter parameters that further restrict - the number of search results are ignored - explode: false - in: query - name: ids - required: false - schema: - $ref: "#/components/schemas/ids" - limit: - explode: false - in: query - name: limit - required: false - schema: - $ref: "#/components/schemas/limit" - style: form - IfNoneMatch: - name: If-None-Match - in: header - schema: - type: string + The landing page provides links to the API definition, the conformance statements + and to the feature collections in this dataset. + operationId: getLandingPage + responses: + "200": + $ref: "#/components/responses/LandingPage" + "500": + $ref: "#/components/responses/ServerError" + summary: Landing page + tags: + - Capabilities + /collections: + get: + operationId: getCollections + parameters: + - $ref: "#/components/parameters/limit" + responses: + "200": + $ref: "#/components/responses/Collections" + "500": + $ref: "#/components/responses/ServerError" + summary: Fetch collections + description: The feature collections in the dataset + tags: + - Data + post: + tags: + - Data Management + summary: >- + Create a new collection + operationId: createCollection + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/collection" + example: + description: The National Map 1:200,000 is a topographic map giving + an overview of Switzerland. + id: ch.swisstopo.pixelkarte-farbe-pk200.noscale + license: proprietary + links: + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + providers: + - name: Federal Office of Topography - swisstopo + roles: + - producer + - licensor + url: https://www.swisstopo.admin.ch + title: National Map 1:200'000 + responses: + "201": + description: Returns the created Collection + headers: + Location: + description: A link to the collection + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "#/components/schemas/collectionWrite" + "403": + $ref: "#/components/responses/PermissionDenied" + "404": + $ref: "#/components/responses/NotFound" + "500": + $ref: "#/components/responses/ServerError" + /collections/{collectionId}: + get: + operationId: describeCollection + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/IfNoneMatch" + responses: + "200": + $ref: "#/components/responses/Collection" + "304": + $ref: "#/components/responses/NotModified" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + "500": + $ref: "#/components/responses/ServerError" + summary: Fetch a single collection + description: Describe the feature collection with id `collectionId` + tags: + - Data + put: + tags: + - Data Management + summary: Update or create a collection description: >- - The RFC7232 `If-None-Match` header field makes the GET request method conditional. - It is composed of a comma separated list of ETags or value "*". - - - The server compares the client's ETags (sent with `If-None-Match`) with the - ETag for its current version of the resource, and if both values match (that - is, the resource has not changed), the server sends back a `304 Not Modified` - status, without a body, which tells the client that the cached version of - the response is still good to use (fresh). - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - IfMatch: - name: If-Match - in: header - schema: - type: string + Update or create a collection with Id `collectionId` with a complete collection + definition. If the collection doesn't exists it is then created. + operationId: updateCollection + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/IfMatchWrite" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/collectionWrite" + example: + description: The National Map 1:200,000 is a topographic map giving + an overview of Switzerland. + id: ch.swisstopo.pixelkarte-farbe-pk200.noscale + license: proprietary + links: + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + providers: + - name: Federal Office of Topography - swisstopo + roles: + - producer + - licensor + url: https://www.swisstopo.admin.ch + title: National Map 1:200'000 + responses: + "200": + description: Returns the updated Collection + content: + application/json: + schema: + $ref: "#/components/schemas/collection" + "201": + description: Returns the created Collection + headers: + Location: + description: A link to the collection + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "#/components/schemas/collection" + "403": + $ref: "#/components/responses/PermissionDenied" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + "500": + $ref: "#/components/responses/ServerError" + patch: + tags: + - Data Management + summary: Partial update of a collection description: >- - The RFC7232 `If-Match` header field makes the GET request method conditional. - It is composed of a comma separated list of ETags or value "*". - - - The server compares the client's ETags (sent with `If-Match`) with the ETag - for its current version of the resource, and if both values don't match (that - is, the resource has changed), the server sends back a `412 Precondition Failed` - status, without a body, which tells the client that the cached version of - the response is not good to use anymore. - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - assetId: - name: assetId - in: path - description: Local identifier of an asset. - required: true - schema: - type: string - uploadId: - name: uploadId - in: path - description: Local identifier of an asset's upload. - required: true - schema: - type: string - presignedUrl: - name: presignedUrl - in: path - description: Presigned url returned by [Create a new Asset's multipart upload](#operation/createAssetUpload). - required: true - schema: - type: string - IfMatchWrite: - name: If-Match - in: header - schema: - type: string - description: >- - The RFC7232 `If-Match` header field makes the PUT/PATCH/DEL request method - conditional. It is composed of a comma separated list of ETags or value "*". - - - The server compares the client's ETags (sent with `If-Match`) with the ETag - for its current version of the resource, and if both values don't match (that - is, the resource has changed), the server sends back a `412 Precondition Failed` - status, without a body, which tells the client that he would overwrite another - changes of the resource. - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - responses: - Collection: - headers: - ETag: - $ref: "#/components/headers/ETag" - content: - application/json: - schema: - $ref: "#/components/schemas/collection" + Update an existing collection with Id `collectionId` with a partial collection + definition + operationId: partialUpdateCollection + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/IfMatch" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/collectionWrite" + example: + id: ch.swisstopo.pixelkarte-farbe-pk200.noscale + license: proprietary + links: + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + providers: + - name: Federal Office of Topography - swisstopo + roles: + - producer + - licensor + url: https://www.swisstopo.admin.ch + title: National Map 1:200'000 + responses: + "200": + description: Returns the updated Collection + content: + application/json: + schema: + $ref: "#/components/schemas/collection" + "403": + $ref: "#/components/responses/PermissionDenied" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + "500": + $ref: "#/components/responses/ServerError" + /collections/{collectionId}/items: + get: description: >- - Information about the feature collection with id `collectionId`. - - - The response contains a link to the items in the collection (path `/collections/{collectionId}/items`, - link relation `items`) as well as key information about the collection. This - information includes: - - - * A local identifier for the collection that is unique for the dataset + Fetch features of the feature collection with id `collectionId`. - * A list of coordinate reference systems (CRS) in which geometries may be - returned by the server. The first CRS is the default coordinate reference - system (the default is always WGS 84 with axis order longitude/latitude) - * An optional title and description for the collection + Every feature in a dataset belongs to a collection. A dataset may consist + of multiple feature collections. A feature collection is often a collection + of features of a similar type, based on a common schema. - * An optional extent that can be used to provide an indication of the spatial - and temporal extent of the collection - typically derived from the data - * An optional indicator about the type of the items in the collection (the - default value, if the indicator is not provided, is 'feature') - Collections: - content: - application/json: - schema: - $ref: "#/components/schemas/collections" + Use content negotiation to request HTML or GeoJSON. + operationId: getFeatures + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/bbox" + - $ref: "#/components/parameters/datetime" + responses: + "200": + $ref: "#/components/responses/Features" + "400": + $ref: "#/components/responses/InvalidParameter" + "404": + $ref: "#/components/responses/NotFound" + "500": + $ref: "#/components/responses/ServerError" + summary: Fetch features + tags: + - Data + post: + summary: Add a new feature to a collection + description: Create a new feature/item in a specific collection + operationId: postFeature + tags: + - Data Management + parameters: + - $ref: "#/components/parameters/collectionId" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/createItem" + example: + id: cs3-20160503_132131_05 + geometry: + type: Polygon + coordinates: + - - - -122.308150179 + - 37.488035566 + - - -122.597502109 + - 37.538869539 + - - -122.576687533 + - 37.613537207 + - - -122.2880486 + - 37.562818007 + - - -122.308150179 + - 37.488035566 + properties: + datetime: "2016-05-03T13:22:30.040Z" + title: A CS3 item + links: + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + responses: + "201": + description: Return the created Item. + headers: + Location: + description: A link to the item + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "#/components/schemas/item" + "400": + $ref: "#/components/responses/BadRequest" + "403": + $ref: "#/components/responses/PermissionDenied" + 5XX: + $ref: "#/components/responses/ServerError" + /collections/{collectionId}/items/{featureId}: + get: description: >- - The feature collections shared by this API. - - - The dataset is organized as one or more feature collections. This resource - provides information about and access to the collections. - - - The response contains the list of collections. For each collection, a link - to the items in the collection (path `/collections/{collectionId}/items`, - link relation `items`) as well as key information about the collection. This - information includes: - - - * A local identifier for the collection that is unique for the dataset - - * A list of coordinate reference systems (CRS) in which geometries may be - returned by the server. The first CRS is the default coordinate reference - system (the default is always WGS 84 with axis order longitude/latitude) + Fetch the feature with id `featureId` in the feature collection with id `collectionId`. - * An optional title and description for the collection - * An optional extent that can be used to provide an indication of the spatial - and temporal extent of the collection - typically derived from the data + Use content negotiation to request HTML or GeoJSON. + operationId: getFeature + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/IfNoneMatch" + responses: + "200": + $ref: "#/components/responses/Feature" + "304": + $ref: "#/components/responses/NotModified" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + "500": + $ref: "#/components/responses/ServerError" + summary: Fetch a single feature + tags: + - Data + put: + summary: Update or create a feature + description: >- + Update or create a feature with Id `featureId` with a complete feature definition. + If the feature doesn't exists it is then created. - * An optional indicator about the type of the items in the collection (the - default value, if the indicator is not provided, is 'feature'). - The `limit` parameter may be used to control the subset of the selected collections - that should be returned in the response, the page size. Each page include - links to support paging (link relation `next` and/or `previous`). - ConformanceDeclaration: - content: - application/json: - example: - conformsTo: - - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core - - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30 - - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson - schema: - $ref: "#/components/schemas/confClasses" + *NOTE: Optional fields that are not part of the PUT payload, will be erased + in the resource. For example if the resource has a properties.title and the + PUT payload doesn't, then the resource's properties.title will be removed.* + operationId: putFeature + tags: + - Data Management + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/IfMatchWrite" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/updateItem" + example: + id: cs3-20160503_132131_05 + geometry: + type: Polygon + coordinates: + - - - -122.308150179 + - 37.488035566 + - - -122.597502109 + - 37.538869539 + - - -122.576687533 + - 37.613537207 + - - -122.2880486 + - 37.562818007 + - - -122.308150179 + - 37.488035566 + properties: + datetime: "2016-05-03T13:22:30.040Z" + title: A CS3 item + links: + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + responses: + "200": + description: Returns the updated Item + content: + application/json: + schema: + $ref: "#/components/schemas/item" + "400": + $ref: "#/components/responses/BadRequest" + "403": + $ref: "#/components/responses/PermissionDenied" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + 5XX: + $ref: "#/components/responses/ServerError" + patch: + summary: Update an existing feature by Id with a partial item definition description: >- - The URIs of all conformance classes supported by the server. - - - To support "generic" clients that want to access multiple OGC API Features - implementations - and not "just" a specific API / server, the server declares - the conformance classes it implements and conforms to. - Feature: - headers: - ETag: - $ref: "#/components/headers/ETag" - content: - application/json: - schema: - $ref: "#/components/schemas/item" + Use this method to update an existing feature. Requires a JSON fragment (containing + the fields to be updated) be submitted. + operationId: patchFeature + tags: + - Data Management + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/IfMatchWrite" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/partialItem" + responses: + "200": + description: Returns the updated Item + content: + application/json: + schema: + $ref: "#/components/schemas/item" + "201": + description: Returns the created Item + headers: + Location: + description: A link to the item + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "#/components/schemas/item" + "400": + $ref: "#/components/responses/BadRequest" + "403": + $ref: "#/components/responses/PermissionDenied" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + 5XX: + $ref: "#/components/responses/ServerError" + delete: + summary: Delete an existing feature by Id + description: Use this method to delete an existing feature/item. + operationId: deleteFeature + tags: + - Data Management + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/IfMatchWrite" + responses: + "200": + $ref: "#/components/responses/DeletedResource" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + 5XX: + $ref: "#/components/responses/ServerError" + /conformance: + get: description: >- - Fetch the feature with id `featureId` in the feature collection with id `collectionId` - Features: - content: - application/json: - schema: - $ref: "#/components/schemas/items" + A list of all conformance classes specified in a standard that the server + conforms to. + operationId: getConformanceDeclaration + responses: + "200": + $ref: "#/components/responses/ConformanceDeclaration" + "500": + $ref: "#/components/responses/ServerError" + summary: Information about specifications that this API conforms to + tags: + - Capabilities + /search: + get: description: >- - The response is a document consisting of features in the collection. The features - included in the response are determined by the server based on the query parameters - of the request. To support access to larger collections without overloading - the client, the API supports paged access with links to the next page, if - more features are selected that the page size. - - - The `bbox` and `datetime` parameter can be used to select only a subset of - the features in the collection (the features that are in the bounding box - or time interval). The `bbox` parameter matches all features in the collection - that are not associated with a location, too. The `datetime` parameter matches - all features in the collection that are not associated with a time stamp or - interval, too. + Retrieve Items matching filters. Intended as a shorthand API for simple queries. + operationId: getSearchSTAC + parameters: + - $ref: "#/components/parameters/bbox" + - $ref: "#/components/parameters/datetime" + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/ids" + - $ref: "#/components/parameters/collectionsArray" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/itemsSearchGet" + description: A feature collection. + "500": + $ref: "#/components/responses/ServerError" + summary: Search STAC items with simple filtering. + tags: + - STAC + post: + description: >- + Retrieve items matching filters. Intended as the standard, full-featured query + API. + operationId: postSearchSTAC + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/searchBody" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/itemsSearchPost" + description: A feature collection. + "500": + $ref: "#/components/responses/ServerError" + summary: Search STAC items with full-featured filtering. + tags: + - STAC + /collections/{collectionId}/items/{featureId}/assets: + get: + description: >- + Fetch assets of the item with id `featureId`. + Every asset belongs to an item. + operationId: getAssets + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + responses: + "200": + $ref: "#/components/responses/Assets" + "400": + $ref: "#/components/responses/InvalidParameter" + "404": + $ref: "#/components/responses/NotFound" + "500": + $ref: "#/components/responses/ServerError" + summary: Fetch all assets for a feature + tags: + - Data + post: + summary: Add a new asset to a feature + description: | + Create a new asset for a specific feature. - The `limit` parameter may be used to control the subset of the selected features - that should be returned in the response, the page size. Each page include - links to support paging (link relation `next` and/or `previous`). - NotModified: - description: The cached resource was not modified since last request. - InvalidParameter: - content: - application/json: - schema: - $ref: "#/components/schemas/exception" - example: - code: 400 - description: "Invalid parameter" - description: A query parameter has an invalid value. - LandingPage: - content: - application/json: - example: - description: Catalog of Swiss Geodata Downloads - id: ch - links: - - href: http://data.geo.admin.ch/api/stac/v0.9/ - rel: self - type: application/json - title: this document - - href: http://data.geo.admin.ch/api/stac/v0.9/static/api.html - rel: service-doc - type: text/html - title: the API documentation - - href: http://data.geo.admin.ch/api/stac/v0.9/conformance - rel: conformance - type: application/json - title: OGC API conformance classes implemented by this server - - href: http://data.geo.admin.ch/api/stac/v0.9/collections - rel: data - type: application/json - title: Information about the feature collections - - href: http://data.geo.admin.ch/api/stac/v0.9/search - rel: search - type: application/json - title: Search across feature collections - stac_version: 0.9.0 - title: data.geo.admin.ch - schema: - $ref: "#/components/schemas/landingPage" - description: >- - The landing page provides links to the API definition (link relations `service-desc` - and `service-doc`), the Conformance declaration (path `/conformance`, link - relation `conformance`), and the Feature Collections (path `/collections`, - link relation `data`). - NotFound: - description: The specified resource/URI was not found - content: - application/json: - schema: - $ref: "#/components/schemas/exception" - example: - code: 404 - description: "Resource not found" - BadRequest: - description: The request was malformed or semantically invalid - content: - application/json: - schema: - $ref: "#/components/schemas/exception" - example: - code: 400 - description: "Invalid parameter" - PreconditionFailed: - description: Some condition specified by the request could not be met in the - server - ServerError: - description: >- - The request was syntactically and semantically valid, but an error occurred - while trying to act upon it - content: - application/json: - schema: - $ref: "#/components/schemas/exception" - example: - code: 500 - description: "Internal server error" - Assets: - description: >- - The response is a document consisting of all assets of the feature. - content: - application/json: - schema: - $ref: "#/components/schemas/assets" - Asset: - description: >- - The response is a document consisting of one asset of the feature. - headers: - ETag: - $ref: "#/components/headers/ETag" - content: - application/json: - schema: - $ref: "#/components/schemas/readUpdateAsset" - DeletedResource: - description: Status of the delete resource - content: - application/json: - schema: - description: >- - Information about the deleted resource and a link to the parent resource - type: object - properties: - code: - type: integer - example: 200 - description: + *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* + operationId: postAsset + tags: + - Data Management + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/createAsset" + responses: + "201": + description: Return the created Asset + headers: + Location: + description: A link to the asset + schema: type: string - example: Resource successfully deleted - links: - type: array - items: - $ref: "#/components/schemas/link" - description: >- - The array contain at least a link to the parent resource (`rel: - parent`). - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items - rel: parent - required: - - code - - links - PermissionDenied: - description: No Permission for this request - content: - application/json: - schema: - $ref: "#/components/schemas/exception" - example: - code: 403 - description: "Permission denied" - schemas: - assetBase: - title: Asset - description: The `property name` defines the ID of the Asset. - type: object - required: - - type - - created - - updated - properties: - title: - $ref: "#/components/schemas/title" - description: - $ref: "#/components/schemas/description" - type: - $ref: "#/components/schemas/type" - href: - $ref: "#/components/schemas/href" - checksum:multihash: - $ref: "#/components/schemas/checksumMultihashReadOnly" - geoadmin:variant: - $ref: "#/components/schemas/geoadminVariant" - geoadmin:lang: - $ref: "#/components/schemas/geoadminLang" - proj:epsg: - $ref: "#/components/schemas/projEpsg" - eo:gsd: - $ref: "#/components/schemas/eoGsd" - created: - $ref: "#/components/schemas/created" - updated: - $ref: "#/components/schemas/updated" - bbox: + format: url + content: + application/json: + schema: + $ref: "#/components/schemas/createAsset" + "400": + $ref: "#/components/responses/BadRequest" + 5XX: + $ref: "#/components/responses/ServerError" + /collections/{collectionId}/items/{featureId}/assets/{assetId}: + get: description: >- - Only features that have a geometry that intersects the bounding box are selected. - The bounding box is provided as four numbers: - - - * Lower left corner, coordinate axis 1 - - * Lower left corner, coordinate axis 2 - - * Upper right corner, coordinate axis 1 - - * Upper right corner, coordinate axis 2 - - - The coordinate reference system of the values is WGS84 longitude/latitude - (http://www.opengis.net/def/crs/OGC/1.3/CRS84). - - - For WGS84 longitude/latitude the values are in most cases the sequence of - minimum longitude, minimum latitude, maximum longitude and maximum latitude. - However, in cases where the box spans the antimeridian the first value (west-most - box edge) is larger than the third value (east-most box edge). + Fetch the asset with id `assetId` of the feature with id `featureId` in the + feature collection with id `collectionId`. + operationId: getAsset + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/IfNoneMatch" + responses: + "200": + $ref: "#/components/responses/Asset" + "304": + $ref: "#/components/responses/NotModified" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + "500": + $ref: "#/components/responses/ServerError" + summary: Fetch a single asset + tags: + - Data + put: + summary: Update or create an asset + description: >- + Update or create an asset with Id `assetId` with a complete asset definition. + If the asset doesn't exists it is then created. - Example: The bounding box of Switzerland in WGS 84 (from 5.96°E to 10.49°E - and from 45.82°N to 47.81°N) would be represented in JSON as `[5.96, 45.82, - 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." - example: - - 5.96 - - 45.82 - - 10.49 - - 47.81 - items: - type: number - maxItems: 4 - minItems: 4 - type: array - readOnly: true - bboxfilter: - description: >- - Only features that have a geometry that intersects the bounding box are selected. - The bounding box is provided as four numbers: - + *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* + operationId: putAsset + tags: + - Data Management + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/IfMatchWrite" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/readUpdateAsset" + responses: + "200": + description: Asset has been successfully updated. + content: + application/json: + schema: + $ref: "#/components/schemas/readUpdateAsset" + "201": + description: Asset has been newly created. + headers: + Location: + description: A link to the asset + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "#/components/schemas/createAsset" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + 5XX: + $ref: "#/components/responses/ServerError" + patch: + summary: Update an existing asset by Id with a partial asset definition + description: >- + Use this method to update an existing asset. Requires a JSON fragment (containing + the fields to be updated) be submitted. - * Lower left corner, coordinate axis 1 - * Lower left corner, coordinate axis 2 + *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* + operationId: patchAsset + tags: + - Data Management + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/IfMatchWrite" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/readUpdateAsset" + responses: + "200": + description: Returns the updated Asset. + content: + application/json: + schema: + $ref: "#/components/schemas/readUpdateAsset" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + 5XX: + $ref: "#/components/responses/ServerError" + delete: + summary: Delete an existing asset by Id + description: >- + Use this method to delete an existing asset. - * Upper right corner, coordinate axis 1 - * Upper right corner, coordinate axis 2 + **NOTE: Asset file on S3 will be also removed !** + operationId: deleteAsset + tags: + - Data Management + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/IfMatchWrite" + responses: + "200": + $ref: "#/components/responses/DeletedResource" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + 5XX: + $ref: "#/components/responses/ServerError" + /collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads: + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + get: + tags: + - Asset Upload Management + summary: List all Asset's multipart uploads + description: >- + Return a list of all Asset's multipart uploads that are in progress and have + been completed or aborted. + operationId: getAssetUploads + parameters: + - name: status + in: query + description: Filter the list by status. + schema: + $ref: "#/components/schemas/status" + responses: + "200": + description: List of Asset's uploads + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploads" + example: + uploads: + - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnusebaJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: in-progress + number_parts: 1 + urls: + - url: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 + part: 1 + expires: '2019-08-24T14:15:22Z' + created: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YaaegJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: completed + number_parts: 1 + created: '2019-08-24T14:15:22Z' + completed: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: aborted + number_parts: 1 + created: '2019-08-24T14:15:22Z' + aborted: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + links: + - rel: next + href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads?cursor=0d34 + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + 5XX: + $ref: "#/components/responses/ServerError" + post: + tags: + - Asset Upload Management + summary: Create a new Asset's multipart upload + description: | + Create a new Asset's multipart upload. + operationId: createAssetUpload + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploadCreate" + responses: + "201": + description: Created Asset's multipart upload + headers: + Location: + description: A link to the Asset's multipart upload object + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploadCreate" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + 5XX: + $ref: "#/components/responses/ServerError" + /collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}: + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/uploadId" + get: + tags: + - Asset Upload Management + summary: Get an Asset's multipart upload + description: | + Return an Asset's multipart upload. + operationId: getAssetUpload + parameters: + - $ref: "#/components/parameters/IfMatch" + - $ref: "#/components/parameters/IfNoneMatch" + responses: + "200": + description: Asset's multipart upload description. + headers: + ETag: + $ref: "#/components/headers/ETag" + content: + application/json: + schema: + $ref: "#/components/schemas/assetUpload" + examples: + inprogress: + $ref: "#/components/examples/inprogress" + completed: + $ref: "#/components/examples/completed" + aborted: + $ref: "#/components/examples/aborted" + "304": + $ref: "#/components/responses/NotModified" + "404": + $ref: "#/components/responses/NotFound" + "412": + $ref: "#/components/responses/PreconditionFailed" + "500": + $ref: "#/components/responses/ServerError" + /{presignedUrl}: + servers: + - url: http://data.geo.admin.ch/ + put: + tags: + - Asset Upload Management + summary: Upload asset file part + description: >- + Upload an Asset file part using the presigned url(s) returned by [Create a + new Asset's multipart upload](#operation/createAssetUpload). - The coordinate reference system of the values is WGS84 longitude/latitude - (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + Parts that have been uploaded but not completed can be checked using [Get + an Asset's multipart upload](#operation/getAssetUpload) - For WGS84 longitude/latitude the values are in most cases the sequence of - minimum longitude, minimum latitude, maximum longitude and maximum latitude. - However, in cases where the box spans the antimeridian the first value (west-most - box edge) is larger than the third value (east-most box edge). + A file part must be at least 5 MB except for the last one and at most 5 GB, + otherwise the complete operation will fail. - Example: The bounding box of Switzerland in WGS 84 (from 5.96°E to 10.49°E - and from 45.82°N to 47.81°N) would be represented in JSON as `[5.96, 45.82, - 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." - example: - - 5.96 - - 45.82 - - 10.49 - - 47.81 - items: - type: number - maxItems: 4 - minItems: 4 - type: array - readOnly: false - bboxFilter: - properties: - bbox: - $ref: "#/components/schemas/bboxfilter" - checksumMultihash: + *Note: this endpoint doesn't require any authentication as it is already part + of the presigned url* + operationId: uploadAssetFilePart + parameters: + - $ref: "#/components/parameters/presignedUrl" + - name: Content-MD5 + in: header + description: Asset file part content MD5. + required: true + schema: + type: string + responses: + "200": + description: Asset file uploaded part successfully + headers: + ETag: + schema: + type: string + description: >- + The RFC7232 ETag header field in a response provides the current entity- + tag for the selected resource. + + + This ETag is required in the complete multipart upload payload. + + + An entity-tag is an opaque identifier for different versions of a + resource over time, regardless whether multiple versions are valid + at the same time. An entity-tag consists of an opaque quoted string. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + required: true + /collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/complete: + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/uploadId" + post: + tags: + - Asset Upload Management + summary: Complete multipart upload + operationId: completeMultipartUpload description: >- - `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) - format. - example: 90e402107a7f2588a85362b9beea2a12d4514d45 - pattern: ^[a-f0-9]+$ - title: Multihash - type: string - checksumMultihashReadOnly: - description: | - `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) format. - example: 90e402107a7f2588a85362b9beea2a12d4514d45 - pattern: ^[a-f0-9]+$ - title: Multihash - type: string - readOnly: true - created: - description: RFC 3339 compliant datetime string, time when the object was created - example: 2018-02-12T23:20:50Z - type: string - format: date-time - readOnly: true - collectionBase: - properties: - crs: - default: - - http://www.opengis.net/def/crs/OGC/1.3/CRS84 - description: The list of coordinate reference systems supported by the service - example: - - http://www.opengis.net/def/crs/OGC/1.3/CRS84 - items: - type: string - type: array - readOnly: true - description: - description: A description of the features in the collection - example: >- - Swiss Map Raster are a conversion of the map image into a digital form - with no direct bearing on the individual map elements. + Complete the multipart upload process. After completion, the Asset metadata + are updated with the new `checksum:multihash` from the upload and the parts + are automatically deleted. The Asset `href` field is also set if it was the + first upload. + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/assetCompleteUpload" + responses: + "200": + description: Asset multipart upload completed successfully. + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploadCompleted" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + 5XX: + $ref: "#/components/responses/ServerError" + /collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/abort: + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/uploadId" + post: + tags: + - Asset Upload Management + summary: Abort multipart upload + operationId: abortMultipartUpload + description: >- + Abort the multipart upload process. All already uploaded parts are automatically + deleted. + responses: + "200": + description: Asset multipart upload aborted successfully. + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploadAborted" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + 5XX: + $ref: "#/components/responses/ServerError" + /collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/parts: + parameters: + - $ref: "#/components/parameters/collectionId" + - $ref: "#/components/parameters/featureId" + - $ref: "#/components/parameters/assetId" + - $ref: "#/components/parameters/uploadId" + get: + tags: + - Asset Upload Management + summary: Get upload parts + operationId: getUploadParts + description: >- + Return the list of already uploaded parts. - The information is structured only in colour layers. Swiss Map Raster - pixel maps are ideal for finding background information for a broad variety - of screen applications, web and mobile applications and services, as well - as for geographic information systems. They can also be used as basic - maps for a variety of purposes (digital printing, plots, offset printing, - etc.). - type: string - extent: - $ref: "#/components/schemas/extent" - id: - description: Identifier of the collection used, for example, in URIs - example: ch.swisstopo.pixelkarte-farbe-pk200.noscale - type: string - itemType: - default: Feature - description: >- - Indicator about the type of the items in the collection (the default value - is 'Feature'). - type: string - readOnly: true - license: - $ref: "#/components/schemas/license" - providers: - $ref: "#/components/schemas/providers" - stac_version: - $ref: "#/components/schemas/stac_version" - summaries: - additionalProperties: - oneOf: - - items: - description: A value of any type. - title: Set of values - type: array - - description: >- - By default, only ranges with a minimum and a maximum value can be - specified. Ranges can be specified for ordinal values only, which - means they need to have a rank order. Therefore, ranges can only be - specified for numbers and some special types of strings. Examples: - grades (A to F), dates or times. Implementors are free to add other - derived statistical values to the object, for example `mean` or `stddev`. + ### Pagination + + By default all parts are returned (maximum number of parts being 100). The + user can use pagination to reduce the number of returned parts. Pagination + is done via the `limit` query parameter (see below). + parameters: + - $ref: "#/components/parameters/limit" + responses: + "200": + description: List of parts already uploaded. + content: + application/json: + schema: + $ref: "#/components/schemas/assetUploadParts" + "400": + $ref: "#/components/responses/BadRequest" + "404": + $ref: "#/components/responses/NotFound" + 5XX: + $ref: "#/components/responses/ServerError" + /get-token: + servers: + - url: http://data.geo.admin.ch/api/stac/ + post: + tags: + - Authentication + summary: >- + Request token for token authentication. + operationId: getToken + requestBody: + required: true + content: + application/json: + schema: + type: object properties: - max: - anyOf: - - type: string - - type: number - min: - anyOf: - - type: string - - type: number + username: + type: string + description: name of user for whom token is requested + password: + type: string + description: password of user for whom token is requested required: - - min - - max - title: Statistics - type: object - description: >- - Summaries are either a unique set of all available values *or* statistics. - Statistics by default only specify the range (minimum and maximum values), - but can optionally be accompanied by additional statistical values. The - range can specify the potential range of values, but it is recommended - to be as precise as possible. The set of values must contain at least - one element and it is strongly recommended to list all values. It is recommended - to list as many properties as reasonable so that consumers get a full - overview of the Collection. Properties that are covered by the Collection - specification (e.g. `providers` and `license`) may not be repeated in - the summaries. - type: object - readOnly: true - example: - eo:gsd: - - 10 - - 20 - geoadmin:variant: - - kgrel - - komb - - krel - proj:epsg: - - 2056 - title: - description: Human readable title of the collection - example: National Map 1:200'000 - type: string - created: - $ref: "#/components/schemas/created" - updated: - $ref: "#/components/schemas/updated" - required: - - id - - stac_version - - description - - license - - extent - - created - - updated - type: object - collection: - allOf: - - $ref: "#/components/schemas/collectionBase" - - type: object - required: - - links - properties: - links: - type: array - items: - $ref: "#/components/schemas/link" - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items - rel: items - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby - collectionWrite: - title: collection - allOf: - - $ref: "#/components/schemas/collectionBase" - - type: object - properties: - links: - type: array - items: - $ref: "#/components/schemas/link" + - username + - password example: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby - collections: - properties: - collections: - items: - $ref: "#/components/schemas/collection" - type: array - links: - items: - $ref: "#/components/schemas/link" - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections?cursor=10ab - rel: next - - href: https://data.geo.admin.ch/api/stac/v0.9/collections?cursor=10cd - rel: previous - required: - - links - - collections - type: object - collectionsArray: - description: >- - Array of Collection IDs to include in the search for items. Only Items in - one of the provided Collections will be searched. - items: - type: string - type: array - collectionsFilter: - description: Only returns the collections specified - properties: - collections: - $ref: "#/components/schemas/collectionsArray" - type: object - example: - collections: - - ch.swisstopo.swisstlmregio - - ch.bfe.energieschweiz - confClasses: - properties: - conformsTo: - items: - type: string - type: array - required: - - conformsTo - type: object - datetime: - description: RFC 3339 compliant datetime string - example: 2018-02-12T23:20:50Z - type: string - format: date-time - datetimeQuery: + username: "Mickey Mouse" + password: "I_love_Minnie_Mouse" + responses: + "200": + description: Returns the token for the specified user + content: + application/json: + schema: + type: object + properties: + token: + type: string + description: generated token for specified user + example: + token: ccecf40693bfc52ba090cd46eb7f19e723fe831f + "400": + description: Wrong credentials were provided. + content: + application/json: + schema: + type: object + properties: + code: + type: string + description: + type: string + example: + code: 400 + description: "Unable to log in with provided credentials." +components: + schemas: + assetQuery: + additionalProperties: + $ref: "#/components/schemas/assetQueryProp" description: >- - Either a date-time or an interval, open or closed. Date and time expressions - adhere to RFC 3339. Open intervals are expressed using double-dots. - - Examples: - - - * A date-time: "2018-02-12T23:20:50Z" - - * A closed interval: "2018-02-12T00:00:00Z/2018-03-18T12:31:12Z" - - * Open intervals: "2018-02-12T00:00:00Z/.." or "../2018-03-18T12:31:12Z" - - - Only features that have a temporal property that intersects the value of `datetime` - are selected. + Define which properties of the asset to query and the operations to apply. - When used as URL query argument, the value must be correctly url-encoded. - example: 2018-02-12T00:00:00Z/2018-03-18T12:31:12Z - type: string - datetimeFilter: - properties: - datetime: - $ref: "#/components/schemas/datetimeQuery" - description: - description: >- - Detailed multi-line description to fully explain the catalog or collection. + The following properties can be queried: + - `type`: query for assets with this specific media type - [CommonMark 0.29](http://commonmark.org/) syntax MAY be used for rich text - representation. - type: string - eoGsd: - description: >- - GSD is the nominal Ground Sample Distance for the data, as measured in meters - on the ground. + - `proj:epsg`: query for assets with this specific epsg + - `eo:gsd`: query for assets with this specific gsd - There are many definitions of GSD. The value of this attribute should be related - to the spatial resolution at the sensor, rather than the pixel size of images - after ortho-rectification, pansharpening, or scaling. The GSD of a sensor - can vary depending on off-nadir and wavelength, so it is at the discretion - of the implementer to decide which value most accurately represents the GSD. - For example, Landsat8 optical and short-wave IR bands are all 30 meters, but - the panchromatic band is 15 meters. The eo:gsd should be 30 meters in this - case because that is nominal spatial resolution at the sensor. The Planet - PlanetScope Ortho Tile Product has an eo:gsd of 3.7 (or 4 if rounding), even - though the pixel size of the images is 3.125. For example, one might choose - for WorldView-2 the Multispectral 20° off-nadir value of 2.07 and for WorldView-3 - the Multispectral 20° off-nadir value of 1.38. - example: 2.5 - title: Ground Sample Distance - type: number - exception: - description: >- - Information about the exception: an error code plus an optional description. - properties: - code: - type: integer - example: 500 - description: - anyOf: - - type: string - - type: array - items: - anyOf: - - type: string - - type: object - - type: object - required: - - code + - `geoadmin:variant`: query for assets with this specific variant + example: + type: + eq: image/tiff type: object - extent: - description: >- - The extent of the features in the collection. In the Core only spatial and - temporal extents are specified. Extensions may add additional members to represent - other extents, for example, thermal or pressure ranges. + assetQueryFilter: + description: Allows users to query asset properties for specific values properties: - spatial: - description: The spatial extent of the features in the collection. - properties: - bbox: - description: >- - One or more bounding boxes that describe the spatial extent of the - dataset. In the Core only a single bounding box is supported. Extensions - may support additional areas. If multiple areas are provided, the - union of the bounding boxes describes the spatial extent. - items: - description: >- - Each bounding box is provided as four or six numbers, depending - on whether the coordinate reference system includes a vertical axis - (height or depth): - - - * Lower left corner, coordinate axis 1 - - * Lower left corner, coordinate axis 2 + assetQuery: + $ref: "#/components/schemas/assetQuery" + type: object + assetQueryProp: + anyOf: + - description: >- + If the object doesn't contain any of the operators, it is equivalent to + using the equals operator + - description: Match using an operator + properties: + contains: + description: >- + Find items with a property that contains the specified literal string, + e.g., matches ".*.*". A case-insensitive comparison must be + performed. + type: string + endsWith: + description: >- + Find items with a property that ends with the specified string. A case-insensitive + comparison must be performed. + type: string + eq: + description: >- + Find items with a property that is equal to the specified value. For + strings, a case-insensitive comparison must be performed. + nullable: true + oneOf: + - type: string + - type: number + - type: boolean + in: + description: >- + Find items with a property that equals at least one entry in the specified + array. A case-insensitive comparison must be performed. + items: + oneOf: + - type: string + - type: number + type: array + startsWith: + description: >- + Find items with a property that begins with the specified string. A + case-insensitive comparison must be performed. + type: string + type: object + description: Apply query operations to a specific property + assetBase: + title: Asset + description: The `property name` defines the ID of the Asset. + type: object + required: + - type + - created + - updated + properties: + title: + $ref: "#/components/schemas/title" + description: + $ref: "#/components/schemas/description" + type: + $ref: "#/components/schemas/type" + href: + $ref: "#/components/schemas/href" + checksum:multihash: + $ref: "#/components/schemas/checksumMultihashReadOnly" + geoadmin:variant: + $ref: "#/components/schemas/geoadminVariant" + geoadmin:lang: + $ref: "#/components/schemas/geoadminLang" + proj:epsg: + $ref: "#/components/schemas/projEpsg" + eo:gsd: + $ref: "#/components/schemas/eoGsd" + created: + $ref: "#/components/schemas/created" + updated: + $ref: "#/components/schemas/updated" + bbox: + description: | + Only features that have a geometry that intersects the bounding box are selected. + The bounding box is provided as four numbers: - * Upper right corner, coordinate axis 1 + * Lower left corner, coordinate axis 1 + * Lower left corner, coordinate axis 2 + * Upper right corner, coordinate axis 1 + * Upper right corner, coordinate axis 2 - * Upper right corner, coordinate axis 2 + The coordinate reference system of the values is WGS84 + longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + For WGS84 longitude/latitude the values are in most cases the sequence + of minimum longitude, minimum latitude, maximum longitude and maximum + latitude. However, in cases where the box spans the antimeridian the + first value (west-most box edge) is larger than the third value + (east-most box edge). - The coordinate reference system of the values is WGS 84 longitude/latitude - (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + Example: The bounding box of Switzerland in + WGS 84 (from 5.96°E to 10.49°E and from 45.82°N to 47.81°N) would be + represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as + `bbox=5.96,45.82,10.49,47.81`." + example: + - 5.96 + - 45.82 + - 10.49 + - 47.81 + items: + type: number + maxItems: 4 + minItems: 4 + type: array + readOnly: true + bboxfilter: + description: >- + Only features that have a geometry that intersects the bounding box are selected. + The bounding box is provided as four numbers: - For WGS 84 longitude/latitude the values are in most cases the sequence - of minimum longitude, minimum latitude, maximum longitude and maximum - latitude. However, in cases where the box spans the antimeridian - the first value (west-most box edge) is larger than the third value - (east-most box edge). + * Lower left corner, coordinate axis 1 + * Lower left corner, coordinate axis 2 - If the vertical axis is included, the third and the sixth number - are the bottom and the top of the 3-dimensional bounding box. + * Upper right corner, coordinate axis 1 + * Upper right corner, coordinate axis 2 - If a feature has multiple spatial geometry properties, it is the - decision of the server whether only a single spatial geometry property - is used to determine the extent or all relevant geometries. - example: - - 5.685114 - - 45.534903 - - 10.747775 - - 47.982586 - items: - type: number - maxItems: 6 - minItems: 4 - type: array - minItems: 1 - type: array - required: - - bbox - type: object - temporal: - description: The temporal extent of the features in the collection. - properties: - interval: - description: >- - One time interval that describe the temporal extent of the dataset. - items: - description: >- - Begin and end times of the time interval. - example: - - "2019-01-01T00:00:00Z" - - "2019-01-02T00:00:00Z" - items: - format: date-time - nullable: false - type: string - maxItems: 2 - minItems: 2 - type: array - minItems: 1 - maxItems: 1 - type: array - required: - - interval - type: object - required: - - spatial - - temporal - type: object - readOnly: true - geoadminLang: - enum: - - de - - it - - fr - - rm - - en - title: Product language + + The coordinate reference system of the values is WGS84 longitude/latitude + (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + + + For WGS84 longitude/latitude the values are in most cases the sequence of + minimum longitude, minimum latitude, maximum longitude and maximum latitude. + However, in cases where the box spans the antimeridian the first value (west-most + box edge) is larger than the third value (east-most box edge). + + + Example: The bounding box of Switzerland in WGS 84 (from 5.96°E to 10.49°E + and from 45.82°N to 47.81°N) would be represented in JSON as `[5.96, 45.82, + 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." + example: + - 5.96 + - 45.82 + - 10.49 + - 47.81 + items: + type: number + maxItems: 4 + minItems: 4 + type: array + readOnly: false + bboxFilter: + properties: + bbox: + $ref: "#/components/schemas/bboxfilter" + checksumMultihash: + description: >- + `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) + format. + example: 90e402107a7f2588a85362b9beea2a12d4514d45 + pattern: ^[a-f0-9]+$ + title: Multihash type: string - geoadminVariant: - example: komb - title: Product variants + checksumMultihashReadOnly: + description: | + `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) format. + example: 90e402107a7f2588a85362b9beea2a12d4514d45 + pattern: ^[a-f0-9]+$ + title: Multihash type: string - geometryGeoJSON: - oneOf: - - $ref: "#/components/schemas/polygonGeoJSON" - geometrycollectionGeoJSON: + readOnly: true + created: + description: RFC 3339 compliant datetime string, time when the object was created + example: 2018-02-12T23:20:50Z + type: string + format: date-time + readOnly: true + collectionBase: properties: - geometries: + crs: + default: + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + description: The list of coordinate reference systems supported by the service + example: + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 items: - $ref: "#/components/schemas/geometryGeoJSON" + type: string type: array - type: - enum: - - GeometryCollection + readOnly: true + description: + description: A description of the features in the collection + example: >- + Swiss Map Raster are a conversion of the map image into a digital form + with no direct bearing on the individual map elements. + + + The information is structured only in colour layers. Swiss Map Raster + pixel maps are ideal for finding background information for a broad variety + of screen applications, web and mobile applications and services, as well + as for geographic information systems. They can also be used as basic + maps for a variety of purposes (digital printing, plots, offset printing, + etc.). + type: string + extent: + $ref: "#/components/schemas/extent" + id: + description: Identifier of the collection used, for example, in URIs + example: ch.swisstopo.pixelkarte-farbe-pk200.noscale + type: string + itemType: + default: Feature + description: >- + Indicator about the type of the items in the collection (the default value + is 'Feature'). type: string - required: - - type - - geometries - type: object - href: - type: string - format: url - description: Link to the asset object - readOnly: true - example: | - http://data.geo.admin.ch/ch.swisstopo.swissimage/collections/cs/items/CS3-20160503_132130_04/thumb.png - ids: - description: >- - Array of Item ids to return. All other filter parameters that further restrict - the number of search results are ignored - items: - type: string - type: array - idsFilter: - description: Only returns items that match the array of given ids - properties: - ids: - $ref: "#/components/schemas/ids" - type: object - example: - ids: - - swisstlmregio-2019 - - swisstlmregio-2020 - intersectsFilter: - description: Only returns items that intersect with the provided polygon. - properties: - intersects: - $ref: https://geojson.org/schema/Geometry.json - type: object - example: - intersects: - type: "Point" - coordinates: - - 7 - - 46 - itemBase: - description: >- - A GeoJSON Feature augmented with foreign members that contain values relevant - to a STAC entity - properties: - assets: - $ref: "#/components/schemas/itemAssets" - bbox: - $ref: "#/components/schemas/bbox" - geometry: - $ref: "#/components/schemas/geometryGeoJSON" - properties: - $ref: "#/components/schemas/itemProperties" readOnly: true + license: + $ref: "#/components/schemas/license" + providers: + $ref: "#/components/schemas/providers" stac_version: $ref: "#/components/schemas/stac_version" + summaries: + additionalProperties: + oneOf: + - items: + description: A value of any type. + title: Set of values + type: array + - description: >- + By default, only ranges with a minimum and a maximum value can be + specified. Ranges can be specified for ordinal values only, which + means they need to have a rank order. Therefore, ranges can only be + specified for numbers and some special types of strings. Examples: + grades (A to F), dates or times. Implementors are free to add other + derived statistical values to the object, for example `mean` or `stddev`. + properties: + max: + anyOf: + - type: string + - type: number + min: + anyOf: + - type: string + - type: number + required: + - min + - max + title: Statistics + type: object + description: >- + Summaries are either a unique set of all available values *or* statistics. + Statistics by default only specify the range (minimum and maximum values), + but can optionally be accompanied by additional statistical values. The + range can specify the potential range of values, but it is recommended + to be as precise as possible. The set of values must contain at least + one element and it is strongly recommended to list all values. It is recommended + to list as many properties as reasonable so that consumers get a full + overview of the Collection. Properties that are covered by the Collection + specification (e.g. `providers` and `license`) may not be repeated in + the summaries. + type: object readOnly: true - type: - $ref: "#/components/schemas/itemType" + example: + eo:gsd: + - 10 + - 20 + geoadmin:variant: + - kgrel + - komb + - krel + proj:epsg: + - 2056 + title: + description: Human readable title of the collection + example: National Map 1:200'000 + type: string + created: + $ref: "#/components/schemas/created" + updated: + $ref: "#/components/schemas/updated" required: + - id - stac_version - - type - - geometry - - bbox - - properties - - assets + - description + - license + - extent + - created + - updated type: object - item: + collection: allOf: + - $ref: "#/components/schemas/collectionBase" - type: object required: - - id - links properties: - id: - $ref: "#/components/schemas/itemId" links: + type: array items: $ref: "#/components/schemas/link" - type: array example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr50-263-2016 + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale rel: self - href: https://data.geo.admin.ch/api/stac/v0.9/ rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + - href: https://data.geo.admin.ch/api/stac/v0.9/collections rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: collection - - $ref: "#/components/schemas/itemBase" - items: - description: >- - A FeatureCollection augmented with foreign members that contain values relevant - to a STAC entity + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: items + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + collections: properties: - features: + collections: items: - $ref: "#/components/schemas/item" + $ref: "#/components/schemas/collection" type: array links: items: $ref: "#/components/schemas/link" - type: array example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + - href: https://data.geo.admin.ch/api/stac/v0.9/collections rel: self - href: https://data.geo.admin.ch/api/stac/v0.9/ rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items?cursor=10ab + - href: https://data.geo.admin.ch/api/stac/v0.9/collections?cursor=10ab rel: next - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items?cursor=10acd + - href: https://data.geo.admin.ch/api/stac/v0.9/collections?cursor=10cd rel: previous - type: - enum: - - FeatureCollection - type: string required: - - features - - type + - links + - collections type: object - itemAssets: - title: Assets - description: List of Assets attached to this feature. - additionalProperties: - $ref: "#/components/schemas/assetBase" + collectionsArray: + description: >- + Array of Collection IDs to include in the search for items. Only Items in + one of the provided Collections will be searched. + items: + type: string + type: array + collectionsFilter: + description: Only returns the collections specified + properties: + collections: + $ref: "#/components/schemas/collectionsArray" type: object - readOnly: true example: - smr50-263-2016-2056-kgrs-2.5.tiff: - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 - created: "2020-07-14T12:30:00Z" - eo:gsd: 2.5 - geoadmin:variant: kgrs - href: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr50-263-2016-2056-kgrs-2.5.tiff - proj:epsg: 2056 - type: image/tiff; application=geotiff - updated: "2020-07-14T12:30:00Z" - smr50-263-2016-2056-komb-2.5.tiff: - checksum:multihash: 90d402107a7f2588a85362b9beea2a12d4514d45 - created: "2020-07-14T12:30:00Z" - eo:gsd: 2.5 - geoadmin:variant: komb - href: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr50-263-2016-2056-komb-2.5.tiff - proj:epsg: "2056" - type: image/tiff; application=geotiff - updated: "2020-07-14T12:30:00Z" - smr50-263-2016-2056-krel-2.5.tiff: - checksum:multihash: 90f402107a7f2588a85362b9beea2a12d4514d45 - created: "2020-07-14T12:30:00Z" - eo:gsd: 2.5 - geoadmin:variant: krel - href: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr50-263-2016-2056-krel-2.5.tiff - proj:epsg: "2056" - type: image/tiff; application=geotiff - updated: "2020-07-14T12:30:00Z" - itemsSearch: - description: >- - A GeoJSON FeatureCollection augmented with foreign members that contain values - relevant to a STAC entity + collections: + - ch.swisstopo.swisstlmregio + - ch.bfe.energieschweiz + confClasses: properties: - features: + conformsTo: items: - $ref: "#/components/schemas/item" + type: string type: array - type: - enum: - - FeatureCollection - type: string required: - - features - - type + - conformsTo type: object - itemsSearchGet: - allOf: - - $ref: "#/components/schemas/itemsSearch" - - type: object - properties: - links: - $ref: "#/components/schemas/itemsSearchLinks" - itemsSearchPost: - allOf: - - $ref: "#/components/schemas/itemsSearch" - - type: object - properties: - links: - $ref: "#/components/schemas/itemsSearchPostLinks" - itemsSearchLinks: - description: >- - An array of links. Can be used for pagination, e.g. by providing a link with - the `next` relation type. - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/search - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/search?cursor=10ab - rel: next - items: - $ref: "#/components/schemas/link" - type: array - itemsSearchPostLinks: - description: >- - An array of links. Can be used for pagination, e.g. by providing a link with - the `next` relation type. - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/search - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/search?cursor=10ab - rel: next - method: POST - body: {} - merge: true - items: - $ref: "#/components/schemas/linkPostSearch" - type: array - itemId: - title: ID - description: Feature identifier (unique per collection) - example: smr200-200-4-2019 + datetime: + description: RFC 3339 compliant datetime string + example: 2018-02-12T23:20:50Z type: string - itemProperties: - title: Properties + format: date-time + datetimeQuery: description: >- - Provides the core metadata fields plus extensions + Either a date-time or an interval, open or closed. Date and time expressions + adhere to RFC 3339. Open intervals are expressed using double-dots. + Examples: - The item's data timing information can be specified either with - * One datetime value in the field `datetime` + * A date-time: "2018-02-12T23:20:50Z" - * A datetime range with a `start_datetime` and an `end_datetime` + * A closed interval: "2018-02-12T00:00:00Z/2018-03-18T12:31:12Z" + * Open intervals: "2018-02-12T00:00:00Z/.." or "../2018-03-18T12:31:12Z" - One of the two is required - properties: - created: - $ref: "#/components/schemas/created" - datetime: - $ref: "#/components/schemas/datetime" - start_datetime: - $ref: "#/components/schemas/datetime" - end_datetime: - $ref: "#/components/schemas/datetime" - updated: - $ref: "#/components/schemas/updated" - title: - description: Human readable title of the Feature - example: Feature title - type: string - required: - - created - - updated - type: object - itemType: - title: type - description: The GeoJSON type - enum: - - Feature + + Only features that have a temporal property that intersects the value of `datetime` + are selected. + + + When used as URL query argument, the value must be correctly url-encoded. + example: 2018-02-12T00:00:00Z/2018-03-18T12:31:12Z type: string - readOnly: true - landingPage: + datetimeFilter: properties: - description: - example: >- - Access to data about buildings in the city of Bonn via a Web API that - conforms to the OGC API Features specification. - type: string - id: - type: string - links: - items: - $ref: "#/components/schemas/link" - type: array - stac_version: - $ref: "#/components/schemas/stac_version" - title: - example: Buildings in Bonn - type: string - required: - - links - - stac_version - - id - - description - type: object - license: + datetime: + $ref: "#/components/schemas/datetimeQuery" + description: description: >- - License(s) of the data as a SPDX [License identifier](https://spdx.org/licenses/). - Alternatively, use `proprietary` if the license is not on the SPDX license - list or `various` if multiple licenses apply. In these two cases links to - the license texts SHOULD be added, see the `license` link relation type. + Detailed multi-line description to fully explain the catalog or collection. - Non-SPDX licenses SHOULD add a link to the license text with the `license` - relation in the links section. The license text MUST NOT be provided as a - value of this field. If there is no public license URL available, it is RECOMMENDED - to host the license text and link to it. - example: proprietary + [CommonMark 0.29](http://commonmark.org/) syntax MAY be used for rich text + representation. type: string - limit: - default: 100 + eoGsd: description: >- - The `limit` parameter limits the number of results that are included in the - response. - - - To retrieve the next bunch of result, use the `next` link in the `links` section - of the response. + GSD is the nominal Ground Sample Distance for the data, as measured in meters + on the ground. - Minimum = 1. Maximum = 100. Default = 100. - example: 20 - maximum: 100 - minimum: 1 - type: integer - limitFilter: - description: Only returns maximum number of results (page size) - properties: - limit: - $ref: "#/components/schemas/limit" - type: object - linestringGeoJSON: + There are many definitions of GSD. The value of this attribute should be related + to the spatial resolution at the sensor, rather than the pixel size of images + after ortho-rectification, pansharpening, or scaling. The GSD of a sensor + can vary depending on off-nadir and wavelength, so it is at the discretion + of the implementer to decide which value most accurately represents the GSD. + For example, Landsat8 optical and short-wave IR bands are all 30 meters, but + the panchromatic band is 15 meters. The eo:gsd should be 30 meters in this + case because that is nominal spatial resolution at the sensor. The Planet + PlanetScope Ortho Tile Product has an eo:gsd of 3.7 (or 4 if rounding), even + though the pixel size of the images is 3.125. For example, one might choose + for WorldView-2 the Multispectral 20° off-nadir value of 2.07 and for WorldView-3 + the Multispectral 20° off-nadir value of 1.38. + example: 2.5 + title: Ground Sample Distance + type: number + exception: + description: >- + Information about the exception: an error code plus an optional description. properties: - coordinates: - items: + code: + type: integer + example: 500 + description: + anyOf: + - type: string + - type: array items: - type: number - minItems: 2 - type: array - minItems: 2 - type: array - type: - enum: - - LineString - type: string + anyOf: + - type: string + - type: object + - type: object required: - - type - - coordinates + - code type: object - link: + extent: + description: >- + The extent of the features in the collection. In the Core only spatial and + temporal extents are specified. Extensions may add additional members to represent + other extents, for example, thermal or pressure ranges. properties: - href: - example: http://data.example.com/buildings/123 - format: url - type: string - rel: - description: >- - Relationship between the current document and the linked document. + spatial: + description: The spatial extent of the features in the collection. + properties: + bbox: + description: >- + One or more bounding boxes that describe the spatial extent of the + dataset. In the Core only a single bounding box is supported. Extensions + may support additional areas. If multiple areas are provided, the + union of the bounding boxes describes the spatial extent. + items: + description: >- + Each bounding box is provided as four or six numbers, depending + on whether the coordinate reference system includes a vertical axis + (height or depth): - NOTE: the following relations are reserved and automatically generated: - `self`, `root`, `parent`, `items`, `collection`, `next`, `previous` - example: describedby - type: string - title: - example: Trierer Strasse 70, 53115 Bonn - type: string - type: - description: The media type of the link target - example: application/geo+json - type: string - method: - default: GET - description: Specifies the HTTP method that the link expects - enum: - - GET - - POST - type: string - required: - - href - - rel - title: Link - type: object - linkPostSearch: - allOf: - - $ref: "#/components/schemas/link" - - type: object - properties: - body: - default: {} - description: For `POST /search` requests, the link can specify the HTTP - body as a JSON object. - type: object - merge: - default: false - description: >- - This is only valid when the server is responding to `POST /search `request. + * Lower left corner, coordinate axis 1 + * Lower left corner, coordinate axis 2 - If merge is true, the client is expected to merge the body value into - the current request body before following the link. This avoids passing - large post bodies back and forth when following links, particularly - for navigating pages through the `POST /search` endpoint. - type: boolean - multilinestringGeoJSON: - properties: - coordinates: - items: - items: + * Upper right corner, coordinate axis 1 + + * Upper right corner, coordinate axis 2 + + + The coordinate reference system of the values is WGS 84 longitude/latitude + (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + + + For WGS 84 longitude/latitude the values are in most cases the sequence + of minimum longitude, minimum latitude, maximum longitude and maximum + latitude. However, in cases where the box spans the antimeridian + the first value (west-most box edge) is larger than the third value + (east-most box edge). + + + If the vertical axis is included, the third and the sixth number + are the bottom and the top of the 3-dimensional bounding box. + + + If a feature has multiple spatial geometry properties, it is the + decision of the server whether only a single spatial geometry property + is used to determine the extent or all relevant geometries. + example: + - 5.685114 + - 45.534903 + - 10.747775 + - 47.982586 + items: + type: number + maxItems: 6 + minItems: 4 + type: array + minItems: 1 + type: array + required: + - bbox + type: object + temporal: + description: The temporal extent of the features in the collection. + properties: + interval: + description: >- + One time interval that describe the temporal extent of the dataset. items: - type: number - minItems: 2 + description: >- + Begin and end times of the time interval. + example: + - "2019-01-01T00:00:00Z" + - "2019-01-02T00:00:00Z" + items: + format: date-time + nullable: false + type: string + maxItems: 2 + minItems: 2 + type: array + minItems: 1 + maxItems: 1 type: array - minItems: 2 - type: array - type: array - type: - enum: - - MultiLineString - type: string + required: + - interval + type: object required: - - type - - coordinates + - spatial + - temporal type: object - multipointGeoJSON: + readOnly: true + geoadminLang: + enum: + - de + - it + - fr + - rm + - en + title: Product language + type: string + geoadminVariant: + example: komb + title: Product variants + type: string + geometryGeoJSON: + oneOf: + - $ref: "#/components/schemas/polygonGeoJSON" + geometrycollectionGeoJSON: properties: - coordinates: + geometries: items: - items: - type: number - minItems: 2 - type: array + $ref: "#/components/schemas/geometryGeoJSON" type: array type: enum: - - MultiPoint + - GeometryCollection type: string required: - type - - coordinates + - geometries type: object - multipolygonGeoJSON: + href: + type: string + format: url + description: Link to the asset object + readOnly: true + example: | + http://data.geo.admin.ch/ch.swisstopo.swissimage/collections/cs/items/CS3-20160503_132130_04/thumb.png + ids: + description: >- + Array of Item ids to return. All other filter parameters that further restrict + the number of search results are ignored + items: + type: string + type: array + idsFilter: + description: Only returns items that match the array of given ids properties: - coordinates: - items: - items: - items: - items: - type: number - minItems: 2 - type: array - minItems: 4 - type: array - type: array - type: array - type: - enum: - - MultiPolygon - type: string - required: - - type - - coordinates + ids: + $ref: "#/components/schemas/ids" type: object - numberMatched: - description: >- - The number of features of the feature type that match the selection parameters - like `bbox`. - example: 127 - minimum: 0 - type: integer - numberReturned: + example: + ids: + - swisstlmregio-2019 + - swisstlmregio-2020 + intersectsFilter: + description: Only returns items that intersect with the provided polygon. + properties: + intersects: + $ref: https://geojson.org/schema/Geometry.json + type: object + example: + intersects: + type: "Point" + coordinates: + - 7 + - 46 + itemBase: description: >- - The number of features in the feature collection. - - - A server may omit this information in a response, if the information about - the number of features is not known or difficult to compute. - - - If the value is provided, the value shall be identical to the number of items - in the "features" array. - example: 10 - minimum: 0 - type: integer - pointGeoJSON: + A GeoJSON Feature augmented with foreign members that contain values relevant + to a STAC entity properties: - coordinates: - items: - type: number - minItems: 2 - type: array + assets: + $ref: "#/components/schemas/itemAssets" + bbox: + $ref: "#/components/schemas/bbox" + geometry: + $ref: "#/components/schemas/geometryGeoJSON" + properties: + $ref: "#/components/schemas/itemProperties" + readOnly: true + stac_version: + $ref: "#/components/schemas/stac_version" + readOnly: true type: - enum: - - Point - type: string + $ref: "#/components/schemas/itemType" required: + - stac_version - type - - coordinates + - geometry + - bbox + - properties + - assets type: object - polygonGeoJSON: - properties: - coordinates: - items: + item: + allOf: + - type: object + required: + - id + - links + properties: + id: + $ref: "#/components/schemas/itemId" + links: items: - items: - type: number - minItems: 2 - type: array - minItems: 4 + $ref: "#/components/schemas/link" type: array - type: array - type: - enum: - - Polygon - type: string - required: - - type - - coordinates - type: object - example: - coordinates: - - - - 7.242974548172171 - - 46.57310580640624 - - - 7.243756483316452 - - 46.35721185723752 - - - 7.698490766144817 - - 46.357085154660915 - - - 7.699524647567326 - - 46.57297861624267 - - - 7.242974548172171 - - 46.57310580640624 - type: Polygon - projEpsg: + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr50-263-2016 + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: collection + - $ref: "#/components/schemas/itemBase" + items: description: >- - A Coordinate Reference System (CRS) is the data reference system (sometimes - called a 'projection') used by the asset data, and can usually be referenced - using an EPSG code. If the asset data does not have a CRS, such as in the - case of non-rectified imagery with Ground Control Points, proj:epsg should - be set to null. It should also be set to null if a CRS exists, but for which - there is no valid EPSG code. - example: 2056 - title: EPSG code. - type: integer - providers: + A FeatureCollection augmented with foreign members that contain values relevant + to a STAC entity + properties: + features: + items: + $ref: "#/components/schemas/item" + type: array + links: + items: + $ref: "#/components/schemas/link" + type: array + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items?cursor=10ab + rel: next + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items?cursor=10acd + rel: previous + type: + enum: + - FeatureCollection + type: string + required: + - features + - type + type: object + itemAssets: + title: Assets + description: List of Assets attached to this feature. + additionalProperties: + $ref: "#/components/schemas/assetBase" + type: object + readOnly: true + example: + smr50-263-2016-2056-kgrs-2.5.tiff: + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + created: "2020-07-14T12:30:00Z" + eo:gsd: 2.5 + geoadmin:variant: kgrs + href: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr50-263-2016-2056-kgrs-2.5.tiff + proj:epsg: 2056 + type: image/tiff; application=geotiff + updated: "2020-07-14T12:30:00Z" + smr50-263-2016-2056-komb-2.5.tiff: + checksum:multihash: 90d402107a7f2588a85362b9beea2a12d4514d45 + created: "2020-07-14T12:30:00Z" + eo:gsd: 2.5 + geoadmin:variant: komb + href: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr50-263-2016-2056-komb-2.5.tiff + proj:epsg: "2056" + type: image/tiff; application=geotiff + updated: "2020-07-14T12:30:00Z" + smr50-263-2016-2056-krel-2.5.tiff: + checksum:multihash: 90f402107a7f2588a85362b9beea2a12d4514d45 + created: "2020-07-14T12:30:00Z" + eo:gsd: 2.5 + geoadmin:variant: krel + href: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr50-263-2016-2056-krel-2.5.tiff + proj:epsg: "2056" + type: image/tiff; application=geotiff + updated: "2020-07-14T12:30:00Z" + itemsSearch: description: >- - A list of providers, which may include all organizations capturing or processing - the data or the hosting provider. Providers should be listed in chronological - order with the most recent provider being the last element of the list. - items: + A GeoJSON FeatureCollection augmented with foreign members that contain values + relevant to a STAC entity + properties: + features: + items: + $ref: "#/components/schemas/item" + type: array + type: + enum: + - FeatureCollection + type: string + required: + - features + - type + type: object + itemsSearchGet: + allOf: + - $ref: "#/components/schemas/itemsSearch" + - type: object properties: - description: - description: >- - Multi-line description to add further provider information such as processing - details for processors and producers, hosting details for hosts or basic - contact information. + links: + $ref: "#/components/schemas/itemsSearchLinks" + itemsSearchPost: + allOf: + - $ref: "#/components/schemas/itemsSearch" + - type: object + properties: + links: + $ref: "#/components/schemas/itemsSearchPostLinks" + itemsSearchLinks: + description: >- + An array of links. Can be used for pagination, e.g. by providing a link with + the `next` relation type. + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/search + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/search?cursor=10ab + rel: next + items: + $ref: "#/components/schemas/link" + type: array + itemsSearchPostLinks: + description: >- + An array of links. Can be used for pagination, e.g. by providing a link with + the `next` relation type. + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/search + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/search?cursor=10ab + rel: next + method: POST + body: {} + merge: true + items: + $ref: "#/components/schemas/linkPostSearch" + type: array + itemId: + title: ID + description: Feature identifier (unique per collection) + example: smr200-200-4-2019 + type: string + itemProperties: + title: Properties + description: >- + Provides the core metadata fields plus extensions - CommonMark 0.29 syntax MAY be used for rich text representation. - type: string - name: - description: The name of the organization or the individual. - type: string - roles: - description: >- - Roles of the provider. + The item's data timing information can be specified either with + * One datetime value in the field `datetime` - The provider's role(s) can be one or more of the following elements: + * A datetime range with a `start_datetime` and an `end_datetime` - * licensor: The organization that is licensing the dataset under + One of the two is required + properties: + created: + $ref: "#/components/schemas/created" + datetime: + $ref: "#/components/schemas/datetime" + start_datetime: + $ref: "#/components/schemas/datetime" + end_datetime: + $ref: "#/components/schemas/datetime" + updated: + $ref: "#/components/schemas/updated" + title: + description: Human readable title of the Feature + example: Feature title + type: string + required: + - created + - updated + type: object + itemType: + title: type + description: The GeoJSON type + enum: + - Feature + type: string + readOnly: true + landingPage: + properties: + description: + example: >- + Access to data about buildings in the city of Bonn via a Web API that + conforms to the OGC API Features specification. + type: string + id: + type: string + links: + items: + $ref: "#/components/schemas/link" + type: array + stac_version: + $ref: "#/components/schemas/stac_version" + title: + example: Buildings in Bonn + type: string + required: + - links + - stac_version + - id + - description + type: object + license: + description: >- + License(s) of the data as a SPDX [License identifier](https://spdx.org/licenses/). + Alternatively, use `proprietary` if the license is not on the SPDX license + list or `various` if multiple licenses apply. In these two cases links to + the license texts SHOULD be added, see the `license` link relation type. - the license specified in the collection's license field. + Non-SPDX licenses SHOULD add a link to the license text with the `license` + relation in the links section. The license text MUST NOT be provided as a + value of this field. If there is no public license URL available, it is RECOMMENDED + to host the license text and link to it. + example: proprietary + type: string + limit: + default: 100 + description: >- + The `limit` parameter limits the number of results that are included in the + response. - * producer: The producer of the data is the provider that + To retrieve the next bunch of result, use the `next` link in the `links` section + of the response. - initially captured and processed the source data, e.g. ESA for - Sentinel-2 data. - - * processor: A processor is any provider who processed data to a + Minimum = 1. Maximum = 100. Default = 100. + example: 20 + maximum: 100 + minimum: 1 + type: integer + limitFilter: + description: Only returns maximum number of results (page size) + properties: + limit: + $ref: "#/components/schemas/limit" + type: object + linestringGeoJSON: + properties: + coordinates: + items: + items: + type: number + minItems: 2 + type: array + minItems: 2 + type: array + type: + enum: + - LineString + type: string + required: + - type + - coordinates + type: object + link: + properties: + href: + example: http://data.example.com/buildings/123 + format: url + type: string + rel: + description: >- + Relationship between the current document and the linked document. - derived product. - * host: The host is the actual provider offering the data on their + NOTE: the following relations are reserved and automatically generated: + `self`, `root`, `parent`, `items`, `collection`, `next`, `previous` + example: describedby + type: string + title: + example: Trierer Strasse 70, 53115 Bonn + type: string + type: + description: The media type of the link target + example: application/geo+json + type: string + method: + default: GET + description: Specifies the HTTP method that the link expects + enum: + - GET + - POST + type: string + required: + - href + - rel + title: Link + type: object + linkPostSearch: + allOf: + - $ref: "#/components/schemas/link" + - type: object + properties: + body: + default: {} + description: For `POST /search` requests, the link can specify the HTTP + body as a JSON object. + type: object + merge: + default: false + description: >- + This is only valid when the server is responding to `POST /search `request. - storage. There should be no more than one host, specified as last - element of the list. + If merge is true, the client is expected to merge the body value into + the current request body before following the link. This avoids passing + large post bodies back and forth when following links, particularly + for navigating pages through the `POST /search` endpoint. + type: boolean + multilinestringGeoJSON: + properties: + coordinates: + items: items: - enum: - - producer - - licensor - - processor - - host - type: string + items: + type: number + minItems: 2 + type: array + minItems: 2 type: array - url: - description: >- - Homepage on which the provider describes the dataset and publishes contact - information. - format: url - type: string - required: - - name - title: Provider - type: object - type: array - example: - - name: Federal Office of Topography - swisstopo - roles: - - producer - - licensor - url: https://www.swisstopo.admin.ch - query: - additionalProperties: - $ref: "#/components/schemas/queryProp" - description: Define which properties to query and the operations to apply - example: - title: - eq: "Swissregio" - contains: "Swiss" - created: - lte: "2021-01-01T00:00:00.000Z" - updated: - gte: "2020-01-01T00:00:00.000Z" + type: array + type: + enum: + - MultiLineString + type: string + required: + - type + - coordinates type: object - queryFilter: - description: Allows users to query properties for specific values + multipointGeoJSON: properties: - query: - $ref: "#/components/schemas/query" + coordinates: + items: + items: + type: number + minItems: 2 + type: array + type: array + type: + enum: + - MultiPoint + type: string + required: + - type + - coordinates type: object - queryProp: - anyOf: - - description: >- - If the object doesn't contain any of the operators, it is equivalent to - using the equals operator - - description: Match using an operator - properties: - contains: - description: >- - Find items with a property that contains the specified literal string, - e.g., matches ".*.*". A case-insensitive comparison must be - performed. - type: string - endsWith: - description: >- - Find items with a property that ends with the specified string. A case-insensitive - comparison must be performed. - type: string - eq: - description: >- - Find items with a property that is equal to the specified value. For - strings, a case-insensitive comparison must be performed. - nullable: true - oneOf: - - type: string - - type: number - - type: boolean - gt: - description: Find items with a property value greater than the specified - value. - oneOf: - - format: date-time - type: string - - type: number - gte: - description: Find items with a property value greater than or equal the - specified value. - oneOf: - - format: date-time - type: string - - type: number - in: - description: >- - Find items with a property that equals at least one entry in the specified - array. A case-insensitive comparison must be performed. + multipolygonGeoJSON: + properties: + coordinates: + items: items: - oneOf: - - type: string - - type: number + items: + items: + type: number + minItems: 2 + type: array + minItems: 4 + type: array type: array - lt: - description: Find items with a property value less than the specified - value. - oneOf: - - format: date-time - type: string - - type: number - lte: - description: Find items with a property value less than or equal the specified - value. - oneOf: - - format: date-time - type: string - - type: number - neq: - description: >- - Find items that *don't* contain the specified value. For strings, a - case-insensitive comparison must be performed. - nullable: true - oneOf: - - type: string - - type: number - - type: boolean - startsWith: - description: >- - Find items with a property that begins with the specified string. A - case-insensitive comparison must be performed. - type: string - type: object + type: array + type: + enum: + - MultiPolygon + type: string + required: + - type + - coordinates + type: object + numberMatched: description: >- - Apply query operations to a specific property. The following properties are + The number of features of the feature type that match the selection parameters + like `bbox`. + example: 127 + minimum: 0 + type: integer + numberReturned: + description: >- + The number of features in the feature collection. + + + A server may omit this information in a response, if the information about + the number of features is not known or difficult to compute. + + + If the value is provided, the value shall be identical to the number of items + in the "features" array. + example: 10 + minimum: 0 + type: integer + pointGeoJSON: + properties: + coordinates: + items: + type: number + minItems: 2 + type: array + type: + enum: + - Point + type: string + required: + - type + - coordinates + type: object + polygonGeoJSON: + properties: + coordinates: + items: + items: + items: + type: number + minItems: 2 + type: array + minItems: 4 + type: array + type: array + type: + enum: + - Polygon + type: string + required: + - type + - coordinates + type: object + example: + coordinates: + - - - 7.242974548172171 + - 46.57310580640624 + - - 7.243756483316452 + - 46.35721185723752 + - - 7.698490766144817 + - 46.357085154660915 + - - 7.699524647567326 + - 46.57297861624267 + - - 7.242974548172171 + - 46.57310580640624 + type: Polygon + projEpsg: + description: >- + A Coordinate Reference System (CRS) is the data reference system (sometimes + called a 'projection') used by the asset data, and can usually be referenced + using an EPSG code. If the asset data does not have a CRS, such as in the + case of non-rectified imagery with Ground Control Points, proj:epsg should + be set to null. It should also be set to null if a CRS exists, but for which + there is no valid EPSG code. + example: 2056 + title: EPSG code. + type: integer + providers: + description: >- + A list of providers, which may include all organizations capturing or processing + the data or the hosting provider. Providers should be listed in chronological + order with the most recent provider being the last element of the list. + items: + properties: + description: + description: >- + Multi-line description to add further provider information such as processing + details for processors and producers, hosting details for hosts or basic + contact information. + + + CommonMark 0.29 syntax MAY be used for rich text representation. + type: string + name: + description: The name of the organization or the individual. + type: string + roles: + description: >- + Roles of the provider. + + + The provider's role(s) can be one or more of the following elements: + + + * licensor: The organization that is licensing the dataset under + + + the license specified in the collection's license field. + + * producer: The producer of the data is the provider that + + + initially captured and processed the source data, e.g. ESA for + Sentinel-2 data. + + * processor: A processor is any provider who processed data to a + + + derived product. + + * host: The host is the actual provider offering the data on their + + + storage. There should be no more than one host, specified as last + element of the list. + items: + enum: + - producer + - licensor + - processor + - host + type: string + type: array + url: + description: >- + Homepage on which the provider describes the dataset and publishes contact + information. + format: url + type: string + required: + - name + title: Provider + type: object + type: array + example: + - name: Federal Office of Topography - swisstopo + roles: + - producer + - licensor + url: https://www.swisstopo.admin.ch + query: + additionalProperties: + $ref: "#/components/schemas/queryProp" + description: Define which properties to query and the operations to apply + example: + title: + eq: "Swissregio" + contains: "Swiss" + created: + lte: "2021-01-01T00:00:00.000Z" + updated: + gte: "2020-01-01T00:00:00.000Z" + type: object + queryFilter: + description: Allows users to query properties for specific values + properties: + query: + $ref: "#/components/schemas/query" + type: object + queryProp: + anyOf: + - description: >- + If the object doesn't contain any of the operators, it is equivalent to + using the equals operator + - description: Match using an operator + properties: + contains: + description: >- + Find items with a property that contains the specified literal string, + e.g., matches ".*.*". A case-insensitive comparison must be + performed. + type: string + endsWith: + description: >- + Find items with a property that ends with the specified string. A case-insensitive + comparison must be performed. + type: string + eq: + description: >- + Find items with a property that is equal to the specified value. For + strings, a case-insensitive comparison must be performed. + nullable: true + oneOf: + - type: string + - type: number + - type: boolean + gt: + description: Find items with a property value greater than the specified + value. + oneOf: + - format: date-time + type: string + - type: number + gte: + description: Find items with a property value greater than or equal the + specified value. + oneOf: + - format: date-time + type: string + - type: number + in: + description: >- + Find items with a property that equals at least one entry in the specified + array. A case-insensitive comparison must be performed. + items: + oneOf: + - type: string + - type: number + type: array + lt: + description: Find items with a property value less than the specified + value. + oneOf: + - format: date-time + type: string + - type: number + lte: + description: Find items with a property value less than or equal the specified + value. + oneOf: + - format: date-time + type: string + - type: number + neq: + description: >- + Find items that *don't* contain the specified value. For strings, a + case-insensitive comparison must be performed. + nullable: true + oneOf: + - type: string + - type: number + - type: boolean + startsWith: + description: >- + Find items with a property that begins with the specified string. A + case-insensitive comparison must be performed. + type: string + type: object + description: >- + Apply query operations to a specific property. The following properties are currently supported: `created`, `updated`, `title`. roles: type: array @@ -1856,9 +2551,26 @@ components: rel: item - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale rel: collection - createAsset: + collectionWrite: + title: collection allOf: - - $ref: "#/components/schemas/assetBase" + - $ref: "#/components/schemas/collectionBase" + - type: object + properties: + links: + type: array + items: + $ref: "#/components/schemas/link" + example: + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography + swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + createAsset: + allOf: + - $ref: "#/components/schemas/assetBase" - type: object required: - id @@ -2018,14 +2730,14 @@ components: readOnly: true dtUploadCreated: title: created - description: Date time when the Asset's upload has been created/started. + description: Date and time when the Asset's upload has been created/started. type: string format: date-time readOnly: true dtUploadCompleted: title: completed description: | - Date time when the Asset's upload has been completed. + Date and time when the Asset's upload has been completed. *Note: this property is mutually exclusive with `aborted`* type: string @@ -2034,7 +2746,7 @@ components: dtUploadAborted: title: aborted description: | - Date time when the Asset's upload has been aborted. + Date and time when the Asset's upload has been aborted. *Note: this property is mutually exclusive with `completed`* type: string @@ -2128,7 +2840,7 @@ components: type: array description: Parts that have been uploaded items: - title: File part that have been uploaded + title: File parts that have been uploaded type: object required: - etag @@ -2285,1072 +2997,441 @@ components: type: string description: The RFC7232 ETag for the specified uploaded part. example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - examples: - inprogress: - summary: In progress upload example - value: - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG - status: in-progress - number_parts: 1 - urls: - - url: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 - part: 1 - expires: '2019-08-24T14:15:22Z' - created: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 - completed: - summary: Completed upload example - value: - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG - status: completed - number_parts: 1 - created: '2019-08-24T14:15:22Z' - completed: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 - aborted: - summary: Aborted upload example - value: - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG - status: completed - number_parts: 1 - created: '2019-08-24T14:15:22Z' - aborted: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 -paths: - /: - get: - description: >- - The landing page provides links to the API definition, the conformance statements - and to the feature collections in this dataset. - operationId: getLandingPage - responses: - "200": - $ref: "#/components/responses/LandingPage" - "500": - $ref: "#/components/responses/ServerError" - summary: Landing page - tags: - - Capabilities - /collections: - get: - operationId: getCollections - parameters: - - $ref: "#/components/parameters/limit" - responses: - "200": - $ref: "#/components/responses/Collections" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch collections - description: The feature collections in the dataset - tags: - - Data - post: - tags: - - Data Management - summary: >- - Create a new collection - operationId: createCollection - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/collection" - example: - description: The National Map 1:200,000 is a topographic map giving - an overview of Switzerland. - id: ch.swisstopo.pixelkarte-farbe-pk200.noscale - license: proprietary - links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby - providers: - - name: Federal Office of Topography - swisstopo - roles: - - producer - - licensor - url: https://www.swisstopo.admin.ch - title: National Map 1:200'000 - responses: - "201": - description: Returns the created Collection - headers: - Location: - description: A link to the collection - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/collectionWrite" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "500": - $ref: "#/components/responses/ServerError" - /collections/{collectionId}: - get: - operationId: describeCollection - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" - responses: - "200": - $ref: "#/components/responses/Collection" - "304": - $ref: "#/components/responses/NotModified" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch a single collection - description: Describe the feature collection with id `collectionId` - tags: - - Data - put: - tags: - - Data Management - summary: Update or create a collection - description: >- - Update or create a collection with Id `collectionId` with a complete collection - definition. If the collection doesn't exists it is then created. - operationId: updateCollection - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/IfMatchWrite" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/collectionWrite" - example: - description: The National Map 1:200,000 is a topographic map giving - an overview of Switzerland. - id: ch.swisstopo.pixelkarte-farbe-pk200.noscale - license: proprietary - links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby - providers: - - name: Federal Office of Topography - swisstopo - roles: - - producer - - licensor - url: https://www.swisstopo.admin.ch - title: National Map 1:200'000 - responses: - "200": - description: Returns the updated Collection - content: - application/json: - schema: - $ref: "#/components/schemas/collection" - "201": - description: Returns the created Collection - headers: - Location: - description: A link to the collection - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/collection" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "500": - $ref: "#/components/responses/ServerError" - patch: - tags: - - Data Management - summary: Partial update of a collection + parameters: + assetQuery: description: >- - Update an existing collection with Id `collectionId` with a partial collection - definition - operationId: partialUpdateCollection - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/IfMatch" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/collectionWrite" - example: - id: ch.swisstopo.pixelkarte-farbe-pk200.noscale - license: proprietary - links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby - providers: - - name: Federal Office of Topography - swisstopo - roles: - - producer - - licensor - url: https://www.swisstopo.admin.ch - title: National Map 1:200'000 - responses: - "200": - description: Returns the updated Collection - content: - application/json: - schema: - $ref: "#/components/schemas/collection" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "500": - $ref: "#/components/responses/ServerError" - /collections/{collectionId}/items: - get: - description: >- - Fetch features of the feature collection with id `collectionId`. - - - Every feature in a dataset belongs to a collection. A dataset may consist - of multiple feature collections. A feature collection is often a collection - of features of a similar type, based on a common schema. - - - Use content negotiation to request HTML or GeoJSON. - operationId: getFeatures - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/limit" - - $ref: "#/components/parameters/bbox" - - $ref: "#/components/parameters/datetime" - responses: - "200": - $ref: "#/components/responses/Features" - "400": - $ref: "#/components/responses/InvalidParameter" - "404": - $ref: "#/components/responses/NotFound" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch features - tags: - - Data - post: - summary: Add a new feature to a collection - description: Create a new feature/item in a specific collection - operationId: postFeature - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/createItem" - example: - id: cs3-20160503_132131_05 - geometry: - type: Polygon - coordinates: - - - - -122.308150179 - - 37.488035566 - - - -122.597502109 - - 37.538869539 - - - -122.576687533 - - 37.613537207 - - - -122.2880486 - - 37.562818007 - - - -122.308150179 - - 37.488035566 - properties: - datetime: "2016-05-03T13:22:30.040Z" - title: A CS3 item - links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby - responses: - "201": - description: Return the created Item. - headers: - Location: - description: A link to the item - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/item" - "400": - $ref: "#/components/responses/BadRequest" - "403": - $ref: "#/components/responses/PermissionDenied" - 5XX: - $ref: "#/components/responses/ServerError" - /collections/{collectionId}/items/{featureId}: - get: - description: >- - Fetch the feature with id `featureId` in the feature collection with id `collectionId`. - - - Use content negotiation to request HTML or GeoJSON. - operationId: getFeature - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" - responses: - "200": - $ref: "#/components/responses/Feature" - "304": - $ref: "#/components/responses/NotModified" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch a single feature - tags: - - Data - put: - summary: Update or create a feature - description: >- - Update or create a feature with Id `featureId` with a complete feature definition. - If the feature doesn't exists it is then created. - - - *NOTE: Optional fields that are not part of the PUT payload, will be erased - in the resource. For example if the resource has a properties.title and the - PUT payload doesn't, then the resource's properties.title will be removed.* - operationId: putFeature - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatchWrite" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/updateItem" - example: - id: cs3-20160503_132131_05 - geometry: - type: Polygon - coordinates: - - - - -122.308150179 - - 37.488035566 - - - -122.597502109 - - 37.538869539 - - - -122.576687533 - - 37.613537207 - - - -122.2880486 - - 37.562818007 - - - -122.308150179 - - 37.488035566 - properties: - datetime: "2016-05-03T13:22:30.040Z" - title: A CS3 item - links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography - swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby - responses: - "200": - description: Returns the updated Item - content: - application/json: - schema: - $ref: "#/components/schemas/item" - "400": - $ref: "#/components/responses/BadRequest" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - 5XX: - $ref: "#/components/responses/ServerError" - patch: - summary: Update an existing feature by Id with a partial item definition - description: >- - Use this method to update an existing feature. Requires a JSON fragment (containing - the fields to be updated) be submitted. - operationId: patchFeature - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatchWrite" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/partialItem" - responses: - "200": - description: Returns the updated Item - content: - application/json: - schema: - $ref: "#/components/schemas/item" - "201": - description: Returns the created Item - headers: - Location: - description: A link to the item - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/item" - "400": - $ref: "#/components/responses/BadRequest" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - 5XX: - $ref: "#/components/responses/ServerError" - delete: - summary: Delete an existing feature by Id - description: Use this method to delete an existing feature/item. - operationId: deleteFeature - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatchWrite" - responses: - "200": - $ref: "#/components/responses/DeletedResource" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - 5XX: - $ref: "#/components/responses/ServerError" - /conformance: - get: - description: >- - A list of all conformance classes specified in a standard that the server - conforms to. - operationId: getConformanceDeclaration - responses: - "200": - $ref: "#/components/responses/ConformanceDeclaration" - "500": - $ref: "#/components/responses/ServerError" - summary: Information about specifications that this API conforms to - tags: - - Capabilities - /search: - get: - description: >- - Retrieve Items matching filters. Intended as a shorthand API for simple queries. - operationId: getSearchSTAC - parameters: - - $ref: "#/components/parameters/bbox" - - $ref: "#/components/parameters/datetime" - - $ref: "#/components/parameters/limit" - - $ref: "#/components/parameters/ids" - - $ref: "#/components/parameters/collectionsArray" - responses: - "200": - content: - application/json: - schema: - $ref: "#/components/schemas/itemsSearchGet" - description: A feature collection. - "500": - $ref: "#/components/responses/ServerError" - summary: Search STAC items with simple filtering. - tags: - - STAC - post: - description: >- - Retrieve items matching filters. Intended as the standard, full-featured query - API. - operationId: postSearchSTAC - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/searchBody" - responses: - "200": - content: - application/json: - schema: - $ref: "#/components/schemas/itemsSearchPost" - description: A feature collection. - "500": - $ref: "#/components/responses/ServerError" - summary: Search STAC items with full-featured filtering. - tags: - - STAC - /collections/{collectionId}/items/{featureId}/assets: - get: - description: >- - Fetch assets of the item with id `featureId`. - - Every asset belongs to an item. - operationId: getAssets - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - responses: - "200": - $ref: "#/components/responses/Assets" - "400": - $ref: "#/components/responses/InvalidParameter" - "404": - $ref: "#/components/responses/NotFound" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch all assets for a feature - tags: - - Data - post: - summary: Add a new asset to a feature - description: | - Create a new asset for a specific feature. - - *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* - operationId: postAsset - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/createAsset" - responses: - "201": - description: Return the created Asset - headers: - Location: - description: A link to the asset - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/createAsset" - "400": - $ref: "#/components/responses/BadRequest" - 5XX: - $ref: "#/components/responses/ServerError" - /collections/{collectionId}/items/{featureId}/assets/{assetId}: - get: - description: >- - Fetch the asset with id `assetId` of the feature with id `featureId` in the - feature collection with id `collectionId`. - operationId: getAsset - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" - responses: - "200": - $ref: "#/components/responses/Asset" - "304": - $ref: "#/components/responses/NotModified" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch a single asset - tags: - - Data - put: - summary: Update or create an asset - description: >- - Update or create an asset with Id `assetId` with a complete asset definition. - If the asset doesn't exists it is then created. - - - *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* - operationId: putAsset - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatchWrite" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/readUpdateAsset" - responses: - "200": - description: Asset has been successfully updated. - content: - application/json: - schema: - $ref: "#/components/schemas/readUpdateAsset" - "201": - description: Asset has been newly created. - headers: - Location: - description: A link to the asset - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/createAsset" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - 5XX: - $ref: "#/components/responses/ServerError" - patch: - summary: Update an existing asset by Id with a partial asset definition + Query for properties in assets (e.g. mediatype). Use the JSON form of the + assetQueryFilter used in POST. + in: query + name: assetQuery + required: false + schema: + type: string + bbox: + explode: false + in: query + name: bbox + required: false + schema: + $ref: "#/components/schemas/bbox" + style: form + example: + collectionId: + description: Local identifier of a collection + in: path + name: collectionId + required: true + schema: + type: string + collectionsArray: + explode: false + in: query + name: collections + required: false + schema: + $ref: "#/components/schemas/collectionsArray" + datetime: + explode: false + in: query + name: datetime + required: false + schema: + $ref: "#/components/schemas/datetimeQuery" + example: 2018-02-12T00%3A00%3A00Z%2F2018-03-18T12%3A31%3A12Z + style: form + featureId: + description: Local identifier of a feature + in: path + name: featureId + required: true + schema: + type: string + ids: description: >- - Use this method to update an existing asset. Requires a JSON fragment (containing - the fields to be updated) be submitted. - - - *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* - operationId: patchAsset - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatchWrite" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/readUpdateAsset" - responses: - "200": - description: Returns the updated Asset. - content: - application/json: - schema: - $ref: "#/components/schemas/readUpdateAsset" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - 5XX: - $ref: "#/components/responses/ServerError" - delete: - summary: Delete an existing asset by Id + Array of Item ids to return. All other filter parameters that further restrict + the number of search results are ignored + explode: false + in: query + name: ids + required: false + schema: + $ref: "#/components/schemas/ids" + limit: + explode: false + in: query + name: limit + required: false + schema: + $ref: "#/components/schemas/limit" + style: form + query: + description: Query for properties in items. Use the JSON form of the queryFilter + used in POST. + in: query + name: query + required: false + schema: + type: string + IfNoneMatch: + name: If-None-Match + in: header + schema: + type: string description: >- - Use this method to delete an existing asset. + The RFC7232 `If-None-Match` header field makes the GET request method conditional. + It is composed of a comma separated list of ETags or value "*". - **NOTE: Asset file on S3 will be also removed !** - operationId: deleteAsset - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatchWrite" - responses: - "200": - $ref: "#/components/responses/DeletedResource" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - 5XX: - $ref: "#/components/responses/ServerError" - /collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads: - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - get: - tags: - - Asset Upload Management - summary: List all Asset's multipart uploads - description: >- - Return a list of all Asset's multipart uploads that are in progress and have - been completed or aborted. - operationId: getAssetUploads - parameters: - - name: status - in: query - description: Filter the list by status. - schema: - $ref: "#/components/schemas/status" - responses: - "200": - description: List of Asset's uploads - content: - application/json: - schema: - $ref: "#/components/schemas/assetUploads" - example: - uploads: - - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnusebaJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG - status: in-progress - number_parts: 1 - urls: - - url: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 - part: 1 - expires: '2019-08-24T14:15:22Z' - created: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 - - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YaaegJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG - status: completed - number_parts: 1 - created: '2019-08-24T14:15:22Z' - completed: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 - - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG - status: aborted - number_parts: 1 - created: '2019-08-24T14:15:22Z' - aborted: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 - links: - - rel: next - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads?cursor=0d34 - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - 5XX: - $ref: "#/components/responses/ServerError" - post: - tags: - - Asset Upload Management - summary: Create a new Asset's multipart upload - description: | - Create a new Asset's multipart upload. - operationId: createAssetUpload - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/assetUploadCreate" - responses: - "201": - description: Created Asset's multipart upload - headers: - Location: - description: A link to the Asset's multipart upload object - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/assetUploadCreate" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - 5XX: - $ref: "#/components/responses/ServerError" - /collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}: - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/uploadId" - get: - tags: - - Asset Upload Management - summary: Get an Asset's multipart upload - description: | - Return an Asset's multipart upload. - operationId: getAssetUpload - parameters: - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" - responses: - "200": - description: Asset's multipart upload description. - headers: - ETag: - $ref: "#/components/headers/ETag" - content: - application/json: - schema: - $ref: "#/components/schemas/assetUpload" - examples: - inprogress: - $ref: "#/components/examples/inprogress" - completed: - $ref: "#/components/examples/completed" - aborted: - $ref: "#/components/examples/aborted" - "304": - $ref: "#/components/responses/NotModified" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "500": - $ref: "#/components/responses/ServerError" - /{presignedUrl}: - servers: - - url: http://data.geo.admin.ch/ - put: - tags: - - Asset Upload Management - summary: Upload asset file part + The server compares the client's ETags (sent with `If-None-Match`) with the + ETag for its current version of the resource, and if both values match (that + is, the resource has not changed), the server sends back a `304 Not Modified` + status, without a body, which tells the client that the cached version of + the response is still good to use (fresh). + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + IfMatch: + name: If-Match + in: header + schema: + type: string description: >- - Upload an Asset file part using the presigned url(s) returned by [Create a - new Asset's multipart upload](#operation/createAssetUpload). + The RFC7232 `If-Match` header field makes the GET request method conditional. + It is composed of a comma separated list of ETags or value "*". - Parts that have been uploaded but not completed can be checked using [Get - an Asset's multipart upload](#operation/getAssetUpload) + The server compares the client's ETags (sent with `If-Match`) with the ETag + for its current version of the resource, and if both values don't match (that + is, the resource has changed), the server sends back a `412 Precondition Failed` + status, without a body, which tells the client that the cached version of + the response is not good to use anymore. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + assetId: + name: assetId + in: path + description: Local identifier of an asset. + required: true + schema: + type: string + uploadId: + name: uploadId + in: path + description: Local identifier of an asset's upload. + required: true + schema: + type: string + presignedUrl: + name: presignedUrl + in: path + description: Presigned url returned by [Create a new Asset's multipart upload](#operation/createAssetUpload). + required: true + schema: + type: string + IfMatchWrite: + name: If-Match + in: header + schema: + type: string + description: >- + The RFC7232 `If-Match` header field makes the PUT/PATCH/DEL request method + conditional. It is composed of a comma separated list of ETags or value "*". - A file part must be at least 5 MB except for the last one and at most 5 GB, - otherwise the complete operation will fail. + The server compares the client's ETags (sent with `If-Match`) with the ETag + for its current version of the resource, and if both values don't match (that + is, the resource has changed), the server sends back a `412 Precondition Failed` + status, without a body, which tells the client that he would overwrite another + changes of the resource. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + responses: + Collection: + headers: + ETag: + $ref: "#/components/headers/ETag" + content: + application/json: + schema: + $ref: "#/components/schemas/collection" + description: >- + Information about the feature collection with id `collectionId`. - *Note: this endpoint doesn't require any authentication as it is already part - of the presigned url* - operationId: uploadAssetFilePart - parameters: - - $ref: "#/components/parameters/presignedUrl" - - name: Content-MD5 - in: header - description: Asset file part content MD5. - required: true - schema: - type: string - responses: - "200": - description: Asset file uploaded part successfully - headers: - ETag: - schema: - type: string - description: >- - The RFC7232 ETag header field in a response provides the current entity- - tag for the selected resource. + The response contains a link to the items in the collection (path `/collections/{collectionId}/items`, + link relation `items`) as well as key information about the collection. This + information includes: - This ETag is required in the complete multipart upload payload. + * A local identifier for the collection that is unique for the dataset + * A list of coordinate reference systems (CRS) in which geometries may be + returned by the server. The first CRS is the default coordinate reference + system (the default is always WGS 84 with axis order longitude/latitude) - An entity-tag is an opaque identifier for different versions of a - resource over time, regardless whether multiple versions are valid - at the same time. An entity-tag consists of an opaque quoted string. - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - required: true - /collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/complete: - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/uploadId" - post: - tags: - - Asset Upload Management - summary: Complete multipart upload - operationId: completeMultipartUpload + * An optional title and description for the collection + + * An optional extent that can be used to provide an indication of the spatial + and temporal extent of the collection - typically derived from the data + + * An optional indicator about the type of the items in the collection (the + default value, if the indicator is not provided, is 'feature') + Collections: + content: + application/json: + schema: + $ref: "#/components/schemas/collections" + description: >- + The feature collections shared by this API. + + + The dataset is organized as one or more feature collections. This resource + provides information about and access to the collections. + + + The response contains the list of collections. For each collection, a link + to the items in the collection (path `/collections/{collectionId}/items`, + link relation `items`) as well as key information about the collection. This + information includes: + + + * A local identifier for the collection that is unique for the dataset + + * A list of coordinate reference systems (CRS) in which geometries may be + returned by the server. The first CRS is the default coordinate reference + system (the default is always WGS 84 with axis order longitude/latitude) + + * An optional title and description for the collection + + * An optional extent that can be used to provide an indication of the spatial + and temporal extent of the collection - typically derived from the data + + * An optional indicator about the type of the items in the collection (the + default value, if the indicator is not provided, is 'feature'). + + The `limit` parameter may be used to control the subset of the selected collections + that should be returned in the response, the page size. Each page include + links to support paging (link relation `next` and/or `previous`). + ConformanceDeclaration: + content: + application/json: + example: + conformsTo: + - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core + - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30 + - http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson + schema: + $ref: "#/components/schemas/confClasses" + description: >- + The URIs of all conformance classes supported by the server. + + + To support "generic" clients that want to access multiple OGC API Features + implementations - and not "just" a specific API / server, the server declares + the conformance classes it implements and conforms to. + Feature: + headers: + ETag: + $ref: "#/components/headers/ETag" + content: + application/json: + schema: + $ref: "#/components/schemas/item" + description: >- + Fetch the feature with id `featureId` in the feature collection with id `collectionId` + Features: + content: + application/json: + schema: + $ref: "#/components/schemas/items" + description: >- + The response is a document consisting of features in the collection. The features + included in the response are determined by the server based on the query parameters + of the request. To support access to larger collections without overloading + the client, the API supports paged access with links to the next page, if + more features are selected that the page size. + + + The `bbox` and `datetime` parameter can be used to select only a subset of + the features in the collection (the features that are in the bounding box + or time interval). The `bbox` parameter matches all features in the collection + that are not associated with a location, too. The `datetime` parameter matches + all features in the collection that are not associated with a time stamp or + interval, too. + + + The `limit` parameter may be used to control the subset of the selected features + that should be returned in the response, the page size. Each page include + links to support paging (link relation `next` and/or `previous`). + NotModified: + description: The cached resource was not modified since last request. + InvalidParameter: + content: + application/json: + schema: + $ref: "#/components/schemas/exception" + example: + code: 400 + description: "Invalid parameter" + description: A query parameter has an invalid value. + LandingPage: + content: + application/json: + example: + description: Catalog of Swiss Geodata Downloads + id: ch + links: + - href: http://data.geo.admin.ch/api/stac/v0.9/ + rel: self + type: application/json + title: this document + - href: http://data.geo.admin.ch/api/stac/v0.9/static/api.html + rel: service-doc + type: text/html + title: the API documentation + - href: http://data.geo.admin.ch/api/stac/v0.9/conformance + rel: conformance + type: application/json + title: OGC API conformance classes implemented by this server + - href: http://data.geo.admin.ch/api/stac/v0.9/collections + rel: data + type: application/json + title: Information about the feature collections + - href: http://data.geo.admin.ch/api/stac/v0.9/search + rel: search + type: application/json + title: Search across feature collections + stac_version: 0.9.0 + title: data.geo.admin.ch + schema: + $ref: "#/components/schemas/landingPage" + description: >- + The landing page provides links to the API definition (link relations `service-desc` + and `service-doc`), the Conformance declaration (path `/conformance`, link + relation `conformance`), and the Feature Collections (path `/collections`, + link relation `data`). + NotFound: + description: The specified resource/URI was not found + content: + application/json: + schema: + $ref: "#/components/schemas/exception" + example: + code: 404 + description: "Resource not found" + BadRequest: + description: The request was malformed or semantically invalid + content: + application/json: + schema: + $ref: "#/components/schemas/exception" + example: + code: 400 + description: "Invalid parameter" + PermissionDenied: + description: No Permission for this request + content: + application/json: + schema: + $ref: "#/components/schemas/exception" + example: + code: 403 + description: "Permission denied" + PreconditionFailed: + description: Some condition specified by the request could not be met in the + server + ServerError: description: >- - Complete the multipart upload process. After completion, the Asset metadata - are updated with the new `checksum:multihash` from the upload and the parts - are automatically deleted. The Asset `href` field is also set if it was the - first upload. - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/assetCompleteUpload" - responses: - "200": - description: Asset multipart upload completed successfully. - content: - application/json: - schema: - $ref: "#/components/schemas/assetUploadCompleted" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - 5XX: - $ref: "#/components/responses/ServerError" - /collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/abort: - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/uploadId" - post: - tags: - - Asset Upload Management - summary: Abort multipart upload - operationId: abortMultipartUpload + The request was syntactically and semantically valid, but an error occurred + while trying to act upon it + content: + application/json: + schema: + $ref: "#/components/schemas/exception" + example: + code: 500 + description: "Internal server error" + Assets: description: >- - Abort the multipart upload process. All already uploaded parts are automatically - deleted. - responses: - "200": - description: Asset multipart upload aborted successfully. - content: - application/json: - schema: - $ref: "#/components/schemas/assetUploadAborted" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - 5XX: - $ref: "#/components/responses/ServerError" - /collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/parts: - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/uploadId" - get: - tags: - - Asset Upload Management - summary: Get upload parts - operationId: getUploadParts + The response is a document consisting of all assets of the feature. + content: + application/json: + schema: + $ref: "#/components/schemas/assets" + Asset: description: >- - Return the list of already uploaded parts. - - - ### Pagination - - By default all parts are returned (maximum number of parts being 100). The - user can use pagination to reduce the number of returned parts. Pagination - is done via the `limit` query parameter (see below). - parameters: - - $ref: "#/components/parameters/limit" - responses: - "200": - description: List of parts already uploaded. - content: - application/json: - schema: - $ref: "#/components/schemas/assetUploadParts" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - 5XX: - $ref: "#/components/responses/ServerError" - /get-token: - servers: - - url: http://data.geo.admin.ch/api/stac/ - post: - tags: - - Authentication - summary: >- - Request token for token authentication. - operationId: getToken - requestBody: - required: true - content: - application/json: - schema: - type: object - properties: - username: - type: string - description: name of user for whom token is requested - password: - type: string - description: password of user for whom token is requested - required: - - username - - password - example: - username: "Mickey Mouse" - password: "I_love_Minnie_Mouse" - responses: - "200": - description: Returns the token for the specified user - content: - application/json: - schema: - type: object - properties: - token: - type: string - description: generated token for specified user - example: - token: ccecf40693bfc52ba090cd46eb7f19e723fe831f - "400": - description: Wrong credentials were provided. - content: - application/json: - schema: - type: object - properties: - code: - type: string - description: - type: string - example: - code: 400 - description: "Unable to log in with provided credentials." + The response is a document consisting of one asset of the feature. + headers: + ETag: + $ref: "#/components/headers/ETag" + content: + application/json: + schema: + $ref: "#/components/schemas/readUpdateAsset" + DeletedResource: + description: Status of the delete resource + content: + application/json: + schema: + description: >- + Information about the deleted resource and a link to the parent resource + type: object + properties: + code: + type: integer + example: 200 + description: + type: string + example: Resource successfully deleted + links: + type: array + items: + $ref: "#/components/schemas/link" + description: >- + The array contain at least a link to the parent resource (`rel: + parent`). + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: parent + required: + - code + - links + headers: + ETag: + schema: + type: string + description: >- + The RFC7232 ETag header field in a response provides the current entity- tag + for the selected resource. An entity-tag is an opaque identifier for different + versions of a resource over time, regardless whether multiple versions are + valid at the same time. An entity-tag consists of an opaque quoted string, + possibly prefixed by a weakness indicator. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + required: true + examples: + inprogress: + summary: In progress upload example + value: + upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: in-progress + number_parts: 1 + urls: + - url: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 + part: 1 + expires: '2019-08-24T14:15:22Z' + created: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + completed: + summary: Completed upload example + value: + upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: completed + number_parts: 1 + created: '2019-08-24T14:15:22Z' + completed: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + aborted: + summary: Aborted upload example + value: + upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: completed + number_parts: 1 + created: '2019-08-24T14:15:22Z' + aborted: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 diff --git a/spec/transaction/components/examples.yaml b/spec/transaction/components/examples.yaml new file mode 100644 index 00000000..d1f12f5d --- /dev/null +++ b/spec/transaction/components/examples.yaml @@ -0,0 +1,33 @@ +openapi: 3.0.1 +components: + examples: + inprogress: + summary: In progress upload example + value: + upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: in-progress + number_parts: 1 + urls: + - url: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 + part: 1 + expires: '2019-08-24T14:15:22Z' + created: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + completed: + summary: Completed upload example + value: + upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: completed + number_parts: 1 + created: '2019-08-24T14:15:22Z' + completed: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + aborted: + summary: Aborted upload example + value: + upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: completed + number_parts: 1 + created: '2019-08-24T14:15:22Z' + aborted: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 \ No newline at end of file diff --git a/spec/transaction/components/parameters.yaml b/spec/transaction/components/parameters.yaml new file mode 100644 index 00000000..e830441f --- /dev/null +++ b/spec/transaction/components/parameters.yaml @@ -0,0 +1,39 @@ +openapi: 3.0.1 +components: + parameters: + assetId: + name: assetId + in: path + description: Local identifier of an asset. + required: true + schema: + type: string + uploadId: + name: uploadId + in: path + description: Local identifier of an asset's upload. + required: true + schema: + type: string + presignedUrl: + name: presignedUrl + in: path + description: Presigned url returned by [Create a new Asset's multipart upload](#operation/createAssetUpload). + required: true + schema: + type: string + IfMatchWrite: + name: If-Match + in: header + schema: + type: string + description: >- + The RFC7232 `If-Match` header field makes the PUT/PATCH/DEL request method conditional. It is + composed of a comma separated list of ETags or value "*". + + + The server compares the client's ETags (sent with `If-Match`) with the ETag for its + current version of the resource, and if both values don't match (that is, the resource has changed), + the server sends back a `412 Precondition Failed` status, without a body, which tells the client that + he would overwrite another changes of the resource. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" \ No newline at end of file diff --git a/spec/transaction/components/responses.yaml b/spec/transaction/components/responses.yaml new file mode 100644 index 00000000..7e6d2c12 --- /dev/null +++ b/spec/transaction/components/responses.yaml @@ -0,0 +1,47 @@ +# openapi: 3.0.1 +components: + responses: + Assets: + description: >- + The response is a document consisting of all assets of the feature. + content: + application/json: + schema: + $ref: "./schemas.yaml#/components/schemas/assets" + Asset: + description: >- + The response is a document consisting of one asset of the feature. + headers: + ETag: + $ref: "../../components/headers.yaml#/components/headers/ETag" + content: + application/json: + schema: + $ref: "./schemas.yaml#/components/schemas/readUpdateAsset" + DeletedResource: + description: Status of the delete resource + content: + application/json: + schema: + description: >- + Information about the deleted resource and a link to the parent resource + type: object + properties: + code: + type: integer + example: 200 + description: + type: string + example: Resource successfully deleted + links: + type: array + items: + $ref: "../../components/schemas.yaml#/components/schemas/link" + description: >- + The array contain at least a link to the parent resource (`rel: parent`). + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items + rel: parent + required: + - code + - links diff --git a/spec/transaction/components/schemas.yaml b/spec/transaction/components/schemas.yaml new file mode 100644 index 00000000..cd4088c8 --- /dev/null +++ b/spec/transaction/components/schemas.yaml @@ -0,0 +1,480 @@ +openapi: 3.0.1 +components: + schemas: + assetId: + type: string + pattern: ^[a-z0-9.-_]+$ + title: ID + description: >- + The asset id uniquely identifies the asset for an item + + + **Note**: `id` must be unique for the item and must be identical to the + filename. + example: smr50-263-2016-2056-kgrs-2.5.tiff + assets: + title: Assets + type: object + properties: + assets: + items: + $ref: "#/components/schemas/readUpdateAsset" + type: array + links: + items: + $ref: "../../components/schemas.yaml#/components/schemas/link" + type: array + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 + rel: item + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: collection + collectionWrite: + title: collection + allOf: + - $ref: "../../components/schemas.yaml#/components/schemas/collectionBase" + - type: object + properties: + links: + type: array + items: + $ref: "../../components/schemas.yaml#/components/schemas/link" + example: + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + createAsset: + allOf: + - $ref: "../../components/schemas.yaml#/components/schemas/assetBase" + - type: object + required: + - id + - links + properties: + id: + $ref: "./schemas.yaml#/components/schemas/assetId" + links: + items: + $ref: "../../components/schemas.yaml#/components/schemas/link" + type: array + readOnly: true + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 + rel: item + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: collection + readUpdateAsset: + allOf: + - $ref: "../../components/schemas.yaml#/components/schemas/assetBase" + - type: object + required: + - id + - links + properties: + id: + $ref: "./schemas.yaml#/components/schemas/assetId" + links: + items: + $ref: "../../components/schemas.yaml#/components/schemas/link" + type: array + readOnly: true + example: + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff + rel: self + - href: https://data.geo.admin.ch/api/stac/v0.9/ + rel: root + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets + rel: parent + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 + rel: item + - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale + rel: collection + writeItem: + allOf: + - $ref: "../../components/schemas.yaml#/components/schemas/itemBase" + - type: object + properties: + links: + items: + $ref: "../../components/schemas.yaml#/components/schemas/link" + type: array + description: >- + Add additional link to the generated ones (`self`, `root`, `parent`, `items`, + `collection`, `next`, `previous`) + createItem: + allOf: + - type: object + required: + - id + properties: + id: + $ref: "../../components/schemas.yaml#/components/schemas/itemId" + - $ref: "./schemas.yaml#/components/schemas/writeItem" + updateItem: + allOf: + - type: object + required: + - id + properties: + id: + $ref: "#/components/schemas/itemIdUpdate" + - $ref: "#/components/schemas/writeItem" + partialItem: + type: object + properties: + id: + $ref: "#/components/schemas/itemIdUpdate" + geometry: + $ref: "https://geojson.org/schema/Geometry.json" + properties: + $ref: "../../components/schemas.yaml#/components/schemas/itemProperties" + links: + type: array + items: + $ref: "../../components/schemas.yaml#/components/schemas/link" + description: >- + Add/update additional link to the generated ones (`self`, `root`, `parent`, `items`, + `collection`, `next`, `previous`) + example: + properties: + datetime: "2016-05-03T13:22:30.040Z" + partialCollection: + type: object + description: Allows for a set of partial metadata fields for a collection + properties: + id: + description: Identifier of the collection used, for example, in URIs + type: string + example: ch.swisstopo.pixelkarte-farbe-pk200.noscale + title: + description: Human readable title of the collection + type: string + example: National Map 1:200'000 + description: + description: A description of the features in the collection + type: string + example: The National Map 1:200,000 is a topographic map giving an overview of Switzerland. + links: + type: array + items: + $ref: "../../components/schemas.yaml#/components/schemas/link" + example: + - href: http://data.example.com/buildings + rel: item + - href: http://example.com/concepts/buildings.html + rel: describedBy + type: text/html + extent: + $ref: "../../components/schemas.yaml#/components/schemas/extent" + itemType: + description: >- + Indicator about the type of the items in the collection (the default value is 'feature'). + type: string + default: feature + crs: + description: The list of coordinate reference systems supported by the service + type: array + items: + type: string + default: + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + example: + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + - http://www.opengis.net/def/crs/EPSG/0/4326 + example: + title: The new title of the collection + itemIdUpdate: + description: >- + Item identifier (unique per collection. If it doesn't match the `featureId` in path + parameters, then the Item is renamed. + example: smr200-200-4-2019 + type: string + uploadId: + title: ID + type: string + description: Unique Asset upload identifier + example: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + readOnly: true + dtUploadCreated: + title: created + description: Date and time when the Asset's upload has been created/started. + type: string + format: date-time + readOnly: true + dtUploadCompleted: + title: completed + description: | + Date and time when the Asset's upload has been completed. + + *Note: this property is mutually exclusive with `aborted`* + type: string + format: date-time + readOnly: true + dtUploadAborted: + title: aborted + description: | + Date and time when the Asset's upload has been aborted. + + *Note: this property is mutually exclusive with `completed`* + type: string + format: date-time + readOnly: true + assetUploads: + title: AssetUploads + type: object + required: + - uploads + - links + properties: + uploads: + description: List of uploads that are within the asset. + type: array + items: + $ref: "#/components/schemas/assetUpload" + links: + description: Next and/or previous links for the pagination. + type: array + items: + $ref: "../../components/schemas.yaml#/components/schemas/link" + example: + - rel: next + href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads?cursor=0d34 + assetUpload: + title: AssetUpload + type: object + required: + - upload_id + - status + - created + - "checksum:multihash" + - number_parts + properties: + upload_id: + $ref: "#/components/schemas/uploadId" + status: + $ref: "#/components/schemas/status" + number_parts: + $ref: "#/components/schemas/number_parts" + urls: + type: array + description: | + Note: As soon as the multipart upload is completed or aborted, the `urls` property is removed. + items: + $ref: "#/components/schemas/multipartUploadUrl" + readOnly: true + created: + $ref: "#/components/schemas/dtUploadCreated" + completed: + $ref: "#/components/schemas/dtUploadCompleted" + aborted: + $ref: "#/components/schemas/dtUploadAborted" + "checksum:multihash": + $ref: "../../components/schemas.yaml#/components/schemas/checksumMultihash" + assetUploadCreate: + title: AssetUpload + type: object + required: + - upload_id + - status + - created + - "checksum:multihash" + - number_parts + properties: + upload_id: + $ref: "#/components/schemas/uploadId" + status: + $ref: "#/components/schemas/status" + number_parts: + $ref: "#/components/schemas/number_parts" + urls: + type: array + description: | + Note: As soon as the multipart upload is completed or aborted, the `urls` property is removed. + items: + $ref: "#/components/schemas/multipartUploadUrl" + readOnly: true + created: + $ref: "#/components/schemas/dtUploadCreated" + "checksum:multihash": + $ref: "../../components/schemas.yaml#/components/schemas/checksumMultihash" + assetCompleteUpload: + title: CompleteUpload + type: object + required: + - parts + properties: + parts: + type: array + description: Parts that have been uploaded + items: + title: File parts that have been uploaded + type: object + required: + - etag + - part_number + properties: + etag: + title: ETag + type: string + description: >- + ETag of the uploaded file part (returned in the header of the answer of + [Upload asset file part](#operation/uploadAssetFilePart)). + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + part_number: + $ref: "#/components/schemas/part_number" + assetUploadCompleted: + title: UploadCompleted + type: object + required: + - upload_id + - status + - number_parts + - created + - completed + - "checksum:multihash" + properties: + upload_id: + $ref: "#/components/schemas/uploadId" + status: + title: Status + description: Status of the Asset's multipart upload. + type: string + enum: + - completed + example: + completed + number_parts: + $ref: "#/components/schemas/number_parts" + created: + $ref: "#/components/schemas/dtUploadCreated" + completed: + $ref: "#/components/schemas/dtUploadCompleted" + "checksum:multihash": + $ref: "../../components/schemas.yaml#/components/schemas/checksumMultihash" + assetUploadAborted: + title: UploadCompleted + type: object + required: + - upload_id + - status + - number_parts + - created + - aborted + - "checksum:multihash" + properties: + upload_id: + $ref: "#/components/schemas/uploadId" + status: + title: Status + description: Status of the Asset's multipart upload. + type: string + enum: + - aborted + example: + aborted + number_parts: + $ref: "#/components/schemas/number_parts" + created: + $ref: "#/components/schemas/dtUploadCreated" + aborted: + $ref: "#/components/schemas/dtUploadAborted" + "checksum:multihash": + $ref: "../../components/schemas.yaml#/components/schemas/checksumMultihash" + assetUploadParts: + title: Parts + type: object + required: + - parts + - links + properties: + parts: + type: object + description: List of uploaded parts + required: + - etag + - part_number + - modified + - size + properties: + etag: + $ref: "#/components/schemas/uploadEtag" + part_number: + $ref: "#/components/schemas/part_number" + modified: + type: string + format: date-time + description: Date time when the part was added/modified + size: + type: integer + description: Part size in bytes + minimum: 0 + example: 1024 + links: + description: Next and/or previous links for the pagination. + type: array + items: + $ref: "../../components/schemas.yaml#/components/schemas/link" + example: + - rel: next + href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads/upload-id/parts?limit=50&offset=50 + status: + title: Status + description: Status of the Asset's multipart upload. + type: string + enum: + - in-progress + - aborted + - completed + readOnly: true + number_parts: + description: Number of parts for the Asset's multipart upload. + type: integer + minimum: 1 + maximum: 100 + part_number: + description: Number of the part. + type: integer + minimum: 1 + maximum: 100 + multipartUploadUrl: + title: MultipartUploadUrl + description: Multipart upload url. + type: object + required: + - url + - part + - expires + properties: + url: + description: Presigned URL to use to upload the Asset File part using the PUT method. + type: string + format: url + example: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 + part: + description: Part number assigned to this presigned URL. + type: integer + minimum: 1 + maximum: 100 + expires: + description: Date time when this presigned URL expires and is not valid anymore. + type: string + format: date-time + uploadEtag: + title: ETag + type: string + description: The RFC7232 ETag for the specified uploaded part. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" \ No newline at end of file diff --git a/spec/transaction/paths.yaml b/spec/transaction/paths.yaml new file mode 100644 index 00000000..7262bdea --- /dev/null +++ b/spec/transaction/paths.yaml @@ -0,0 +1,874 @@ +openapi: 3.0.1 +paths: + "/collections": + post: + tags: + - Data Management + summary: >- + Create a new collection + operationId: createCollection + requestBody: + content: + application/json: + schema: + $ref: "../components/schemas.yaml#/components/schemas/collection" + example: + description: The National Map 1:200,000 is a topographic map giving an overview of Switzerland. + id: ch.swisstopo.pixelkarte-farbe-pk200.noscale + license: proprietary + links: + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + providers: + - name: Federal Office of Topography - swisstopo + roles: + - producer + - licensor + url: https://www.swisstopo.admin.ch + title: National Map 1:200'000 + responses: + "201": + description: Returns the created Collection + headers: + Location: + description: A link to the collection + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/collectionWrite" + "403": + $ref: "../components/responses.yaml#/components/responses/PermissionDenied" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "500": + $ref: "../components/responses.yaml#/components/responses/ServerError" + + "/collections/{collectionId}": + put: + tags: + - Data Management + summary: Update or create a collection + description: >- + Update or create a collection with Id `collectionId` with a complete collection definition. + If the collection doesn't exists it is then created. + operationId: updateCollection + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "./components/parameters.yaml#/components/parameters/IfMatchWrite" + requestBody: + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/collectionWrite" + example: + description: The National Map 1:200,000 is a topographic map giving an overview of Switzerland. + id: ch.swisstopo.pixelkarte-farbe-pk200.noscale + license: proprietary + links: + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + providers: + - name: Federal Office of Topography - swisstopo + roles: + - producer + - licensor + url: https://www.swisstopo.admin.ch + title: National Map 1:200'000 + responses: + "200": + description: Returns the updated Collection + content: + application/json: + schema: + $ref: "../components/schemas.yaml#/components/schemas/collection" + "201": + description: Returns the created Collection + headers: + Location: + description: A link to the collection + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "../components/schemas.yaml#/components/schemas/collection" + "403": + $ref: "../components/responses.yaml#/components/responses/PermissionDenied" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "412": + $ref: "../components/responses.yaml#/components/responses/PreconditionFailed" + "500": + $ref: "../components/responses.yaml#/components/responses/ServerError" + patch: + tags: + - Data Management + summary: Partial update of a collection + description: >- + Update an existing collection with Id `collectionId` with a partial collection definition + operationId: partialUpdateCollection + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "../components/parameters.yaml#/components/parameters/IfMatch" + requestBody: + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/collectionWrite" + example: + id: ch.swisstopo.pixelkarte-farbe-pk200.noscale + license: proprietary + links: + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + providers: + - name: Federal Office of Topography - swisstopo + roles: + - producer + - licensor + url: https://www.swisstopo.admin.ch + title: National Map 1:200'000 + responses: + "200": + description: Returns the updated Collection + content: + application/json: + schema: + $ref: "../components/schemas.yaml#/components/schemas/collection" + "403": + $ref: "../components/responses.yaml#/components/responses/PermissionDenied" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "412": + $ref: "../components/responses.yaml#/components/responses/PreconditionFailed" + "500": + $ref: "../components/responses.yaml#/components/responses/ServerError" + "/collections/{collectionId}/items": + post: + summary: Add a new feature to a collection + description: Create a new feature/item in a specific collection + operationId: postFeature + tags: + - Data Management + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + requestBody: + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/createItem" + example: + id: cs3-20160503_132131_05 + geometry: + type: Polygon + coordinates: + - - - -122.308150179 + - 37.488035566 + - - -122.597502109 + - 37.538869539 + - - -122.576687533 + - 37.613537207 + - - -122.2880486 + - 37.562818007 + - - -122.308150179 + - 37.488035566 + properties: + datetime: "2016-05-03T13:22:30.040Z" + title: A CS3 item + links: + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + responses: + "201": + description: Return the created Item. + headers: + Location: + description: A link to the item + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "../components/schemas.yaml#/components/schemas/item" + "400": + $ref: "../components/responses.yaml#/components/responses/BadRequest" + "403": + $ref: "../components/responses.yaml#/components/responses/PermissionDenied" + "5XX": + $ref: "../components/responses.yaml#/components/responses/ServerError" + "/collections/{collectionId}/items/{featureId}": + put: + summary: Update or create a feature + description: >- + Update or create a feature with Id `featureId` with a complete feature + definition. If the feature doesn't exists it is then created. + + + *NOTE: Optional fields that are not part of the PUT payload, will be erased + in the resource. For example if the resource has a properties.title and the + PUT payload doesn't, then the resource's properties.title will be removed.* + operationId: putFeature + tags: + - Data Management + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "../components/parameters.yaml#/components/parameters/featureId" + - $ref: "./components/parameters.yaml#/components/parameters/IfMatchWrite" + requestBody: + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/updateItem" + example: + id: cs3-20160503_132131_05 + geometry: + type: Polygon + coordinates: + - - - -122.308150179 + - 37.488035566 + - - -122.597502109 + - 37.538869539 + - - -122.576687533 + - 37.613537207 + - - -122.2880486 + - 37.562818007 + - - -122.308150179 + - 37.488035566 + properties: + datetime: "2016-05-03T13:22:30.040Z" + title: A CS3 item + links: + - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html + rel: license + title: Licence for the free geodata of the Federal Office of Topography swisstopo + - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df + rel: describedby + responses: + "200": + description: Returns the updated Item + content: + application/json: + schema: + $ref: "../components/schemas.yaml#/components/schemas/item" + "400": + $ref: "../components/responses.yaml#/components/responses/BadRequest" + "403": + $ref: "../components/responses.yaml#/components/responses/PermissionDenied" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "412": + $ref: "../components/responses.yaml#/components/responses/PreconditionFailed" + "5XX": + $ref: "../components/responses.yaml#/components/responses/ServerError" + patch: + summary: Update an existing feature by Id with a partial item definition + description: >- + Use this method to update an existing feature. Requires a JSON + fragment (containing the fields to be updated) be submitted. + operationId: patchFeature + tags: + - Data Management + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "../components/parameters.yaml#/components/parameters/featureId" + - $ref: "./components/parameters.yaml#/components/parameters/IfMatchWrite" + requestBody: + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/partialItem" + responses: + "200": + description: Returns the updated Item + content: + application/json: + schema: + $ref: "../components/schemas.yaml#/components/schemas/item" + "201": + description: Returns the created Item + headers: + Location: + description: A link to the item + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "../components/schemas.yaml#/components/schemas/item" + "400": + $ref: "../components/responses.yaml#/components/responses/BadRequest" + "403": + $ref: "../components/responses.yaml#/components/responses/PermissionDenied" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "412": + $ref: "../components/responses.yaml#/components/responses/PreconditionFailed" + "5XX": + $ref: "../components/responses.yaml#/components/responses/ServerError" + delete: + summary: Delete an existing feature by Id + description: Use this method to delete an existing feature/item. + operationId: deleteFeature + tags: + - Data Management + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "../components/parameters.yaml#/components/parameters/featureId" + - $ref: "./components/parameters.yaml#/components/parameters/IfMatchWrite" + responses: + "200": + $ref: "./components/responses.yaml#/components/responses/DeletedResource" + "400": + $ref: "../components/responses.yaml#/components/responses/BadRequest" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "412": + $ref: "../components/responses.yaml#/components/responses/PreconditionFailed" + "5XX": + $ref: "../components/responses.yaml#/components/responses/ServerError" + "/collections/{collectionId}/items/{featureId}/assets": + get: + description: >- + Fetch assets of the item with id `featureId`. + + Every asset belongs to an item. + operationId: getAssets + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "../components/parameters.yaml#/components/parameters/featureId" + responses: + "200": + $ref: "./components/responses.yaml#/components/responses/Assets" + "400": + $ref: "../components/responses.yaml#/components/responses/InvalidParameter" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "500": + $ref: "../components/responses.yaml#/components/responses/ServerError" + summary: Fetch all assets for a feature + tags: + - Data + post: + summary: Add a new asset to a feature + description: | + Create a new asset for a specific feature. + + *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* + operationId: postAsset + tags: + - Data Management + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "../components/parameters.yaml#/components/parameters/featureId" + requestBody: + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/createAsset" + responses: + "201": + description: Return the created Asset + headers: + Location: + description: A link to the asset + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/createAsset" + "400": + $ref: "../components/responses.yaml#/components/responses/BadRequest" + "5XX": + $ref: "../components/responses.yaml#/components/responses/ServerError" + + "/collections/{collectionId}/items/{featureId}/assets/{assetId}": + get: + description: >- + Fetch the asset with id `assetId` of the feature with id `featureId` + in the feature collection with id `collectionId`. + operationId: getAsset + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "../components/parameters.yaml#/components/parameters/featureId" + - $ref: "./components/parameters.yaml#/components/parameters/assetId" + - $ref: "../components/parameters.yaml#/components/parameters/IfMatch" + - $ref: "../components/parameters.yaml#/components/parameters/IfNoneMatch" + responses: + "200": + $ref: "./components/responses.yaml#/components/responses/Asset" + "304": + $ref: "../components/responses.yaml#/components/responses/NotModified" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "412": + $ref: "../components/responses.yaml#/components/responses/PreconditionFailed" + "500": + $ref: "../components/responses.yaml#/components/responses/ServerError" + summary: Fetch a single asset + tags: + - Data + put: + summary: Update or create an asset + description: >- + Update or create an asset with Id `assetId` with a complete asset definition. + If the asset doesn't exists it is then created. + + + *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* + operationId: putAsset + tags: + - Data Management + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "../components/parameters.yaml#/components/parameters/featureId" + - $ref: "./components/parameters.yaml#/components/parameters/assetId" + - $ref: "./components/parameters.yaml#/components/parameters/IfMatchWrite" + requestBody: + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/readUpdateAsset" + responses: + "200": + description: Asset has been successfully updated. + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/readUpdateAsset" + "201": + description: Asset has been newly created. + headers: + Location: + description: A link to the asset + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/createAsset" + "400": + $ref: "../components/responses.yaml#/components/responses/BadRequest" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "412": + $ref: "../components/responses.yaml#/components/responses/PreconditionFailed" + "5XX": + $ref: "../components/responses.yaml#/components/responses/ServerError" + patch: + summary: Update an existing asset by Id with a partial asset definition + description: >- + Use this method to update an existing asset. Requires a JSON + fragment (containing the fields to be updated) be submitted. + + + *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* + operationId: patchAsset + tags: + - Data Management + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "../components/parameters.yaml#/components/parameters/featureId" + - $ref: "./components/parameters.yaml#/components/parameters/assetId" + - $ref: "./components/parameters.yaml#/components/parameters/IfMatchWrite" + requestBody: + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/readUpdateAsset" + responses: + "200": + description: Returns the updated Asset. + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/readUpdateAsset" + "400": + $ref: "../components/responses.yaml#/components/responses/BadRequest" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "412": + $ref: "../components/responses.yaml#/components/responses/PreconditionFailed" + "5XX": + $ref: "../components/responses.yaml#/components/responses/ServerError" + delete: + summary: Delete an existing asset by Id + description: >- + Use this method to delete an existing asset. + + + **NOTE: Asset file on S3 will be also removed !** + operationId: deleteAsset + tags: + - Data Management + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "../components/parameters.yaml#/components/parameters/featureId" + - $ref: "./components/parameters.yaml#/components/parameters/assetId" + - $ref: "./components/parameters.yaml#/components/parameters/IfMatchWrite" + responses: + "200": + $ref: "./components/responses.yaml#/components/responses/DeletedResource" + "400": + $ref: "../components/responses.yaml#/components/responses/BadRequest" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "412": + $ref: "../components/responses.yaml#/components/responses/PreconditionFailed" + "5XX": + $ref: "../components/responses.yaml#/components/responses/ServerError" + + + "/collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads": + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "../components/parameters.yaml#/components/parameters/featureId" + - $ref: "./components/parameters.yaml#/components/parameters/assetId" + get: + tags: + - Asset Upload Management + summary: List all Asset's multipart uploads + description: >- + Return a list of all Asset's multipart uploads that are in progress and have been completed + or aborted. + operationId: getAssetUploads + parameters: + - name: status + in: query + description: Filter the list by status. + schema: + $ref: "./components/schemas.yaml#/components/schemas/status" + responses: + 200: + description: List of Asset's uploads + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/assetUploads" + example: + uploads: + - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnusebaJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: in-progress + number_parts: 1 + urls: + - url: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 + part: 1 + expires: '2019-08-24T14:15:22Z' + created: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YaaegJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: completed + number_parts: 1 + created: '2019-08-24T14:15:22Z' + completed: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG + status: aborted + number_parts: 1 + created: '2019-08-24T14:15:22Z' + aborted: '2019-08-24T14:15:22Z' + checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + links: + - rel: next + href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads?cursor=0d34 + "400": + $ref: "../components/responses.yaml#/components/responses/BadRequest" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "5XX": + $ref: "../components/responses.yaml#/components/responses/ServerError" + post: + tags: + - Asset Upload Management + summary: Create a new Asset's multipart upload + description: | + Create a new Asset's multipart upload. + operationId: createAssetUpload + requestBody: + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/assetUploadCreate" + responses: + 201: + description: Created Asset's multipart upload + headers: + Location: + description: A link to the Asset's multipart upload object + schema: + type: string + format: url + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/assetUploadCreate" + "400": + $ref: "../components/responses.yaml#/components/responses/BadRequest" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "5XX": + $ref: "../components/responses.yaml#/components/responses/ServerError" + "/collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}": + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "../components/parameters.yaml#/components/parameters/featureId" + - $ref: "./components/parameters.yaml#/components/parameters/assetId" + - $ref: "./components/parameters.yaml#/components/parameters/uploadId" + get: + tags: + - Asset Upload Management + summary: Get an Asset's multipart upload + description: | + Return an Asset's multipart upload. + operationId: getAssetUpload + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/IfMatch" + - $ref: "../components/parameters.yaml#/components/parameters/IfNoneMatch" + responses: + "200": + description: Asset's multipart upload description. + headers: + ETag: + $ref: "../components/headers.yaml#/components/headers/ETag" + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/assetUpload" + examples: + inprogress: + $ref: "./components/examples.yaml#/components/examples/inprogress" + completed: + $ref: "./components/examples.yaml#/components/examples/completed" + aborted: + $ref: "./components/examples.yaml#/components/examples/aborted" + "304": + $ref: "../components/responses.yaml#/components/responses/NotModified" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "412": + $ref: "../components/responses.yaml#/components/responses/PreconditionFailed" + "500": + $ref: "../components/responses.yaml#/components/responses/ServerError" + "/{presignedUrl}": + servers: + - url: http://data.geo.admin.ch/ + put: + tags: + - Asset Upload Management + summary: Upload asset file part + description: >- + Upload an Asset file part using the presigned url(s) returned by + [Create a new Asset's multipart upload](#operation/createAssetUpload). + + + Parts that have been uploaded but not completed can be checked using + [Get an Asset's multipart upload](#operation/getAssetUpload) + + + A file part must be at least 5 MB except for the last one and at most 5 GB, otherwise the + complete operation will fail. + + + *Note: this endpoint doesn't require any authentication as it is already part of the + presigned url* + operationId: uploadAssetFilePart + parameters: + - $ref: "./components/parameters.yaml#/components/parameters/presignedUrl" + - name: Content-MD5 + in: header + description: Asset file part content MD5. + required: true + schema: + type: string + responses: + "200": + description: Asset file uploaded part successfully + headers: + ETag: + schema: + type: string + description: >- + The RFC7232 ETag header field in a response provides the current entity- + tag for the selected resource. + + + This ETag is required in the complete multipart upload payload. + + + An entity-tag is an opaque identifier for + different versions of a resource over time, regardless whether multiple + versions are valid at the same time. An entity-tag consists of an opaque + quoted string. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + required: true + "/collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/complete": + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "../components/parameters.yaml#/components/parameters/featureId" + - $ref: "./components/parameters.yaml#/components/parameters/assetId" + - $ref: "./components/parameters.yaml#/components/parameters/uploadId" + post: + tags: + - Asset Upload Management + summary: Complete multipart upload + operationId: completeMultipartUpload + description: >- + Complete the multipart upload process. After completion, the Asset metadata are updated + with the new `checksum:multihash` from the upload and the parts are automatically deleted. + The Asset `href` field is also set if it was the first upload. + requestBody: + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/assetCompleteUpload" + responses: + "200": + description: Asset multipart upload completed successfully. + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/assetUploadCompleted" + "400": + $ref: "../components/responses.yaml#/components/responses/BadRequest" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "5XX": + $ref: "../components/responses.yaml#/components/responses/ServerError" + "/collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/abort": + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "../components/parameters.yaml#/components/parameters/featureId" + - $ref: "./components/parameters.yaml#/components/parameters/assetId" + - $ref: "./components/parameters.yaml#/components/parameters/uploadId" + post: + tags: + - Asset Upload Management + summary: Abort multipart upload + operationId: abortMultipartUpload + description: >- + Abort the multipart upload process. All already uploaded parts are automatically deleted. + responses: + "200": + description: Asset multipart upload aborted successfully. + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/assetUploadAborted" + "400": + $ref: "../components/responses.yaml#/components/responses/BadRequest" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "5XX": + $ref: "../components/responses.yaml#/components/responses/ServerError" + "/collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/parts": + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/collectionId" + - $ref: "../components/parameters.yaml#/components/parameters/featureId" + - $ref: "./components/parameters.yaml#/components/parameters/assetId" + - $ref: "./components/parameters.yaml#/components/parameters/uploadId" + get: + tags: + - Asset Upload Management + summary: Get upload parts + operationId: getUploadParts + description: >- + Return the list of already uploaded parts. + + + ### Pagination + + By default all parts are returned (maximum number of parts being 100). The user can + use pagination to reduce the number of returned parts. Pagination is done via the `limit` + query parameter (see below). + parameters: + - $ref: "../components/parameters.yaml#/components/parameters/limit" + responses: + "200": + description: List of parts already uploaded. + content: + application/json: + schema: + $ref: "./components/schemas.yaml#/components/schemas/assetUploadParts" + "400": + $ref: "../components/responses.yaml#/components/responses/BadRequest" + "404": + $ref: "../components/responses.yaml#/components/responses/NotFound" + "5XX": + $ref: "../components/responses.yaml#/components/responses/ServerError" + + + "/get-token": + servers: + - url: http://data.geo.admin.ch/api/stac/ + post: + tags: + - Authentication + summary: >- + Request token for token authentication. + operationId: getToken + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + username: + type: string + description: name of user for whom token is requested + password: + type: string + description: password of user for whom token is requested + required: + - username + - password + example: + username: "Mickey Mouse" + password: "I_love_Minnie_Mouse" + responses: + "200": + description: Returns the token for the specified user + content: + application/json: + schema: + type: object + properties: + token: + type: string + description: generated token for specified user + example: + token: ccecf40693bfc52ba090cd46eb7f19e723fe831f + "400": + description: Wrong credentials were provided. + content: + application/json: + schema: + type: object + properties: + code: + type: string + description: + type: string + example: + code: 400 + description: "Unable to log in with provided credentials." diff --git a/spec/transaction/tags.yaml b/spec/transaction/tags.yaml new file mode 100644 index 00000000..47c1dfe8 --- /dev/null +++ b/spec/transaction/tags.yaml @@ -0,0 +1,128 @@ +openapi: 3.0.1 +tags: + - name: Capabilities + - name: Data + - name: STAC + - name: Data Management + description: | + Metadata management requests. Theses requests are used to create, update or delete the STAC + metadata. + + *NOTE: these requests require authentication as described in [here](#tag/Authentication).* + - name: Asset Upload Management + description: | + Asset file can be uploaded via the STAC API using the following requests. + + *NOTE: the POST requests require authentication as described in [here](#tag/Authentication).* + + ### Example + + ```python + import os + import hashlib + + import requests + import multihash + + # variables + scheme = 'https' + hostname = 'data.geo.admin.ch' + collection = 'ch.swisstopo.pixelkarte-farbe-pk200.noscale' + item = 'smr200-200-4-2016' + asset = 'smr200-200-4-2016-2056-kgrs-10.tiff' + asset_path = f'collections/{collection}/items/{item}/assets/{asset}' + user = os.environ.get('STAC_USER', 'unknown-user') + password = os.environ.get('STAC_PASSWORD', 'unknown-password') + + with open('smr200-200-4-2016-2056-kgrs-10.tiff', 'rb') as fd: + data = fd.read() + + checksum_multihash = multihash.to_hex_string(multihash.encode(hashlib.sha256(data).digest(), 'sha2-256')) + + # 1. Create a multipart upload + response = requests.post( + f"{scheme}://{hostname}/api/stac/v0.9/{asset_path}/uploads", + auth=(user, password), + json={ + "number_parts": 1, + "checksum:multihash": checksum_multihash + } + ) + upload_id = response.json()['upload_id'] + + # 2. Upload the part using the presigned url + response = requests.put(response.json()['urls'][0]['url'], data=data) + etag = response.headers['ETag'] + + # 3. Complete the upload + response = requests.post( + f"{scheme}://{hostname}/api/stac/v0.9/{asset_path}/uploads/{upload_id}/complete", + auth=(user, password), + json={'parts': [{'etag': etag, 'part_number': 1}]} + ) + ``` + - name: Authentication + description: | + All write requests require authentication. There is currently three type of supported authentications: + + * [Session authentication](#section/Session-authentication) + * [Basic authentication](#section/Basic-authentication) + * [Token authentication](#section/Token-authentication) + + ## Session authentication + + When using the browsable API the user can simply use the admin interface for logging in. + Once logged in, the browsable API can be used to perform write requests. + + ## Basic authentication + + The username and password for authentication can be added to every write request the user wants to perform. + Here is an example of posting an asset using curl (_username_="MickeyMouse", _password_="I_love_Minnie_Mouse"): + + ``` + curl --request POST \ + --user MickeyMouse:I_love_Minnie_Mouse \ + --url https://data.geoadmin.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ + --header 'Content-Type: application/json' \ + --data '{ + "id": "fancy_unique_id", + "item": "swisstlmregio-2020", + "title": "My title", + "type": "application/x.filegdb+zip", + "description": "My description", + "proj:epsg": 2056, + "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" + }' + ``` + + ## Token authentication + + A user specific token for authentication can be added to every write request the user wants to perform. + Here is an example of posting an asset using curl: + + ``` + curl --request POST \ + --url https://data.geoadmin.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ + --header 'Authorization: Token ccecf40693bfc52ba090cd46eb7f19e723fe831f' \ + --header 'Content-Type: application/json' \ + --data '{ + "id": "fancy_unique_id", + "item": "swisstlmregio-2020", + "title": "My title", + "type": "application/x.filegdb+zip", + "description": "My description", + "proj:epsg": 2056, + "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" + }' + ``` + + Tokens can either be generated in the admin interface or existing users can perform a POST request + on the get-token endpoint to request a token (also see [Request token for token authentication](#operation/getToken)). + Here is an example using curl: + + ``` + curl --request POST \ + --url https://data.geoadmin.ch/api/stac/get-token \ + --header 'Content-Type: application/json' \ + --data '{"username": "MickeyMouse", "password": "I_love_Minnie_Mouse"}' + ``` diff --git a/spec/transaction/transaction.yml b/spec/transaction/transaction.yml deleted file mode 100644 index f4aa54fc..00000000 --- a/spec/transaction/transaction.yml +++ /dev/null @@ -1,1593 +0,0 @@ -openapi: 3.0.1 -paths: - "/collections": - post: - tags: - - Data Management - summary: >- - Create a new collection - operationId: createCollection - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/collection" - example: - description: The National Map 1:200,000 is a topographic map giving an overview of Switzerland. - id: ch.swisstopo.pixelkarte-farbe-pk200.noscale - license: proprietary - links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby - providers: - - name: Federal Office of Topography - swisstopo - roles: - - producer - - licensor - url: https://www.swisstopo.admin.ch - title: National Map 1:200'000 - responses: - "201": - description: Returns the created Collection - headers: - Location: - description: A link to the collection - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/collectionWrite" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "500": - $ref: "#/components/responses/ServerError" - - "/collections/{collectionId}": - put: - tags: - - Data Management - summary: Update or create a collection - description: >- - Update or create a collection with Id `collectionId` with a complete collection definition. - If the collection doesn't exists it is then created. - operationId: updateCollection - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/IfMatchWrite" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/collectionWrite" - example: - description: The National Map 1:200,000 is a topographic map giving an overview of Switzerland. - id: ch.swisstopo.pixelkarte-farbe-pk200.noscale - license: proprietary - links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby - providers: - - name: Federal Office of Topography - swisstopo - roles: - - producer - - licensor - url: https://www.swisstopo.admin.ch - title: National Map 1:200'000 - responses: - "200": - description: Returns the updated Collection - content: - application/json: - schema: - $ref: "#/components/schemas/collection" - "201": - description: Returns the created Collection - headers: - Location: - description: A link to the collection - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/collection" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "500": - $ref: "#/components/responses/ServerError" - patch: - tags: - - Data Management - summary: Partial update of a collection - description: >- - Update an existing collection with Id `collectionId` with a partial collection definition - operationId: partialUpdateCollection - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/IfMatch" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/collectionWrite" - example: - id: ch.swisstopo.pixelkarte-farbe-pk200.noscale - license: proprietary - links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby - providers: - - name: Federal Office of Topography - swisstopo - roles: - - producer - - licensor - url: https://www.swisstopo.admin.ch - title: National Map 1:200'000 - responses: - "200": - description: Returns the updated Collection - content: - application/json: - schema: - $ref: "#/components/schemas/collection" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "500": - $ref: "#/components/responses/ServerError" - "/collections/{collectionId}/items": - post: - summary: Add a new feature to a collection - description: Create a new feature/item in a specific collection - operationId: postFeature - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/createItem" - example: - id: cs3-20160503_132131_05 - geometry: - type: Polygon - coordinates: - - - - -122.308150179 - - 37.488035566 - - - -122.597502109 - - 37.538869539 - - - -122.576687533 - - 37.613537207 - - - -122.2880486 - - 37.562818007 - - - -122.308150179 - - 37.488035566 - properties: - datetime: "2016-05-03T13:22:30.040Z" - title: A CS3 item - links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby - responses: - "201": - description: Return the created Item. - headers: - Location: - description: A link to the item - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/item" - "400": - $ref: "#/components/responses/BadRequest" - "403": - $ref: "#/components/responses/PermissionDenied" - "5XX": - $ref: "#/components/responses/ServerError" - "/collections/{collectionId}/items/{featureId}": - put: - summary: Update or create a feature - description: >- - Update or create a feature with Id `featureId` with a complete feature - definition. If the feature doesn't exists it is then created. - - - *NOTE: Optional fields that are not part of the PUT payload, will be erased - in the resource. For example if the resource has a properties.title and the - PUT payload doesn't, then the resource's properties.title will be removed.* - operationId: putFeature - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatchWrite" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/updateItem" - example: - id: cs3-20160503_132131_05 - geometry: - type: Polygon - coordinates: - - - - -122.308150179 - - 37.488035566 - - - -122.597502109 - - 37.538869539 - - - -122.576687533 - - 37.613537207 - - - -122.2880486 - - 37.562818007 - - - -122.308150179 - - 37.488035566 - properties: - datetime: "2016-05-03T13:22:30.040Z" - title: A CS3 item - links: - - href: https://www.swisstopo.admin.ch/en/home/meta/conditions/geodata/free-geodata.html - rel: license - title: Licence for the free geodata of the Federal Office of Topography swisstopo - - href: https://www.geocat.ch/geonetwork/srv/eng/catalog.search#/metadata/4c8c7c58-61c7-4a61-8e7a-6ffb95d183df - rel: describedby - responses: - "200": - description: Returns the updated Item - content: - application/json: - schema: - $ref: "#/components/schemas/item" - "400": - $ref: "#/components/responses/BadRequest" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "5XX": - $ref: "#/components/responses/ServerError" - patch: - summary: Update an existing feature by Id with a partial item definition - description: >- - Use this method to update an existing feature. Requires a JSON - fragment (containing the fields to be updated) be submitted. - operationId: patchFeature - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatchWrite" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/partialItem" - responses: - "200": - description: Returns the updated Item - content: - application/json: - schema: - $ref: "#/components/schemas/item" - "201": - description: Returns the created Item - headers: - Location: - description: A link to the item - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/item" - "400": - $ref: "#/components/responses/BadRequest" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "5XX": - $ref: "#/components/responses/ServerError" - delete: - summary: Delete an existing feature by Id - description: Use this method to delete an existing feature/item. - operationId: deleteFeature - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/IfMatchWrite" - responses: - "200": - $ref: "#/components/responses/DeletedResource" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "5XX": - $ref: "#/components/responses/ServerError" - "/collections/{collectionId}/items/{featureId}/assets": - get: - description: >- - Fetch assets of the item with id `featureId`. - - Every asset belongs to an item. - operationId: getAssets - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - responses: - "200": - $ref: "#/components/responses/Assets" - "400": - $ref: "#/components/responses/InvalidParameter" - "404": - $ref: "#/components/responses/NotFound" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch all assets for a feature - tags: - - Data - post: - summary: Add a new asset to a feature - description: | - Create a new asset for a specific feature. - - *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* - operationId: postAsset - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/createAsset" - responses: - "201": - description: Return the created Asset - headers: - Location: - description: A link to the asset - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/createAsset" - "400": - $ref: "#/components/responses/BadRequest" - "5XX": - $ref: "#/components/responses/ServerError" - - "/collections/{collectionId}/items/{featureId}/assets/{assetId}": - get: - description: >- - Fetch the asset with id `assetId` of the feature with id `featureId` - in the feature collection with id `collectionId`. - operationId: getAsset - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" - responses: - "200": - $ref: "#/components/responses/Asset" - "304": - $ref: "#/components/responses/NotModified" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "500": - $ref: "#/components/responses/ServerError" - summary: Fetch a single asset - tags: - - Data - put: - summary: Update or create an asset - description: >- - Update or create an asset with Id `assetId` with a complete asset definition. - If the asset doesn't exists it is then created. - - - *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* - operationId: putAsset - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatchWrite" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/readUpdateAsset" - responses: - "200": - description: Asset has been successfully updated. - content: - application/json: - schema: - $ref: "#/components/schemas/readUpdateAsset" - "201": - description: Asset has been newly created. - headers: - Location: - description: A link to the asset - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/createAsset" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "5XX": - $ref: "#/components/responses/ServerError" - patch: - summary: Update an existing asset by Id with a partial asset definition - description: >- - Use this method to update an existing asset. Requires a JSON - fragment (containing the fields to be updated) be submitted. - - - *Note: to upload an asset file see [Asset Upload Management](#tag/Asset-Upload-Management)* - operationId: patchAsset - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatchWrite" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/readUpdateAsset" - responses: - "200": - description: Returns the updated Asset. - content: - application/json: - schema: - $ref: "#/components/schemas/readUpdateAsset" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "5XX": - $ref: "#/components/responses/ServerError" - delete: - summary: Delete an existing asset by Id - description: >- - Use this method to delete an existing asset. - - - **NOTE: Asset file on S3 will be also removed !** - operationId: deleteAsset - tags: - - Data Management - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/IfMatchWrite" - responses: - "200": - $ref: "#/components/responses/DeletedResource" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "5XX": - $ref: "#/components/responses/ServerError" - - - "/collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads": - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - get: - tags: - - Asset Upload Management - summary: List all Asset's multipart uploads - description: >- - Return a list of all Asset's multipart uploads that are in progress and have been completed - or aborted. - operationId: getAssetUploads - parameters: - - name: status - in: query - description: Filter the list by status. - schema: - $ref: "#/components/schemas/status" - responses: - 200: - description: List of Asset's uploads - content: - application/json: - schema: - $ref: "#/components/schemas/assetUploads" - example: - uploads: - - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnusebaJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG - status: in-progress - number_parts: 1 - urls: - - url: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 - part: 1 - expires: '2019-08-24T14:15:22Z' - created: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 - - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YaaegJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG - status: completed - number_parts: 1 - created: '2019-08-24T14:15:22Z' - completed: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 - - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG - status: aborted - number_parts: 1 - created: '2019-08-24T14:15:22Z' - aborted: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 - links: - - rel: next - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads?cursor=0d34 - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - "5XX": - $ref: "#/components/responses/ServerError" - post: - tags: - - Asset Upload Management - summary: Create a new Asset's multipart upload - description: | - Create a new Asset's multipart upload. - operationId: createAssetUpload - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/assetUploadCreate" - responses: - 201: - description: Created Asset's multipart upload - headers: - Location: - description: A link to the Asset's multipart upload object - schema: - type: string - format: url - content: - application/json: - schema: - $ref: "#/components/schemas/assetUploadCreate" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - "5XX": - $ref: "#/components/responses/ServerError" - "/collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}": - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/uploadId" - get: - tags: - - Asset Upload Management - summary: Get an Asset's multipart upload - description: | - Return an Asset's multipart upload. - operationId: getAssetUpload - parameters: - - $ref: "#/components/parameters/IfMatch" - - $ref: "#/components/parameters/IfNoneMatch" - responses: - "200": - description: Asset's multipart upload description. - headers: - ETag: - $ref: "#/components/headers/ETag" - content: - application/json: - schema: - $ref: "#/components/schemas/assetUpload" - examples: - inprogress: - $ref: "#/components/examples/inprogress" - completed: - $ref: "#/components/examples/completed" - aborted: - $ref: "#/components/examples/aborted" - "304": - $ref: "#/components/responses/NotModified" - "404": - $ref: "#/components/responses/NotFound" - "412": - $ref: "#/components/responses/PreconditionFailed" - "500": - $ref: "#/components/responses/ServerError" - "/{presignedUrl}": - servers: - - url: http://data.geo.admin.ch/ - put: - tags: - - Asset Upload Management - summary: Upload asset file part - description: >- - Upload an Asset file part using the presigned url(s) returned by - [Create a new Asset's multipart upload](#operation/createAssetUpload). - - - Parts that have been uploaded but not completed can be checked using - [Get an Asset's multipart upload](#operation/getAssetUpload) - - - A file part must be at least 5 MB except for the last one and at most 5 GB, otherwise the - complete operation will fail. - - - *Note: this endpoint doesn't require any authentication as it is already part of the - presigned url* - operationId: uploadAssetFilePart - parameters: - - $ref: "#/components/parameters/presignedUrl" - - name: Content-MD5 - in: header - description: Asset file part content MD5. - required: true - schema: - type: string - responses: - "200": - description: Asset file uploaded part successfully - headers: - ETag: - schema: - type: string - description: >- - The RFC7232 ETag header field in a response provides the current entity- - tag for the selected resource. - - - This ETag is required in the complete multipart upload payload. - - - An entity-tag is an opaque identifier for - different versions of a resource over time, regardless whether multiple - versions are valid at the same time. An entity-tag consists of an opaque - quoted string. - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - required: true - "/collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/complete": - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/uploadId" - post: - tags: - - Asset Upload Management - summary: Complete multipart upload - operationId: completeMultipartUpload - description: >- - Complete the multipart upload process. After completion, the Asset metadata are updated - with the new `checksum:multihash` from the upload and the parts are automatically deleted. - The Asset `href` field is also set if it was the first upload. - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/assetCompleteUpload" - responses: - "200": - description: Asset multipart upload completed successfully. - content: - application/json: - schema: - $ref: "#/components/schemas/assetUploadCompleted" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - "5XX": - $ref: "#/components/responses/ServerError" - "/collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/abort": - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/uploadId" - post: - tags: - - Asset Upload Management - summary: Abort multipart upload - operationId: abortMultipartUpload - description: >- - Abort the multipart upload process. All already uploaded parts are automatically deleted. - responses: - "200": - description: Asset multipart upload aborted successfully. - content: - application/json: - schema: - $ref: "#/components/schemas/assetUploadAborted" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - "5XX": - $ref: "#/components/responses/ServerError" - "/collections/{collectionId}/items/{featureId}/assets/{assetId}/uploads/{uploadId}/parts": - parameters: - - $ref: "#/components/parameters/collectionId" - - $ref: "#/components/parameters/featureId" - - $ref: "#/components/parameters/assetId" - - $ref: "#/components/parameters/uploadId" - get: - tags: - - Asset Upload Management - summary: Get upload parts - operationId: getUploadParts - description: >- - Return the list of already uploaded parts. - - - ### Pagination - - By default all parts are returned (maximum number of parts being 100). The user can - use pagination to reduce the number of returned parts. Pagination is done via the `limit` - query parameter (see below). - parameters: - - $ref: "#/components/parameters/limit" - responses: - "200": - description: List of parts already uploaded. - content: - application/json: - schema: - $ref: "#/components/schemas/assetUploadParts" - "400": - $ref: "#/components/responses/BadRequest" - "404": - $ref: "#/components/responses/NotFound" - "5XX": - $ref: "#/components/responses/ServerError" - - - "/get-token": - servers: - - url: http://data.geo.admin.ch/api/stac/ - post: - tags: - - Authentication - summary: >- - Request token for token authentication. - operationId: getToken - requestBody: - required: true - content: - application/json: - schema: - type: object - properties: - username: - type: string - description: name of user for whom token is requested - password: - type: string - description: password of user for whom token is requested - required: - - username - - password - example: - username: "Mickey Mouse" - password: "I_love_Minnie_Mouse" - responses: - "200": - description: Returns the token for the specified user - content: - application/json: - schema: - type: object - properties: - token: - type: string - description: generated token for specified user - example: - token: ccecf40693bfc52ba090cd46eb7f19e723fe831f - "400": - description: Wrong credentials were provided. - content: - application/json: - schema: - type: object - properties: - code: - type: string - description: - type: string - example: - code: 400 - description: "Unable to log in with provided credentials." - -components: - examples: - inprogress: - summary: In progress upload example - value: - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG - status: in-progress - number_parts: 1 - urls: - - url: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 - part: 1 - expires: '2019-08-24T14:15:22Z' - created: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 - completed: - summary: Completed upload example - value: - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG - status: completed - number_parts: 1 - created: '2019-08-24T14:15:22Z' - completed: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 - aborted: - summary: Aborted upload example - value: - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG - status: completed - number_parts: 1 - created: '2019-08-24T14:15:22Z' - aborted: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 - parameters: - assetId: - name: assetId - in: path - description: Local identifier of an asset. - required: true - schema: - type: string - uploadId: - name: uploadId - in: path - description: Local identifier of an asset's upload. - required: true - schema: - type: string - presignedUrl: - name: presignedUrl - in: path - description: Presigned url returned by [Create a new Asset's multipart upload](#operation/createAssetUpload). - required: true - schema: - type: string - IfMatchWrite: - name: If-Match - in: header - schema: - type: string - description: >- - The RFC7232 `If-Match` header field makes the PUT/PATCH/DEL request method conditional. It is - composed of a comma separated list of ETags or value "*". - - - The server compares the client's ETags (sent with `If-Match`) with the ETag for its - current version of the resource, and if both values don't match (that is, the resource has changed), - the server sends back a `412 Precondition Failed` status, without a body, which tells the client that - he would overwrite another changes of the resource. - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - schemas: - assetId: - type: string - pattern: ^[a-z0-9.-_]+$ - title: ID - description: >- - The asset id uniquely identifies the asset for an item - - - **Note**: `id` must be unique for the item and must be identical to the - filename. - example: smr50-263-2016-2056-kgrs-2.5.tiff - assets: - title: Assets - type: object - properties: - assets: - items: - $ref: "#/components/schemas/readUpdateAsset" - type: array - links: - items: - $ref: "#/components/schemas/link" - type: array - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 - rel: item - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: collection - createAsset: - allOf: - - $ref: "#/components/schemas/assetBase" - - type: object - required: - - id - - links - properties: - id: - $ref: "#/components/schemas/assetId" - links: - items: - $ref: "#/components/schemas/link" - type: array - readOnly: true - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 - rel: item - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: collection - readUpdateAsset: - allOf: - - $ref: "#/components/schemas/assetBase" - - type: object - required: - - id - - links - properties: - id: - $ref: "#/components/schemas/assetId" - links: - items: - $ref: "#/components/schemas/link" - type: array - readOnly: true - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff - rel: self - - href: https://data.geo.admin.ch/api/stac/v0.9/ - rel: root - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets - rel: parent - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019 - rel: item - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale - rel: collection - - writeItem: - allOf: - - $ref: "#/components/schemas/itemBase" - - type: object - properties: - links: - items: - $ref: "#/components/schemas/link" - type: array - description: >- - Add additional link to the generated ones (`self`, `root`, `parent`, `items`, - `collection`, `next`, `previous`) - createItem: - allOf: - - type: object - required: - - id - properties: - id: - $ref: "#/components/schemas/itemId" - - $ref: "#/components/schemas/writeItem" - updateItem: - allOf: - - type: object - required: - - id - properties: - id: - $ref: "#/components/schemas/itemIdUpdate" - - $ref: "#/components/schemas/writeItem" - partialItem: - type: object - properties: - id: - $ref: "#/components/schemas/itemIdUpdate" - geometry: - $ref: "https://geojson.org/schema/Geometry.json" - properties: - $ref: "#/components/schemas/itemProperties" - links: - type: array - items: - $ref: "#/components/schemas/link" - description: >- - Add/update additional link to the generated ones (`self`, `root`, `parent`, `items`, - `collection`, `next`, `previous`) - example: - properties: - datetime: "2016-05-03T13:22:30.040Z" - partialCollection: - type: object - description: Allows for a set of partial metadata fields for a collection - properties: - id: - description: Identifier of the collection used, for example, in URIs - type: string - example: ch.swisstopo.pixelkarte-farbe-pk200.noscale - title: - description: Human readable title of the collection - type: string - example: National Map 1:200'000 - description: - description: A description of the features in the collection - type: string - example: The National Map 1:200,000 is a topographic map giving an overview of Switzerland. - links: - type: array - items: - $ref: "#/components/schemas/link" - example: - - href: http://data.example.com/buildings - rel: item - - href: http://example.com/concepts/buildings.html - rel: describedBy - type: text/html - extent: - $ref: "#/components/schemas/extent" - itemType: - description: >- - Indicator about the type of the items in the collection (the default value is 'feature'). - type: string - default: feature - crs: - description: The list of coordinate reference systems supported by the service - type: array - items: - type: string - default: - - http://www.opengis.net/def/crs/OGC/1.3/CRS84 - example: - - http://www.opengis.net/def/crs/OGC/1.3/CRS84 - - http://www.opengis.net/def/crs/EPSG/0/4326 - example: - title: The new title of the collection - itemIdUpdate: - description: >- - Item identifier (unique per collection. If it doesn't match the `featureId` in path - parameters, then the Item is renamed. - example: smr200-200-4-2019 - type: string - uploadId: - title: ID - type: string - description: Unique Asset upload identifier - example: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG - readOnly: true - dtUploadCreated: - title: created - description: Date and time when the Asset's upload has been created/started. - type: string - format: date-time - readOnly: true - dtUploadCompleted: - title: completed - description: | - Date and time when the Asset's upload has been completed. - - *Note: this property is mutually exclusive with `aborted`* - type: string - format: date-time - readOnly: true - dtUploadAborted: - title: aborted - description: | - Date and time when the Asset's upload has been aborted. - - *Note: this property is mutually exclusive with `completed`* - type: string - format: date-time - readOnly: true - assetUploads: - title: AssetUploads - type: object - required: - - uploads - - links - properties: - uploads: - description: List of uploads that are within the asset. - type: array - items: - $ref: "#/components/schemas/assetUpload" - links: - description: Next and/or previous links for the pagination. - type: array - items: - $ref: "#/components/schemas/link" - example: - - rel: next - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads?cursor=0d34 - assetUpload: - title: AssetUpload - type: object - required: - - upload_id - - status - - created - - "checksum:multihash" - - number_parts - properties: - upload_id: - $ref: "#/components/schemas/uploadId" - status: - $ref: "#/components/schemas/status" - number_parts: - $ref: "#/components/schemas/number_parts" - urls: - type: array - description: | - Note: As soon as the multipart upload is completed or aborted, the `urls` property is removed. - items: - $ref: "#/components/schemas/multipartUploadUrl" - readOnly: true - created: - $ref: "#/components/schemas/dtUploadCreated" - completed: - $ref: "#/components/schemas/dtUploadCompleted" - aborted: - $ref: "#/components/schemas/dtUploadAborted" - "checksum:multihash": - $ref: "#/components/schemas/checksumMultihash" - assetUploadCreate: - title: AssetUpload - type: object - required: - - upload_id - - status - - created - - "checksum:multihash" - - number_parts - properties: - upload_id: - $ref: "#/components/schemas/uploadId" - status: - $ref: "#/components/schemas/status" - number_parts: - $ref: "#/components/schemas/number_parts" - urls: - type: array - description: | - Note: As soon as the multipart upload is completed or aborted, the `urls` property is removed. - items: - $ref: "#/components/schemas/multipartUploadUrl" - readOnly: true - created: - $ref: "#/components/schemas/dtUploadCreated" - "checksum:multihash": - $ref: "#/components/schemas/checksumMultihash" - assetCompleteUpload: - title: CompleteUpload - type: object - required: - - parts - properties: - parts: - type: array - description: Parts that have been uploaded - items: - title: File parts that have been uploaded - type: object - required: - - etag - - part_number - properties: - etag: - title: ETag - type: string - description: >- - ETag of the uploaded file part (returned in the header of the answer of - [Upload asset file part](#operation/uploadAssetFilePart)). - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - part_number: - $ref: "#/components/schemas/part_number" - assetUploadCompleted: - title: UploadCompleted - type: object - required: - - upload_id - - status - - number_parts - - created - - completed - - "checksum:multihash" - properties: - upload_id: - $ref: "#/components/schemas/uploadId" - status: - title: Status - description: Status of the Asset's multipart upload. - type: string - enum: - - completed - example: - completed - number_parts: - $ref: "#/components/schemas/number_parts" - created: - $ref: "#/components/schemas/dtUploadCreated" - completed: - $ref: "#/components/schemas/dtUploadCompleted" - "checksum:multihash": - $ref: "#/components/schemas/checksumMultihash" - assetUploadAborted: - title: UploadCompleted - type: object - required: - - upload_id - - status - - number_parts - - created - - aborted - - "checksum:multihash" - properties: - upload_id: - $ref: "#/components/schemas/uploadId" - status: - title: Status - description: Status of the Asset's multipart upload. - type: string - enum: - - aborted - example: - aborted - number_parts: - $ref: "#/components/schemas/number_parts" - created: - $ref: "#/components/schemas/dtUploadCreated" - aborted: - $ref: "#/components/schemas/dtUploadAborted" - "checksum:multihash": - $ref: "#/components/schemas/checksumMultihash" - assetUploadParts: - title: Parts - type: object - required: - - parts - - links - properties: - parts: - type: object - description: List of uploaded parts - required: - - etag - - part_number - - modified - - size - properties: - etag: - $ref: "#/components/schemas/uploadEtag" - part_number: - $ref: "#/components/schemas/part_number" - modified: - type: string - format: date-time - description: Date time when the part was added/modified - size: - type: integer - description: Part size in bytes - minimum: 0 - example: 1024 - links: - description: Next and/or previous links for the pagination. - type: array - items: - $ref: "#/components/schemas/link" - example: - - rel: next - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads/upload-id/parts?limit=50&offset=50 - status: - title: Status - description: Status of the Asset's multipart upload. - type: string - enum: - - in-progress - - aborted - - completed - readOnly: true - number_parts: - description: Number of parts for the Asset's multipart upload. - type: integer - minimum: 1 - maximum: 100 - part_number: - description: Number of the part. - type: integer - minimum: 1 - maximum: 100 - multipartUploadUrl: - title: MultipartUploadUrl - description: Multipart upload url. - type: object - required: - - url - - part - - expires - properties: - url: - description: Presigned URL to use to upload the Asset File part using the PUT method. - type: string - format: url - example: https://data.geo.admin.ch/ch.swisstopo.pixelkarte-farbe-pk50.noscale/smr200-200-4-2019/smr50-263-2016-2056-kgrs-2.5.tiff?uploadId=d77UbNnEVTaqCAyAz61AVqy7uuTGJ_YOUyPOklcRMr4ZPBthON9p6cpMREx683yQ_oeGVmGE_yTg4cmnEz3mTErEPKn0_m.3LBjo6A88Qxlj4vFrAdU6YBuourb.IqFF&partNumber=1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA4HDUVYWAM6ZB6SWO%2F20210414%2Feu-central-1%2Fs3%2Faws4_request&X-Amz-Date=20210414T112742Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d12763467eaffa0c69d601297a661a05c9f414d4008b4148fa6ba604f203be01 - part: - description: Part number assigned to this presigned URL. - type: integer - minimum: 1 - maximum: 100 - expires: - description: Date time when this presigned URL expires and is not valid anymore. - type: string - format: date-time - uploadEtag: - title: ETag - type: string - description: The RFC7232 ETag for the specified uploaded part. - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - - responses: - Assets: - description: >- - The response is a document consisting of all assets of the feature. - content: - application/json: - schema: - $ref: "#/components/schemas/assets" - Asset: - description: >- - The response is a document consisting of one asset of the feature. - headers: - ETag: - $ref: "#/components/headers/ETag" - content: - application/json: - schema: - $ref: "#/components/schemas/readUpdateAsset" - DeletedResource: - description: Status of the delete resource - content: - application/json: - schema: - description: >- - Information about the deleted resource and a link to the parent resource - type: object - properties: - code: - type: integer - example: 200 - description: - type: string - example: Resource successfully deleted - links: - type: array - items: - $ref: "#/components/schemas/link" - description: >- - The array contain at least a link to the parent resource (`rel: parent`). - example: - - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items - rel: parent - required: - - code - - links - PermissionDenied: - description: No Permission for this request - content: - application/json: - schema: - $ref: "#/components/schemas/exception" - example: - code: 403 - description: "Permission denied" - -tags: - - name: Capabilities - description: Essential characteristics of this API - - name: Data - description: Access to data (features) - - name: STAC - description: Extension to OGC API - Features to support STAC metadata model and search API - - name: Data Management - description: | - Metadata management requests. Theses requests are used to create, update or delete the STAC - metadata. - - *NOTE: these requests require authentication as described in [here](#tag/Authentication).* - - name: Asset Upload Management - description: | - Asset file can be uploaded via the STAC API using the following requests. - - *NOTE: the POST requests require authentication as described in [here](#tag/Authentication).* - - ### Example - - ```python - import os - import hashlib - - import requests - import multihash - - # variables - scheme = 'https' - hostname = 'data.geo.admin.ch' - collection = 'ch.swisstopo.pixelkarte-farbe-pk200.noscale' - item = 'smr200-200-4-2016' - asset = 'smr200-200-4-2016-2056-kgrs-10.tiff' - asset_path = f'collections/{collection}/items/{item}/assets/{asset}' - user = os.environ.get('STAC_USER', 'unknown-user') - password = os.environ.get('STAC_PASSWORD', 'unknown-password') - - with open('smr200-200-4-2016-2056-kgrs-10.tiff', 'rb') as fd: - data = fd.read() - - checksum_multihash = multihash.to_hex_string(multihash.encode(hashlib.sha256(data).digest(), 'sha2-256')) - - # 1. Create a multipart upload - response = requests.post( - f"{scheme}://{hostname}/api/stac/v0.9/{asset_path}/uploads", - auth=(user, password), - json={ - "number_parts": 1, - "checksum:multihash": checksum_multihash - } - ) - upload_id = response.json()['upload_id'] - - # 2. Upload the part using the presigned url - response = requests.put(response.json()['urls'][0]['url'], data=data) - etag = response.headers['ETag'] - - # 3. Complete the upload - response = requests.post( - f"{scheme}://{hostname}/api/stac/v0.9/{asset_path}/uploads/{upload_id}/complete", - auth=(user, password), - json={'parts': [{'etag': etag, 'part_number': 1}]} - ) - ``` - - name: Authentication - description: | - All write requests require authentication. There is currently three type of supported authentications: - - * [Session authentication](#section/Session-authentication) - * [Basic authentication](#section/Basic-authentication) - * [Token authentication](#section/Token-authentication) - - ## Session authentication - - When using the browsable API the user can simply use the admin interface for logging in. - Once logged in, the browsable API can be used to perform write requests. - - ## Basic authentication - - The username and password for authentication can be added to every write request the user wants to perform. - Here is an example of posting an asset using curl (_username_="MickeyMouse", _password_="I_love_Minnie_Mouse"): - - ``` - curl --request POST \ - --user MickeyMouse:I_love_Minnie_Mouse \ - --url https://data.geoadmin.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ - --header 'Content-Type: application/json' \ - --data '{ - "id": "fancy_unique_id", - "item": "swisstlmregio-2020", - "title": "My title", - "type": "application/x.filegdb+zip", - "description": "My description", - "proj:epsg": 2056, - "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" - }' - ``` - - ## Token authentication - - A user specific token for authentication can be added to every write request the user wants to perform. - Here is an example of posting an asset using curl: - - ``` - curl --request POST \ - --url https://data.geoadmin.ch/api/stac/v0.9/collections/ch.swisstopo.swisstlmregio/items/swisstlmregio-2020/assets \ - --header 'Authorization: Token ccecf40693bfc52ba090cd46eb7f19e723fe831f' \ - --header 'Content-Type: application/json' \ - --data '{ - "id": "fancy_unique_id", - "item": "swisstlmregio-2020", - "title": "My title", - "type": "application/x.filegdb+zip", - "description": "My description", - "proj:epsg": 2056, - "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" - }' - ``` - - Tokens can either be generated in the admin interface or existing users can perform a POST request - on the get-token endpoint to request a token (also see [Request token for token authentication](#operation/getToken)). - Here is an example using curl: - - ``` - curl --request POST \ - --url https://data.geoadmin.ch/api/stac/get-token \ - --header 'Content-Type: application/json' \ - --data '{"username": "MickeyMouse", "password": "I_love_Minnie_Mouse"}' - ``` From 231e577a62ed107111fad9e3110ebfee50ff3520 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Mon, 19 Apr 2021 16:56:40 +0200 Subject: [PATCH 092/105] BGDIINF_SB-1753 collections updated property to be updated upstream currently when an item or asset changes, the collection's updated property is not updated upstream This PR implements an upstream updating of parent items or collections, when child assets or items are updated or deleted. --- app/stac_api/models.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/app/stac_api/models.py b/app/stac_api/models.py index 5a81c116..db08aaef 100644 --- a/app/stac_api/models.py +++ b/app/stac_api/models.py @@ -460,8 +460,7 @@ def save(self, *args, **kwargs): # pylint: disable=signature-differs trigger, self.geometry, self._original_values.get('geometry', None), self ) - if collection_updated: - self.collection.save() + self.collection.save() super().save(*args, **kwargs) @@ -482,8 +481,7 @@ def delete(self, *args, **kwargs): # pylint: disable=signature-differs 'delete', self.geometry, None, self ) - if collection_updated: - self.collection.save() + self.collection.save() super().delete(*args, **kwargs) @@ -646,9 +644,7 @@ def save(self, *args, **kwargs): # pylint: disable=signature-differs old_values = [self._original_values.get(field, None) for field in UPDATE_SUMMARIES_FIELDS] - if self.item.collection.update_summaries(self, trigger, old_values=old_values): - self.item.collection.save() - + self.item.collection.update_summaries(self, trigger, old_values=old_values) self.item.save() # We save the item to update its ETag super().save(*args, **kwargs) @@ -666,9 +662,9 @@ def delete(self, *args, **kwargs): # pylint: disable=signature-differs ) # It is important to use `*args, **kwargs` in signature because django might add dynamically # parameters - if self.item.collection.update_summaries(self, 'delete', old_values=None): - self.item.collection.save() + self.item.collection.update_summaries(self, 'delete', old_values=None) self.item.save() # We save the item to update its ETag + try: super().delete(*args, **kwargs) except ProtectedError as error: From 6b72c413e056ae53673b4bacb77cccc512a84f62 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Fri, 23 Apr 2021 11:31:25 +0200 Subject: [PATCH 093/105] BGDIINF_SB-1753 added unit test * added unit tests asserting collections updated property gets correctly updated on item creation, item update, item delete, assets creation, assets update and assets deletion --- app/tests/test_collection_model.py | 105 ++++++++++++++++++++++++++--- 1 file changed, 97 insertions(+), 8 deletions(-) diff --git a/app/tests/test_collection_model.py b/app/tests/test_collection_model.py index 754c8775..376ff6c4 100644 --- a/app/tests/test_collection_model.py +++ b/app/tests/test_collection_model.py @@ -1,11 +1,13 @@ import logging +from time import sleep from django.core.exceptions import ValidationError from django.test import TestCase from stac_api.models import Collection -from tests.data_factory import CollectionFactory +from tests.data_factory import Factory +from tests.utils import mock_s3_asset_file logger = logging.getLogger(__name__) @@ -13,8 +15,10 @@ class CollectionsModelTestCase(TestCase): @classmethod + @mock_s3_asset_file def setUpTestData(cls): - cls.collection = CollectionFactory().create_sample(db_create=True) + cls.factory = Factory() + cls.collection = Factory().create_collection_sample(db_create=True) def test_create_already_existing_collection(self): # try to create already existing collection twice @@ -26,14 +30,14 @@ def test_create_already_existing_collection(self): def test_create_collection_invalid_name(self): # try to create a collection with invalid collection name with self.assertRaises(ValidationError, msg="Collection with invalid name was accepted."): - CollectionFactory().create_sample(sample="collection-invalid", db_create=True) + Factory().create_collection_sample(sample="collection-invalid", db_create=True) def test_create_collection_missing_mandatory_fields(self): # try to create a collection with invalid collection name with self.assertRaises( ValidationError, msg="Collection with missing mandatory fields was accepted." ): - CollectionFactory().create_sample( + Factory().create_collection_sample( name="collection-missing-mandatory-fields", sample="collection-missing-mandatory-fields", db_create=True @@ -42,7 +46,7 @@ def test_create_collection_missing_mandatory_fields(self): def test_create_collection_invalid_links(self): # try to create a collection with invalid collection name with self.assertRaises(ValidationError, msg="Collection with invalid links was accepted."): - CollectionFactory().create_sample( + Factory().create_collection_sample( name="collection-invalid-links", sample="collection-invalid-links", db_create=True ) @@ -51,20 +55,105 @@ def test_create_collection_invalid_providers(self): with self.assertRaises( ValidationError, msg="Collection with invalid providers was accepted." ): - CollectionFactory().create_sample(sample="collection-invalid-providers", db_create=True) + Factory().create_collection_sample( + sample="collection-invalid-providers", db_create=True + ) def test_create_collection_with_providers_and_links(self): # try to create a valid collection with providers and links. Should not raise any errors. - CollectionFactory().create_sample( + Factory().create_collection_sample( name="collection-links-providers", sample="collection-1", db_create=True ) def test_create_collection_only_required_attributes(self): # try to create a valid collection with only the required attributes. # Should not raise any errors. - CollectionFactory().create_sample( + Factory().create_collection_sample( name="collection-required-only", sample="collection-1", db_create=True, required_only=True ) + + def test_collection_update_on_item_write_operations(self): + # assert that collection's updated property is updated when an item is + # added to the collection, this item is updated and this item is deleted + + # check collection's update on item insertion + initial_last_modified = self.collection.model.updated + sleep(0.01) + item = Factory().create_item_sample(self.collection.model, sample='item-1', db_create=True) + item.model.full_clean() + item.model.save() + self.collection.model.refresh_from_db() + self.assertGreater( + self.collection.model.updated, + initial_last_modified, + msg="Collection's updated property was not updated on item insert" + ) + + # check collection's update on item update + initial_last_modified = self.collection.model.updated + sleep(0.01) + item.model.name = "new_randon_name" + item.model.full_clean() + item.model.save() + self.collection.model.refresh_from_db() + self.assertGreater( + self.collection.model.updated, + initial_last_modified, + msg="Collection's updated property was not updated on item update" + ) + + # check collection's update on item deletion + initial_last_modified = self.collection.model.updated + sleep(0.01) + item.model.delete() + self.collection.model.refresh_from_db() + self.assertGreater( + self.collection.model.updated, + initial_last_modified, + msg="Collection's updated property was not updated on item deletion" + ) + + def test_collection_update_on_asset_write_operations(self): + # assert that collection's updated property is updated when an asset is + # added to an item of the collection, this asset is updated and this asset is deleted + + # check collection's update on asset insertion + item = Factory().create_item_sample(self.collection.model, sample='item-1', db_create=True) + initial_last_modified = self.collection.model.updated + sleep(0.01) + asset = Factory().create_asset_sample(item=item.model, sample='asset-1', db_create=True) + asset.model.full_clean() + asset.model.save() + self.collection.model.refresh_from_db() + self.assertGreater( + self.collection.model.updated, + initial_last_modified, + msg="Collection's updated property was not updated on asset insert" + ) + + # check collection's update on asset update + initial_last_modified = self.collection.model.updated + sleep(0.01) + asset.model.name = f"new-{asset.model.name}" + asset.model.full_clean() + asset.model.save() + self.collection.model.refresh_from_db() + self.assertGreater( + self.collection.model.updated, + initial_last_modified, + msg="Collection's updated property was not updated on asset update" + ) + + # check collection's update on asset deletion + initial_last_modified = self.collection.model.updated + sleep(0.01) + asset.model.delete() + self.collection.model.refresh_from_db() + self.assertGreater( + self.collection.model.updated, + initial_last_modified, + msg="Collection's updated property was not updated on asset deletion" + ) From feadcea856074fa2626e430c921c55a2cc3ac1b5 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Mon, 26 Apr 2021 10:53:17 +0200 Subject: [PATCH 094/105] BGDIINF_SB-1753 minor clean up --- app/stac_api/collection_temporal_extent.py | 4 ++- app/stac_api/models.py | 18 ++++-------- app/tests/test_collection_model.py | 34 ++++++++++++---------- 3 files changed, 26 insertions(+), 30 deletions(-) diff --git a/app/stac_api/collection_temporal_extent.py b/app/stac_api/collection_temporal_extent.py index 1d154b48..503b0861 100644 --- a/app/stac_api/collection_temporal_extent.py +++ b/app/stac_api/collection_temporal_extent.py @@ -55,7 +55,9 @@ def update_temporal_extent(self, item, trigger, original_item_values): logger.critical( 'Failed to update collection temporal extent; invalid trigger parameter %s', trigger, - extra={'collection', self.name, 'item', item.name} + extra={ + 'collection': self.name, 'item': item.name + } ) raise ValueError(f'Invalid trigger parameter; {trigger}') diff --git a/app/stac_api/models.py b/app/stac_api/models.py index db08aaef..876de2f6 100644 --- a/app/stac_api/models.py +++ b/app/stac_api/models.py @@ -254,7 +254,7 @@ def clean(self): for role in self.roles: if role not in self.allowed_roles: logger.error( - 'Invalid provider role %s', role, extra={'collection', self.collection.name} + 'Invalid provider role %s', role, extra={'collection': self.collection.name} ) raise ValidationError( _('Invalid role, must be in %(roles)s'), @@ -446,17 +446,14 @@ def save(self, *args, **kwargs): # pylint: disable=signature-differs # It is important to use `*args, **kwargs` in signature because django might add dynamically # parameters logger.debug('Saving item', extra={'collection': self.collection.name, 'item': self.name}) - collection_updated = False self.update_etag() trigger = get_save_trigger(self) - collection_updated |= self.collection.update_temporal_extent( - self, trigger, self._original_values - ) + self.collection.update_temporal_extent(self, trigger, self._original_values) - collection_updated |= self.collection.update_bbox_extent( + self.collection.update_bbox_extent( trigger, self.geometry, self._original_values.get('geometry', None), self ) @@ -471,15 +468,10 @@ def delete(self, *args, **kwargs): # pylint: disable=signature-differs # It is important to use `*args, **kwargs` in signature because django might add dynamically # parameters logger.debug('Deleting item', extra={'collection': self.collection.name, 'item': self.name}) - collection_updated = False - collection_updated |= self.collection.update_temporal_extent( - self, 'delete', self._original_values - ) + self.collection.update_temporal_extent(self, 'delete', self._original_values) - collection_updated |= self.collection.update_bbox_extent( - 'delete', self.geometry, None, self - ) + self.collection.update_bbox_extent('delete', self.geometry, None, self) self.collection.save() diff --git a/app/tests/test_collection_model.py b/app/tests/test_collection_model.py index 376ff6c4..d4200cab 100644 --- a/app/tests/test_collection_model.py +++ b/app/tests/test_collection_model.py @@ -18,7 +18,7 @@ class CollectionsModelTestCase(TestCase): @mock_s3_asset_file def setUpTestData(cls): cls.factory = Factory() - cls.collection = Factory().create_collection_sample(db_create=True) + cls.collection = cls.factory.create_collection_sample(db_create=True) def test_create_already_existing_collection(self): # try to create already existing collection twice @@ -30,14 +30,16 @@ def test_create_already_existing_collection(self): def test_create_collection_invalid_name(self): # try to create a collection with invalid collection name with self.assertRaises(ValidationError, msg="Collection with invalid name was accepted."): - Factory().create_collection_sample(sample="collection-invalid", db_create=True) + self.factory.create_collection_sample( + name="invalid name", sample="collection-invalid", db_create=True + ) def test_create_collection_missing_mandatory_fields(self): # try to create a collection with invalid collection name with self.assertRaises( ValidationError, msg="Collection with missing mandatory fields was accepted." ): - Factory().create_collection_sample( + self.factory.create_collection_sample( name="collection-missing-mandatory-fields", sample="collection-missing-mandatory-fields", db_create=True @@ -46,7 +48,7 @@ def test_create_collection_missing_mandatory_fields(self): def test_create_collection_invalid_links(self): # try to create a collection with invalid collection name with self.assertRaises(ValidationError, msg="Collection with invalid links was accepted."): - Factory().create_collection_sample( + self.factory.create_collection_sample( name="collection-invalid-links", sample="collection-invalid-links", db_create=True ) @@ -55,20 +57,20 @@ def test_create_collection_invalid_providers(self): with self.assertRaises( ValidationError, msg="Collection with invalid providers was accepted." ): - Factory().create_collection_sample( + self.factory.create_collection_sample( sample="collection-invalid-providers", db_create=True ) def test_create_collection_with_providers_and_links(self): # try to create a valid collection with providers and links. Should not raise any errors. - Factory().create_collection_sample( + self.factory.create_collection_sample( name="collection-links-providers", sample="collection-1", db_create=True ) def test_create_collection_only_required_attributes(self): # try to create a valid collection with only the required attributes. # Should not raise any errors. - Factory().create_collection_sample( + self.factory.create_collection_sample( name="collection-required-only", sample="collection-1", db_create=True, @@ -82,9 +84,9 @@ def test_collection_update_on_item_write_operations(self): # check collection's update on item insertion initial_last_modified = self.collection.model.updated sleep(0.01) - item = Factory().create_item_sample(self.collection.model, sample='item-1', db_create=True) - item.model.full_clean() - item.model.save() + item = self.factory.create_item_sample( + self.collection.model, sample='item-1', db_create=True + ) self.collection.model.refresh_from_db() self.assertGreater( self.collection.model.updated, @@ -95,7 +97,7 @@ def test_collection_update_on_item_write_operations(self): # check collection's update on item update initial_last_modified = self.collection.model.updated sleep(0.01) - item.model.name = "new_randon_name" + item.model.properties_title = f"new_{item.model.properties_title}" item.model.full_clean() item.model.save() self.collection.model.refresh_from_db() @@ -121,12 +123,12 @@ def test_collection_update_on_asset_write_operations(self): # added to an item of the collection, this asset is updated and this asset is deleted # check collection's update on asset insertion - item = Factory().create_item_sample(self.collection.model, sample='item-1', db_create=True) + item = self.factory.create_item_sample( + self.collection.model, sample='item-1', db_create=True + ) initial_last_modified = self.collection.model.updated sleep(0.01) - asset = Factory().create_asset_sample(item=item.model, sample='asset-1', db_create=True) - asset.model.full_clean() - asset.model.save() + asset = self.factory.create_asset_sample(item=item.model, sample='asset-1', db_create=True) self.collection.model.refresh_from_db() self.assertGreater( self.collection.model.updated, @@ -137,7 +139,7 @@ def test_collection_update_on_asset_write_operations(self): # check collection's update on asset update initial_last_modified = self.collection.model.updated sleep(0.01) - asset.model.name = f"new-{asset.model.name}" + asset.model.title = f"new-{asset.model.title}" asset.model.full_clean() asset.model.save() self.collection.model.refresh_from_db() From 9cde0881c233bd7362d8cad40f560dd708bdeb1e Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 29 Apr 2021 09:33:51 +0200 Subject: [PATCH 095/105] Changed invalid checksum:multihash examples in SPEC Those examples did not relfect a correct sha2-256 multihash. --- spec/components/schemas.yaml | 8 +++---- spec/static/spec/v0.9/openapi.yaml | 6 ++--- .../spec/v0.9/openapitransactional.yaml | 22 +++++++++---------- spec/transaction/components/examples.yaml | 6 ++--- spec/transaction/paths.yaml | 6 ++--- spec/transaction/tags.yaml | 4 ++-- 6 files changed, 26 insertions(+), 26 deletions(-) diff --git a/spec/components/schemas.yaml b/spec/components/schemas.yaml index 640ca414..57729908 100644 --- a/spec/components/schemas.yaml +++ b/spec/components/schemas.yaml @@ -187,7 +187,7 @@ components: `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) format. - example: 90e402107a7f2588a85362b9beea2a12d4514d45 + example: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC pattern: ^[a-f0-9]+$ title: Multihash type: string @@ -729,7 +729,7 @@ components: readOnly: true example: smr50-263-2016-2056-kgrs-2.5.tiff: - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC created: "2020-07-14T12:30:00Z" eo:gsd: 2.5 geoadmin:variant: kgrs @@ -738,7 +738,7 @@ components: type: image/tiff; application=geotiff updated: "2020-07-14T12:30:00Z" smr50-263-2016-2056-komb-2.5.tiff: - checksum:multihash: 90d402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC created: "2020-07-14T12:30:00Z" eo:gsd: 2.5 geoadmin:variant: komb @@ -747,7 +747,7 @@ components: type: image/tiff; application=geotiff updated: "2020-07-14T12:30:00Z" smr50-263-2016-2056-krel-2.5.tiff: - checksum:multihash: 90f402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC created: "2020-07-14T12:30:00Z" eo:gsd: 2.5 geoadmin:variant: krel diff --git a/spec/static/spec/v0.9/openapi.yaml b/spec/static/spec/v0.9/openapi.yaml index eceb10e6..12718b77 100644 --- a/spec/static/spec/v0.9/openapi.yaml +++ b/spec/static/spec/v0.9/openapi.yaml @@ -906,7 +906,7 @@ components: readOnly: true example: smr50-263-2016-2056-kgrs-2.5.tiff: - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC created: "2020-07-14T12:30:00Z" eo:gsd: 2.5 geoadmin:variant: kgrs @@ -915,7 +915,7 @@ components: type: image/tiff; application=geotiff updated: "2020-07-14T12:30:00Z" smr50-263-2016-2056-komb-2.5.tiff: - checksum:multihash: 90d402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC created: "2020-07-14T12:30:00Z" eo:gsd: 2.5 geoadmin:variant: komb @@ -924,7 +924,7 @@ components: type: image/tiff; application=geotiff updated: "2020-07-14T12:30:00Z" smr50-263-2016-2056-krel-2.5.tiff: - checksum:multihash: 90f402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC created: "2020-07-14T12:30:00Z" eo:gsd: 2.5 geoadmin:variant: krel diff --git a/spec/static/spec/v0.9/openapitransactional.yaml b/spec/static/spec/v0.9/openapitransactional.yaml index cf848edb..26b2e4ba 100644 --- a/spec/static/spec/v0.9/openapitransactional.yaml +++ b/spec/static/spec/v0.9/openapitransactional.yaml @@ -109,7 +109,7 @@ tags: "type": "application/x.filegdb+zip", "description": "My description", "proj:epsg": 2056, - "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" + "checksum:multihash": "12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC" }' ``` @@ -130,7 +130,7 @@ tags: "type": "application/x.filegdb+zip", "description": "My description", "proj:epsg": 2056, - "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" + "checksum:multihash": "12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC" }' ``` @@ -886,19 +886,19 @@ paths: part: 1 expires: '2019-08-24T14:15:22Z' created: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YaaegJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG status: completed number_parts: 1 created: '2019-08-24T14:15:22Z' completed: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG status: aborted number_parts: 1 created: '2019-08-24T14:15:22Z' aborted: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC links: - rel: next href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads?cursor=0d34 @@ -1900,7 +1900,7 @@ components: readOnly: true example: smr50-263-2016-2056-kgrs-2.5.tiff: - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC created: "2020-07-14T12:30:00Z" eo:gsd: 2.5 geoadmin:variant: kgrs @@ -1909,7 +1909,7 @@ components: type: image/tiff; application=geotiff updated: "2020-07-14T12:30:00Z" smr50-263-2016-2056-komb-2.5.tiff: - checksum:multihash: 90d402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC created: "2020-07-14T12:30:00Z" eo:gsd: 2.5 geoadmin:variant: komb @@ -1918,7 +1918,7 @@ components: type: image/tiff; application=geotiff updated: "2020-07-14T12:30:00Z" smr50-263-2016-2056-krel-2.5.tiff: - checksum:multihash: 90f402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC created: "2020-07-14T12:30:00Z" eo:gsd: 2.5 geoadmin:variant: krel @@ -3416,7 +3416,7 @@ components: part: 1 expires: '2019-08-24T14:15:22Z' created: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC completed: summary: Completed upload example value: @@ -3425,7 +3425,7 @@ components: number_parts: 1 created: '2019-08-24T14:15:22Z' completed: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC aborted: summary: Aborted upload example value: @@ -3434,4 +3434,4 @@ components: number_parts: 1 created: '2019-08-24T14:15:22Z' aborted: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC diff --git a/spec/transaction/components/examples.yaml b/spec/transaction/components/examples.yaml index d1f12f5d..713f3c83 100644 --- a/spec/transaction/components/examples.yaml +++ b/spec/transaction/components/examples.yaml @@ -12,7 +12,7 @@ components: part: 1 expires: '2019-08-24T14:15:22Z' created: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC completed: summary: Completed upload example value: @@ -21,7 +21,7 @@ components: number_parts: 1 created: '2019-08-24T14:15:22Z' completed: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC aborted: summary: Aborted upload example value: @@ -30,4 +30,4 @@ components: number_parts: 1 created: '2019-08-24T14:15:22Z' aborted: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 \ No newline at end of file + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC \ No newline at end of file diff --git a/spec/transaction/paths.yaml b/spec/transaction/paths.yaml index 7262bdea..2bdaef8b 100644 --- a/spec/transaction/paths.yaml +++ b/spec/transaction/paths.yaml @@ -575,19 +575,19 @@ paths: part: 1 expires: '2019-08-24T14:15:22Z' created: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YaaegJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG status: completed number_parts: 1 created: '2019-08-24T14:15:22Z' completed: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC - upload_id: KrFTuglD.N8ireqry_w3.oQqNwrYI7SfSXpVRiusKah0YigDnuM06hfJNIUZg4R_No0MMW9FLU2UG5anTW0boTUYVxKfBZWCFXqnQTpjnQEo1K7la39MYpjSTvIbZgnG status: aborted number_parts: 1 created: '2019-08-24T14:15:22Z' aborted: '2019-08-24T14:15:22Z' - checksum:multihash: 90e402107a7f2588a85362b9beea2a12d4514d45 + checksum:multihash: 12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC links: - rel: next href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr200-200-4-2019/assets/smr50-263-2016-2056-kgrs-2.5.tiff/uploads?cursor=0d34 diff --git a/spec/transaction/tags.yaml b/spec/transaction/tags.yaml index 47c1dfe8..fdb62f2f 100644 --- a/spec/transaction/tags.yaml +++ b/spec/transaction/tags.yaml @@ -91,7 +91,7 @@ tags: "type": "application/x.filegdb+zip", "description": "My description", "proj:epsg": 2056, - "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" + "checksum:multihash": "12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC" }' ``` @@ -112,7 +112,7 @@ tags: "type": "application/x.filegdb+zip", "description": "My description", "proj:epsg": 2056, - "checksum:multihash": "01205c3fd6978a7d0b051efaa4263a04" + "checksum:multihash": "12200ADEC47F803A8CF1055ED36750B3BA573C79A3AF7DA6D6F5A2AED03EA16AF3BC" }' ``` From f78cf2d3d16c4ac699d08b3e8bee7f1bcf09fd4c Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Tue, 27 Apr 2021 14:54:44 +0200 Subject: [PATCH 096/105] BGDIINF_SB-1764 copied https://geojson.org/schema/Geometry.json in schemas.yaml and adapted slightly --- spec/components/schemas.yaml | 144 ++++++ spec/static/spec/v0.9/openapi.yaml | 372 ++++++++++----- .../spec/v0.9/openapitransactional.yaml | 446 ++++++++++++------ spec/transaction/components/schemas.yaml | 2 +- 4 files changed, 696 insertions(+), 268 deletions(-) diff --git a/spec/components/schemas.yaml b/spec/components/schemas.yaml index 57729908..c798e9b3 100644 --- a/spec/components/schemas.yaml +++ b/spec/components/schemas.yaml @@ -818,6 +818,150 @@ components: description: Feature identifier (unique per collection) example: smr200-200-4-2019 type: string + itemGeometry: + oneOf: + - title: GeoJSON Point + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Point + coordinates: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number + - title: GeoJSON LineString + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - LineString + coordinates: + type: array + minItems: 2 + items: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number + - title: GeoJSON Polygon + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Polygon + coordinates: + type: array + items: + type: array + minItems: 4 + items: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number + - title: GeoJSON MultiPoint + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPoint + coordinates: + type: array + items: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number + - title: GeoJSON MultiLineString + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiLineString + coordinates: + type: array + items: + type: array + minItems: 2 + items: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number + - title: GeoJSON MultiPolygon + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPolygon + coordinates: + type: array + items: + type: array + items: + type: array + minItems: 4 + items: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number itemProperties: title: Properties description: >- diff --git a/spec/static/spec/v0.9/openapi.yaml b/spec/static/spec/v0.9/openapi.yaml index 12718b77..7e292904 100644 --- a/spec/static/spec/v0.9/openapi.yaml +++ b/spec/static/spec/v0.9/openapi.yaml @@ -189,6 +189,18 @@ paths: tags: - STAC components: + headers: + ETag: + schema: + type: string + description: >- + The RFC7232 ETag header field in a response provides the current entity- tag + for the selected resource. An entity-tag is an opaque identifier for different + versions of a resource over time, regardless whether multiple versions are + valid at the same time. An entity-tag consists of an opaque quoted string, + possibly prefixed by a weakness indicator. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + required: true schemas: assetQuery: additionalProperties: @@ -995,6 +1007,148 @@ components: description: Feature identifier (unique per collection) example: smr200-200-4-2019 type: string + itemGeometry: + oneOf: + - title: GeoJSON Point + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Point + coordinates: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number + - title: GeoJSON LineString + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - LineString + coordinates: + type: array + minItems: 2 + items: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number + - title: GeoJSON Polygon + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Polygon + coordinates: + type: array + items: + type: array + minItems: 2 + items: + type: number + minItems: 4 + bbox: + type: array + minItems: 4 + items: + type: number + - title: GeoJSON MultiPoint + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPoint + coordinates: + type: array + items: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number + - title: GeoJSON MultiLineString + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiLineString + coordinates: + type: array + items: + type: array + minItems: 2 + items: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number + - title: GeoJSON MultiPolygon + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPolygon + coordinates: + type: array + items: + type: array + items: + type: array + minItems: 4 + items: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number itemProperties: title: Properties description: >- @@ -1520,113 +1674,6 @@ components: type: string format: date-time readOnly: true - parameters: - assetQuery: - description: >- - Query for properties in assets (e.g. mediatype). Use the JSON form of the - assetQueryFilter used in POST. - in: query - name: assetQuery - required: false - schema: - type: string - bbox: - explode: false - in: query - name: bbox - required: false - schema: - $ref: "#/components/schemas/bbox" - style: form - example: - collectionId: - description: Local identifier of a collection - in: path - name: collectionId - required: true - schema: - type: string - collectionsArray: - explode: false - in: query - name: collections - required: false - schema: - $ref: "#/components/schemas/collectionsArray" - datetime: - explode: false - in: query - name: datetime - required: false - schema: - $ref: "#/components/schemas/datetimeQuery" - example: 2018-02-12T00%3A00%3A00Z%2F2018-03-18T12%3A31%3A12Z - style: form - featureId: - description: Local identifier of a feature - in: path - name: featureId - required: true - schema: - type: string - ids: - description: >- - Array of Item ids to return. All other filter parameters that further restrict - the number of search results are ignored - explode: false - in: query - name: ids - required: false - schema: - $ref: "#/components/schemas/ids" - limit: - explode: false - in: query - name: limit - required: false - schema: - $ref: "#/components/schemas/limit" - style: form - query: - description: Query for properties in items. Use the JSON form of the queryFilter - used in POST. - in: query - name: query - required: false - schema: - type: string - IfNoneMatch: - name: If-None-Match - in: header - schema: - type: string - description: >- - The RFC7232 `If-None-Match` header field makes the GET request method conditional. - It is composed of a comma separated list of ETags or value "*". - - - The server compares the client's ETags (sent with `If-None-Match`) with the - ETag for its current version of the resource, and if both values match (that - is, the resource has not changed), the server sends back a `304 Not Modified` - status, without a body, which tells the client that the cached version of - the response is still good to use (fresh). - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - IfMatch: - name: If-Match - in: header - schema: - type: string - description: >- - The RFC7232 `If-Match` header field makes the GET request method conditional. - It is composed of a comma separated list of ETags or value "*". - - - The server compares the client's ETags (sent with `If-Match`) with the ETag - for its current version of the resource, and if both values don't match (that - is, the resource has changed), the server sends back a `412 Precondition Failed` - status, without a body, which tells the client that the cached version of - the response is not good to use anymore. - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" responses: Collection: headers: @@ -1833,15 +1880,110 @@ components: example: code: 500 description: "Internal server error" - headers: - ETag: + parameters: + assetQuery: + description: >- + Query for properties in assets (e.g. mediatype). Use the JSON form of the + assetQueryFilter used in POST. + in: query + name: assetQuery + required: false + schema: + type: string + bbox: + explode: false + in: query + name: bbox + required: false + schema: + $ref: "#/components/schemas/bbox" + style: form + example: + collectionId: + description: Local identifier of a collection + in: path + name: collectionId + required: true + schema: + type: string + collectionsArray: + explode: false + in: query + name: collections + required: false + schema: + $ref: "#/components/schemas/collectionsArray" + datetime: + explode: false + in: query + name: datetime + required: false + schema: + $ref: "#/components/schemas/datetimeQuery" + example: 2018-02-12T00%3A00%3A00Z%2F2018-03-18T12%3A31%3A12Z + style: form + featureId: + description: Local identifier of a feature + in: path + name: featureId + required: true schema: type: string + ids: description: >- - The RFC7232 ETag header field in a response provides the current entity- tag - for the selected resource. An entity-tag is an opaque identifier for different - versions of a resource over time, regardless whether multiple versions are - valid at the same time. An entity-tag consists of an opaque quoted string, - possibly prefixed by a weakness indicator. + Array of Item ids to return. All other filter parameters that further restrict + the number of search results are ignored + explode: false + in: query + name: ids + required: false + schema: + $ref: "#/components/schemas/ids" + limit: + explode: false + in: query + name: limit + required: false + schema: + $ref: "#/components/schemas/limit" + style: form + query: + description: Query for properties in items. Use the JSON form of the queryFilter + used in POST. + in: query + name: query + required: false + schema: + type: string + IfNoneMatch: + name: If-None-Match + in: header + schema: + type: string + description: >- + The RFC7232 `If-None-Match` header field makes the GET request method conditional. + It is composed of a comma separated list of ETags or value "*". + + + The server compares the client's ETags (sent with `If-None-Match`) with the + ETag for its current version of the resource, and if both values match (that + is, the resource has not changed), the server sends back a `304 Not Modified` + status, without a body, which tells the client that the cached version of + the response is still good to use (fresh). + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + IfMatch: + name: If-Match + in: header + schema: + type: string + description: >- + The RFC7232 `If-Match` header field makes the GET request method conditional. + It is composed of a comma separated list of ETags or value "*". + + + The server compares the client's ETags (sent with `If-Match`) with the ETag + for its current version of the resource, and if both values don't match (that + is, the resource has changed), the server sends back a `412 Precondition Failed` + status, without a body, which tells the client that the cached version of + the response is not good to use anymore. example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - required: true diff --git a/spec/static/spec/v0.9/openapitransactional.yaml b/spec/static/spec/v0.9/openapitransactional.yaml index 26b2e4ba..121f1f69 100644 --- a/spec/static/spec/v0.9/openapitransactional.yaml +++ b/spec/static/spec/v0.9/openapitransactional.yaml @@ -1183,6 +1183,18 @@ paths: code: 400 description: "Unable to log in with provided credentials." components: + headers: + ETag: + schema: + type: string + description: >- + The RFC7232 ETag header field in a response provides the current entity- tag + for the selected resource. An entity-tag is an opaque identifier for different + versions of a resource over time, regardless whether multiple versions are + valid at the same time. An entity-tag consists of an opaque quoted string, + possibly prefixed by a weakness indicator. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + required: true schemas: assetQuery: additionalProperties: @@ -1989,6 +2001,148 @@ components: description: Feature identifier (unique per collection) example: smr200-200-4-2019 type: string + itemGeometry: + oneOf: + - title: GeoJSON Point + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Point + coordinates: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number + - title: GeoJSON LineString + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - LineString + coordinates: + type: array + minItems: 2 + items: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number + - title: GeoJSON Polygon + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Polygon + coordinates: + type: array + items: + type: array + minItems: 2 + items: + type: number + minItems: 4 + bbox: + type: array + minItems: 4 + items: + type: number + - title: GeoJSON MultiPoint + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPoint + coordinates: + type: array + items: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number + - title: GeoJSON MultiLineString + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiLineString + coordinates: + type: array + items: + type: array + minItems: 2 + items: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number + - title: GeoJSON MultiPolygon + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPolygon + coordinates: + type: array + items: + type: array + items: + type: array + minItems: 4 + items: + type: array + minItems: 2 + items: + type: number + bbox: + type: array + minItems: 4 + items: + type: number itemProperties: title: Properties description: >- @@ -2656,7 +2810,7 @@ components: id: $ref: "#/components/schemas/itemIdUpdate" geometry: - $ref: "https://geojson.org/schema/Geometry.json" + $ref: "#/components/schemas/itemGeometry" properties: $ref: "#/components/schemas/itemProperties" links: @@ -2997,150 +3151,6 @@ components: type: string description: The RFC7232 ETag for the specified uploaded part. example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - parameters: - assetQuery: - description: >- - Query for properties in assets (e.g. mediatype). Use the JSON form of the - assetQueryFilter used in POST. - in: query - name: assetQuery - required: false - schema: - type: string - bbox: - explode: false - in: query - name: bbox - required: false - schema: - $ref: "#/components/schemas/bbox" - style: form - example: - collectionId: - description: Local identifier of a collection - in: path - name: collectionId - required: true - schema: - type: string - collectionsArray: - explode: false - in: query - name: collections - required: false - schema: - $ref: "#/components/schemas/collectionsArray" - datetime: - explode: false - in: query - name: datetime - required: false - schema: - $ref: "#/components/schemas/datetimeQuery" - example: 2018-02-12T00%3A00%3A00Z%2F2018-03-18T12%3A31%3A12Z - style: form - featureId: - description: Local identifier of a feature - in: path - name: featureId - required: true - schema: - type: string - ids: - description: >- - Array of Item ids to return. All other filter parameters that further restrict - the number of search results are ignored - explode: false - in: query - name: ids - required: false - schema: - $ref: "#/components/schemas/ids" - limit: - explode: false - in: query - name: limit - required: false - schema: - $ref: "#/components/schemas/limit" - style: form - query: - description: Query for properties in items. Use the JSON form of the queryFilter - used in POST. - in: query - name: query - required: false - schema: - type: string - IfNoneMatch: - name: If-None-Match - in: header - schema: - type: string - description: >- - The RFC7232 `If-None-Match` header field makes the GET request method conditional. - It is composed of a comma separated list of ETags or value "*". - - - The server compares the client's ETags (sent with `If-None-Match`) with the - ETag for its current version of the resource, and if both values match (that - is, the resource has not changed), the server sends back a `304 Not Modified` - status, without a body, which tells the client that the cached version of - the response is still good to use (fresh). - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - IfMatch: - name: If-Match - in: header - schema: - type: string - description: >- - The RFC7232 `If-Match` header field makes the GET request method conditional. - It is composed of a comma separated list of ETags or value "*". - - - The server compares the client's ETags (sent with `If-Match`) with the ETag - for its current version of the resource, and if both values don't match (that - is, the resource has changed), the server sends back a `412 Precondition Failed` - status, without a body, which tells the client that the cached version of - the response is not good to use anymore. - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" - assetId: - name: assetId - in: path - description: Local identifier of an asset. - required: true - schema: - type: string - uploadId: - name: uploadId - in: path - description: Local identifier of an asset's upload. - required: true - schema: - type: string - presignedUrl: - name: presignedUrl - in: path - description: Presigned url returned by [Create a new Asset's multipart upload](#operation/createAssetUpload). - required: true - schema: - type: string - IfMatchWrite: - name: If-Match - in: header - schema: - type: string - description: >- - The RFC7232 `If-Match` header field makes the PUT/PATCH/DEL request method - conditional. It is composed of a comma separated list of ETags or value "*". - - - The server compares the client's ETags (sent with `If-Match`) with the ETag - for its current version of the resource, and if both values don't match (that - is, the resource has changed), the server sends back a `412 Precondition Failed` - status, without a body, which tells the client that he would overwrite another - changes of the resource. - example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" responses: Collection: headers: @@ -3392,18 +3402,150 @@ components: required: - code - links - headers: - ETag: + parameters: + assetQuery: + description: >- + Query for properties in assets (e.g. mediatype). Use the JSON form of the + assetQueryFilter used in POST. + in: query + name: assetQuery + required: false + schema: + type: string + bbox: + explode: false + in: query + name: bbox + required: false + schema: + $ref: "#/components/schemas/bbox" + style: form + example: + collectionId: + description: Local identifier of a collection + in: path + name: collectionId + required: true + schema: + type: string + collectionsArray: + explode: false + in: query + name: collections + required: false + schema: + $ref: "#/components/schemas/collectionsArray" + datetime: + explode: false + in: query + name: datetime + required: false + schema: + $ref: "#/components/schemas/datetimeQuery" + example: 2018-02-12T00%3A00%3A00Z%2F2018-03-18T12%3A31%3A12Z + style: form + featureId: + description: Local identifier of a feature + in: path + name: featureId + required: true schema: type: string + ids: description: >- - The RFC7232 ETag header field in a response provides the current entity- tag - for the selected resource. An entity-tag is an opaque identifier for different - versions of a resource over time, regardless whether multiple versions are - valid at the same time. An entity-tag consists of an opaque quoted string, - possibly prefixed by a weakness indicator. + Array of Item ids to return. All other filter parameters that further restrict + the number of search results are ignored + explode: false + in: query + name: ids + required: false + schema: + $ref: "#/components/schemas/ids" + limit: + explode: false + in: query + name: limit + required: false + schema: + $ref: "#/components/schemas/limit" + style: form + query: + description: Query for properties in items. Use the JSON form of the queryFilter + used in POST. + in: query + name: query + required: false + schema: + type: string + IfNoneMatch: + name: If-None-Match + in: header + schema: + type: string + description: >- + The RFC7232 `If-None-Match` header field makes the GET request method conditional. + It is composed of a comma separated list of ETags or value "*". + + + The server compares the client's ETags (sent with `If-None-Match`) with the + ETag for its current version of the resource, and if both values match (that + is, the resource has not changed), the server sends back a `304 Not Modified` + status, without a body, which tells the client that the cached version of + the response is still good to use (fresh). example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + IfMatch: + name: If-Match + in: header + schema: + type: string + description: >- + The RFC7232 `If-Match` header field makes the GET request method conditional. + It is composed of a comma separated list of ETags or value "*". + + + The server compares the client's ETags (sent with `If-Match`) with the ETag + for its current version of the resource, and if both values don't match (that + is, the resource has changed), the server sends back a `412 Precondition Failed` + status, without a body, which tells the client that the cached version of + the response is not good to use anymore. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" + assetId: + name: assetId + in: path + description: Local identifier of an asset. + required: true + schema: + type: string + uploadId: + name: uploadId + in: path + description: Local identifier of an asset's upload. required: true + schema: + type: string + presignedUrl: + name: presignedUrl + in: path + description: Presigned url returned by [Create a new Asset's multipart upload](#operation/createAssetUpload). + required: true + schema: + type: string + IfMatchWrite: + name: If-Match + in: header + schema: + type: string + description: >- + The RFC7232 `If-Match` header field makes the PUT/PATCH/DEL request method + conditional. It is composed of a comma separated list of ETags or value "*". + + + The server compares the client's ETags (sent with `If-Match`) with the ETag + for its current version of the resource, and if both values don't match (that + is, the resource has changed), the server sends back a `412 Precondition Failed` + status, without a body, which tells the client that he would overwrite another + changes of the resource. + example: "d01af8b8ebbf899e30095be8754b377ddb0f0ed0f7fddbc33ac23b0d1969736b" examples: inprogress: summary: In progress upload example diff --git a/spec/transaction/components/schemas.yaml b/spec/transaction/components/schemas.yaml index cd4088c8..3415bd3f 100644 --- a/spec/transaction/components/schemas.yaml +++ b/spec/transaction/components/schemas.yaml @@ -139,7 +139,7 @@ components: id: $ref: "#/components/schemas/itemIdUpdate" geometry: - $ref: "https://geojson.org/schema/Geometry.json" + $ref: "../../components/schemas.yaml#/components/schemas/itemGeometry" properties: $ref: "../../components/schemas.yaml#/components/schemas/itemProperties" links: From 0c81b037ffcb88dfc3178ea8f6a7299e2859948f Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Tue, 27 Apr 2021 21:51:44 +0200 Subject: [PATCH 097/105] BGDIINF_SB-1764 item's geometry for POST, PUT and PATCH and GET Description of item's geometry should now show all availabel geometry options for all possible requests, i.e. GeoJSON Point, GeoJSON LineString, GeoJSON Polygon, GeoJSON MultiPoint or GeoJSON MultiLineString or GeoJSON MultiPolygon. --- spec/components/schemas.yaml | 21 ++++++++++--------- spec/static/spec/v0.9/openapi.yaml | 5 +---- .../spec/v0.9/openapitransactional.yaml | 5 +---- 3 files changed, 13 insertions(+), 18 deletions(-) diff --git a/spec/components/schemas.yaml b/spec/components/schemas.yaml index c798e9b3..d88bca87 100644 --- a/spec/components/schemas.yaml +++ b/spec/components/schemas.yaml @@ -575,15 +575,15 @@ components: example: komb title: Product variants type: string - geometryGeoJSON: - oneOf: - # - $ref: "./schemas.yaml#/components/schemas/pointGeoJSON" - # - $ref: "./schemas.yaml#/components/schemas/multipointGeoJSON" - # - $ref: "./schemas.yaml#/components/schemas/linestringGeoJSON" - # - $ref: "./schemas.yaml#/components/schemas/multilinestringGeoJSON" - - $ref: "./schemas.yaml#/components/schemas/polygonGeoJSON" - # - $ref: "./schemas.yaml#/components/schemas/multipolygonGeoJSON" - # - $ref: "./schemas.yaml#/components/schemas/geometrycollectionGeoJSON" + # geometryGeoJSON: + # oneOf: + # # - $ref: "./schemas.yaml#/components/schemas/pointGeoJSON" + # # - $ref: "./schemas.yaml#/components/schemas/multipointGeoJSON" + # # - $ref: "./schemas.yaml#/components/schemas/linestringGeoJSON" + # # - $ref: "./schemas.yaml#/components/schemas/multilinestringGeoJSON" + # - $ref: "./schemas.yaml#/components/schemas/polygonGeoJSON" + # # - $ref: "./schemas.yaml#/components/schemas/multipolygonGeoJSON" + # # - $ref: "./schemas.yaml#/components/schemas/geometrycollectionGeoJSON" geometrycollectionGeoJSON: properties: geometries: @@ -644,11 +644,12 @@ components: bbox: $ref: "./schemas.yaml#/components/schemas/bbox" geometry: + $ref: "./schemas.yaml#/components/schemas/itemGeometry" # we could use the 'original' schema, but it doesn't # contain a useful example, hence we use our own # schema with appropriate example # $ref: https://geojson.org/schema/Geometry.json - $ref: "./schemas.yaml#/components/schemas/geometryGeoJSON" + # $ref: "./schemas.yaml#/components/schemas/geometryGeoJSON" properties: $ref: "./schemas.yaml#/components/schemas/itemProperties" readOnly: true diff --git a/spec/static/spec/v0.9/openapi.yaml b/spec/static/spec/v0.9/openapi.yaml index 7e292904..c9bb602c 100644 --- a/spec/static/spec/v0.9/openapi.yaml +++ b/spec/static/spec/v0.9/openapi.yaml @@ -774,9 +774,6 @@ components: example: komb title: Product variants type: string - geometryGeoJSON: - oneOf: - - $ref: "#/components/schemas/polygonGeoJSON" geometrycollectionGeoJSON: properties: geometries: @@ -837,7 +834,7 @@ components: bbox: $ref: "#/components/schemas/bbox" geometry: - $ref: "#/components/schemas/geometryGeoJSON" + $ref: "#/components/schemas/itemGeometry" properties: $ref: "#/components/schemas/itemProperties" readOnly: true diff --git a/spec/static/spec/v0.9/openapitransactional.yaml b/spec/static/spec/v0.9/openapitransactional.yaml index 121f1f69..27aba036 100644 --- a/spec/static/spec/v0.9/openapitransactional.yaml +++ b/spec/static/spec/v0.9/openapitransactional.yaml @@ -1768,9 +1768,6 @@ components: example: komb title: Product variants type: string - geometryGeoJSON: - oneOf: - - $ref: "#/components/schemas/polygonGeoJSON" geometrycollectionGeoJSON: properties: geometries: @@ -1831,7 +1828,7 @@ components: bbox: $ref: "#/components/schemas/bbox" geometry: - $ref: "#/components/schemas/geometryGeoJSON" + $ref: "#/components/schemas/itemGeometry" properties: $ref: "#/components/schemas/itemProperties" readOnly: true From 5e1b89bf4ab49b2aaa97cdbf7a119e73f54dbea6 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Thu, 29 Apr 2021 17:21:31 +0200 Subject: [PATCH 098/105] BGDIINF_SB-1764 bbox, sbreakdown schemas for geom types, adapted bbox for items adapted bbox to fit to the sample item rather than whole CH splitted geomtypes in oneOf --- spec/components/schemas.yaml | 413 +++++++++++------- spec/static/spec/v0.9/openapi.yaml | 317 ++++++++------ .../spec/v0.9/openapitransactional.yaml | 357 ++++++++------- spec/transaction/paths.yaml | 38 +- 4 files changed, 652 insertions(+), 473 deletions(-) diff --git a/spec/components/schemas.yaml b/spec/components/schemas.yaml index d88bca87..45e26bd4 100644 --- a/spec/components/schemas.yaml +++ b/spec/components/schemas.yaml @@ -105,7 +105,6 @@ components: $ref: "./schemas.yaml#/components/schemas/updated" bbox: description: | - Only features that have a geometry that intersects the bounding box are selected. The bounding box is provided as four numbers: * Lower left corner, coordinate axis 1 @@ -127,10 +126,10 @@ components: represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." example: - - 5.96 - - 45.82 - - 10.49 - - 47.81 + - 7.0906249 + - 45.9160584 + - 7.1035698 + - 45.925093 items: type: number maxItems: 4 @@ -168,10 +167,10 @@ components: represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." example: - - 5.96 - - 45.82 - - 10.49 - - 47.81 + - 7.0906249 + - 45.9160584 + - 7.1035698 + - 45.925093 items: type: number maxItems: 4 @@ -584,20 +583,20 @@ components: # - $ref: "./schemas.yaml#/components/schemas/polygonGeoJSON" # # - $ref: "./schemas.yaml#/components/schemas/multipolygonGeoJSON" # # - $ref: "./schemas.yaml#/components/schemas/geometrycollectionGeoJSON" - geometrycollectionGeoJSON: - properties: - geometries: - items: - $ref: "./schemas.yaml#/components/schemas/geometryGeoJSON" - type: array - type: - enum: - - GeometryCollection - type: string - required: - - type - - geometries - type: object + # geometrycollectionGeoJSON: + # properties: + # geometries: + # items: + # $ref: "./schemas.yaml#/components/schemas/geometryGeoJSON" + # type: array + # type: + # enum: + # - GeometryCollection + # type: string + # required: + # - type + # - geometries + # type: object href: type: string format: url @@ -821,148 +820,248 @@ components: type: string itemGeometry: oneOf: - - title: GeoJSON Point - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - Point - coordinates: - type: array - minItems: 2 - items: - type: number - bbox: - type: array - minItems: 4 - items: - type: number - - title: GeoJSON LineString - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - LineString - coordinates: + # - $ref: "#/components/schemas/geoJsonPoint" + # - $ref: "#/components/schemas/geoJsonLineString" + - $ref: "#/components/schemas/geoJsonPolygon" + # - $ref: "#/components/schemas/geoJsonMultiPoint" + # - $ref: "#/components/schemas/geoJsonMultiLineString" + # - $ref: "#/components/schemas/geoJsonMultiPolygon" + geoJsonPoint: + title: GeoJSON Point + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Point + coordinates: + description: >- + For type "Point", the "coordinates" member is a single position. + The coordinate reference system of the values is WGS84 + longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + [7.0906823, 45.9160584] + type: array + minItems: 2 + items: + type: number + # bbox: + # $ref: "./schemas.yaml#/components/schemas/bbox" + # type: array + # minItems: 4 + # items: + # type: number + geoJsonLineString: + title: GeoJSON LineString + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - LineString + coordinates: + description: >- + For type "LineString", the "coordinates" member is an array of two or + more positions. + The coordinate reference system of the values is WGS84 + longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + - [ + [7.0906823, 45.9160584], [7.1035698, 45.9160977] + ] + type: array + minItems: 2 + items: + type: array + minItems: 2 + items: + type: number + # bbox: + # $ref: "./schemas.yaml#/components/schemas/bbox" + # type: array + # minItems: 4 + # items: + # type: number + geoJsonPolygon: + title: GeoJSON Polygon + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Polygon + coordinates: + description: >- + For type "Polygon", the "coordinates" member MUST be an array of + linear ring coordinate arrays. + The coordinate reference system of the values is WGS84 + longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + [ + [ + [7.0906823, 45.9160584], + [7.1035698, 45.9160977], + [7.1035146, 45.925093], + [7.0906249, 45.9250537], + [7.0906823, 45.9160584] + ] + ] + type: array + items: + type: array + minItems: 4 + items: + type: array + minItems: 2 + items: + type: number + # bbox: + # $ref: "./schemas.yaml#/components/schemas/bbox" + # example: + # 7.0906249 + # 45.9160584 + # 7.1035698 + # 45.925093 + # type: array + # minItems: 4 + # items: + # type: number + geoJsonMultiPoint: + title: GeoJSON MultiPoint + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPoint + coordinates: + description: >- + For type "MultiPoint", the "coordinates" member is an array of + positions. + The coordinate reference system of the values is WGS84 + longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + [ + [7.0906823, 45.9160584], + [7.1035698, 45.9160977], + [7.1035146, 45.925093] + ] + type: array + items: + type: array + minItems: 2 + items: + type: number + # bbox: + # $ref: "./schemas.yaml#/components/schemas/bbox" + # type: array + # minItems: 4 + # items: + # type: number + geoJsonMultiLineString: + title: GeoJSON MultiLineString + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiLineString + coordinates: + description: >- + For type "MultiLineString", the "coordinates" member is an array of + LineString coordinate arrays. + The coordinate reference system of the values is WGS84 + longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + [ + [[7.0906823, 45.9160584], [7.1035698, 45.9160977]], + [[7.1035146, 45.925093], [7.0906249, 45.9250537]] + ] + type: array + items: + type: array + minItems: 2 + items: type: array minItems: 2 - items: - type: array - minItems: 2 - items: - type: number - bbox: - type: array - minItems: 4 items: type: number - - title: GeoJSON Polygon - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - Polygon - coordinates: - type: array - items: - type: array - minItems: 4 - items: - type: array - minItems: 2 - items: - type: number - bbox: - type: array - minItems: 4 - items: - type: number - - title: GeoJSON MultiPoint - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - MultiPoint - coordinates: - type: array - items: - type: array - minItems: 2 - items: - type: number - bbox: - type: array - minItems: 4 - items: - type: number - - title: GeoJSON MultiLineString - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - MultiLineString - coordinates: - type: array - items: - type: array - minItems: 2 - items: - type: array - minItems: 2 - items: - type: number - bbox: + # bbox: + # $ref: "./schemas.yaml#/components/schemas/bbox" + # type: array + # minItems: 4 + # items: + # type: number + geoJsonMultiPolygon: + title: GeoJSON MultiPolygon + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPolygon + coordinates: + description: >- + For type "MultiPolygon", the "coordinates" member is an array of + Polygon coordinate arrays. + The coordinate reference system of the values is WGS84 + longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + [ + [ + [ + [7.0906823, 45.9160584], + [7.1035698, 45.9160977], + [7.1035146, 45.925093], + [7.0906249, 45.9250537], + [7.0906823, 45.9160584] + ] + ], + [ + [ + [8.5816399, 45.7218735], + [8.5944806, 45.7217417], + [8.5946699, 45.7307358], + [8.581827, 45.7308676], + [8.5816399, 45.7218735] + ] + ] + ] + type: array + items: + type: array + items: type: array minItems: 4 items: - type: number - - title: GeoJSON MultiPolygon - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - MultiPolygon - coordinates: - type: array - items: - type: array + type: array + minItems: 2 items: - type: array - minItems: 4 - items: - type: array - minItems: 2 - items: - type: number - bbox: - type: array - minItems: 4 - items: - type: number + type: number + # bbox: + # $ref: "./schemas.yaml#/components/schemas/bbox" + # type: array + # minItems: 4 + # items: + # type: number itemProperties: title: Properties description: >- diff --git a/spec/static/spec/v0.9/openapi.yaml b/spec/static/spec/v0.9/openapi.yaml index c9bb602c..c43fda26 100644 --- a/spec/static/spec/v0.9/openapi.yaml +++ b/spec/static/spec/v0.9/openapi.yaml @@ -304,7 +304,6 @@ components: $ref: "#/components/schemas/updated" bbox: description: | - Only features that have a geometry that intersects the bounding box are selected. The bounding box is provided as four numbers: * Lower left corner, coordinate axis 1 @@ -326,10 +325,10 @@ components: represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." example: - - 5.96 - - 45.82 - - 10.49 - - 47.81 + - 7.0906249 + - 45.9160584 + - 7.1035698 + - 45.925093 items: type: number maxItems: 4 @@ -365,10 +364,10 @@ components: and from 45.82°N to 47.81°N) would be represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." example: - - 5.96 - - 45.82 - - 10.49 - - 47.81 + - 7.0906249 + - 45.9160584 + - 7.1035698 + - 45.925093 items: type: number maxItems: 4 @@ -774,20 +773,6 @@ components: example: komb title: Product variants type: string - geometrycollectionGeoJSON: - properties: - geometries: - items: - $ref: "#/components/schemas/geometryGeoJSON" - type: array - type: - enum: - - GeometryCollection - type: string - required: - - type - - geometries - type: object href: type: string format: url @@ -1006,146 +991,202 @@ components: type: string itemGeometry: oneOf: - - title: GeoJSON Point - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - Point - coordinates: + - $ref: "#/components/schemas/geoJsonPolygon" + geoJsonPoint: + title: GeoJSON Point + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Point + coordinates: + description: >- + For type "Point", the "coordinates" member is a single position. The coordinate + reference system of the values is WGS84 longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + - 7.0906823 + - 45.9160584 + type: array + minItems: 2 + items: + type: number + geoJsonLineString: + title: GeoJSON LineString + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - LineString + coordinates: + description: >- + For type "LineString", the "coordinates" member is an array of two or + more positions. The coordinate reference system of the values is WGS84 + longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + - - - 7.0906823 + - 45.9160584 + - - 7.1035698 + - 45.9160977 + type: array + minItems: 2 + items: type: array minItems: 2 items: type: number - bbox: - type: array - minItems: 4 - items: - type: number - - title: GeoJSON LineString - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - LineString - coordinates: + geoJsonPolygon: + title: GeoJSON Polygon + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Polygon + coordinates: + description: >- + For type "Polygon", the "coordinates" member MUST be an array of linear + ring coordinate arrays. The coordinate reference system of the values + is WGS84 longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + - - - 7.0906823 + - 45.9160584 + - - 7.1035698 + - 45.9160977 + - - 7.1035146 + - 45.925093 + - - 7.0906249 + - 45.9250537 + - - 7.0906823 + - 45.9160584 + type: array + items: type: array minItems: 2 - items: - type: array - minItems: 2 - items: - type: number - bbox: - type: array - minItems: 4 items: type: number - - title: GeoJSON Polygon - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - Polygon - coordinates: - type: array - items: - type: array - minItems: 2 - items: - type: number - minItems: 4 - bbox: + minItems: 4 + geoJsonMultiPoint: + title: GeoJSON MultiPoint + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPoint + coordinates: + description: >- + For type "MultiPoint", the "coordinates" member is an array of positions. + The coordinate reference system of the values is WGS84 longitude/latitude + (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + - - 7.0906823 + - 45.9160584 + - - 7.1035698 + - 45.9160977 + - - 7.1035146 + - 45.925093 + type: array + items: type: array - minItems: 4 + minItems: 2 items: type: number - - title: GeoJSON MultiPoint - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - MultiPoint - coordinates: + geoJsonMultiLineString: + title: GeoJSON MultiLineString + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiLineString + coordinates: + description: >- + For type "MultiLineString", the "coordinates" member is an array of LineString + coordinate arrays. The coordinate reference system of the values is WGS84 + longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + - - - 7.0906823 + - 45.9160584 + - - 7.1035698 + - 45.9160977 + - - - 7.1035146 + - 45.925093 + - - 7.0906249 + - 45.9250537 + type: array + items: type: array + minItems: 2 items: type: array minItems: 2 items: type: number - bbox: - type: array - minItems: 4 - items: - type: number - - title: GeoJSON MultiLineString - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - MultiLineString - coordinates: + geoJsonMultiPolygon: + title: GeoJSON MultiPolygon + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPolygon + coordinates: + description: >- + For type "MultiPolygon", the "coordinates" member is an array of Polygon + coordinate arrays. The coordinate reference system of the values is WGS84 + longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + - - - - 7.0906823 + - 45.9160584 + - - 7.1035698 + - 45.9160977 + - - 7.1035146 + - 45.925093 + - - 7.0906249 + - 45.9250537 + - - 7.0906823 + - 45.9160584 + - - - - 8.5816399 + - 45.7218735 + - - 8.5944806 + - 45.7217417 + - - 8.5946699 + - 45.7307358 + - - 8.581827 + - 45.7308676 + - - 8.5816399 + - 45.7218735 + type: array + items: type: array items: type: array - minItems: 2 + minItems: 4 items: type: array minItems: 2 items: type: number - bbox: - type: array - minItems: 4 - items: - type: number - - title: GeoJSON MultiPolygon - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - MultiPolygon - coordinates: - type: array - items: - type: array - items: - type: array - minItems: 4 - items: - type: array - minItems: 2 - items: - type: number - bbox: - type: array - minItems: 4 - items: - type: number itemProperties: title: Properties description: >- diff --git a/spec/static/spec/v0.9/openapitransactional.yaml b/spec/static/spec/v0.9/openapitransactional.yaml index 27aba036..3f8b140b 100644 --- a/spec/static/spec/v0.9/openapitransactional.yaml +++ b/spec/static/spec/v0.9/openapitransactional.yaml @@ -402,16 +402,16 @@ paths: geometry: type: Polygon coordinates: - - - - -122.308150179 - - 37.488035566 - - - -122.597502109 - - 37.538869539 - - - -122.576687533 - - 37.613537207 - - - -122.2880486 - - 37.562818007 - - - -122.308150179 - - 37.488035566 + - - - 7.0906823 + - 45.9160584 + - - 7.1035698 + - 45.9160977 + - - 7.1035146 + - 45.925093 + - - 7.0906249 + - 45.9250537 + - - 7.0906823 + - 45.9160584 properties: datetime: "2016-05-03T13:22:30.040Z" title: A CS3 item @@ -495,16 +495,16 @@ paths: geometry: type: Polygon coordinates: - - - - -122.308150179 - - 37.488035566 - - - -122.597502109 - - 37.538869539 - - - -122.576687533 - - 37.613537207 - - - -122.2880486 - - 37.562818007 - - - -122.308150179 - - 37.488035566 + - - - 7.0906823 + - 45.9160584 + - - 7.1035698 + - 45.9160977 + - - 7.1035146 + - 45.925093 + - - 7.0906249 + - 45.9250537 + - - 7.0906823 + - 45.9160584 properties: datetime: "2016-05-03T13:22:30.040Z" title: A CS3 item @@ -1298,7 +1298,6 @@ components: $ref: "#/components/schemas/updated" bbox: description: | - Only features that have a geometry that intersects the bounding box are selected. The bounding box is provided as four numbers: * Lower left corner, coordinate axis 1 @@ -1320,10 +1319,10 @@ components: represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." example: - - 5.96 - - 45.82 - - 10.49 - - 47.81 + - 7.0906249 + - 45.9160584 + - 7.1035698 + - 45.925093 items: type: number maxItems: 4 @@ -1359,10 +1358,10 @@ components: and from 45.82°N to 47.81°N) would be represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." example: - - 5.96 - - 45.82 - - 10.49 - - 47.81 + - 7.0906249 + - 45.9160584 + - 7.1035698 + - 45.925093 items: type: number maxItems: 4 @@ -1768,20 +1767,6 @@ components: example: komb title: Product variants type: string - geometrycollectionGeoJSON: - properties: - geometries: - items: - $ref: "#/components/schemas/geometryGeoJSON" - type: array - type: - enum: - - GeometryCollection - type: string - required: - - type - - geometries - type: object href: type: string format: url @@ -2000,146 +1985,202 @@ components: type: string itemGeometry: oneOf: - - title: GeoJSON Point - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - Point - coordinates: + - $ref: "#/components/schemas/geoJsonPolygon" + geoJsonPoint: + title: GeoJSON Point + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Point + coordinates: + description: >- + For type "Point", the "coordinates" member is a single position. The coordinate + reference system of the values is WGS84 longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + - 7.0906823 + - 45.9160584 + type: array + minItems: 2 + items: + type: number + geoJsonLineString: + title: GeoJSON LineString + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - LineString + coordinates: + description: >- + For type "LineString", the "coordinates" member is an array of two or + more positions. The coordinate reference system of the values is WGS84 + longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + - - - 7.0906823 + - 45.9160584 + - - 7.1035698 + - 45.9160977 + type: array + minItems: 2 + items: type: array minItems: 2 items: type: number - bbox: - type: array - minItems: 4 - items: - type: number - - title: GeoJSON LineString - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - LineString - coordinates: + geoJsonPolygon: + title: GeoJSON Polygon + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Polygon + coordinates: + description: >- + For type "Polygon", the "coordinates" member MUST be an array of linear + ring coordinate arrays. The coordinate reference system of the values + is WGS84 longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + - - - 7.0906823 + - 45.9160584 + - - 7.1035698 + - 45.9160977 + - - 7.1035146 + - 45.925093 + - - 7.0906249 + - 45.9250537 + - - 7.0906823 + - 45.9160584 + type: array + items: type: array minItems: 2 - items: - type: array - minItems: 2 - items: - type: number - bbox: - type: array - minItems: 4 items: type: number - - title: GeoJSON Polygon - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - Polygon - coordinates: - type: array - items: - type: array - minItems: 2 - items: - type: number - minItems: 4 - bbox: + minItems: 4 + geoJsonMultiPoint: + title: GeoJSON MultiPoint + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPoint + coordinates: + description: >- + For type "MultiPoint", the "coordinates" member is an array of positions. + The coordinate reference system of the values is WGS84 longitude/latitude + (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + - - 7.0906823 + - 45.9160584 + - - 7.1035698 + - 45.9160977 + - - 7.1035146 + - 45.925093 + type: array + items: type: array - minItems: 4 + minItems: 2 items: type: number - - title: GeoJSON MultiPoint - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - MultiPoint - coordinates: + geoJsonMultiLineString: + title: GeoJSON MultiLineString + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiLineString + coordinates: + description: >- + For type "MultiLineString", the "coordinates" member is an array of LineString + coordinate arrays. The coordinate reference system of the values is WGS84 + longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + - - - 7.0906823 + - 45.9160584 + - - 7.1035698 + - 45.9160977 + - - - 7.1035146 + - 45.925093 + - - 7.0906249 + - 45.9250537 + type: array + items: type: array + minItems: 2 items: type: array minItems: 2 items: type: number - bbox: - type: array - minItems: 4 - items: - type: number - - title: GeoJSON MultiLineString - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - MultiLineString - coordinates: + geoJsonMultiPolygon: + title: GeoJSON MultiPolygon + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPolygon + coordinates: + description: >- + For type "MultiPolygon", the "coordinates" member is an array of Polygon + coordinate arrays. The coordinate reference system of the values is WGS84 + longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84). + example: + - - - - 7.0906823 + - 45.9160584 + - - 7.1035698 + - 45.9160977 + - - 7.1035146 + - 45.925093 + - - 7.0906249 + - 45.9250537 + - - 7.0906823 + - 45.9160584 + - - - - 8.5816399 + - 45.7218735 + - - 8.5944806 + - 45.7217417 + - - 8.5946699 + - 45.7307358 + - - 8.581827 + - 45.7308676 + - - 8.5816399 + - 45.7218735 + type: array + items: type: array items: type: array - minItems: 2 + minItems: 4 items: type: array minItems: 2 items: type: number - bbox: - type: array - minItems: 4 - items: - type: number - - title: GeoJSON MultiPolygon - type: object - required: - - type - - coordinates - properties: - type: - type: string - enum: - - MultiPolygon - coordinates: - type: array - items: - type: array - items: - type: array - minItems: 4 - items: - type: array - minItems: 2 - items: - type: number - bbox: - type: array - minItems: 4 - items: - type: number itemProperties: title: Properties description: >- diff --git a/spec/transaction/paths.yaml b/spec/transaction/paths.yaml index 2bdaef8b..91313e75 100644 --- a/spec/transaction/paths.yaml +++ b/spec/transaction/paths.yaml @@ -175,16 +175,15 @@ paths: geometry: type: Polygon coordinates: - - - - -122.308150179 - - 37.488035566 - - - -122.597502109 - - 37.538869539 - - - -122.576687533 - - 37.613537207 - - - -122.2880486 - - 37.562818007 - - - -122.308150179 - - 37.488035566 + [ + [ + [7.0906823, 45.9160584], + [7.1035698, 45.9160977], + [7.1035146, 45.925093], + [7.0906249, 45.9250537], + [7.0906823, 45.9160584] + ] + ] properties: datetime: "2016-05-03T13:22:30.040Z" title: A CS3 item @@ -241,16 +240,15 @@ paths: geometry: type: Polygon coordinates: - - - - -122.308150179 - - 37.488035566 - - - -122.597502109 - - 37.538869539 - - - -122.576687533 - - 37.613537207 - - - -122.2880486 - - 37.562818007 - - - -122.308150179 - - 37.488035566 + [ + [ + [7.0906823, 45.9160584], + [7.1035698, 45.9160977], + [7.1035146, 45.925093], + [7.0906249, 45.9250537], + [7.0906823, 45.9160584] + ] + ] properties: datetime: "2016-05-03T13:22:30.040Z" title: A CS3 item From a535f14a20dcab5c16aecead171c4a1fd4fec21a Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Thu, 29 Apr 2021 20:46:13 +0200 Subject: [PATCH 099/105] BGDIINF_SB-1764 added coordinate description to item intersects query | removed commented out lines --- spec/components/schemas.yaml | 86 +++---------------- spec/static/spec/v0.9/openapi.yaml | 27 +++--- .../spec/v0.9/openapitransactional.yaml | 35 +++++--- 3 files changed, 52 insertions(+), 96 deletions(-) diff --git a/spec/components/schemas.yaml b/spec/components/schemas.yaml index 45e26bd4..8b3cf9e3 100644 --- a/spec/components/schemas.yaml +++ b/spec/components/schemas.yaml @@ -574,29 +574,6 @@ components: example: komb title: Product variants type: string - # geometryGeoJSON: - # oneOf: - # # - $ref: "./schemas.yaml#/components/schemas/pointGeoJSON" - # # - $ref: "./schemas.yaml#/components/schemas/multipointGeoJSON" - # # - $ref: "./schemas.yaml#/components/schemas/linestringGeoJSON" - # # - $ref: "./schemas.yaml#/components/schemas/multilinestringGeoJSON" - # - $ref: "./schemas.yaml#/components/schemas/polygonGeoJSON" - # # - $ref: "./schemas.yaml#/components/schemas/multipolygonGeoJSON" - # # - $ref: "./schemas.yaml#/components/schemas/geometrycollectionGeoJSON" - # geometrycollectionGeoJSON: - # properties: - # geometries: - # items: - # $ref: "./schemas.yaml#/components/schemas/geometryGeoJSON" - # type: array - # type: - # enum: - # - GeometryCollection - # type: string - # required: - # - type - # - geometries - # type: object href: type: string format: url @@ -625,7 +602,13 @@ components: description: Only returns items that intersect with the provided polygon. properties: intersects: - $ref: https://geojson.org/schema/Geometry.json + oneOf: + - $ref: "#/components/schemas/geoJsonPoint" + - $ref: "#/components/schemas/geoJsonLineString" + - $ref: "#/components/schemas/geoJsonPolygon" + - $ref: "#/components/schemas/geoJsonMultiPoint" + - $ref: "#/components/schemas/geoJsonMultiLineString" + - $ref: "#/components/schemas/geoJsonMultiPolygon" type: object example: intersects: @@ -819,10 +802,10 @@ components: example: smr200-200-4-2019 type: string itemGeometry: - oneOf: + # oneOf: # - $ref: "#/components/schemas/geoJsonPoint" # - $ref: "#/components/schemas/geoJsonLineString" - - $ref: "#/components/schemas/geoJsonPolygon" + $ref: "#/components/schemas/geoJsonPolygon" # - $ref: "#/components/schemas/geoJsonMultiPoint" # - $ref: "#/components/schemas/geoJsonMultiLineString" # - $ref: "#/components/schemas/geoJsonMultiPolygon" @@ -848,12 +831,6 @@ components: minItems: 2 items: type: number - # bbox: - # $ref: "./schemas.yaml#/components/schemas/bbox" - # type: array - # minItems: 4 - # items: - # type: number geoJsonLineString: title: GeoJSON LineString type: object @@ -882,12 +859,6 @@ components: minItems: 2 items: type: number - # bbox: - # $ref: "./schemas.yaml#/components/schemas/bbox" - # type: array - # minItems: 4 - # items: - # type: number geoJsonPolygon: title: GeoJSON Polygon type: object @@ -918,23 +889,12 @@ components: type: array items: type: array - minItems: 4 - items: - type: array - minItems: 2 + minItems: 4 items: - type: number - # bbox: - # $ref: "./schemas.yaml#/components/schemas/bbox" - # example: - # 7.0906249 - # 45.9160584 - # 7.1035698 - # 45.925093 - # type: array - # minItems: 4 - # items: - # type: number + type: array + minItems: 2 + items: + type: number geoJsonMultiPoint: title: GeoJSON MultiPoint type: object @@ -964,12 +924,6 @@ components: minItems: 2 items: type: number - # bbox: - # $ref: "./schemas.yaml#/components/schemas/bbox" - # type: array - # minItems: 4 - # items: - # type: number geoJsonMultiLineString: title: GeoJSON MultiLineString type: object @@ -1001,12 +955,6 @@ components: minItems: 2 items: type: number - # bbox: - # $ref: "./schemas.yaml#/components/schemas/bbox" - # type: array - # minItems: 4 - # items: - # type: number geoJsonMultiPolygon: title: GeoJSON MultiPolygon type: object @@ -1056,12 +1004,6 @@ components: minItems: 2 items: type: number - # bbox: - # $ref: "./schemas.yaml#/components/schemas/bbox" - # type: array - # minItems: 4 - # items: - # type: number itemProperties: title: Properties description: >- diff --git a/spec/static/spec/v0.9/openapi.yaml b/spec/static/spec/v0.9/openapi.yaml index c43fda26..2384f737 100644 --- a/spec/static/spec/v0.9/openapi.yaml +++ b/spec/static/spec/v0.9/openapi.yaml @@ -364,10 +364,10 @@ components: and from 45.82°N to 47.81°N) would be represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." example: - - 7.0906249 - - 45.9160584 - - 7.1035698 - - 45.925093 + - 7.0906249 + - 45.9160584 + - 7.1035698 + - 45.925093 items: type: number maxItems: 4 @@ -801,7 +801,13 @@ components: description: Only returns items that intersect with the provided polygon. properties: intersects: - $ref: https://geojson.org/schema/Geometry.json + oneOf: + - $ref: "#/components/schemas/geoJsonPoint" + - $ref: "#/components/schemas/geoJsonLineString" + - $ref: "#/components/schemas/geoJsonPolygon" + - $ref: "#/components/schemas/geoJsonMultiPoint" + - $ref: "#/components/schemas/geoJsonMultiLineString" + - $ref: "#/components/schemas/geoJsonMultiPolygon" type: object example: intersects: @@ -990,8 +996,7 @@ components: example: smr200-200-4-2019 type: string itemGeometry: - oneOf: - - $ref: "#/components/schemas/geoJsonPolygon" + $ref: "#/components/schemas/geoJsonPolygon" geoJsonPoint: title: GeoJSON Point type: object @@ -1072,10 +1077,12 @@ components: type: array items: type: array - minItems: 2 + minItems: 4 items: - type: number - minItems: 4 + type: array + minItems: 2 + items: + type: number geoJsonMultiPoint: title: GeoJSON MultiPoint type: object diff --git a/spec/static/spec/v0.9/openapitransactional.yaml b/spec/static/spec/v0.9/openapitransactional.yaml index 3f8b140b..f920e25e 100644 --- a/spec/static/spec/v0.9/openapitransactional.yaml +++ b/spec/static/spec/v0.9/openapitransactional.yaml @@ -1319,10 +1319,10 @@ components: represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." example: - - 7.0906249 - - 45.9160584 - - 7.1035698 - - 45.925093 + - 7.0906249 + - 45.9160584 + - 7.1035698 + - 45.925093 items: type: number maxItems: 4 @@ -1358,10 +1358,10 @@ components: and from 45.82°N to 47.81°N) would be represented in JSON as `[5.96, 45.82, 10.49, 47.81]` and in a query as `bbox=5.96,45.82,10.49,47.81`." example: - - 7.0906249 - - 45.9160584 - - 7.1035698 - - 45.925093 + - 7.0906249 + - 45.9160584 + - 7.1035698 + - 45.925093 items: type: number maxItems: 4 @@ -1795,7 +1795,13 @@ components: description: Only returns items that intersect with the provided polygon. properties: intersects: - $ref: https://geojson.org/schema/Geometry.json + oneOf: + - $ref: "#/components/schemas/geoJsonPoint" + - $ref: "#/components/schemas/geoJsonLineString" + - $ref: "#/components/schemas/geoJsonPolygon" + - $ref: "#/components/schemas/geoJsonMultiPoint" + - $ref: "#/components/schemas/geoJsonMultiLineString" + - $ref: "#/components/schemas/geoJsonMultiPolygon" type: object example: intersects: @@ -1984,8 +1990,7 @@ components: example: smr200-200-4-2019 type: string itemGeometry: - oneOf: - - $ref: "#/components/schemas/geoJsonPolygon" + $ref: "#/components/schemas/geoJsonPolygon" geoJsonPoint: title: GeoJSON Point type: object @@ -2066,10 +2071,12 @@ components: type: array items: type: array - minItems: 2 + minItems: 4 items: - type: number - minItems: 4 + type: array + minItems: 2 + items: + type: number geoJsonMultiPoint: title: GeoJSON MultiPoint type: object From 29e5ee846b8a6ac8e49a5e84396a960479f4c966 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Thu, 29 Apr 2021 20:50:04 +0200 Subject: [PATCH 100/105] BGDIINF_SB-1764 minor adaption in one comment --- spec/components/schemas.yaml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/spec/components/schemas.yaml b/spec/components/schemas.yaml index 8b3cf9e3..d6738c01 100644 --- a/spec/components/schemas.yaml +++ b/spec/components/schemas.yaml @@ -629,9 +629,8 @@ components: $ref: "./schemas.yaml#/components/schemas/itemGeometry" # we could use the 'original' schema, but it doesn't # contain a useful example, hence we use our own - # schema with appropriate example - # $ref: https://geojson.org/schema/Geometry.json - # $ref: "./schemas.yaml#/components/schemas/geometryGeoJSON" + # schema with appropriate example. Original schemas + # were defined here: https://geojson.org/schema/Geometry.json properties: $ref: "./schemas.yaml#/components/schemas/itemProperties" readOnly: true From 5daf13e06afdb3c27da2c2f88d01fa19d88986b4 Mon Sep 17 00:00:00 2001 From: Juergen Hansmann Date: Mon, 3 May 2021 06:43:01 +0200 Subject: [PATCH 101/105] BGDIINF_SB-1764 removed unnecessary prefixes for local references | shortened and moved one comment --- spec/components/schemas.yaml | 136 +++++++++++++++++------------------ 1 file changed, 67 insertions(+), 69 deletions(-) diff --git a/spec/components/schemas.yaml b/spec/components/schemas.yaml index d6738c01..b87b51ac 100644 --- a/spec/components/schemas.yaml +++ b/spec/components/schemas.yaml @@ -3,7 +3,7 @@ components: schemas: assetQuery: additionalProperties: - $ref: "./schemas.yaml#/components/schemas/assetQueryProp" + $ref: "#/components/schemas/assetQueryProp" description: >- Define which properties of the asset to query and the operations to apply. @@ -25,7 +25,7 @@ components: description: Allows users to query asset properties for specific values properties: assetQuery: - $ref: "./schemas.yaml#/components/schemas/assetQuery" + $ref: "#/components/schemas/assetQuery" type: object assetQueryProp: anyOf: @@ -80,29 +80,29 @@ components: - updated properties: title: - $ref: "./schemas.yaml#/components/schemas/title" + $ref: "#/components/schemas/title" description: - $ref: "./schemas.yaml#/components/schemas/description" + $ref: "#/components/schemas/description" type: - $ref: "./schemas.yaml#/components/schemas/type" + $ref: "#/components/schemas/type" href: - $ref: "./schemas.yaml#/components/schemas/href" + $ref: "#/components/schemas/href" checksum:multihash: - $ref: "./schemas.yaml#/components/schemas/checksumMultihashReadOnly" + $ref: "#/components/schemas/checksumMultihashReadOnly" # roles: # $ref: '#/components/schemas/roles' "geoadmin:variant": - $ref: "./schemas.yaml#/components/schemas/geoadminVariant" + $ref: "#/components/schemas/geoadminVariant" "geoadmin:lang": - $ref: "./schemas.yaml#/components/schemas/geoadminLang" + $ref: "#/components/schemas/geoadminLang" "proj:epsg": - $ref: "./schemas.yaml#/components/schemas/projEpsg" + $ref: "#/components/schemas/projEpsg" "eo:gsd": - $ref: "./schemas.yaml#/components/schemas/eoGsd" + $ref: "#/components/schemas/eoGsd" created: - $ref: "./schemas.yaml#/components/schemas/created" + $ref: "#/components/schemas/created" updated: - $ref: "./schemas.yaml#/components/schemas/updated" + $ref: "#/components/schemas/updated" bbox: description: | The bounding box is provided as four numbers: @@ -180,7 +180,7 @@ components: bboxFilter: properties: bbox: - $ref: "./schemas.yaml#/components/schemas/bboxfilter" + $ref: "#/components/schemas/bboxfilter" checksumMultihash: description: >- `sha2-256` checksum of the asset in [multihash](https://multiformats.io/multihash/) @@ -231,7 +231,7 @@ components: for a variety of purposes (digital printing, plots, offset printing, etc.). type: string extent: - $ref: "./schemas.yaml#/components/schemas/extent" + $ref: "#/components/schemas/extent" id: description: Identifier of the collection used, for example, in URIs example: ch.swisstopo.pixelkarte-farbe-pk200.noscale @@ -243,11 +243,11 @@ components: type: string readOnly: true license: - $ref: "./schemas.yaml#/components/schemas/license" + $ref: "#/components/schemas/license" providers: - $ref: "./schemas.yaml#/components/schemas/providers" + $ref: "#/components/schemas/providers" stac_version: - $ref: "./schemas.yaml#/components/schemas/stac_version" + $ref: "#/components/schemas/stac_version" summaries: additionalProperties: oneOf: @@ -306,9 +306,9 @@ components: example: National Map 1:200'000 type: string created: - $ref: "./schemas.yaml#/components/schemas/created" + $ref: "#/components/schemas/created" updated: - $ref: "./schemas.yaml#/components/schemas/updated" + $ref: "#/components/schemas/updated" required: - id - stac_version @@ -320,7 +320,7 @@ components: type: object collection: allOf: - - $ref: "./schemas.yaml#/components/schemas/collectionBase" + - $ref: "#/components/schemas/collectionBase" - type: object required: - links @@ -328,7 +328,7 @@ components: links: type: array items: - $ref: "./schemas.yaml#/components/schemas/link" + $ref: "#/components/schemas/link" example: - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale rel: self @@ -347,11 +347,11 @@ components: properties: collections: items: - $ref: "./schemas.yaml#/components/schemas/collection" + $ref: "#/components/schemas/collection" type: array links: items: - $ref: "./schemas.yaml#/components/schemas/link" + $ref: "#/components/schemas/link" example: - href: https://data.geo.admin.ch/api/stac/v0.9/collections rel: self @@ -376,7 +376,7 @@ components: description: Only returns the collections specified properties: collections: - $ref: "./schemas.yaml#/components/schemas/collectionsArray" + $ref: "#/components/schemas/collectionsArray" type: object example: collections: @@ -421,7 +421,7 @@ components: datetimeFilter: properties: datetime: - $ref: "./schemas.yaml#/components/schemas/datetimeQuery" + $ref: "#/components/schemas/datetimeQuery" description: description: >- Detailed multi-line description to fully explain the catalog or @@ -592,7 +592,7 @@ components: description: Only returns items that match the array of given ids properties: ids: - $ref: "./schemas.yaml#/components/schemas/ids" + $ref: "#/components/schemas/ids" type: object example: ids: @@ -622,23 +622,21 @@ components: STAC entity properties: assets: - $ref: "./schemas.yaml#/components/schemas/itemAssets" + $ref: "#/components/schemas/itemAssets" bbox: - $ref: "./schemas.yaml#/components/schemas/bbox" + $ref: "#/components/schemas/bbox" geometry: - $ref: "./schemas.yaml#/components/schemas/itemGeometry" - # we could use the 'original' schema, but it doesn't - # contain a useful example, hence we use our own - # schema with appropriate example. Original schemas - # were defined here: https://geojson.org/schema/Geometry.json + # Original schemas were defined here: + # https://geojson.org/schema/Geometry.json + $ref: "#/components/schemas/itemGeometry" properties: - $ref: "./schemas.yaml#/components/schemas/itemProperties" + $ref: "#/components/schemas/itemProperties" readOnly: true stac_version: - $ref: "./schemas.yaml#/components/schemas/stac_version" + $ref: "#/components/schemas/stac_version" readOnly: true type: - $ref: "./schemas.yaml#/components/schemas/itemType" + $ref: "#/components/schemas/itemType" required: - stac_version - type @@ -655,10 +653,10 @@ components: - links properties: id: - $ref: "./schemas.yaml#/components/schemas/itemId" + $ref: "#/components/schemas/itemId" links: items: - $ref: "./schemas.yaml#/components/schemas/link" + $ref: "#/components/schemas/link" type: array example: - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items/smr50-263-2016 @@ -669,7 +667,7 @@ components: rel: parent - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale rel: collection - - $ref: "./schemas.yaml#/components/schemas/itemBase" + - $ref: "#/components/schemas/itemBase" items: description: >- A FeatureCollection augmented with foreign members that contain values relevant @@ -677,11 +675,11 @@ components: properties: features: items: - $ref: "./schemas.yaml#/components/schemas/item" + $ref: "#/components/schemas/item" type: array links: items: - $ref: "./schemas.yaml#/components/schemas/link" + $ref: "#/components/schemas/link" type: array example: - href: https://data.geo.admin.ch/api/stac/v0.9/collections/ch.swisstopo.pixelkarte-farbe-pk50.noscale/items @@ -706,7 +704,7 @@ components: title: Assets description: List of Assets attached to this feature. additionalProperties: - $ref: "./schemas.yaml#/components/schemas/assetBase" + $ref: "#/components/schemas/assetBase" type: object readOnly: true example: @@ -744,7 +742,7 @@ components: properties: features: items: - $ref: "./schemas.yaml#/components/schemas/item" + $ref: "#/components/schemas/item" type: array type: enum: @@ -756,18 +754,18 @@ components: type: object itemsSearchGet: allOf: - - $ref: "./schemas.yaml#/components/schemas/itemsSearch" + - $ref: "#/components/schemas/itemsSearch" - type: object properties: links: - $ref: "./schemas.yaml#/components/schemas/itemsSearchLinks" + $ref: "#/components/schemas/itemsSearchLinks" itemsSearchPost: allOf: - - $ref: "./schemas.yaml#/components/schemas/itemsSearch" + - $ref: "#/components/schemas/itemsSearch" - type: object properties: links: - $ref: "./schemas.yaml#/components/schemas/itemsSearchPostLinks" + $ref: "#/components/schemas/itemsSearchPostLinks" itemsSearchLinks: description: >- An array of links. Can be used for pagination, e.g. by providing a link with the `next` @@ -778,7 +776,7 @@ components: - href: https://data.geo.admin.ch/api/stac/v0.9/search?cursor=10ab rel: next items: - $ref: "./schemas.yaml#/components/schemas/link" + $ref: "#/components/schemas/link" type: array itemsSearchPostLinks: description: >- @@ -793,7 +791,7 @@ components: body: {} merge: true items: - $ref: "./schemas.yaml#/components/schemas/linkPostSearch" + $ref: "#/components/schemas/linkPostSearch" type: array itemId: title: ID @@ -1019,15 +1017,15 @@ components: One of the two is required properties: created: - $ref: "./schemas.yaml#/components/schemas/created" + $ref: "#/components/schemas/created" datetime: - $ref: "./schemas.yaml#/components/schemas/datetime" + $ref: "#/components/schemas/datetime" start_datetime: - $ref: "./schemas.yaml#/components/schemas/datetime" + $ref: "#/components/schemas/datetime" end_datetime: - $ref: "./schemas.yaml#/components/schemas/datetime" + $ref: "#/components/schemas/datetime" updated: - $ref: "./schemas.yaml#/components/schemas/updated" + $ref: "#/components/schemas/updated" title: description: Human readable title of the Feature example: Feature title @@ -1054,10 +1052,10 @@ components: type: string links: items: - $ref: "./schemas.yaml#/components/schemas/link" + $ref: "#/components/schemas/link" type: array stac_version: - $ref: "./schemas.yaml#/components/schemas/stac_version" + $ref: "#/components/schemas/stac_version" title: example: Buildings in Bonn type: string @@ -1103,7 +1101,7 @@ components: description: Only returns maximum number of results (page size) properties: limit: - $ref: "./schemas.yaml#/components/schemas/limit" + $ref: "#/components/schemas/limit" type: object linestringGeoJSON: properties: @@ -1159,7 +1157,7 @@ components: type: object linkPostSearch: allOf: - - $ref: "./schemas.yaml#/components/schemas/link" + - $ref: "#/components/schemas/link" - type: object properties: body: @@ -1384,7 +1382,7 @@ components: url: https://www.swisstopo.admin.ch query: additionalProperties: - $ref: "./schemas.yaml#/components/schemas/queryProp" + $ref: "#/components/schemas/queryProp" description: Define which properties to query and the operations to apply example: title: @@ -1400,7 +1398,7 @@ components: description: Allows users to query properties for specific values properties: query: - $ref: "./schemas.yaml#/components/schemas/query" + $ref: "#/components/schemas/query" type: object queryProp: anyOf: @@ -1488,14 +1486,14 @@ components: - thumbnail searchBody: allOf: - # - $ref: "./schemas.yaml#/components/schemas/assetQueryFilter" - - $ref: "./schemas.yaml#/components/schemas/queryFilter" - - $ref: "./schemas.yaml#/components/schemas/bboxFilter" - - $ref: "./schemas.yaml#/components/schemas/datetimeFilter" - - $ref: "./schemas.yaml#/components/schemas/intersectsFilter" - - $ref: "./schemas.yaml#/components/schemas/collectionsFilter" - - $ref: "./schemas.yaml#/components/schemas/idsFilter" - - $ref: "./schemas.yaml#/components/schemas/limitFilter" + # - $ref: "#/components/schemas/assetQueryFilter" + - $ref: "#/components/schemas/queryFilter" + - $ref: "#/components/schemas/bboxFilter" + - $ref: "#/components/schemas/datetimeFilter" + - $ref: "#/components/schemas/intersectsFilter" + - $ref: "#/components/schemas/collectionsFilter" + - $ref: "#/components/schemas/idsFilter" + - $ref: "#/components/schemas/limitFilter" description: The search criteria type: object stac_version: From 8c14e0e9ee5c0308cc07cc23df221fdc690f99e9 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Wed, 5 May 2021 07:51:08 +0200 Subject: [PATCH 102/105] BGDIINF_SB-1810: Added admin page for asset upload This gives the possibility to list all asset uploads. --- app/stac_api/admin.py | 58 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/app/stac_api/admin.py b/app/stac_api/admin.py index 23e9b43b..0267f8a3 100644 --- a/app/stac_api/admin.py +++ b/app/stac_api/admin.py @@ -10,6 +10,7 @@ from solo.admin import SingletonModelAdmin from stac_api.models import Asset +from stac_api.models import AssetUpload from stac_api.models import Collection from stac_api.models import CollectionLink from stac_api.models import ConformancePage @@ -292,3 +293,60 @@ def get_fieldsets(self, request, obj=None): # without help text fields[0][1]['fields'] = ('name', 'item_name', 'collection_name') return fields + + +@admin.register(AssetUpload) +class AssetUploadAdmin(admin.ModelAdmin): + + autocomplete_fields = ['asset'] + search_fields = [ + 'upload_id', 'asset__name', 'asset__item__name', 'asset__item__collection__name', 'status' + ] + readonly_fields = [ + 'asset_name', 'item_name', 'collection_name', 'created', 'ended', 'etag', 'status' + ] + list_display = ['upload_id', 'status', 'asset_name', 'item_name', 'collection_name'] + fieldsets = ((None, { + 'fields': ('upload_id', 'asset', 'status') + }), + ( + 'Attributes', { + 'fields': + ('number_parts', 'urls', 'checksum_multihash', 'created', 'ended') + } + )) + + def has_add_permission(self, request): + return False + + def get_fieldsets(self, request, obj=None): + fields = super().get_fieldsets(request, obj) + if obj is None: + # In case a new AssetUpload is added use the normal field 'asset' from model that have + # a help text fort the search functionality. + fields[0][1]['fields'] = ('upload_id', 'asset', 'status') + return fields + # Otherwise if this is an update operation only display the read only fields + # without help text + fields[0][1]['fields'] = ( + 'upload_id', 'asset_name', 'item_name', 'collection_name', 'status' + ) + return fields + + def collection_name(self, instance): + return instance.asset.item.collection.name + + collection_name.admin_order_field = 'asset__item__collection__name' + collection_name.short_description = 'Collection Id' + + def item_name(self, instance): + return instance.asset.item.name + + item_name.admin_order_field = 'asset__item__name' + item_name.short_description = 'Item Id' + + def asset_name(self, instance): + return instance.asset.name + + asset_name.admin_order_field = 'asset__name' + asset_name.short_description = 'Asset Id' From 676cbd6c849c415f213d061495162d8f91f029bd Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Thu, 6 May 2021 07:17:42 +0200 Subject: [PATCH 103/105] Improved admin page for Asset Upload Now all fields are read only and the "update" button is hidden. We should not change anything from the admin page for the Asset Upload and do only changes from the API. This is because the asset upload have a lots of functionality in the API view. Delete of completed/aborted asset upload is accepted. --- README.md | 14 ++++--- app/stac_api/admin.py | 83 ++++++++++++++++++++++++++++++------------ app/stac_api/models.py | 2 - 3 files changed, 68 insertions(+), 31 deletions(-) diff --git a/README.md b/README.md index a3873c1f..b3c84995 100644 --- a/README.md +++ b/README.md @@ -43,6 +43,7 @@ Prerequisites on host for development and build: - python version 3.7 +- libgdal-dev - [pipenv](https://pipenv-fork.readthedocs.io/en/latest/install.html) - `docker` and `docker-compose` @@ -50,10 +51,12 @@ Prerequisites on host for development and build: If your Ubuntu distribution is missing Python 3.7, you may use the `deadsnakes` PPA and install it: - sudo add-apt-repository ppa:deadsnakes/ppa - sudo apt-get update - sudo apt-get install python3.7 - +```bash +sudo add-apt-repository ppa:deadsnakes/ppa +sudo apt-get update +sudo apt-get install python3.7 +``` + #### pipenv Generally, all modern distribution have already a [pipenv](https://pipenv-fork.readthedocs.io) package. If no, install from hand. @@ -62,7 +65,7 @@ The other services that are used (Postgres with PostGIS extension for metadata a Starting postgres and MinIO is done with a simple -``` +```bash docker-compose up ``` @@ -156,6 +159,7 @@ some default values to be able to start working with it. (From the root) ``` the ```pipenv shell``` command activate the virtual environment provided by pipenv. + ### Using a local PostGres database instead of a container To use a local postgres instance rather than a container, once you've ensured you've the needed dependencies, you should : diff --git a/app/stac_api/admin.py b/app/stac_api/admin.py index 0267f8a3..50421888 100644 --- a/app/stac_api/admin.py +++ b/app/stac_api/admin.py @@ -1,11 +1,17 @@ +import json + from admin_auto_filters.filters import AutocompleteFilter from admin_auto_filters.filters import AutocompleteFilterFactory +from django.contrib import messages from django.contrib.gis import admin from django.contrib.postgres.fields import ArrayField from django.contrib.staticfiles import finders from django.db import models +from django.db.models.deletion import ProtectedError from django.forms import Textarea +from django.http import HttpResponseRedirect +from django.urls import reverse from solo.admin import SingletonModelAdmin @@ -303,35 +309,45 @@ class AssetUploadAdmin(admin.ModelAdmin): 'upload_id', 'asset__name', 'asset__item__name', 'asset__item__collection__name', 'status' ] readonly_fields = [ - 'asset_name', 'item_name', 'collection_name', 'created', 'ended', 'etag', 'status' + 'upload_id', + 'asset_name', + 'item_name', + 'collection_name', + 'created', + 'ended', + 'etag', + 'status', + 'urls_json', + 'number_parts', + 'checksum_multihash' + ] + list_display = [ + 'short_upload_id', 'status', 'asset_name', 'item_name', 'collection_name', 'created' ] - list_display = ['upload_id', 'status', 'asset_name', 'item_name', 'collection_name'] - fieldsets = ((None, { - 'fields': ('upload_id', 'asset', 'status') - }), - ( - 'Attributes', { - 'fields': - ('number_parts', 'urls', 'checksum_multihash', 'created', 'ended') - } - )) + fieldsets = ( + (None, { + 'fields': ('upload_id', 'asset_name', 'item_name', 'collection_name', 'status') + }), + ( + 'Attributes', { + 'fields': ('number_parts', 'urls_json', 'checksum_multihash', 'created', 'ended') + } + ), + ) def has_add_permission(self, request): return False - def get_fieldsets(self, request, obj=None): - fields = super().get_fieldsets(request, obj) - if obj is None: - # In case a new AssetUpload is added use the normal field 'asset' from model that have - # a help text fort the search functionality. - fields[0][1]['fields'] = ('upload_id', 'asset', 'status') - return fields - # Otherwise if this is an update operation only display the read only fields - # without help text - fields[0][1]['fields'] = ( - 'upload_id', 'asset_name', 'item_name', 'collection_name', 'status' - ) - return fields + def has_change_permission(self, request, obj=None): + return False + + def short_upload_id(self, instance): + if len(instance.upload_id) > 32: + return instance.upload_id[:29] + '...' + return instance.upload_id + + short_upload_id.admin_order_field = 'upload_id' + short_upload_id.short_description = 'Upload ID' def collection_name(self, instance): return instance.asset.item.collection.name @@ -350,3 +366,22 @@ def asset_name(self, instance): asset_name.admin_order_field = 'asset__name' asset_name.short_description = 'Asset Id' + + def urls_json(self, instance): + return json.dumps(instance.urls, indent=1) + + urls_json.short_description = "Urls" + + def delete_view(self, request, object_id, extra_context=None): + try: + return super().delete_view(request, object_id, extra_context) + except ProtectedError: + msg = "You cannot delete Asset Upload that are in progress" + self.message_user(request, msg, messages.ERROR) + opts = self.model._meta + return_url = reverse( + 'admin:%s_%s_change' % (opts.app_label, opts.model_name), + args=(object_id,), + current_app=self.admin_site.name, + ) + return HttpResponseRedirect(return_url) diff --git a/app/stac_api/models.py b/app/stac_api/models.py index 876de2f6..2a9acd35 100644 --- a/app/stac_api/models.py +++ b/app/stac_api/models.py @@ -4,7 +4,6 @@ import time from uuid import uuid4 -# import botocore.exceptions # Un-comment with BGDIINF_SB-1625 from multihash import encode as multihash_encode from multihash import to_hex_string @@ -29,7 +28,6 @@ from stac_api.managers import AssetUploadManager from stac_api.managers import ItemManager from stac_api.utils import get_asset_path -# from stac_api.utils import get_s3_resource # Un-comment with BGDIINF_SB-1625 from stac_api.validators import MEDIA_TYPES from stac_api.validators import validate_asset_name from stac_api.validators import validate_asset_name_with_media_type From d4b63a6c2186df310745696322149ab15f09878a Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 11 May 2021 06:39:23 +0200 Subject: [PATCH 104/105] Updated packages. --- Pipfile | 6 +- Pipfile.lock | 554 ++++++++++++++++++++++++++++----------------------- 2 files changed, 308 insertions(+), 252 deletions(-) diff --git a/Pipfile b/Pipfile index 6ce35878..220adcd9 100644 --- a/Pipfile +++ b/Pipfile @@ -9,7 +9,7 @@ isort = "~=4.3.21" pylint = "!=2.7.2,!=2.7.1,!=2.7.0" # These version of pylint have issues with similarities config pylint-django = "*" django-extensions = "*" -django-debug-toolbar = "*" +django-debug-toolbar = ">=3.2.1" pip = "*" tblib = "*" # needed for traceback when running tests in parallel mock = "~=4.0.2" @@ -25,8 +25,8 @@ logging-utilities = "~=1.2.0" numpy = "~=1.19.2" python-dotenv = "~=0.14.0" djangorestframework = "~=3.12.1" -Django = "~=3.1" -PyYAML = "~=5.3.1" +Django = "~=3.1.8" +PyYAML = "~=5.4" whitenoise = "~=5.2.0" djangorestframework-gis = "~=0.16" python-dateutil = "~=2.8.1" diff --git a/Pipfile.lock b/Pipfile.lock index 846f8dc3..e9c989fd 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "a7fcba27d693e9f1b737e27611e9755be5102950979660ce0eb4afcdf1ed38e6" + "sha256": "b1a6ecc254f68a6099887fe4b545d668b3eb09071a43c85051872b11f47c0119" }, "pipfile-spec": 6, "requires": { @@ -18,10 +18,10 @@ "default": { "asgiref": { "hashes": [ - "sha256:5ee950735509d04eb673bd7f7120f8fa1c9e2df495394992c73234d526907e17", - "sha256:7162a3cb30ab0609f1a4c95938fd73e8604f63bdba516a7f7d64b83ff09478f0" + "sha256:92906c611ce6c967347bbfea733f13d6313901d54dcca88195eaeb52b2a8e8ee", + "sha256:d1216dfbdfb63826470995d31caed36225dcaf34f182e0fa257a4dd9e86f1b78" ], - "version": "==3.3.1" + "version": "==3.3.4" }, "base58": { "hashes": [ @@ -61,11 +61,11 @@ }, "django": { "hashes": [ - "sha256:32ce792ee9b6a0cbbec340123e229ac9f765dff8c2a4ae9247a14b2ba3a365a7", - "sha256:baf099db36ad31f970775d0be5587cc58a6256a6771a44eb795b554d45f211b8" + "sha256:973c968e63518859732f018975364785dd96f0581b1e4b12e2a4b749415ac43a", + "sha256:cd6ec37db950a384dba3341b135394fdc776ede4d149fc7abde1e45a21ec4f22" ], "index": "pypi", - "version": "==3.1.7" + "version": "==3.1.10" }, "django-admin-autocomplete-filter": { "hashes": [ @@ -108,11 +108,11 @@ }, "djangorestframework": { "hashes": [ - "sha256:0209bafcb7b5010fdfec784034f059d512256424de2a0f084cb82b096d6dd6a7", - "sha256:0898182b4737a7b584a2c73735d89816343369f259fea932d90dc78e35d8ac33" + "sha256:6d1d59f623a5ad0509fe0d6bfe93cbdfe17b8116ebc8eda86d45f6e16e819aaf", + "sha256:f747949a8ddac876e879190df194b925c177cdeb725a099db1460872f7c0a7f2" ], "index": "pypi", - "version": "==3.12.2" + "version": "==3.12.4" }, "djangorestframework-gis": { "hashes": [ @@ -154,52 +154,58 @@ }, "greenlet": { "hashes": [ - "sha256:0a77691f0080c9da8dfc81e23f4e3cffa5accf0f5b56478951016d7cfead9196", - "sha256:0ddd77586553e3daf439aa88b6642c5f252f7ef79a39271c25b1d4bf1b7cbb85", - "sha256:111cfd92d78f2af0bc7317452bd93a477128af6327332ebf3c2be7df99566683", - "sha256:122c63ba795fdba4fc19c744df6277d9cfd913ed53d1a286f12189a0265316dd", - "sha256:181300f826625b7fd1182205b830642926f52bd8cdb08b34574c9d5b2b1813f7", - "sha256:1a1ada42a1fd2607d232ae11a7b3195735edaa49ea787a6d9e6a53afaf6f3476", - "sha256:1bb80c71de788b36cefb0c3bb6bfab306ba75073dbde2829c858dc3ad70f867c", - "sha256:1d1d4473ecb1c1d31ce8fd8d91e4da1b1f64d425c1dc965edc4ed2a63cfa67b2", - "sha256:292e801fcb3a0b3a12d8c603c7cf340659ea27fd73c98683e75800d9fd8f704c", - "sha256:2c65320774a8cd5fdb6e117c13afa91c4707548282464a18cf80243cf976b3e6", - "sha256:4365eccd68e72564c776418c53ce3c5af402bc526fe0653722bc89efd85bf12d", - "sha256:5352c15c1d91d22902582e891f27728d8dac3bd5e0ee565b6a9f575355e6d92f", - "sha256:58ca0f078d1c135ecf1879d50711f925ee238fe773dfe44e206d7d126f5bc664", - "sha256:5d4030b04061fdf4cbc446008e238e44936d77a04b2b32f804688ad64197953c", - "sha256:5d69bbd9547d3bc49f8a545db7a0bd69f407badd2ff0f6e1a163680b5841d2b0", - "sha256:5f297cb343114b33a13755032ecf7109b07b9a0020e841d1c3cedff6602cc139", - "sha256:62afad6e5fd70f34d773ffcbb7c22657e1d46d7fd7c95a43361de979f0a45aef", - "sha256:647ba1df86d025f5a34043451d7c4a9f05f240bee06277a524daad11f997d1e7", - "sha256:719e169c79255816cdcf6dccd9ed2d089a72a9f6c42273aae12d55e8d35bdcf8", - "sha256:7cd5a237f241f2764324396e06298b5dee0df580cf06ef4ada0ff9bff851286c", - "sha256:875d4c60a6299f55df1c3bb870ebe6dcb7db28c165ab9ea6cdc5d5af36bb33ce", - "sha256:90b6a25841488cf2cb1c8623a53e6879573010a669455046df5f029d93db51b7", - "sha256:94620ed996a7632723a424bccb84b07e7b861ab7bb06a5aeb041c111dd723d36", - "sha256:b5f1b333015d53d4b381745f5de842f19fe59728b65f0fbb662dafbe2018c3a5", - "sha256:c5b22b31c947ad8b6964d4ed66776bcae986f73669ba50620162ba7c832a6b6a", - "sha256:c93d1a71c3fe222308939b2e516c07f35a849c5047f0197442a4d6fbcb4128ee", - "sha256:cdb90267650c1edb54459cdb51dab865f6c6594c3a47ebd441bc493360c7af70", - "sha256:cfd06e0f0cc8db2a854137bd79154b61ecd940dce96fad0cba23fe31de0b793c", - "sha256:d3789c1c394944084b5e57c192889985a9f23bd985f6d15728c745d380318128", - "sha256:da7d09ad0f24270b20f77d56934e196e982af0d0a2446120cb772be4e060e1a2", - "sha256:df3e83323268594fa9755480a442cabfe8d82b21aba815a71acf1bb6c1776218", - "sha256:df8053867c831b2643b2c489fe1d62049a98566b1646b194cc815f13e27b90df", - "sha256:e1128e022d8dce375362e063754e129750323b67454cac5600008aad9f54139e", - "sha256:e6e9fdaf6c90d02b95e6b0709aeb1aba5affbbb9ccaea5502f8638e4323206be", - "sha256:eac8803c9ad1817ce3d8d15d1bb82c2da3feda6bee1153eec5c58fa6e5d3f770", - "sha256:eb333b90036358a0e2c57373f72e7648d7207b76ef0bd00a4f7daad1f79f5203", - "sha256:ed1d1351f05e795a527abc04a0d82e9aecd3bdf9f46662c36ff47b0b00ecaf06", - "sha256:f3dc68272990849132d6698f7dc6df2ab62a88b0d36e54702a8fd16c0490e44f", - "sha256:f59eded163d9752fd49978e0bab7a1ff21b1b8d25c05f0995d140cc08ac83379", - "sha256:f5e2d36c86c7b03c94b8459c3bd2c9fe2c7dab4b258b8885617d44a22e453fb7", - "sha256:f6f65bf54215e4ebf6b01e4bb94c49180a589573df643735107056f7a910275b", - "sha256:f8450d5ef759dbe59f84f2c9f77491bb3d3c44bc1a573746daf086e70b14c243", - "sha256:f97d83049715fd9dec7911860ecf0e17b48d8725de01e45de07d8ac0bd5bc378" + "sha256:03f28a5ea20201e70ab70518d151116ce939b412961c33827519ce620957d44c", + "sha256:06d7ac89e6094a0a8f8dc46aa61898e9e1aec79b0f8b47b2400dd51a44dbc832", + "sha256:06ecb43b04480e6bafc45cb1b4b67c785e183ce12c079473359e04a709333b08", + "sha256:096cb0217d1505826ba3d723e8981096f2622cde1eb91af9ed89a17c10aa1f3e", + "sha256:0c557c809eeee215b87e8a7cbfb2d783fb5598a78342c29ade561440abae7d22", + "sha256:0de64d419b1cb1bfd4ea544bedea4b535ef3ae1e150b0f2609da14bbf48a4a5f", + "sha256:14927b15c953f8f2d2a8dffa224aa78d7759ef95284d4c39e1745cf36e8cdd2c", + "sha256:16183fa53bc1a037c38d75fdc59d6208181fa28024a12a7f64bb0884434c91ea", + "sha256:206295d270f702bc27dbdbd7651e8ebe42d319139e0d90217b2074309a200da8", + "sha256:22002259e5b7828b05600a762579fa2f8b33373ad95a0ee57b4d6109d0e589ad", + "sha256:2325123ff3a8ecc10ca76f062445efef13b6cf5a23389e2df3c02a4a527b89bc", + "sha256:258f9612aba0d06785143ee1cbf2d7361801c95489c0bd10c69d163ec5254a16", + "sha256:3096286a6072553b5dbd5efbefc22297e9d06a05ac14ba017233fedaed7584a8", + "sha256:3d13da093d44dee7535b91049e44dd2b5540c2a0e15df168404d3dd2626e0ec5", + "sha256:408071b64e52192869129a205e5b463abda36eff0cebb19d6e63369440e4dc99", + "sha256:598bcfd841e0b1d88e32e6a5ea48348a2c726461b05ff057c1b8692be9443c6e", + "sha256:5d928e2e3c3906e0a29b43dc26d9b3d6e36921eee276786c4e7ad9ff5665c78a", + "sha256:5f75e7f237428755d00e7460239a2482fa7e3970db56c8935bd60da3f0733e56", + "sha256:60848099b76467ef09b62b0f4512e7e6f0a2c977357a036de602b653667f5f4c", + "sha256:6b1d08f2e7f2048d77343279c4d4faa7aef168b3e36039cba1917fffb781a8ed", + "sha256:70bd1bb271e9429e2793902dfd194b653221904a07cbf207c3139e2672d17959", + "sha256:76ed710b4e953fc31c663b079d317c18f40235ba2e3d55f70ff80794f7b57922", + "sha256:7920e3eccd26b7f4c661b746002f5ec5f0928076bd738d38d894bb359ce51927", + "sha256:7db68f15486d412b8e2cfcd584bf3b3a000911d25779d081cbbae76d71bd1a7e", + "sha256:8833e27949ea32d27f7e96930fa29404dd4f2feb13cce483daf52e8842ec246a", + "sha256:944fbdd540712d5377a8795c840a97ff71e7f3221d3fddc98769a15a87b36131", + "sha256:9a6b035aa2c5fcf3dbbf0e3a8a5bc75286fc2d4e6f9cfa738788b433ec894919", + "sha256:9bdcff4b9051fb1aa4bba4fceff6a5f770c6be436408efd99b76fc827f2a9319", + "sha256:a9017ff5fc2522e45562882ff481128631bf35da444775bc2776ac5c61d8bcae", + "sha256:aa4230234d02e6f32f189fd40b59d5a968fe77e80f59c9c933384fe8ba535535", + "sha256:ad80bb338cf9f8129c049837a42a43451fc7c8b57ad56f8e6d32e7697b115505", + "sha256:adb94a28225005890d4cf73648b5131e885c7b4b17bc762779f061844aabcc11", + "sha256:b3090631fecdf7e983d183d0fad7ea72cfb12fa9212461a9b708ff7907ffff47", + "sha256:b33b51ab057f8a20b497ffafdb1e79256db0c03ef4f5e3d52e7497200e11f821", + "sha256:b97c9a144bbeec7039cca44df117efcbeed7209543f5695201cacf05ba3b5857", + "sha256:be13a18cec649ebaab835dff269e914679ef329204704869f2f167b2c163a9da", + "sha256:be9768e56f92d1d7cd94185bab5856f3c5589a50d221c166cc2ad5eb134bd1dc", + "sha256:c1580087ab493c6b43e66f2bdd165d9e3c1e86ef83f6c2c44a29f2869d2c5bd5", + "sha256:c35872b2916ab5a240d52a94314c963476c989814ba9b519bc842e5b61b464bb", + "sha256:c70c7dd733a4c56838d1f1781e769081a25fade879510c5b5f0df76956abfa05", + "sha256:c767458511a59f6f597bfb0032a1c82a52c29ae228c2c0a6865cfeaeaac4c5f5", + "sha256:c87df8ae3f01ffb4483c796fe1b15232ce2b219f0b18126948616224d3f658ee", + "sha256:ca1c4a569232c063615f9e70ff9a1e2fee8c66a6fb5caf0f5e8b21a396deec3e", + "sha256:cc407b68e0a874e7ece60f6639df46309376882152345508be94da608cc0b831", + "sha256:da862b8f7de577bc421323714f63276acb2f759ab8c5e33335509f0b89e06b8f", + "sha256:dfe7eac0d253915116ed0cd160a15a88981a1d194c1ef151e862a5c7d2f853d3", + "sha256:ed1377feed808c9c1139bdb6a61bcbf030c236dd288d6fca71ac26906ab03ba6", + "sha256:f42ad188466d946f1b3afc0a9e1a266ac8926461ee0786c06baac6bd71f8a6f3", + "sha256:f92731609d6625e1cc26ff5757db4d32b6b810d2a3363b0ff94ff573e5901f6f" ], "markers": "platform_python_implementation == 'CPython'", - "version": "==1.0.0" + "version": "==1.1.0" }, "gunicorn": { "hashes": [ @@ -279,10 +285,10 @@ }, "prometheus-client": { "hashes": [ - "sha256:9da7b32f02439d8c04f7777021c304ed51d9ec180604700c1ba72a4d44dceb03", - "sha256:b08c34c328e1bf5961f0b4352668e6c8f145b4a087e09b7296ef62cbe4693d35" + "sha256:030e4f9df5f53db2292eec37c6255957eb76168c6f974e4176c711cf91ed34aa", + "sha256:b6c5a9643e3545bcbfd9451766cbaa5d9c67e7303c7bc32c750b6fa70ecb107d" ], - "version": "==0.9.0" + "version": "==0.10.1" }, "psycopg2-binary": { "hashes": [ @@ -365,22 +371,38 @@ }, "pyyaml": { "hashes": [ - "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", - "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", - "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", - "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", - "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", - "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", - "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", - "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", - "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", - "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", - "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", - "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", - "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" + "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", + "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", + "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", + "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", + "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", + "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", + "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", + "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", + "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", + "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", + "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", + "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347", + "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541", + "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", + "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc", + "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa", + "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122", + "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", + "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", + "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc", + "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247", + "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", + "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" ], "index": "pypi", - "version": "==5.3.1" + "version": "==5.4.1" }, "requests": { "hashes": [ @@ -392,17 +414,17 @@ }, "s3transfer": { "hashes": [ - "sha256:1e28620e5b444652ed752cf87c7e0cb15b0e578972568c6609f0f18212f259ed", - "sha256:7fdddb4f22275cf1d32129e21f056337fd2a80b6ccef1664528145b72c49e6d2" + "sha256:35627b86af8ff97e7ac27975fe0a98a312814b46c6333d8a6b889627bcd80994", + "sha256:efa5bd92a897b6a8d5c1383828dca3d52d0790e0756d49740563a3fb6ed03246" ], - "version": "==0.3.4" + "version": "==0.3.7" }, "six": { "hashes": [ - "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", - "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" ], - "version": "==1.15.0" + "version": "==1.16.0" }, "sqlparse": { "hashes": [ @@ -411,6 +433,15 @@ ], "version": "==0.4.1" }, + "typing-extensions": { + "hashes": [ + "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497", + "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342", + "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84" + ], + "markers": "python_version < '3.8'", + "version": "==3.10.0.0" + }, "urllib3": { "hashes": [ "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", @@ -442,76 +473,75 @@ }, "zope.interface": { "hashes": [ - "sha256:05a97ba92c1c7c26f25c9f671aa1ef85ffead6cdad13770e5b689cf983adc7e1", - "sha256:07d61722dd7d85547b7c6b0f5486b4338001fab349f2ac5cabc0b7182eb3425d", - "sha256:0a990dcc97806e5980bbb54b2e46b9cde9e48932d8e6984daf71ef1745516123", - "sha256:150e8bcb7253a34a4535aeea3de36c0bb3b1a6a47a183a95d65a194b3e07f232", - "sha256:1743bcfe45af8846b775086471c28258f4c6e9ee8ef37484de4495f15a98b549", - "sha256:1b5f6c8fff4ed32aa2dd43e84061bc8346f32d3ba6ad6e58f088fe109608f102", - "sha256:21e49123f375703cf824214939d39df0af62c47d122d955b2a8d9153ea08cfd5", - "sha256:21f579134a47083ffb5ddd1307f0405c91aa8b61ad4be6fd5af0171474fe0c45", - "sha256:27c267dc38a0f0079e96a2945ee65786d38ef111e413c702fbaaacbab6361d00", - "sha256:299bde0ab9e5c4a92f01a152b7fbabb460f31343f1416f9b7b983167ab1e33bc", - "sha256:2ab88d8f228f803fcb8cb7d222c579d13dab2d3622c51e8cf321280da01102a7", - "sha256:2ced4c35061eea623bc84c7711eedce8ecc3c2c51cd9c6afa6290df3bae9e104", - "sha256:2dcab01c660983ba5e5a612e0c935141ccbee67d2e2e14b833e01c2354bd8034", - "sha256:32546af61a9a9b141ca38d971aa6eb9800450fa6620ce6323cc30eec447861f3", - "sha256:32b40a4c46d199827d79c86bb8cb88b1bbb764f127876f2cb6f3a47f63dbada3", - "sha256:3cc94c69f6bd48ed86e8e24f358cb75095c8129827df1298518ab860115269a4", - "sha256:42b278ac0989d6f5cf58d7e0828ea6b5951464e3cf2ff229dd09a96cb6ba0c86", - "sha256:495b63fd0302f282ee6c1e6ea0f1c12cb3d1a49c8292d27287f01845ff252a96", - "sha256:4af87cdc0d4b14e600e6d3d09793dce3b7171348a094ba818e2a68ae7ee67546", - "sha256:4b94df9f2fdde7b9314321bab8448e6ad5a23b80542dcab53e329527d4099dcb", - "sha256:4c48ddb63e2b20fba4c6a2bf81b4d49e99b6d4587fb67a6cd33a2c1f003af3e3", - "sha256:4df9afd17bd5477e9f8c8b6bb8507e18dd0f8b4efe73bb99729ff203279e9e3b", - "sha256:518950fe6a5d56f94ba125107895f938a4f34f704c658986eae8255edb41163b", - "sha256:538298e4e113ccb8b41658d5a4b605bebe75e46a30ceca22a5a289cf02c80bec", - "sha256:55465121e72e208a7b69b53de791402affe6165083b2ea71b892728bd19ba9ae", - "sha256:588384d70a0f19b47409cfdb10e0c27c20e4293b74fc891df3d8eb47782b8b3e", - "sha256:6278c080d4afffc9016e14325f8734456831124e8c12caa754fd544435c08386", - "sha256:64ea6c221aeee4796860405e1aedec63424cda4202a7ad27a5066876db5b0fd2", - "sha256:681dbb33e2b40262b33fd383bae63c36d33fd79fa1a8e4092945430744ffd34a", - "sha256:6936aa9da390402d646a32a6a38d5409c2d2afb2950f045a7d02ab25a4e7d08d", - "sha256:778d0ec38bbd288b150a3ae363c8ffd88d2207a756842495e9bffd8a8afbc89a", - "sha256:8251f06a77985a2729a8bdbefbae79ee78567dddc3acbd499b87e705ca59fe24", - "sha256:83b4aa5344cce005a9cff5d0321b2e318e871cc1dfc793b66c32dd4f59e9770d", - "sha256:844fad925ac5c2ad4faaceb3b2520ad016b5280105c6e16e79838cf951903a7b", - "sha256:8ceb3667dd13b8133f2e4d637b5b00f240f066448e2aa89a41f4c2d78a26ce50", - "sha256:92dc0fb79675882d0b6138be4bf0cec7ea7c7eede60aaca78303d8e8dbdaa523", - "sha256:9789bd945e9f5bd026ed3f5b453d640befb8b1fc33a779c1fe8d3eb21fe3fb4a", - "sha256:a2b6d6eb693bc2fc6c484f2e5d93bd0b0da803fa77bf974f160533e555e4d095", - "sha256:aab9f1e34d810feb00bf841993552b8fcc6ae71d473c505381627143d0018a6a", - "sha256:abb61afd84f23099ac6099d804cdba9bd3b902aaaded3ffff47e490b0a495520", - "sha256:adf9ee115ae8ff8b6da4b854b4152f253b390ba64407a22d75456fe07dcbda65", - "sha256:aedc6c672b351afe6dfe17ff83ee5e7eb6ed44718f879a9328a68bdb20b57e11", - "sha256:b7a00ecb1434f8183395fac5366a21ee73d14900082ca37cf74993cf46baa56c", - "sha256:ba32f4a91c1cb7314c429b03afbf87b1fff4fb1c8db32260e7310104bd77f0c7", - "sha256:cbd0f2cbd8689861209cd89141371d3a22a11613304d1f0736492590aa0ab332", - "sha256:e4bc372b953bf6cec65a8d48482ba574f6e051621d157cf224227dbb55486b1e", - "sha256:eccac3d9aadc68e994b6d228cb0c8919fc47a5350d85a1b4d3d81d1e98baf40c", - "sha256:efd550b3da28195746bb43bd1d815058181a7ca6d9d6aa89dd37f5eefe2cacb7", - "sha256:efef581c8ba4d990770875e1a2218e856849d32ada2680e53aebc5d154a17e20", - "sha256:f057897711a630a0b7a6a03f1acf379b6ba25d37dc5dc217a97191984ba7f2fc", - "sha256:f37d45fab14ffef9d33a0dc3bc59ce0c5313e2253323312d47739192da94f5fd", - "sha256:f44906f70205d456d503105023041f1e63aece7623b31c390a0103db4de17537" - ], - "version": "==5.2.0" + "sha256:08f9636e99a9d5410181ba0729e0408d3d8748026ea938f3b970a0249daa8192", + "sha256:0b465ae0962d49c68aa9733ba92a001b2a0933c317780435f00be7ecb959c702", + "sha256:0cba8477e300d64a11a9789ed40ee8932b59f9ee05f85276dbb4b59acee5dd09", + "sha256:0cee5187b60ed26d56eb2960136288ce91bcf61e2a9405660d271d1f122a69a4", + "sha256:0ea1d73b7c9dcbc5080bb8aaffb776f1c68e807767069b9ccdd06f27a161914a", + "sha256:0f91b5b948686659a8e28b728ff5e74b1be6bf40cb04704453617e5f1e945ef3", + "sha256:15e7d1f7a6ee16572e21e3576d2012b2778cbacf75eb4b7400be37455f5ca8bf", + "sha256:17776ecd3a1fdd2b2cd5373e5ef8b307162f581c693575ec62e7c5399d80794c", + "sha256:194d0bcb1374ac3e1e023961610dc8f2c78a0f5f634d0c737691e215569e640d", + "sha256:1c0e316c9add0db48a5b703833881351444398b04111188069a26a61cfb4df78", + "sha256:205e40ccde0f37496904572035deea747390a8b7dc65146d30b96e2dd1359a83", + "sha256:273f158fabc5ea33cbc936da0ab3d4ba80ede5351babc4f577d768e057651531", + "sha256:2876246527c91e101184f63ccd1d716ec9c46519cc5f3d5375a3351c46467c46", + "sha256:2c98384b254b37ce50eddd55db8d381a5c53b4c10ee66e1e7fe749824f894021", + "sha256:2e5a26f16503be6c826abca904e45f1a44ff275fdb7e9d1b75c10671c26f8b94", + "sha256:334701327f37c47fa628fc8b8d28c7d7730ce7daaf4bda1efb741679c2b087fc", + "sha256:3748fac0d0f6a304e674955ab1365d515993b3a0a865e16a11ec9d86fb307f63", + "sha256:3c02411a3b62668200910090a0dff17c0b25aaa36145082a5a6adf08fa281e54", + "sha256:3dd4952748521205697bc2802e4afac5ed4b02909bb799ba1fe239f77fd4e117", + "sha256:3f24df7124c323fceb53ff6168da70dbfbae1442b4f3da439cd441681f54fe25", + "sha256:469e2407e0fe9880ac690a3666f03eb4c3c444411a5a5fddfdabc5d184a79f05", + "sha256:4de4bc9b6d35c5af65b454d3e9bc98c50eb3960d5a3762c9438df57427134b8e", + "sha256:5208ebd5152e040640518a77827bdfcc73773a15a33d6644015b763b9c9febc1", + "sha256:52de7fc6c21b419078008f697fd4103dbc763288b1406b4562554bd47514c004", + "sha256:5bb3489b4558e49ad2c5118137cfeaf59434f9737fa9c5deefc72d22c23822e2", + "sha256:5dba5f530fec3f0988d83b78cc591b58c0b6eb8431a85edd1569a0539a8a5a0e", + "sha256:5dd9ca406499444f4c8299f803d4a14edf7890ecc595c8b1c7115c2342cadc5f", + "sha256:5f931a1c21dfa7a9c573ec1f50a31135ccce84e32507c54e1ea404894c5eb96f", + "sha256:63b82bb63de7c821428d513607e84c6d97d58afd1fe2eb645030bdc185440120", + "sha256:66c0061c91b3b9cf542131148ef7ecbecb2690d48d1612ec386de9d36766058f", + "sha256:6f0c02cbb9691b7c91d5009108f975f8ffeab5dff8f26d62e21c493060eff2a1", + "sha256:71aace0c42d53abe6fc7f726c5d3b60d90f3c5c055a447950ad6ea9cec2e37d9", + "sha256:7d97a4306898b05404a0dcdc32d9709b7d8832c0c542b861d9a826301719794e", + "sha256:7df1e1c05304f26faa49fa752a8c690126cf98b40b91d54e6e9cc3b7d6ffe8b7", + "sha256:8270252effc60b9642b423189a2fe90eb6b59e87cbee54549db3f5562ff8d1b8", + "sha256:867a5ad16892bf20e6c4ea2aab1971f45645ff3102ad29bd84c86027fa99997b", + "sha256:877473e675fdcc113c138813a5dd440da0769a2d81f4d86614e5d62b69497155", + "sha256:8892f89999ffd992208754851e5a052f6b5db70a1e3f7d54b17c5211e37a98c7", + "sha256:9a9845c4c6bb56e508651f005c4aeb0404e518c6f000d5a1123ab077ab769f5c", + "sha256:a1e6e96217a0f72e2b8629e271e1b280c6fa3fe6e59fa8f6701bec14e3354325", + "sha256:a8156e6a7f5e2a0ff0c5b21d6bcb45145efece1909efcbbbf48c56f8da68221d", + "sha256:a9506a7e80bcf6eacfff7f804c0ad5350c8c95b9010e4356a4b36f5322f09abb", + "sha256:af310ec8335016b5e52cae60cda4a4f2a60a788cbb949a4fbea13d441aa5a09e", + "sha256:b0297b1e05fd128d26cc2460c810d42e205d16d76799526dfa8c8ccd50e74959", + "sha256:bf68f4b2b6683e52bec69273562df15af352e5ed25d1b6641e7efddc5951d1a7", + "sha256:d0c1bc2fa9a7285719e5678584f6b92572a5b639d0e471bb8d4b650a1a910920", + "sha256:d4d9d6c1a455d4babd320203b918ccc7fcbefe308615c521062bc2ba1aa4d26e", + "sha256:db1fa631737dab9fa0b37f3979d8d2631e348c3b4e8325d6873c2541d0ae5a48", + "sha256:dd93ea5c0c7f3e25335ab7d22a507b1dc43976e1345508f845efc573d3d779d8", + "sha256:f44e517131a98f7a76696a7b21b164bcb85291cee106a23beccce454e1f433a4", + "sha256:f7ee479e96f7ee350db1cf24afa5685a5899e2b34992fb99e1f7c1b0b758d263" + ], + "version": "==5.4.0" } }, "develop": { "asgiref": { "hashes": [ - "sha256:5ee950735509d04eb673bd7f7120f8fa1c9e2df495394992c73234d526907e17", - "sha256:7162a3cb30ab0609f1a4c95938fd73e8604f63bdba516a7f7d64b83ff09478f0" + "sha256:92906c611ce6c967347bbfea733f13d6313901d54dcca88195eaeb52b2a8e8ee", + "sha256:d1216dfbdfb63826470995d31caed36225dcaf34f182e0fa257a4dd9e86f1b78" ], - "version": "==3.3.1" + "version": "==3.3.4" }, "astroid": { "hashes": [ - "sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703", - "sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386" + "sha256:4db03ab5fc3340cf619dbc25e42c2cc3755154ce6009469766d7143d1fc2ee4e", + "sha256:8a398dfce302c13f14bab13e2b14fe385d32b73f4e4853b9bdfb64598baa1975" ], - "version": "==2.4.2" + "version": "==2.5.6" }, "boto3": { "hashes": [ @@ -586,44 +616,44 @@ }, "cryptography": { "hashes": [ - "sha256:066bc53f052dfeda2f2d7c195cf16fb3e5ff13e1b6b7415b468514b40b381a5b", - "sha256:0923ba600d00718d63a3976f23cab19aef10c1765038945628cd9be047ad0336", - "sha256:2d32223e5b0ee02943f32b19245b61a62db83a882f0e76cc564e1cec60d48f87", - "sha256:4169a27b818de4a1860720108b55a2801f32b6ae79e7f99c00d79f2a2822eeb7", - "sha256:57ad77d32917bc55299b16d3b996ffa42a1c73c6cfa829b14043c561288d2799", - "sha256:5ecf2bcb34d17415e89b546dbb44e73080f747e504273e4d4987630493cded1b", - "sha256:600cf9bfe75e96d965509a4c0b2b183f74a4fa6f5331dcb40fb7b77b7c2484df", - "sha256:66b57a9ca4b3221d51b237094b0303843b914b7d5afd4349970bb26518e350b0", - "sha256:93cfe5b7ff006de13e1e89830810ecbd014791b042cbe5eec253be11ac2b28f3", - "sha256:9e98b452132963678e3ac6c73f7010fe53adf72209a32854d55690acac3f6724", - "sha256:df186fcbf86dc1ce56305becb8434e4b6b7504bc724b71ad7a3239e0c9d14ef2", - "sha256:fec7fb46b10da10d9e1d078d1ff8ed9e05ae14f431fdbd11145edd0550b9a964" - ], - "version": "==3.4.6" + "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d", + "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959", + "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6", + "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873", + "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2", + "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713", + "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1", + "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177", + "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250", + "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca", + "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d", + "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9" + ], + "version": "==3.4.7" }, "django": { "hashes": [ - "sha256:32ce792ee9b6a0cbbec340123e229ac9f765dff8c2a4ae9247a14b2ba3a365a7", - "sha256:baf099db36ad31f970775d0be5587cc58a6256a6771a44eb795b554d45f211b8" + "sha256:973c968e63518859732f018975364785dd96f0581b1e4b12e2a4b749415ac43a", + "sha256:cd6ec37db950a384dba3341b135394fdc776ede4d149fc7abde1e45a21ec4f22" ], "index": "pypi", - "version": "==3.1.7" + "version": "==3.1.10" }, "django-debug-toolbar": { "hashes": [ - "sha256:84e2607d900dbd571df0a2acf380b47c088efb787dce9805aefeb407341961d2", - "sha256:9e5a25d0c965f7e686f6a8ba23613ca9ca30184daa26487706d4829f5cfb697a" + "sha256:a5ff2a54f24bf88286f9872836081078f4baa843dc3735ee88524e89f8821e33", + "sha256:e759e63e3fe2d3110e0e519639c166816368701eab4a47fed75d7de7018467b9" ], "index": "pypi", - "version": "==3.2" + "version": "==3.2.1" }, "django-extensions": { "hashes": [ - "sha256:674ad4c3b1587a884881824f40212d51829e662e52f85b012cd83d83fe1271d9", - "sha256:9507f8761ee760748938fd8af766d0608fb2738cf368adfa1b2451f61c15ae35" + "sha256:50de8977794a66a91575dd40f87d5053608f679561731845edbd325ceeb387e3", + "sha256:5f0fea7bf131ca303090352577a9e7f8bfbf5489bd9d9c8aea9401db28db34a0" ], "index": "pypi", - "version": "==3.1.1" + "version": "==3.1.3" }, "idna": { "hashes": [ @@ -656,29 +686,30 @@ }, "lazy-object-proxy": { "hashes": [ - "sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d", - "sha256:194d092e6f246b906e8f70884e620e459fc54db3259e60cf69a4d66c3fda3449", - "sha256:1be7e4c9f96948003609aa6c974ae59830a6baecc5376c25c92d7d697e684c08", - "sha256:4677f594e474c91da97f489fea5b7daa17b5517190899cf213697e48d3902f5a", - "sha256:48dab84ebd4831077b150572aec802f303117c8cc5c871e182447281ebf3ac50", - "sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd", - "sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239", - "sha256:8d859b89baf8ef7f8bc6b00aa20316483d67f0b1cbf422f5b4dc56701c8f2ffb", - "sha256:9254f4358b9b541e3441b007a0ea0764b9d056afdeafc1a5569eee1cc6c1b9ea", - "sha256:9651375199045a358eb6741df3e02a651e0330be090b3bc79f6d0de31a80ec3e", - "sha256:97bb5884f6f1cdce0099f86b907aa41c970c3c672ac8b9c8352789e103cf3156", - "sha256:9b15f3f4c0f35727d3a0fba4b770b3c4ebbb1fa907dbcc046a1d2799f3edd142", - "sha256:a2238e9d1bb71a56cd710611a1614d1194dc10a175c1e08d75e1a7bcc250d442", - "sha256:a6ae12d08c0bf9909ce12385803a543bfe99b95fe01e752536a60af2b7797c62", - "sha256:ca0a928a3ddbc5725be2dd1cf895ec0a254798915fb3a36af0964a0a4149e3db", - "sha256:cb2c7c57005a6804ab66f106ceb8482da55f5314b7fcb06551db1edae4ad1531", - "sha256:d74bb8693bf9cf75ac3b47a54d716bbb1a92648d5f781fc799347cfc95952383", - "sha256:d945239a5639b3ff35b70a88c5f2f491913eb94871780ebfabb2568bd58afc5a", - "sha256:eba7011090323c1dadf18b3b689845fd96a61ba0a1dfbd7f24b921398affc357", - "sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4", - "sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0" - ], - "version": "==1.4.3" + "sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653", + "sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61", + "sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2", + "sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837", + "sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3", + "sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43", + "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726", + "sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3", + "sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587", + "sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8", + "sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a", + "sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd", + "sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f", + "sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad", + "sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4", + "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b", + "sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf", + "sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981", + "sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741", + "sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e", + "sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93", + "sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b" + ], + "version": "==1.6.0" }, "markupsafe": { "hashes": [ @@ -761,11 +792,11 @@ }, "moto": { "hashes": [ - "sha256:4610d27ead9124eaa84a78eca7dfa25a8ccb66cf6a7cb8a8889b5ca0c7796889", - "sha256:f5db62e50a5377da4457307675281198e9ffbe9425866a88f523bef0c6e8d463" + "sha256:5ae275d6852233b69badb64f46ba2549a28e91d819f631ea275ec76b3fce7055", + "sha256:7982176837406cabb47a84785125d337b6df357ae92ae29423724d072764dd90" ], "index": "pypi", - "version": "==2.0.2" + "version": "==2.0.6" }, "pycparser": { "hashes": [ @@ -776,19 +807,19 @@ }, "pylint": { "hashes": [ - "sha256:718b74786ea7ed07aa0c58bf572154d4679f960d26e9641cc1de204a30b87fc9", - "sha256:e71c2e9614a4f06e36498f310027942b0f4f2fde20aebb01655b31edc63b9eaf" + "sha256:586d8fa9b1891f4b725f587ef267abe2a1bad89d6b184520c7f07a253dd6e217", + "sha256:f7e2072654a6b6afdf5e2fb38147d3e2d2d43c89f648637baab63e026481279b" ], "index": "pypi", - "version": "==2.6.2" + "version": "==2.8.2" }, "pylint-django": { "hashes": [ - "sha256:355dddb25ef07dbdb77a818b0860ada722aab654c24da34aab916ec26d6390ba", - "sha256:f8d77f7da47a7019cda5cb669c214f03033208f9e945094661299d2637c0da06" + "sha256:aff49d9602a39c027b4ed7521a041438893205918f405800063b7ff692b7371b", + "sha256:f63f717169b0c2e4e19c28f1c32c28290647330184fcb7427805ae9b6994f3fc" ], "index": "pypi", - "version": "==2.4.2" + "version": "==2.4.4" }, "pylint-plugin-utils": { "hashes": [ @@ -814,22 +845,38 @@ }, "pyyaml": { "hashes": [ - "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", - "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", - "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", - "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", - "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", - "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", - "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", - "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", - "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", - "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", - "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", - "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", - "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" + "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", + "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", + "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", + "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", + "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", + "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", + "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", + "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", + "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", + "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", + "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", + "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347", + "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541", + "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", + "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc", + "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa", + "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122", + "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", + "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", + "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc", + "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247", + "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", + "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" ], "index": "pypi", - "version": "==5.3.1" + "version": "==5.4.1" }, "requests": { "hashes": [ @@ -841,33 +888,33 @@ }, "requests-mock": { "hashes": [ - "sha256:11215c6f4df72702aa357f205cf1e537cffd7392b3e787b58239bde5fb3db53b", - "sha256:e68f46844e4cee9d447150343c9ae875f99fa8037c6dcf5f15bf1fe9ab43d226" + "sha256:33296f228d8c5df11a7988b741325422480baddfdf5dd9318fd0eb40c3ed8595", + "sha256:5c8ef0254c14a84744be146e9799dc13ebc4f6186058112d9aeed96b131b58e2" ], "index": "pypi", - "version": "==1.8.0" + "version": "==1.9.2" }, "responses": { "hashes": [ - "sha256:3b1ea9cf026edaaf25e853abc4d3b2687d25467e9d8d41e77ee525cad0673f3e", - "sha256:cf62ab0f4119b81d485521b2c950d8aa55a885c90126488450b7acb8ee3f77ac" + "sha256:18a5b88eb24143adbf2b4100f328a2f5bfa72fbdacf12d97d41f07c26c45553d", + "sha256:b54067596f331786f5ed094ff21e8d79e6a1c68ef625180a7d34808d6f36c11b" ], "index": "pypi", - "version": "==0.13.1" + "version": "==0.13.3" }, "s3transfer": { "hashes": [ - "sha256:1e28620e5b444652ed752cf87c7e0cb15b0e578972568c6609f0f18212f259ed", - "sha256:7fdddb4f22275cf1d32129e21f056337fd2a80b6ccef1664528145b72c49e6d2" + "sha256:35627b86af8ff97e7ac27975fe0a98a312814b46c6333d8a6b889627bcd80994", + "sha256:efa5bd92a897b6a8d5c1383828dca3d52d0790e0756d49740563a3fb6ed03246" ], - "version": "==0.3.4" + "version": "==0.3.7" }, "six": { "hashes": [ - "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", - "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" ], - "version": "==1.15.0" + "version": "==1.16.0" }, "sqlparse": { "hashes": [ @@ -893,39 +940,48 @@ }, "typed-ast": { "hashes": [ - "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1", - "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d", - "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6", - "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd", - "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37", - "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151", - "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07", - "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440", - "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70", - "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496", - "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea", - "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400", - "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc", - "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606", - "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc", - "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581", - "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412", - "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a", - "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2", - "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787", - "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f", - "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937", - "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64", - "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487", - "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b", - "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41", - "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a", - "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3", - "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166", - "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10" + "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace", + "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff", + "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266", + "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528", + "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6", + "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808", + "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4", + "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363", + "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341", + "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04", + "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41", + "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e", + "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3", + "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899", + "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805", + "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c", + "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c", + "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39", + "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a", + "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3", + "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7", + "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f", + "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075", + "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0", + "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40", + "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428", + "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927", + "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3", + "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f", + "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65" ], "markers": "implementation_name == 'cpython' and python_version < '3.8'", - "version": "==1.4.2" + "version": "==1.4.3" + }, + "typing-extensions": { + "hashes": [ + "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497", + "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342", + "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84" + ], + "markers": "python_version < '3.8'", + "version": "==3.10.0.0" }, "urllib3": { "hashes": [ From aa453f16d1b5a9451da696db926ee48e7c329091 Mon Sep 17 00:00:00 2001 From: Brice Schaffner Date: Tue, 11 May 2021 08:31:53 +0200 Subject: [PATCH 105/105] Fixed pylint issues due to its update Disabled also pylint similarities than still suffer from a bug where the similarities line config is not taken into account when running with jobs>=2. --- .pylintrc | 1 + Pipfile | 2 +- Pipfile.lock | 4 +--- app/config/version.py | 15 +++++++-------- 4 files changed, 10 insertions(+), 12 deletions(-) diff --git a/.pylintrc b/.pylintrc index 8a802228..e1ff4688 100644 --- a/.pylintrc +++ b/.pylintrc @@ -73,6 +73,7 @@ disable=missing-docstring, # yapf correct indentation continuation issue useless-object-inheritance, no-self-use, # method signature often defined by django + similarities # currently similarities has a bug when running pylint with jobs>=2 # Enable the message, report, category or checker with the given id(s). You can diff --git a/Pipfile b/Pipfile index 220adcd9..1bc17a10 100644 --- a/Pipfile +++ b/Pipfile @@ -6,7 +6,7 @@ verify_ssl = true [dev-packages] yapf = "~=0.30.0" isort = "~=4.3.21" -pylint = "!=2.7.2,!=2.7.1,!=2.7.0" # These version of pylint have issues with similarities config +pylint = "*" pylint-django = "*" django-extensions = "*" django-debug-toolbar = ">=3.2.1" diff --git a/Pipfile.lock b/Pipfile.lock index e9c989fd..465c4adf 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "b1a6ecc254f68a6099887fe4b545d668b3eb09071a43c85051872b11f47c0119" + "sha256": "630d3a40293871dce350b50acd8c60d5b7bb665ac96730f38bd921428cd36dfc" }, "pipfile-spec": 6, "requires": { @@ -447,7 +447,6 @@ "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" ], - "markers": "python_version != '3.4'", "version": "==1.26.4" }, "varint": { @@ -988,7 +987,6 @@ "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" ], - "markers": "python_version != '3.4'", "version": "==1.26.4" }, "werkzeug": { diff --git a/app/config/version.py b/app/config/version.py index c029b068..1041bffe 100644 --- a/app/config/version.py +++ b/app/config/version.py @@ -11,18 +11,17 @@ # the tag is directly related to the commit or has an additional # suffix 'v[0-9]+\.[0-9]+\.[0-9]+-beta.[0-9]-[0-9]+-gHASH' denoting # the 'distance' to the latest tag -proc = subprocess.Popen(["git", "describe", "--tags"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) -stdout, stderr = proc.communicate() +with subprocess.Popen(["git", "describe", "--tags"], stdout=subprocess.PIPE, + stderr=subprocess.PIPE) as proc: + stdout, stderr = proc.communicate() GIT_VERSION = stdout.decode('utf-8').strip() if GIT_VERSION == '': # If theres no git tag found in the history we simply use the short # version of the latest git commit hash - proc = subprocess.Popen(["git", "rev-parse", "--short", "HEAD"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - stdout, stderr = proc.communicate() + with subprocess.Popen(["git", "rev-parse", "--short", "HEAD"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) as proc: + stdout, stderr = proc.communicate() APP_VERSION = f"v_{stdout.decode('utf-8').strip()}" else: APP_VERSION = GIT_VERSION